commit
stringlengths 40
40
| old_file
stringlengths 4
118
| new_file
stringlengths 4
118
| old_contents
stringlengths 10
2.94k
| new_contents
stringlengths 21
3.18k
| subject
stringlengths 16
444
| message
stringlengths 17
2.63k
| lang
stringclasses 1
value | license
stringclasses 13
values | repos
stringlengths 5
43k
| ndiff
stringlengths 51
3.32k
| instruction
stringlengths 16
444
| content
stringlengths 133
4.32k
|
---|---|---|---|---|---|---|---|---|---|---|---|---|
932ee2737b822742996f234c90b715771fb876bf | tests/functional/api/view_pdf_test.py | tests/functional/api/view_pdf_test.py | import pytest
from tests.conftest import assert_cache_control
class TestViewPDFAPI:
def test_caching_is_disabled(self, test_app):
response = test_app.get("/pdf?url=http://example.com/foo.pdf")
assert_cache_control(
response.headers, ["max-age=0", "must-revalidate", "no-cache", "no-store"]
)
| from tests.conftest import assert_cache_control
class TestViewPDFAPI:
def test_caching_is_disabled(self, test_app):
response = test_app.get("/pdf?url=http://example.com/foo.pdf")
assert_cache_control(
response.headers, ["max-age=0", "must-revalidate", "no-cache", "no-store"]
)
| Fix lint errors after adding missing __init__ files | Fix lint errors after adding missing __init__ files
| Python | bsd-2-clause | hypothesis/via,hypothesis/via,hypothesis/via | - import pytest
-
from tests.conftest import assert_cache_control
class TestViewPDFAPI:
def test_caching_is_disabled(self, test_app):
response = test_app.get("/pdf?url=http://example.com/foo.pdf")
assert_cache_control(
response.headers, ["max-age=0", "must-revalidate", "no-cache", "no-store"]
)
| Fix lint errors after adding missing __init__ files | ## Code Before:
import pytest
from tests.conftest import assert_cache_control
class TestViewPDFAPI:
def test_caching_is_disabled(self, test_app):
response = test_app.get("/pdf?url=http://example.com/foo.pdf")
assert_cache_control(
response.headers, ["max-age=0", "must-revalidate", "no-cache", "no-store"]
)
## Instruction:
Fix lint errors after adding missing __init__ files
## Code After:
from tests.conftest import assert_cache_control
class TestViewPDFAPI:
def test_caching_is_disabled(self, test_app):
response = test_app.get("/pdf?url=http://example.com/foo.pdf")
assert_cache_control(
response.headers, ["max-age=0", "must-revalidate", "no-cache", "no-store"]
)
|
50f2cd076aae183376ab14d31594c104ac210738 | shivyc.py | shivyc.py |
import argparse
def get_arguments():
"""Set up the argument parser and return an object storing the
argument values.
return - An object storing argument values, as returned by
argparse.parse_args()
"""
parser = argparse.ArgumentParser(description="Compile C files.")
# The C file to compile
parser.add_argument("file_name")
return parser.parse_args()
def main():
"""Run the compiler
"""
arguments = get_arguments()
print(arguments)
if __name__ == "__main__":
main()
|
import argparse
def get_arguments():
"""Set up the argument parser and return an object storing the
argument values.
return - An object storing argument values, as returned by
argparse.parse_args()
"""
parser = argparse.ArgumentParser(description="Compile C files.")
# The file name of the C file to compile. The file name gets saved to the
# file_name attribute of the returned object, but this parameter appears as
# "filename" (no underscore) on the command line.
parser.add_argument("file_name", metavar="filename")
return parser.parse_args()
def main():
"""Run the compiler
"""
arguments = get_arguments()
print(arguments)
if __name__ == "__main__":
main()
| Rename file_name argument on command line | Rename file_name argument on command line
| Python | mit | ShivamSarodia/ShivyC,ShivamSarodia/ShivyC,ShivamSarodia/ShivyC |
import argparse
def get_arguments():
"""Set up the argument parser and return an object storing the
argument values.
return - An object storing argument values, as returned by
argparse.parse_args()
"""
parser = argparse.ArgumentParser(description="Compile C files.")
- # The C file to compile
+ # The file name of the C file to compile. The file name gets saved to the
+ # file_name attribute of the returned object, but this parameter appears as
+ # "filename" (no underscore) on the command line.
- parser.add_argument("file_name")
+ parser.add_argument("file_name", metavar="filename")
return parser.parse_args()
def main():
"""Run the compiler
"""
arguments = get_arguments()
print(arguments)
if __name__ == "__main__":
main()
| Rename file_name argument on command line | ## Code Before:
import argparse
def get_arguments():
"""Set up the argument parser and return an object storing the
argument values.
return - An object storing argument values, as returned by
argparse.parse_args()
"""
parser = argparse.ArgumentParser(description="Compile C files.")
# The C file to compile
parser.add_argument("file_name")
return parser.parse_args()
def main():
"""Run the compiler
"""
arguments = get_arguments()
print(arguments)
if __name__ == "__main__":
main()
## Instruction:
Rename file_name argument on command line
## Code After:
import argparse
def get_arguments():
"""Set up the argument parser and return an object storing the
argument values.
return - An object storing argument values, as returned by
argparse.parse_args()
"""
parser = argparse.ArgumentParser(description="Compile C files.")
# The file name of the C file to compile. The file name gets saved to the
# file_name attribute of the returned object, but this parameter appears as
# "filename" (no underscore) on the command line.
parser.add_argument("file_name", metavar="filename")
return parser.parse_args()
def main():
"""Run the compiler
"""
arguments = get_arguments()
print(arguments)
if __name__ == "__main__":
main()
|
66a9d140feb3a0bd332031853fb1038622fd5c5b | oidc_apis/utils.py | oidc_apis/utils.py | from collections import OrderedDict
def combine_uniquely(iterable1, iterable2):
"""
Combine unique items of two sequences preserving order.
:type seq1: Iterable[Any]
:type seq2: Iterable[Any]
:rtype: list[Any]
"""
result = OrderedDict.fromkeys(iterable1)
for item in iterable2:
result[item] = None
return list(result.keys())
def after_userlogin_hook(request, user, client):
"""Marks Django session modified
The purpose of this function is to keep the session used by the
oidc-provider fresh. This is achieved by pointing
'OIDC_AFTER_USERLOGIN_HOOK' setting to this."""
request.session.modified = True
# Return None to continue the login flow
return None
| from collections import OrderedDict
import django
from oidc_provider import settings
from django.contrib.auth import BACKEND_SESSION_KEY
from django.contrib.auth import logout as django_user_logout
from users.models import LoginMethod, OidcClientOptions
from django.contrib.auth.views import redirect_to_login
def combine_uniquely(iterable1, iterable2):
"""
Combine unique items of two sequences preserving order.
:type seq1: Iterable[Any]
:type seq2: Iterable[Any]
:rtype: list[Any]
"""
result = OrderedDict.fromkeys(iterable1)
for item in iterable2:
result[item] = None
return list(result.keys())
def after_userlogin_hook(request, user, client):
"""Marks Django session modified
The purpose of this function is to keep the session used by the
oidc-provider fresh. This is achieved by pointing
'OIDC_AFTER_USERLOGIN_HOOK' setting to this."""
request.session.modified = True
last_login_backend = request.session.get('social_auth_last_login_backend')
client_options = OidcClientOptions.objects.get(oidc_client=client)
allowed_methods = client_options.login_methods.all()
if allowed_methods is None:
raise django.core.exceptions.PermissionDenied
allowed_providers = set((x.provider_id for x in allowed_methods))
if last_login_backend is not None:
active_backend = user.social_auth.filter(provider=last_login_backend)
if ((last_login_backend is None and user is not None)
or (active_backend.exists() and active_backend.first().provider not in allowed_providers)):
django_user_logout(request)
next_page = request.get_full_path()
return redirect_to_login(next_page, settings.get('OIDC_LOGIN_URL'))
# Return None to continue the login flow
return None
| Implement current session auth method check | Implement current session auth method check
| Python | mit | mikkokeskinen/tunnistamo,mikkokeskinen/tunnistamo | from collections import OrderedDict
+ import django
+ from oidc_provider import settings
+ from django.contrib.auth import BACKEND_SESSION_KEY
+ from django.contrib.auth import logout as django_user_logout
+ from users.models import LoginMethod, OidcClientOptions
+ from django.contrib.auth.views import redirect_to_login
def combine_uniquely(iterable1, iterable2):
"""
Combine unique items of two sequences preserving order.
:type seq1: Iterable[Any]
:type seq2: Iterable[Any]
:rtype: list[Any]
"""
result = OrderedDict.fromkeys(iterable1)
for item in iterable2:
result[item] = None
return list(result.keys())
def after_userlogin_hook(request, user, client):
"""Marks Django session modified
The purpose of this function is to keep the session used by the
oidc-provider fresh. This is achieved by pointing
'OIDC_AFTER_USERLOGIN_HOOK' setting to this."""
request.session.modified = True
+ last_login_backend = request.session.get('social_auth_last_login_backend')
+ client_options = OidcClientOptions.objects.get(oidc_client=client)
+
+ allowed_methods = client_options.login_methods.all()
+ if allowed_methods is None:
+ raise django.core.exceptions.PermissionDenied
+
+ allowed_providers = set((x.provider_id for x in allowed_methods))
+ if last_login_backend is not None:
+ active_backend = user.social_auth.filter(provider=last_login_backend)
+
+ if ((last_login_backend is None and user is not None)
+ or (active_backend.exists() and active_backend.first().provider not in allowed_providers)):
+ django_user_logout(request)
+ next_page = request.get_full_path()
+ return redirect_to_login(next_page, settings.get('OIDC_LOGIN_URL'))
+
# Return None to continue the login flow
return None
| Implement current session auth method check | ## Code Before:
from collections import OrderedDict
def combine_uniquely(iterable1, iterable2):
"""
Combine unique items of two sequences preserving order.
:type seq1: Iterable[Any]
:type seq2: Iterable[Any]
:rtype: list[Any]
"""
result = OrderedDict.fromkeys(iterable1)
for item in iterable2:
result[item] = None
return list(result.keys())
def after_userlogin_hook(request, user, client):
"""Marks Django session modified
The purpose of this function is to keep the session used by the
oidc-provider fresh. This is achieved by pointing
'OIDC_AFTER_USERLOGIN_HOOK' setting to this."""
request.session.modified = True
# Return None to continue the login flow
return None
## Instruction:
Implement current session auth method check
## Code After:
from collections import OrderedDict
import django
from oidc_provider import settings
from django.contrib.auth import BACKEND_SESSION_KEY
from django.contrib.auth import logout as django_user_logout
from users.models import LoginMethod, OidcClientOptions
from django.contrib.auth.views import redirect_to_login
def combine_uniquely(iterable1, iterable2):
"""
Combine unique items of two sequences preserving order.
:type seq1: Iterable[Any]
:type seq2: Iterable[Any]
:rtype: list[Any]
"""
result = OrderedDict.fromkeys(iterable1)
for item in iterable2:
result[item] = None
return list(result.keys())
def after_userlogin_hook(request, user, client):
"""Marks Django session modified
The purpose of this function is to keep the session used by the
oidc-provider fresh. This is achieved by pointing
'OIDC_AFTER_USERLOGIN_HOOK' setting to this."""
request.session.modified = True
last_login_backend = request.session.get('social_auth_last_login_backend')
client_options = OidcClientOptions.objects.get(oidc_client=client)
allowed_methods = client_options.login_methods.all()
if allowed_methods is None:
raise django.core.exceptions.PermissionDenied
allowed_providers = set((x.provider_id for x in allowed_methods))
if last_login_backend is not None:
active_backend = user.social_auth.filter(provider=last_login_backend)
if ((last_login_backend is None and user is not None)
or (active_backend.exists() and active_backend.first().provider not in allowed_providers)):
django_user_logout(request)
next_page = request.get_full_path()
return redirect_to_login(next_page, settings.get('OIDC_LOGIN_URL'))
# Return None to continue the login flow
return None
|
a36fe5002bbf5dfcf27a3251cfed85c341e2156d | cbcollections.py | cbcollections.py | class defaultdict(dict):
"""Poor man's implementation of defaultdict for Python 2.4
"""
def __init__(self, default_factory=None, **kwargs):
self.default_factory = default_factory
super(defaultdict, self).__init__(**kwargs)
def __getitem__(self, key):
if self.default_factory is None:
return super(defaultdict, self).__getitem__(key)
else:
try:
return super(defaultdict, self).__getitem__(key)
except KeyError:
return self.default_factory()
| class defaultdict(dict):
"""Poor man's implementation of defaultdict for Python 2.4
"""
def __init__(self, default_factory=None, **kwargs):
self.default_factory = default_factory
super(defaultdict, self).__init__(**kwargs)
def __getitem__(self, key):
if self.default_factory is None:
return super(defaultdict, self).__getitem__(key)
else:
try:
return super(defaultdict, self).__getitem__(key)
except KeyError:
self[key] = self.default_factory()
return self[key]
| Save generated value for defaultdict | MB-6867: Save generated value for defaultdict
Instead of just returning value, keep it in dict.
Change-Id: I2a9862503b71f2234a4a450c48998b5f53a951bc
Reviewed-on: http://review.couchbase.org/21602
Tested-by: Bin Cui <[email protected]>
Reviewed-by: Pavel Paulau <[email protected]>
| Python | apache-2.0 | couchbase/couchbase-cli,couchbaselabs/couchbase-cli,membase/membase-cli,membase/membase-cli,couchbase/couchbase-cli,membase/membase-cli,couchbaselabs/couchbase-cli,couchbaselabs/couchbase-cli | class defaultdict(dict):
"""Poor man's implementation of defaultdict for Python 2.4
"""
def __init__(self, default_factory=None, **kwargs):
self.default_factory = default_factory
super(defaultdict, self).__init__(**kwargs)
def __getitem__(self, key):
if self.default_factory is None:
return super(defaultdict, self).__getitem__(key)
else:
try:
return super(defaultdict, self).__getitem__(key)
except KeyError:
- return self.default_factory()
+ self[key] = self.default_factory()
+ return self[key]
| Save generated value for defaultdict | ## Code Before:
class defaultdict(dict):
"""Poor man's implementation of defaultdict for Python 2.4
"""
def __init__(self, default_factory=None, **kwargs):
self.default_factory = default_factory
super(defaultdict, self).__init__(**kwargs)
def __getitem__(self, key):
if self.default_factory is None:
return super(defaultdict, self).__getitem__(key)
else:
try:
return super(defaultdict, self).__getitem__(key)
except KeyError:
return self.default_factory()
## Instruction:
Save generated value for defaultdict
## Code After:
class defaultdict(dict):
"""Poor man's implementation of defaultdict for Python 2.4
"""
def __init__(self, default_factory=None, **kwargs):
self.default_factory = default_factory
super(defaultdict, self).__init__(**kwargs)
def __getitem__(self, key):
if self.default_factory is None:
return super(defaultdict, self).__getitem__(key)
else:
try:
return super(defaultdict, self).__getitem__(key)
except KeyError:
self[key] = self.default_factory()
return self[key]
|
b27a51f19ea3f9d13672a0db51f7d2b05f9539f0 | kitten/validation.py | kitten/validation.py | import jsonschema
CORE_SCHEMA = {
'type': 'object',
'properties': {
'paradigm': {
'type': 'string',
},
'method': {
'type': 'string',
},
},
'additionalProperties': False,
}
VALIDATORS = {
'core': CORE_SCHEMA
}
def validate(request, schema_name):
jsonschema.validate(request, VALIDATORS[schema_name])
| import jsonschema
CORE_SCHEMA = {
'type': 'object',
'properties': {
'paradigm': {
'type': 'string',
},
'method': {
'type': 'string',
},
'address': {
'type': 'string',
},
},
'additionalProperties': False,
}
VALIDATORS = {
'core': CORE_SCHEMA
}
def validate(request, schema_name):
jsonschema.validate(request, VALIDATORS[schema_name])
| Add 'address' field to core schema | Add 'address' field to core schema
| Python | mit | thiderman/network-kitten | import jsonschema
CORE_SCHEMA = {
'type': 'object',
'properties': {
'paradigm': {
'type': 'string',
},
'method': {
'type': 'string',
},
+ 'address': {
+ 'type': 'string',
+ },
},
'additionalProperties': False,
}
VALIDATORS = {
'core': CORE_SCHEMA
}
+
def validate(request, schema_name):
jsonschema.validate(request, VALIDATORS[schema_name])
| Add 'address' field to core schema | ## Code Before:
import jsonschema
CORE_SCHEMA = {
'type': 'object',
'properties': {
'paradigm': {
'type': 'string',
},
'method': {
'type': 'string',
},
},
'additionalProperties': False,
}
VALIDATORS = {
'core': CORE_SCHEMA
}
def validate(request, schema_name):
jsonschema.validate(request, VALIDATORS[schema_name])
## Instruction:
Add 'address' field to core schema
## Code After:
import jsonschema
CORE_SCHEMA = {
'type': 'object',
'properties': {
'paradigm': {
'type': 'string',
},
'method': {
'type': 'string',
},
'address': {
'type': 'string',
},
},
'additionalProperties': False,
}
VALIDATORS = {
'core': CORE_SCHEMA
}
def validate(request, schema_name):
jsonschema.validate(request, VALIDATORS[schema_name])
|
fb0b956563efbcd22af8300fd4341e3cb277b80a | app/models/user.py | app/models/user.py | from app import db
from flask import Flask
from datetime import datetime
class User(db.Model):
id = db.Column(db.Integer, primary_key=True)
username = db.Column(db.String(80), unique=True)
email = db.Column(db.String(120), unique=True)
name = db.Column(db.String(80))
bio = db.Column(db.String(180))
github_id = db.Column(db.Integer, unique=True)
github_username = db.Column(db.String(64), unique=True)
github_token = db.Column(db.String(300), unique=True)
password = db.Column(db.String(300))
created_at = db.Column(db.DateTime)
def __init__(self, username, email, password, name=None):
self.email = email
self.username = username
self.password = password
if name is None:
self.name = username
else:
self.name = name
self.created_at = datetime.now()
is_authenticated = True
is_anonymous = False
is_active = True
def get_id(self):
return unicode(self.id)
def __repr__(self):
return '<User %r>' % self.username
| from app import db
from flask import Flask
from datetime import datetime
class User(db.Model):
id = db.Column(db.Integer, primary_key=True)
username = db.Column(db.String(80), unique=True)
email = db.Column(db.String(120), unique=True)
name = db.Column(db.String(80))
bio = db.Column(db.String(180))
avatar_url = db.Column(db.String(256))
owner = db.Column(db.String(32), nullable=False, default='user')
github_id = db.Column(db.Integer, unique=True)
github_username = db.Column(db.String(64), unique=True)
github_token = db.Column(db.String(300), unique=True)
password = db.Column(db.String(300))
created_at = db.Column(db.DateTime)
def __init__(self, username, email, password, name=None):
self.email = email
self.username = username
self.password = password
if name is None:
self.name = username
else:
self.name = name
self.created_at = datetime.now()
is_authenticated = True
is_anonymous = False
is_active = True
def get_id(self):
return unicode(self.id)
def __repr__(self):
return '<User %r>' % self.username
| Add avatar_url and owner field for User | Add avatar_url and owner field for User
| Python | agpl-3.0 | lc-soft/GitDigger,lc-soft/GitDigger,lc-soft/GitDigger,lc-soft/GitDigger | from app import db
from flask import Flask
from datetime import datetime
class User(db.Model):
id = db.Column(db.Integer, primary_key=True)
username = db.Column(db.String(80), unique=True)
email = db.Column(db.String(120), unique=True)
name = db.Column(db.String(80))
bio = db.Column(db.String(180))
+ avatar_url = db.Column(db.String(256))
+ owner = db.Column(db.String(32), nullable=False, default='user')
github_id = db.Column(db.Integer, unique=True)
github_username = db.Column(db.String(64), unique=True)
github_token = db.Column(db.String(300), unique=True)
password = db.Column(db.String(300))
created_at = db.Column(db.DateTime)
def __init__(self, username, email, password, name=None):
self.email = email
self.username = username
self.password = password
if name is None:
self.name = username
else:
self.name = name
self.created_at = datetime.now()
is_authenticated = True
is_anonymous = False
is_active = True
def get_id(self):
return unicode(self.id)
def __repr__(self):
return '<User %r>' % self.username
| Add avatar_url and owner field for User | ## Code Before:
from app import db
from flask import Flask
from datetime import datetime
class User(db.Model):
id = db.Column(db.Integer, primary_key=True)
username = db.Column(db.String(80), unique=True)
email = db.Column(db.String(120), unique=True)
name = db.Column(db.String(80))
bio = db.Column(db.String(180))
github_id = db.Column(db.Integer, unique=True)
github_username = db.Column(db.String(64), unique=True)
github_token = db.Column(db.String(300), unique=True)
password = db.Column(db.String(300))
created_at = db.Column(db.DateTime)
def __init__(self, username, email, password, name=None):
self.email = email
self.username = username
self.password = password
if name is None:
self.name = username
else:
self.name = name
self.created_at = datetime.now()
is_authenticated = True
is_anonymous = False
is_active = True
def get_id(self):
return unicode(self.id)
def __repr__(self):
return '<User %r>' % self.username
## Instruction:
Add avatar_url and owner field for User
## Code After:
from app import db
from flask import Flask
from datetime import datetime
class User(db.Model):
id = db.Column(db.Integer, primary_key=True)
username = db.Column(db.String(80), unique=True)
email = db.Column(db.String(120), unique=True)
name = db.Column(db.String(80))
bio = db.Column(db.String(180))
avatar_url = db.Column(db.String(256))
owner = db.Column(db.String(32), nullable=False, default='user')
github_id = db.Column(db.Integer, unique=True)
github_username = db.Column(db.String(64), unique=True)
github_token = db.Column(db.String(300), unique=True)
password = db.Column(db.String(300))
created_at = db.Column(db.DateTime)
def __init__(self, username, email, password, name=None):
self.email = email
self.username = username
self.password = password
if name is None:
self.name = username
else:
self.name = name
self.created_at = datetime.now()
is_authenticated = True
is_anonymous = False
is_active = True
def get_id(self):
return unicode(self.id)
def __repr__(self):
return '<User %r>' % self.username
|
9f6d4d9e82ef575164535a8fb9ea80417458dd6b | website/files/models/dataverse.py | website/files/models/dataverse.py | import requests
from framework.auth.core import _get_current_user
from website.files.models.base import File, Folder, FileNode, FileVersion
__all__ = ('DataverseFile', 'DataverseFolder', 'DataverseFileNode')
class DataverseFileNode(FileNode):
provider = 'dataverse'
class DataverseFolder(DataverseFileNode, Folder):
pass
class DataverseFile(DataverseFileNode, File):
def touch(self, version=None, revision=None, **kwargs):
"""Note: Dataverse only has psuedo versions, don't save them"""
version = revision or version # Use revision or version
resp = requests.get(self.generate_waterbutler_url(meta=True, version=version, **kwargs))
if resp.status_code != 200:
return None
data = resp.json()
self.name = data['data']['name']
self.materialized_path = data['data']['materialized']
version = FileVersion(identifier=version)
version.update_metadata(data['data'], save=False)
user = _get_current_user()
if not user or not self.node.can_edit(user=user):
try:
# Users without edit permission can only see published files
if not data['data']['extra']['hasPublishedVersion']:
# Blank out name and path for the render
# Dont save because there's no reason to persist the change
self.name = ''
self.materialized_path = ''
return (version, '<div class="alert alert-info" role="alert">This file does not exist.</div>')
except (KeyError, IndexError):
pass
| from framework.auth.core import _get_current_user
from website.files.models.base import File, Folder, FileNode, FileVersion
__all__ = ('DataverseFile', 'DataverseFolder', 'DataverseFileNode')
class DataverseFileNode(FileNode):
provider = 'dataverse'
class DataverseFolder(DataverseFileNode, Folder):
pass
class DataverseFile(DataverseFileNode, File):
version_identifier = 'version'
def update(self, revision, data):
"""Note: Dataverse only has psuedo versions, don't save them"""
self.name = data['name']
self.materialized_path = data['materialized']
version = FileVersion(identifier=revision)
version.update_metadata(data, save=False)
user = _get_current_user()
if not user or not self.node.can_edit(user=user):
try:
# Users without edit permission can only see published files
if not data['extra']['hasPublishedVersion']:
# Blank out name and path for the render
# Dont save because there's no reason to persist the change
self.name = ''
self.materialized_path = ''
return (version, '<div class="alert alert-info" role="alert">This file does not exist.</div>')
except (KeyError, IndexError):
pass
return version
| Move override logic into update rather than touch | Move override logic into update rather than touch
| Python | apache-2.0 | Johnetordoff/osf.io,mluke93/osf.io,SSJohns/osf.io,chrisseto/osf.io,hmoco/osf.io,caseyrygt/osf.io,GageGaskins/osf.io,acshi/osf.io,alexschiller/osf.io,caseyrollins/osf.io,ZobairAlijan/osf.io,wearpants/osf.io,GageGaskins/osf.io,brandonPurvis/osf.io,CenterForOpenScience/osf.io,SSJohns/osf.io,alexschiller/osf.io,adlius/osf.io,samchrisinger/osf.io,sloria/osf.io,hmoco/osf.io,erinspace/osf.io,DanielSBrown/osf.io,samanehsan/osf.io,haoyuchen1992/osf.io,Ghalko/osf.io,crcresearch/osf.io,brandonPurvis/osf.io,CenterForOpenScience/osf.io,danielneis/osf.io,doublebits/osf.io,zamattiac/osf.io,baylee-d/osf.io,Johnetordoff/osf.io,KAsante95/osf.io,wearpants/osf.io,caneruguz/osf.io,petermalcolm/osf.io,pattisdr/osf.io,mattclark/osf.io,brianjgeiger/osf.io,cwisecarver/osf.io,samchrisinger/osf.io,billyhunt/osf.io,asanfilippo7/osf.io,chrisseto/osf.io,Johnetordoff/osf.io,caneruguz/osf.io,rdhyee/osf.io,abought/osf.io,RomanZWang/osf.io,mluke93/osf.io,monikagrabowska/osf.io,TomHeatwole/osf.io,mfraezz/osf.io,cosenal/osf.io,saradbowman/osf.io,felliott/osf.io,aaxelb/osf.io,jnayak1/osf.io,cwisecarver/osf.io,danielneis/osf.io,kch8qx/osf.io,GageGaskins/osf.io,aaxelb/osf.io,aaxelb/osf.io,TomHeatwole/osf.io,crcresearch/osf.io,ZobairAlijan/osf.io,zamattiac/osf.io,caneruguz/osf.io,monikagrabowska/osf.io,brianjgeiger/osf.io,zachjanicki/osf.io,rdhyee/osf.io,samchrisinger/osf.io,kch8qx/osf.io,Johnetordoff/osf.io,chennan47/osf.io,TomBaxter/osf.io,jnayak1/osf.io,felliott/osf.io,binoculars/osf.io,acshi/osf.io,Nesiehr/osf.io,kch8qx/osf.io,Nesiehr/osf.io,Ghalko/osf.io,mfraezz/osf.io,GageGaskins/osf.io,petermalcolm/osf.io,arpitar/osf.io,samanehsan/osf.io,abought/osf.io,laurenrevere/osf.io,emetsger/osf.io,petermalcolm/osf.io,mattclark/osf.io,RomanZWang/osf.io,jnayak1/osf.io,kwierman/osf.io,leb2dg/osf.io,doublebits/osf.io,cslzchen/osf.io,zachjanicki/osf.io,njantrania/osf.io,chennan47/osf.io,kwierman/osf.io,brianjgeiger/osf.io,hmoco/osf.io,GageGaskins/osf.io,zamattiac/osf.io,emetsger/osf.io,caseyrollins/osf.io,binoculars/osf.io,Nesiehr/osf.io,monikagrabowska/osf.io,cslzchen/osf.io,brandonPurvis/osf.io,acshi/osf.io,rdhyee/osf.io,petermalcolm/osf.io,cosenal/osf.io,asanfilippo7/osf.io,adlius/osf.io,mluke93/osf.io,adlius/osf.io,mluo613/osf.io,kch8qx/osf.io,Ghalko/osf.io,haoyuchen1992/osf.io,HalcyonChimera/osf.io,abought/osf.io,pattisdr/osf.io,haoyuchen1992/osf.io,brandonPurvis/osf.io,mluo613/osf.io,saradbowman/osf.io,mattclark/osf.io,jnayak1/osf.io,felliott/osf.io,arpitar/osf.io,caseyrollins/osf.io,amyshi188/osf.io,acshi/osf.io,asanfilippo7/osf.io,emetsger/osf.io,RomanZWang/osf.io,njantrania/osf.io,sloria/osf.io,SSJohns/osf.io,cwisecarver/osf.io,leb2dg/osf.io,leb2dg/osf.io,felliott/osf.io,CenterForOpenScience/osf.io,TomBaxter/osf.io,billyhunt/osf.io,adlius/osf.io,ticklemepierce/osf.io,chrisseto/osf.io,haoyuchen1992/osf.io,erinspace/osf.io,Ghalko/osf.io,acshi/osf.io,doublebits/osf.io,wearpants/osf.io,alexschiller/osf.io,icereval/osf.io,amyshi188/osf.io,doublebits/osf.io,KAsante95/osf.io,alexschiller/osf.io,wearpants/osf.io,samchrisinger/osf.io,njantrania/osf.io,njantrania/osf.io,cwisecarver/osf.io,KAsante95/osf.io,zamattiac/osf.io,kch8qx/osf.io,TomBaxter/osf.io,ZobairAlijan/osf.io,HalcyonChimera/osf.io,KAsante95/osf.io,DanielSBrown/osf.io,aaxelb/osf.io,rdhyee/osf.io,mluke93/osf.io,mfraezz/osf.io,zachjanicki/osf.io,cslzchen/osf.io,hmoco/osf.io,cosenal/osf.io,zachjanicki/osf.io,chennan47/osf.io,doublebits/osf.io,caneruguz/osf.io,monikagrabowska/osf.io,billyhunt/osf.io,KAsante95/osf.io,brandonPurvis/osf.io,brianjgeiger/osf.io,samanehsan/osf.io,binoculars/osf.io,monikagrabowska/osf.io,crcresearch/osf.io,danielneis/osf.io,laurenrevere/osf.io,arpitar/osf.io,kwierman/osf.io,billyhunt/osf.io,kwierman/osf.io,danielneis/osf.io,ticklemepierce/osf.io,baylee-d/osf.io,HalcyonChimera/osf.io,ZobairAlijan/osf.io,cslzchen/osf.io,caseyrygt/osf.io,laurenrevere/osf.io,Nesiehr/osf.io,mluo613/osf.io,TomHeatwole/osf.io,amyshi188/osf.io,caseyrygt/osf.io,mfraezz/osf.io,SSJohns/osf.io,erinspace/osf.io,leb2dg/osf.io,DanielSBrown/osf.io,arpitar/osf.io,baylee-d/osf.io,RomanZWang/osf.io,billyhunt/osf.io,mluo613/osf.io,mluo613/osf.io,amyshi188/osf.io,abought/osf.io,pattisdr/osf.io,ticklemepierce/osf.io,chrisseto/osf.io,emetsger/osf.io,caseyrygt/osf.io,cosenal/osf.io,RomanZWang/osf.io,HalcyonChimera/osf.io,sloria/osf.io,icereval/osf.io,alexschiller/osf.io,samanehsan/osf.io,icereval/osf.io,TomHeatwole/osf.io,ticklemepierce/osf.io,DanielSBrown/osf.io,asanfilippo7/osf.io,CenterForOpenScience/osf.io | - import requests
-
from framework.auth.core import _get_current_user
from website.files.models.base import File, Folder, FileNode, FileVersion
__all__ = ('DataverseFile', 'DataverseFolder', 'DataverseFileNode')
class DataverseFileNode(FileNode):
provider = 'dataverse'
class DataverseFolder(DataverseFileNode, Folder):
pass
class DataverseFile(DataverseFileNode, File):
+ version_identifier = 'version'
- def touch(self, version=None, revision=None, **kwargs):
+ def update(self, revision, data):
"""Note: Dataverse only has psuedo versions, don't save them"""
- version = revision or version # Use revision or version
+ self.name = data['name']
+ self.materialized_path = data['materialized']
- resp = requests.get(self.generate_waterbutler_url(meta=True, version=version, **kwargs))
- if resp.status_code != 200:
- return None
-
- data = resp.json()
- self.name = data['data']['name']
- self.materialized_path = data['data']['materialized']
-
- version = FileVersion(identifier=version)
+ version = FileVersion(identifier=revision)
- version.update_metadata(data['data'], save=False)
+ version.update_metadata(data, save=False)
user = _get_current_user()
if not user or not self.node.can_edit(user=user):
try:
# Users without edit permission can only see published files
- if not data['data']['extra']['hasPublishedVersion']:
+ if not data['extra']['hasPublishedVersion']:
# Blank out name and path for the render
# Dont save because there's no reason to persist the change
self.name = ''
self.materialized_path = ''
return (version, '<div class="alert alert-info" role="alert">This file does not exist.</div>')
except (KeyError, IndexError):
pass
+ return version
| Move override logic into update rather than touch | ## Code Before:
import requests
from framework.auth.core import _get_current_user
from website.files.models.base import File, Folder, FileNode, FileVersion
__all__ = ('DataverseFile', 'DataverseFolder', 'DataverseFileNode')
class DataverseFileNode(FileNode):
provider = 'dataverse'
class DataverseFolder(DataverseFileNode, Folder):
pass
class DataverseFile(DataverseFileNode, File):
def touch(self, version=None, revision=None, **kwargs):
"""Note: Dataverse only has psuedo versions, don't save them"""
version = revision or version # Use revision or version
resp = requests.get(self.generate_waterbutler_url(meta=True, version=version, **kwargs))
if resp.status_code != 200:
return None
data = resp.json()
self.name = data['data']['name']
self.materialized_path = data['data']['materialized']
version = FileVersion(identifier=version)
version.update_metadata(data['data'], save=False)
user = _get_current_user()
if not user or not self.node.can_edit(user=user):
try:
# Users without edit permission can only see published files
if not data['data']['extra']['hasPublishedVersion']:
# Blank out name and path for the render
# Dont save because there's no reason to persist the change
self.name = ''
self.materialized_path = ''
return (version, '<div class="alert alert-info" role="alert">This file does not exist.</div>')
except (KeyError, IndexError):
pass
## Instruction:
Move override logic into update rather than touch
## Code After:
from framework.auth.core import _get_current_user
from website.files.models.base import File, Folder, FileNode, FileVersion
__all__ = ('DataverseFile', 'DataverseFolder', 'DataverseFileNode')
class DataverseFileNode(FileNode):
provider = 'dataverse'
class DataverseFolder(DataverseFileNode, Folder):
pass
class DataverseFile(DataverseFileNode, File):
version_identifier = 'version'
def update(self, revision, data):
"""Note: Dataverse only has psuedo versions, don't save them"""
self.name = data['name']
self.materialized_path = data['materialized']
version = FileVersion(identifier=revision)
version.update_metadata(data, save=False)
user = _get_current_user()
if not user or not self.node.can_edit(user=user):
try:
# Users without edit permission can only see published files
if not data['extra']['hasPublishedVersion']:
# Blank out name and path for the render
# Dont save because there's no reason to persist the change
self.name = ''
self.materialized_path = ''
return (version, '<div class="alert alert-info" role="alert">This file does not exist.</div>')
except (KeyError, IndexError):
pass
return version
|
06d210cdc811f0051a489f335cc94a604e99a35d | werobot/session/mongodbstorage.py | werobot/session/mongodbstorage.py |
from werobot.session import SessionStorage
from werobot.utils import json_loads, json_dumps
class MongoDBStorage(SessionStorage):
"""
MongoDBStorage 会把你的 Session 数据储存在一个 MongoDB Collection 中 ::
import pymongo
import werobot
from werobot.session.mongodbstorage import MongoDBStorage
collection = pymongo.MongoClient()["wechat"]["session"]
session_storage = MongoDBStorage(collection)
robot = werobot.WeRoBot(token="token", enable_session=True,
session_storage=session_storage)
你需要安装 ``pymongo`` 才能使用 MongoDBStorage 。
:param collection: 一个 MongoDB Collection。
"""
def __init__(self, collection):
import pymongo
assert isinstance(collection,
pymongo.collection.Collection)
self.collection = collection
collection.create_index("wechat_id")
def _get_document(self, id):
return self.collection.find_one({"wechat_id": id})
def get(self, id):
document = self._get_document(id)
if document:
session_json = document["session"]
return json_loads(session_json)
return {}
def set(self, id, value):
document = self._get_document(id)
session = json_dumps(value)
if document:
document["session"] = session
self.collection.save(document)
else:
self.collection.insert({
"wechat_id": id,
"session": session
})
def delete(self, id):
document = self._get_document(id)
if document:
self.collection.remove(document["_id"])
|
from werobot.session import SessionStorage
from werobot.utils import json_loads, json_dumps
class MongoDBStorage(SessionStorage):
"""
MongoDBStorage 会把你的 Session 数据储存在一个 MongoDB Collection 中 ::
import pymongo
import werobot
from werobot.session.mongodbstorage import MongoDBStorage
collection = pymongo.MongoClient()["wechat"]["session"]
session_storage = MongoDBStorage(collection)
robot = werobot.WeRoBot(token="token", enable_session=True,
session_storage=session_storage)
你需要安装 ``pymongo`` 才能使用 MongoDBStorage 。
:param collection: 一个 MongoDB Collection。
"""
def __init__(self, collection):
self.collection = collection
collection.create_index("wechat_id")
def _get_document(self, id):
return self.collection.find_one({"wechat_id": id})
def get(self, id):
document = self._get_document(id)
if document:
session_json = document["session"]
return json_loads(session_json)
return {}
def set(self, id, value):
session = json_dumps(value)
self.collection.replace_one({
"wechat_id": id
}, {
"wechat_id": id,
"session": session
}, upsert=True)
def delete(self, id):
self.collection.delete_one({
"wechat_id": id
})
| Use new pymongo API in MongoDBStorage | Use new pymongo API in MongoDBStorage
| Python | mit | whtsky/WeRoBot,whtsky/WeRoBot,adam139/WeRobot,adam139/WeRobot,whtsky/WeRoBot,weberwang/WeRoBot,weberwang/WeRoBot |
from werobot.session import SessionStorage
from werobot.utils import json_loads, json_dumps
class MongoDBStorage(SessionStorage):
"""
MongoDBStorage 会把你的 Session 数据储存在一个 MongoDB Collection 中 ::
import pymongo
import werobot
from werobot.session.mongodbstorage import MongoDBStorage
collection = pymongo.MongoClient()["wechat"]["session"]
session_storage = MongoDBStorage(collection)
robot = werobot.WeRoBot(token="token", enable_session=True,
session_storage=session_storage)
你需要安装 ``pymongo`` 才能使用 MongoDBStorage 。
:param collection: 一个 MongoDB Collection。
"""
def __init__(self, collection):
- import pymongo
- assert isinstance(collection,
- pymongo.collection.Collection)
self.collection = collection
collection.create_index("wechat_id")
def _get_document(self, id):
return self.collection.find_one({"wechat_id": id})
def get(self, id):
document = self._get_document(id)
if document:
session_json = document["session"]
return json_loads(session_json)
return {}
def set(self, id, value):
- document = self._get_document(id)
session = json_dumps(value)
- if document:
- document["session"] = session
- self.collection.save(document)
- else:
- self.collection.insert({
+ self.collection.replace_one({
+ "wechat_id": id
+ }, {
- "wechat_id": id,
+ "wechat_id": id,
- "session": session
+ "session": session
- })
+ }, upsert=True)
def delete(self, id):
- document = self._get_document(id)
- if document:
- self.collection.remove(document["_id"])
+ self.collection.delete_one({
+ "wechat_id": id
+ })
| Use new pymongo API in MongoDBStorage | ## Code Before:
from werobot.session import SessionStorage
from werobot.utils import json_loads, json_dumps
class MongoDBStorage(SessionStorage):
"""
MongoDBStorage 会把你的 Session 数据储存在一个 MongoDB Collection 中 ::
import pymongo
import werobot
from werobot.session.mongodbstorage import MongoDBStorage
collection = pymongo.MongoClient()["wechat"]["session"]
session_storage = MongoDBStorage(collection)
robot = werobot.WeRoBot(token="token", enable_session=True,
session_storage=session_storage)
你需要安装 ``pymongo`` 才能使用 MongoDBStorage 。
:param collection: 一个 MongoDB Collection。
"""
def __init__(self, collection):
import pymongo
assert isinstance(collection,
pymongo.collection.Collection)
self.collection = collection
collection.create_index("wechat_id")
def _get_document(self, id):
return self.collection.find_one({"wechat_id": id})
def get(self, id):
document = self._get_document(id)
if document:
session_json = document["session"]
return json_loads(session_json)
return {}
def set(self, id, value):
document = self._get_document(id)
session = json_dumps(value)
if document:
document["session"] = session
self.collection.save(document)
else:
self.collection.insert({
"wechat_id": id,
"session": session
})
def delete(self, id):
document = self._get_document(id)
if document:
self.collection.remove(document["_id"])
## Instruction:
Use new pymongo API in MongoDBStorage
## Code After:
from werobot.session import SessionStorage
from werobot.utils import json_loads, json_dumps
class MongoDBStorage(SessionStorage):
"""
MongoDBStorage 会把你的 Session 数据储存在一个 MongoDB Collection 中 ::
import pymongo
import werobot
from werobot.session.mongodbstorage import MongoDBStorage
collection = pymongo.MongoClient()["wechat"]["session"]
session_storage = MongoDBStorage(collection)
robot = werobot.WeRoBot(token="token", enable_session=True,
session_storage=session_storage)
你需要安装 ``pymongo`` 才能使用 MongoDBStorage 。
:param collection: 一个 MongoDB Collection。
"""
def __init__(self, collection):
self.collection = collection
collection.create_index("wechat_id")
def _get_document(self, id):
return self.collection.find_one({"wechat_id": id})
def get(self, id):
document = self._get_document(id)
if document:
session_json = document["session"]
return json_loads(session_json)
return {}
def set(self, id, value):
session = json_dumps(value)
self.collection.replace_one({
"wechat_id": id
}, {
"wechat_id": id,
"session": session
}, upsert=True)
def delete(self, id):
self.collection.delete_one({
"wechat_id": id
})
|
841ca9cfbdb8faac9d8deb47b65717b5fb7c8eb4 | mfh.py | mfh.py | import os
import sys
import time
from multiprocessing import Process, Event
import mfhclient
import server
import update
from arguments import parse
from settings import HONEYPORT, HIVEPORT
def main():
update_event = Event()
mfhclient_process = Process(
args=(args, update_event,),
name="mfhclient_process",
target=mfhclient.main,
)
server_process = Process(
args=(args, update_event,),
name="server_process",
target=server.main,
)
if args.client is not None:
mfhclient_process.start()
if args.client is not None:
server_process.start()
if args.updater:
trigger_process = Process(
args=(update_event,),
name="trigger_process",
target=update.trigger,
)
trigger_process.start()
trigger_process.join()
while mfhclient_process.is_alive() or server_process.is_alive():
time.sleep(5)
else:
if args.updater:
# update.pull("origin", "master")
sys.stdout.flush()
os.execl(sys.executable, sys.executable, *sys.argv)
if __name__ == '__main__':
# Parse arguments
args = parse()
if args.c:
args.client = HONEYPORT
if args.s:
args.server = HIVEPORT
main()
| import os
import sys
import time
from multiprocessing import Process, Event
import mfhclient
import server
import update
from arguments import parse
from settings import HONEYPORT, HIVEPORT
def main():
update_event = Event()
client = create_process("client", mfhclient.main, args, update_event)
serv = create_process("server", server.main, args, update_event)
if args.client is not None:
client.start()
if args.client is not None:
serv.start()
if args.updater:
trigger = create_process("trigger", update.trigger, update_event)
trigger.start()
trigger.join()
while client.is_alive() or serv.is_alive():
time.sleep(5)
else:
if args.updater:
update.pull("origin", "master")
sys.stdout.flush()
os.execl(sys.executable, sys.executable, *sys.argv)
def create_process(name, function, *arguments):
process = Process(
args=arguments,
name="{0}_process".format(name),
target=function,
)
return process
if __name__ == '__main__':
# Parse arguments
args = parse()
if args.c:
args.client = HONEYPORT
if args.s:
args.server = HIVEPORT
processes = {}
main()
| Move all the process creation in a new function | Move all the process creation in a new function
This reduces the size of code.
| Python | mit | Zloool/manyfaced-honeypot | import os
import sys
import time
from multiprocessing import Process, Event
import mfhclient
import server
import update
from arguments import parse
from settings import HONEYPORT, HIVEPORT
def main():
update_event = Event()
+ client = create_process("client", mfhclient.main, args, update_event)
+ serv = create_process("server", server.main, args, update_event)
- mfhclient_process = Process(
- args=(args, update_event,),
- name="mfhclient_process",
- target=mfhclient.main,
- )
- server_process = Process(
- args=(args, update_event,),
- name="server_process",
- target=server.main,
- )
if args.client is not None:
- mfhclient_process.start()
+ client.start()
if args.client is not None:
- server_process.start()
+ serv.start()
if args.updater:
+ trigger = create_process("trigger", update.trigger, update_event)
- trigger_process = Process(
- args=(update_event,),
- name="trigger_process",
- target=update.trigger,
- )
- trigger_process.start()
+ trigger.start()
- trigger_process.join()
+ trigger.join()
- while mfhclient_process.is_alive() or server_process.is_alive():
+ while client.is_alive() or serv.is_alive():
time.sleep(5)
else:
if args.updater:
- # update.pull("origin", "master")
+ update.pull("origin", "master")
sys.stdout.flush()
os.execl(sys.executable, sys.executable, *sys.argv)
+
+
+ def create_process(name, function, *arguments):
+ process = Process(
+ args=arguments,
+ name="{0}_process".format(name),
+ target=function,
+ )
+ return process
if __name__ == '__main__':
# Parse arguments
args = parse()
if args.c:
args.client = HONEYPORT
if args.s:
args.server = HIVEPORT
+ processes = {}
main()
| Move all the process creation in a new function | ## Code Before:
import os
import sys
import time
from multiprocessing import Process, Event
import mfhclient
import server
import update
from arguments import parse
from settings import HONEYPORT, HIVEPORT
def main():
update_event = Event()
mfhclient_process = Process(
args=(args, update_event,),
name="mfhclient_process",
target=mfhclient.main,
)
server_process = Process(
args=(args, update_event,),
name="server_process",
target=server.main,
)
if args.client is not None:
mfhclient_process.start()
if args.client is not None:
server_process.start()
if args.updater:
trigger_process = Process(
args=(update_event,),
name="trigger_process",
target=update.trigger,
)
trigger_process.start()
trigger_process.join()
while mfhclient_process.is_alive() or server_process.is_alive():
time.sleep(5)
else:
if args.updater:
# update.pull("origin", "master")
sys.stdout.flush()
os.execl(sys.executable, sys.executable, *sys.argv)
if __name__ == '__main__':
# Parse arguments
args = parse()
if args.c:
args.client = HONEYPORT
if args.s:
args.server = HIVEPORT
main()
## Instruction:
Move all the process creation in a new function
## Code After:
import os
import sys
import time
from multiprocessing import Process, Event
import mfhclient
import server
import update
from arguments import parse
from settings import HONEYPORT, HIVEPORT
def main():
update_event = Event()
client = create_process("client", mfhclient.main, args, update_event)
serv = create_process("server", server.main, args, update_event)
if args.client is not None:
client.start()
if args.client is not None:
serv.start()
if args.updater:
trigger = create_process("trigger", update.trigger, update_event)
trigger.start()
trigger.join()
while client.is_alive() or serv.is_alive():
time.sleep(5)
else:
if args.updater:
update.pull("origin", "master")
sys.stdout.flush()
os.execl(sys.executable, sys.executable, *sys.argv)
def create_process(name, function, *arguments):
process = Process(
args=arguments,
name="{0}_process".format(name),
target=function,
)
return process
if __name__ == '__main__':
# Parse arguments
args = parse()
if args.c:
args.client = HONEYPORT
if args.s:
args.server = HIVEPORT
processes = {}
main()
|
5f128bbfc61169ac6b5f0e9f4dc6bcd05092382c | requests_cache/serializers/pipeline.py | requests_cache/serializers/pipeline.py | from typing import Any, List, Union
from ..models import CachedResponse
class Stage:
"""Generic class to wrap serialization steps with consistent ``dumps()`` and ``loads()`` methods"""
def __init__(self, obj: Any, dumps: str = "dumps", loads: str = "loads"):
self.obj = obj
self.dumps = getattr(obj, dumps)
self.loads = getattr(obj, loads)
class SerializerPipeline:
"""A sequence of steps used to serialize and deserialize response objects"""
def __init__(self, steps: List):
self.steps = steps
self.dump_steps = [step.dumps for step in steps]
self.load_steps = [step.loads for step in reversed(steps)]
def dumps(self, value) -> Union[str, bytes]:
for step in self.dump_steps:
value = step(value)
return value
def loads(self, value) -> CachedResponse:
for step in self.load_steps:
value = step(value)
return value
| from typing import Any, Callable, List, Union
from ..models import CachedResponse
class Stage:
"""Generic class to wrap serialization steps with consistent ``dumps()`` and ``loads()`` methods
Args:
obj: Serializer object or module, if applicable
dumps: Serialization function, or name of method on ``obj``
loads: Deserialization function, or name of method on ``obj``
"""
def __init__(
self,
obj: Any = None,
dumps: Union[str, Callable] = 'dumps',
loads: Union[str, Callable] = 'loads',
):
self.obj = obj
self.dumps = getattr(obj, dumps) if isinstance(dumps, str) else dumps
self.loads = getattr(obj, loads) if isinstance(loads, str) else loads
class SerializerPipeline:
"""A sequence of steps used to serialize and deserialize response objects.
This can be initialized with :py:class:`Stage` objects, or any objects with ``dumps()`` and
``loads()`` methods
"""
def __init__(self, stages: List):
self.steps = stages
self.dump_steps = [step.dumps for step in stages]
self.load_steps = [step.loads for step in reversed(stages)]
def dumps(self, value) -> Union[str, bytes]:
for step in self.dump_steps:
value = step(value)
return value
def loads(self, value) -> CachedResponse:
for step in self.load_steps:
value = step(value)
return value
| Allow Stage objects to take functions instead of object + method names | Allow Stage objects to take functions instead of object + method names
| Python | bsd-2-clause | reclosedev/requests-cache | - from typing import Any, List, Union
+ from typing import Any, Callable, List, Union
from ..models import CachedResponse
class Stage:
- """Generic class to wrap serialization steps with consistent ``dumps()`` and ``loads()`` methods"""
+ """Generic class to wrap serialization steps with consistent ``dumps()`` and ``loads()`` methods
- def __init__(self, obj: Any, dumps: str = "dumps", loads: str = "loads"):
+ Args:
+ obj: Serializer object or module, if applicable
+ dumps: Serialization function, or name of method on ``obj``
+ loads: Deserialization function, or name of method on ``obj``
+ """
+
+ def __init__(
+ self,
+ obj: Any = None,
+ dumps: Union[str, Callable] = 'dumps',
+ loads: Union[str, Callable] = 'loads',
+ ):
self.obj = obj
- self.dumps = getattr(obj, dumps)
- self.loads = getattr(obj, loads)
+ self.dumps = getattr(obj, dumps) if isinstance(dumps, str) else dumps
+ self.loads = getattr(obj, loads) if isinstance(loads, str) else loads
class SerializerPipeline:
- """A sequence of steps used to serialize and deserialize response objects"""
+ """A sequence of steps used to serialize and deserialize response objects.
+ This can be initialized with :py:class:`Stage` objects, or any objects with ``dumps()`` and
+ ``loads()`` methods
+ """
- def __init__(self, steps: List):
+ def __init__(self, stages: List):
- self.steps = steps
+ self.steps = stages
- self.dump_steps = [step.dumps for step in steps]
+ self.dump_steps = [step.dumps for step in stages]
- self.load_steps = [step.loads for step in reversed(steps)]
+ self.load_steps = [step.loads for step in reversed(stages)]
def dumps(self, value) -> Union[str, bytes]:
for step in self.dump_steps:
value = step(value)
return value
def loads(self, value) -> CachedResponse:
for step in self.load_steps:
value = step(value)
return value
| Allow Stage objects to take functions instead of object + method names | ## Code Before:
from typing import Any, List, Union
from ..models import CachedResponse
class Stage:
"""Generic class to wrap serialization steps with consistent ``dumps()`` and ``loads()`` methods"""
def __init__(self, obj: Any, dumps: str = "dumps", loads: str = "loads"):
self.obj = obj
self.dumps = getattr(obj, dumps)
self.loads = getattr(obj, loads)
class SerializerPipeline:
"""A sequence of steps used to serialize and deserialize response objects"""
def __init__(self, steps: List):
self.steps = steps
self.dump_steps = [step.dumps for step in steps]
self.load_steps = [step.loads for step in reversed(steps)]
def dumps(self, value) -> Union[str, bytes]:
for step in self.dump_steps:
value = step(value)
return value
def loads(self, value) -> CachedResponse:
for step in self.load_steps:
value = step(value)
return value
## Instruction:
Allow Stage objects to take functions instead of object + method names
## Code After:
from typing import Any, Callable, List, Union
from ..models import CachedResponse
class Stage:
"""Generic class to wrap serialization steps with consistent ``dumps()`` and ``loads()`` methods
Args:
obj: Serializer object or module, if applicable
dumps: Serialization function, or name of method on ``obj``
loads: Deserialization function, or name of method on ``obj``
"""
def __init__(
self,
obj: Any = None,
dumps: Union[str, Callable] = 'dumps',
loads: Union[str, Callable] = 'loads',
):
self.obj = obj
self.dumps = getattr(obj, dumps) if isinstance(dumps, str) else dumps
self.loads = getattr(obj, loads) if isinstance(loads, str) else loads
class SerializerPipeline:
"""A sequence of steps used to serialize and deserialize response objects.
This can be initialized with :py:class:`Stage` objects, or any objects with ``dumps()`` and
``loads()`` methods
"""
def __init__(self, stages: List):
self.steps = stages
self.dump_steps = [step.dumps for step in stages]
self.load_steps = [step.loads for step in reversed(stages)]
def dumps(self, value) -> Union[str, bytes]:
for step in self.dump_steps:
value = step(value)
return value
def loads(self, value) -> CachedResponse:
for step in self.load_steps:
value = step(value)
return value
|
657741f3d4df734afef228e707005dc21d540e34 | post-refunds-back.py | post-refunds-back.py | from __future__ import absolute_import, division, print_function, unicode_literals
import csv
from gratipay import wireup
from gratipay.models.exchange_route import ExchangeRoute
from gratipay.models.participant import Participant
from gratipay.billing.exchanges import record_exchange
db = wireup.db(wireup.env())
inp = csv.reader(open('balanced/refund/refunds.completed.csv'))
note = 'refund of advance payment; see https://medium.com/gratipay-blog/charging-in-arrears-18cacf779bee'
for ts, id, amount, username, route_id, status_code, content in inp:
if status_code != '201': continue
amount = '-' + amount[:-2] + '.' + amount[-2:]
print('posting {} back for {}'.format(amount, username))
route = ExchangeRoute.from_id(route_id)
rp = route.participant
participant = Participant.from_id(rp) if type(rp) is long else rp # Such a hack. :(
route.set_attributes(participant=participant)
record_exchange(db, route, amount, 0, participant, 'pending', note)
| from __future__ import absolute_import, division, print_function, unicode_literals
import csv
from decimal import Decimal as D
from gratipay import wireup
from gratipay.models.exchange_route import ExchangeRoute
from gratipay.models.participant import Participant
from gratipay.billing.exchanges import record_exchange
db = wireup.db(wireup.env())
inp = csv.reader(open('refunds.completed.csv'))
note = 'refund of advance payment; see https://medium.com/gratipay-blog/18cacf779bee'
total = N = 0
for ts, id, amount, username, route_id, success, ref in inp:
print('posting {} back for {}'.format(amount, username))
assert success == 'True'
total += D(amount)
N += 1
amount = D('-' + amount)
route = ExchangeRoute.from_id(route_id)
# Such a hack. :(
rp = route.participant
participant = Participant.from_id(rp) if type(rp) is long else rp
route.set_attributes(participant=participant)
exchange_id = record_exchange(db, route, amount, 0, participant, 'pending', note)
db.run("update exchanges set ref=%s where id=%s", (ref, exchange_id))
print('posted {} back for {}'.format(total, N))
| Update post-back script for Braintree | Update post-back script for Braintree
| Python | mit | gratipay/gratipay.com,gratipay/gratipay.com,gratipay/gratipay.com,gratipay/gratipay.com | from __future__ import absolute_import, division, print_function, unicode_literals
import csv
+ from decimal import Decimal as D
from gratipay import wireup
from gratipay.models.exchange_route import ExchangeRoute
from gratipay.models.participant import Participant
from gratipay.billing.exchanges import record_exchange
db = wireup.db(wireup.env())
- inp = csv.reader(open('balanced/refund/refunds.completed.csv'))
+ inp = csv.reader(open('refunds.completed.csv'))
- note = 'refund of advance payment; see https://medium.com/gratipay-blog/charging-in-arrears-18cacf779bee'
+ note = 'refund of advance payment; see https://medium.com/gratipay-blog/18cacf779bee'
+ total = N = 0
- for ts, id, amount, username, route_id, status_code, content in inp:
+ for ts, id, amount, username, route_id, success, ref in inp:
- if status_code != '201': continue
- amount = '-' + amount[:-2] + '.' + amount[-2:]
print('posting {} back for {}'.format(amount, username))
+ assert success == 'True'
+ total += D(amount)
+ N += 1
+
+ amount = D('-' + amount)
route = ExchangeRoute.from_id(route_id)
+
+ # Such a hack. :(
rp = route.participant
- participant = Participant.from_id(rp) if type(rp) is long else rp # Such a hack. :(
+ participant = Participant.from_id(rp) if type(rp) is long else rp
route.set_attributes(participant=participant)
- record_exchange(db, route, amount, 0, participant, 'pending', note)
+ exchange_id = record_exchange(db, route, amount, 0, participant, 'pending', note)
+ db.run("update exchanges set ref=%s where id=%s", (ref, exchange_id))
+
+ print('posted {} back for {}'.format(total, N))
+ | Update post-back script for Braintree | ## Code Before:
from __future__ import absolute_import, division, print_function, unicode_literals
import csv
from gratipay import wireup
from gratipay.models.exchange_route import ExchangeRoute
from gratipay.models.participant import Participant
from gratipay.billing.exchanges import record_exchange
db = wireup.db(wireup.env())
inp = csv.reader(open('balanced/refund/refunds.completed.csv'))
note = 'refund of advance payment; see https://medium.com/gratipay-blog/charging-in-arrears-18cacf779bee'
for ts, id, amount, username, route_id, status_code, content in inp:
if status_code != '201': continue
amount = '-' + amount[:-2] + '.' + amount[-2:]
print('posting {} back for {}'.format(amount, username))
route = ExchangeRoute.from_id(route_id)
rp = route.participant
participant = Participant.from_id(rp) if type(rp) is long else rp # Such a hack. :(
route.set_attributes(participant=participant)
record_exchange(db, route, amount, 0, participant, 'pending', note)
## Instruction:
Update post-back script for Braintree
## Code After:
from __future__ import absolute_import, division, print_function, unicode_literals
import csv
from decimal import Decimal as D
from gratipay import wireup
from gratipay.models.exchange_route import ExchangeRoute
from gratipay.models.participant import Participant
from gratipay.billing.exchanges import record_exchange
db = wireup.db(wireup.env())
inp = csv.reader(open('refunds.completed.csv'))
note = 'refund of advance payment; see https://medium.com/gratipay-blog/18cacf779bee'
total = N = 0
for ts, id, amount, username, route_id, success, ref in inp:
print('posting {} back for {}'.format(amount, username))
assert success == 'True'
total += D(amount)
N += 1
amount = D('-' + amount)
route = ExchangeRoute.from_id(route_id)
# Such a hack. :(
rp = route.participant
participant = Participant.from_id(rp) if type(rp) is long else rp
route.set_attributes(participant=participant)
exchange_id = record_exchange(db, route, amount, 0, participant, 'pending', note)
db.run("update exchanges set ref=%s where id=%s", (ref, exchange_id))
print('posted {} back for {}'.format(total, N))
|
022062c409ee06a719b5687ea1feb989c5cad627 | app/grandchallenge/pages/sitemaps.py | app/grandchallenge/pages/sitemaps.py | from grandchallenge.core.sitemaps import SubdomainSitemap
from grandchallenge.pages.models import Page
class PagesSitemap(SubdomainSitemap):
priority = 0.8
def items(self):
return Page.objects.filter(
permission_level=Page.ALL, challenge__hidden=False
)
| from grandchallenge.core.sitemaps import SubdomainSitemap
from grandchallenge.pages.models import Page
class PagesSitemap(SubdomainSitemap):
priority = 0.8
def items(self):
return Page.objects.filter(
permission_level=Page.ALL, challenge__hidden=False, hidden=False,
)
| Remove hidden public pages from sitemap | Remove hidden public pages from sitemap
| Python | apache-2.0 | comic/comic-django,comic/comic-django,comic/comic-django,comic/comic-django,comic/comic-django | from grandchallenge.core.sitemaps import SubdomainSitemap
from grandchallenge.pages.models import Page
class PagesSitemap(SubdomainSitemap):
priority = 0.8
def items(self):
return Page.objects.filter(
- permission_level=Page.ALL, challenge__hidden=False
+ permission_level=Page.ALL, challenge__hidden=False, hidden=False,
)
| Remove hidden public pages from sitemap | ## Code Before:
from grandchallenge.core.sitemaps import SubdomainSitemap
from grandchallenge.pages.models import Page
class PagesSitemap(SubdomainSitemap):
priority = 0.8
def items(self):
return Page.objects.filter(
permission_level=Page.ALL, challenge__hidden=False
)
## Instruction:
Remove hidden public pages from sitemap
## Code After:
from grandchallenge.core.sitemaps import SubdomainSitemap
from grandchallenge.pages.models import Page
class PagesSitemap(SubdomainSitemap):
priority = 0.8
def items(self):
return Page.objects.filter(
permission_level=Page.ALL, challenge__hidden=False, hidden=False,
)
|
c5239c6bbb40ede4279b33b965c5ded26a78b2ae | app/tests/manual/test_twitter_api.py | app/tests/manual/test_twitter_api.py | from __future__ import absolute_import
from unittest import TestCase
from lib.twitter_api import authentication
class TestAuth(TestCase):
def test_generateAppAccessToken(self):
auth = authentication._generateAppAccessToken()
def test_getTweepyConnection(self):
auth = authentication._generateAppAccessToken()
api = authentication._getTweepyConnection(auth)
def test_getAPIConnection(self):
"""
Test that App Access token can be used to connect to Twitter API.
"""
api = authentication.getAPIConnection(userFlow=False)
def test_getAppOnlyConnection(self):
"""
Test App-only token.
"""
api = authentication.getAppOnlyConnection()
| from __future__ import absolute_import
import os
import sys
import unittest
from unittest import TestCase
# Allow imports to be done when executing this file directly.
sys.path.insert(0, os.path.abspath(os.path.join(
os.path.dirname(__file__), os.path.pardir, os.path.pardir)
))
from lib.twitter_api import authentication
class TestAuth(TestCase):
def test_generateAppAccessToken(self):
auth = authentication._generateAppAccessToken()
def test_getTweepyConnection(self):
auth = authentication._generateAppAccessToken()
api = authentication._getTweepyConnection(auth)
def test_getAPIConnection(self):
"""
Test that App Access token can be used to connect to Twitter API.
"""
api = authentication.getAPIConnection(userFlow=False)
def test_getAppOnlyConnection(self):
"""
Test App-only token.
"""
api = authentication.getAppOnlyConnection()
if __name__ == '__main__':
unittest.main()
| Update Twitter auth test to run directly | test: Update Twitter auth test to run directly
| Python | mit | MichaelCurrin/twitterverse,MichaelCurrin/twitterverse | from __future__ import absolute_import
+ import os
+ import sys
+ import unittest
from unittest import TestCase
+
+ # Allow imports to be done when executing this file directly.
+ sys.path.insert(0, os.path.abspath(os.path.join(
+ os.path.dirname(__file__), os.path.pardir, os.path.pardir)
+ ))
+
from lib.twitter_api import authentication
class TestAuth(TestCase):
def test_generateAppAccessToken(self):
auth = authentication._generateAppAccessToken()
def test_getTweepyConnection(self):
auth = authentication._generateAppAccessToken()
api = authentication._getTweepyConnection(auth)
def test_getAPIConnection(self):
"""
Test that App Access token can be used to connect to Twitter API.
"""
api = authentication.getAPIConnection(userFlow=False)
def test_getAppOnlyConnection(self):
"""
Test App-only token.
"""
api = authentication.getAppOnlyConnection()
+
+ if __name__ == '__main__':
+ unittest.main()
+ | Update Twitter auth test to run directly | ## Code Before:
from __future__ import absolute_import
from unittest import TestCase
from lib.twitter_api import authentication
class TestAuth(TestCase):
def test_generateAppAccessToken(self):
auth = authentication._generateAppAccessToken()
def test_getTweepyConnection(self):
auth = authentication._generateAppAccessToken()
api = authentication._getTweepyConnection(auth)
def test_getAPIConnection(self):
"""
Test that App Access token can be used to connect to Twitter API.
"""
api = authentication.getAPIConnection(userFlow=False)
def test_getAppOnlyConnection(self):
"""
Test App-only token.
"""
api = authentication.getAppOnlyConnection()
## Instruction:
Update Twitter auth test to run directly
## Code After:
from __future__ import absolute_import
import os
import sys
import unittest
from unittest import TestCase
# Allow imports to be done when executing this file directly.
sys.path.insert(0, os.path.abspath(os.path.join(
os.path.dirname(__file__), os.path.pardir, os.path.pardir)
))
from lib.twitter_api import authentication
class TestAuth(TestCase):
def test_generateAppAccessToken(self):
auth = authentication._generateAppAccessToken()
def test_getTweepyConnection(self):
auth = authentication._generateAppAccessToken()
api = authentication._getTweepyConnection(auth)
def test_getAPIConnection(self):
"""
Test that App Access token can be used to connect to Twitter API.
"""
api = authentication.getAPIConnection(userFlow=False)
def test_getAppOnlyConnection(self):
"""
Test App-only token.
"""
api = authentication.getAppOnlyConnection()
if __name__ == '__main__':
unittest.main()
|
c6862c5f864db4e77dd835f074efdd284667e6fd | util/ldjpp.py | util/ldjpp.py |
from __future__ import print_function
import argparse
import json
parser = argparse.ArgumentParser(description='Pretty-print LDJSON.')
parser.add_argument('--indent', metavar='N', type=int, default=2,
dest='indent', help='indentation for pretty-printing')
parser.add_argument('--file', metavar='FILE', required=True, dest='file',
type=argparse.FileType('r'), help='input LDJSON file')
parser.add_argument('--sort', action='store_true', dest='sortkeys',
help='sort object keys')
args = parser.parse_args()
for line in args.file:
record = json.loads(line)
print(json.dumps(record, indent=args.indent, sort_keys=args.sortkeys))
|
from __future__ import print_function
import click
import json
from collections import OrderedDict
def json_loader(sortkeys):
def _loader(line):
if sortkeys:
return json.loads(line)
else:
# if --no-sortkeys, let's preserve file order
return json.JSONDecoder(object_pairs_hook=OrderedDict).decode(line)
return _loader
@click.command()
@click.option('indent', '-i', '--indent', default=2,
help='indentation for pretty-printing')
@click.option('--sortkeys/--no-sortkeys', default=False,
help='sort object keys')
@click.argument('infile', type=click.File())
def cli(indent, sortkeys, infile):
"""Pretty-print LDJSON."""
loader = json_loader(sortkeys)
for line in infile:
record = loader(line)
print(json.dumps(record, indent=indent, sort_keys=sortkeys))
if __name__ == '__main__':
cli()
| Use click instead of argparse | Use click instead of argparse
| Python | mit | mhyfritz/goontools,mhyfritz/goontools,mhyfritz/goontools |
from __future__ import print_function
- import argparse
+ import click
import json
+ from collections import OrderedDict
- parser = argparse.ArgumentParser(description='Pretty-print LDJSON.')
- parser.add_argument('--indent', metavar='N', type=int, default=2,
- dest='indent', help='indentation for pretty-printing')
- parser.add_argument('--file', metavar='FILE', required=True, dest='file',
- type=argparse.FileType('r'), help='input LDJSON file')
- parser.add_argument('--sort', action='store_true', dest='sortkeys',
- help='sort object keys')
- args = parser.parse_args()
+ def json_loader(sortkeys):
+ def _loader(line):
+ if sortkeys:
+ return json.loads(line)
+ else:
+ # if --no-sortkeys, let's preserve file order
+ return json.JSONDecoder(object_pairs_hook=OrderedDict).decode(line)
+ return _loader
- for line in args.file:
- record = json.loads(line)
- print(json.dumps(record, indent=args.indent, sort_keys=args.sortkeys))
+
+ @click.command()
+ @click.option('indent', '-i', '--indent', default=2,
+ help='indentation for pretty-printing')
+ @click.option('--sortkeys/--no-sortkeys', default=False,
+ help='sort object keys')
+ @click.argument('infile', type=click.File())
+ def cli(indent, sortkeys, infile):
+ """Pretty-print LDJSON."""
+ loader = json_loader(sortkeys)
+ for line in infile:
+ record = loader(line)
+ print(json.dumps(record, indent=indent, sort_keys=sortkeys))
+
+ if __name__ == '__main__':
+ cli()
+ | Use click instead of argparse | ## Code Before:
from __future__ import print_function
import argparse
import json
parser = argparse.ArgumentParser(description='Pretty-print LDJSON.')
parser.add_argument('--indent', metavar='N', type=int, default=2,
dest='indent', help='indentation for pretty-printing')
parser.add_argument('--file', metavar='FILE', required=True, dest='file',
type=argparse.FileType('r'), help='input LDJSON file')
parser.add_argument('--sort', action='store_true', dest='sortkeys',
help='sort object keys')
args = parser.parse_args()
for line in args.file:
record = json.loads(line)
print(json.dumps(record, indent=args.indent, sort_keys=args.sortkeys))
## Instruction:
Use click instead of argparse
## Code After:
from __future__ import print_function
import click
import json
from collections import OrderedDict
def json_loader(sortkeys):
def _loader(line):
if sortkeys:
return json.loads(line)
else:
# if --no-sortkeys, let's preserve file order
return json.JSONDecoder(object_pairs_hook=OrderedDict).decode(line)
return _loader
@click.command()
@click.option('indent', '-i', '--indent', default=2,
help='indentation for pretty-printing')
@click.option('--sortkeys/--no-sortkeys', default=False,
help='sort object keys')
@click.argument('infile', type=click.File())
def cli(indent, sortkeys, infile):
"""Pretty-print LDJSON."""
loader = json_loader(sortkeys)
for line in infile:
record = loader(line)
print(json.dumps(record, indent=indent, sort_keys=sortkeys))
if __name__ == '__main__':
cli()
|
b7decb588f5b6e4d15fb04fa59aa571e5570cbfe | djangae/contrib/contenttypes/apps.py | djangae/contrib/contenttypes/apps.py | from django.apps import AppConfig
from django.utils.translation import ugettext_lazy as _
from django.contrib.contenttypes.management import update_contenttypes as django_update_contenttypes
from django.db.models.signals import post_migrate
from .management import update_contenttypes
from .models import SimulatedContentTypeManager
class ContentTypesConfig(AppConfig):
name = 'djangae.contrib.contenttypes'
verbose_name = _("Djangae Content Types")
label = "djangae_contenttypes"
def ready(self):
if django_update_contenttypes != update_contenttypes:
post_migrate.disconnect(django_update_contenttypes)
from django.db import models
from django.contrib.contenttypes import models as django_models
if not isinstance(django_models.ContentType.objects, SimulatedContentTypeManager):
django_models.ContentType.objects = SimulatedContentTypeManager()
django_models.ContentType.objects.auto_created = True
# Really force the default manager to use the Simulated one
meta = django_models.ContentType._meta
meta.local_managers[0] = SimulatedContentTypeManager()
meta._expire_cache()
# Our generated IDs take up a 64 bit range (signed) but aren't auto
# incrementing so update the field to reflect that (for validation)
meta.pk.__class__ = models.BigIntegerField
| from django.apps import AppConfig
from django.utils.translation import ugettext_lazy as _
from django.contrib.contenttypes.management import update_contenttypes as django_update_contenttypes
from django.db.models.signals import post_migrate
from .management import update_contenttypes
from .models import SimulatedContentTypeManager
class ContentTypesConfig(AppConfig):
name = 'djangae.contrib.contenttypes'
verbose_name = _("Djangae Content Types")
label = "djangae_contenttypes"
def ready(self):
if django_update_contenttypes != update_contenttypes:
post_migrate.disconnect(django_update_contenttypes)
from django.db import models
from django.contrib.contenttypes import models as django_models
if not isinstance(django_models.ContentType.objects, SimulatedContentTypeManager):
django_models.ContentType.objects = SimulatedContentTypeManager(django_models.ContentType)
django_models.ContentType.objects.auto_created = True
# Really force the default manager to use the Simulated one
meta = django_models.ContentType._meta
if hasattr(meta, "local_managers"):
# Django >= 1.10
meta.local_managers[0] = SimulatedContentTypeManager()
else:
django_models.ContentType._default_manager = SimulatedContentTypeManager(django_models.ContentType)
meta._expire_cache()
# Our generated IDs take up a 64 bit range (signed) but aren't auto
# incrementing so update the field to reflect that (for validation)
meta.pk.__class__ = models.BigIntegerField
| Fix up for Django 1.9 | Fix up for Django 1.9
| Python | bsd-3-clause | grzes/djangae,potatolondon/djangae,grzes/djangae,potatolondon/djangae,grzes/djangae | from django.apps import AppConfig
from django.utils.translation import ugettext_lazy as _
from django.contrib.contenttypes.management import update_contenttypes as django_update_contenttypes
from django.db.models.signals import post_migrate
from .management import update_contenttypes
from .models import SimulatedContentTypeManager
class ContentTypesConfig(AppConfig):
name = 'djangae.contrib.contenttypes'
verbose_name = _("Djangae Content Types")
label = "djangae_contenttypes"
def ready(self):
if django_update_contenttypes != update_contenttypes:
post_migrate.disconnect(django_update_contenttypes)
from django.db import models
from django.contrib.contenttypes import models as django_models
if not isinstance(django_models.ContentType.objects, SimulatedContentTypeManager):
- django_models.ContentType.objects = SimulatedContentTypeManager()
+ django_models.ContentType.objects = SimulatedContentTypeManager(django_models.ContentType)
django_models.ContentType.objects.auto_created = True
# Really force the default manager to use the Simulated one
meta = django_models.ContentType._meta
+ if hasattr(meta, "local_managers"):
+ # Django >= 1.10
- meta.local_managers[0] = SimulatedContentTypeManager()
+ meta.local_managers[0] = SimulatedContentTypeManager()
+ else:
+ django_models.ContentType._default_manager = SimulatedContentTypeManager(django_models.ContentType)
+
meta._expire_cache()
# Our generated IDs take up a 64 bit range (signed) but aren't auto
# incrementing so update the field to reflect that (for validation)
meta.pk.__class__ = models.BigIntegerField
| Fix up for Django 1.9 | ## Code Before:
from django.apps import AppConfig
from django.utils.translation import ugettext_lazy as _
from django.contrib.contenttypes.management import update_contenttypes as django_update_contenttypes
from django.db.models.signals import post_migrate
from .management import update_contenttypes
from .models import SimulatedContentTypeManager
class ContentTypesConfig(AppConfig):
name = 'djangae.contrib.contenttypes'
verbose_name = _("Djangae Content Types")
label = "djangae_contenttypes"
def ready(self):
if django_update_contenttypes != update_contenttypes:
post_migrate.disconnect(django_update_contenttypes)
from django.db import models
from django.contrib.contenttypes import models as django_models
if not isinstance(django_models.ContentType.objects, SimulatedContentTypeManager):
django_models.ContentType.objects = SimulatedContentTypeManager()
django_models.ContentType.objects.auto_created = True
# Really force the default manager to use the Simulated one
meta = django_models.ContentType._meta
meta.local_managers[0] = SimulatedContentTypeManager()
meta._expire_cache()
# Our generated IDs take up a 64 bit range (signed) but aren't auto
# incrementing so update the field to reflect that (for validation)
meta.pk.__class__ = models.BigIntegerField
## Instruction:
Fix up for Django 1.9
## Code After:
from django.apps import AppConfig
from django.utils.translation import ugettext_lazy as _
from django.contrib.contenttypes.management import update_contenttypes as django_update_contenttypes
from django.db.models.signals import post_migrate
from .management import update_contenttypes
from .models import SimulatedContentTypeManager
class ContentTypesConfig(AppConfig):
name = 'djangae.contrib.contenttypes'
verbose_name = _("Djangae Content Types")
label = "djangae_contenttypes"
def ready(self):
if django_update_contenttypes != update_contenttypes:
post_migrate.disconnect(django_update_contenttypes)
from django.db import models
from django.contrib.contenttypes import models as django_models
if not isinstance(django_models.ContentType.objects, SimulatedContentTypeManager):
django_models.ContentType.objects = SimulatedContentTypeManager(django_models.ContentType)
django_models.ContentType.objects.auto_created = True
# Really force the default manager to use the Simulated one
meta = django_models.ContentType._meta
if hasattr(meta, "local_managers"):
# Django >= 1.10
meta.local_managers[0] = SimulatedContentTypeManager()
else:
django_models.ContentType._default_manager = SimulatedContentTypeManager(django_models.ContentType)
meta._expire_cache()
# Our generated IDs take up a 64 bit range (signed) but aren't auto
# incrementing so update the field to reflect that (for validation)
meta.pk.__class__ = models.BigIntegerField
|
dfd3bff4560d1711624b8508795eb3debbaafa40 | changes/api/snapshotimage_details.py | changes/api/snapshotimage_details.py | from __future__ import absolute_import
from flask.ext.restful import reqparse
from changes.api.base import APIView
from changes.config import db
from changes.models import SnapshotImage, SnapshotStatus
class SnapshotImageDetailsAPIView(APIView):
parser = reqparse.RequestParser()
parser.add_argument('status', choices=SnapshotStatus._member_names_)
def get(self, image_id):
image = SnapshotImage.query.get(image_id)
if image is None:
return '', 404
return self.respond(image)
def post(self, image_id):
image = SnapshotImage.query.get(image_id)
if image is None:
return '', 404
args = self.parser.parse_args()
if args.status:
image.status = SnapshotStatus[args.status]
db.session.add(image)
db.session.flush()
if image.status == SnapshotStatus.active:
snapshot = image.snapshot
inactive_image_query = SnapshotImage.query.filter(
SnapshotImage.status != SnapshotStatus.active,
SnapshotImage.snapshot_id == snapshot.id,
).exists()
if not db.session.query(inactive_image_query).scalar():
snapshot.status = SnapshotStatus.active
db.session.add(snapshot)
db.session.commit()
return self.respond(image)
| from __future__ import absolute_import
from flask.ext.restful import reqparse
from changes.api.base import APIView
from changes.config import db
from changes.models import SnapshotImage, SnapshotStatus
class SnapshotImageDetailsAPIView(APIView):
parser = reqparse.RequestParser()
parser.add_argument('status', choices=SnapshotStatus._member_names_)
def get(self, image_id):
image = SnapshotImage.query.get(image_id)
if image is None:
return '', 404
return self.respond(image)
def post(self, image_id):
image = SnapshotImage.query.get(image_id)
if image is None:
return '', 404
args = self.parser.parse_args()
if args.status:
image.status = SnapshotStatus[args.status]
db.session.add(image)
db.session.flush()
if image.status == SnapshotStatus.active:
snapshot = image.snapshot
inactive_image_query = SnapshotImage.query.filter(
SnapshotImage.status != SnapshotStatus.active,
SnapshotImage.snapshot_id == snapshot.id,
).exists()
if not db.session.query(inactive_image_query).scalar():
snapshot.status = SnapshotStatus.active
db.session.add(snapshot)
elif snapshot.status == SnapshotStatus.active:
snapshot.status = SnapshotStatus.inactive
db.session.add(snapshot)
db.session.commit()
return self.respond(image)
| Mark snapshots as inactive if any are not valid | Mark snapshots as inactive if any are not valid
| Python | apache-2.0 | dropbox/changes,bowlofstew/changes,wfxiang08/changes,bowlofstew/changes,wfxiang08/changes,wfxiang08/changes,dropbox/changes,bowlofstew/changes,dropbox/changes,dropbox/changes,bowlofstew/changes,wfxiang08/changes | from __future__ import absolute_import
from flask.ext.restful import reqparse
from changes.api.base import APIView
from changes.config import db
from changes.models import SnapshotImage, SnapshotStatus
class SnapshotImageDetailsAPIView(APIView):
parser = reqparse.RequestParser()
parser.add_argument('status', choices=SnapshotStatus._member_names_)
def get(self, image_id):
image = SnapshotImage.query.get(image_id)
if image is None:
return '', 404
return self.respond(image)
def post(self, image_id):
image = SnapshotImage.query.get(image_id)
if image is None:
return '', 404
args = self.parser.parse_args()
if args.status:
image.status = SnapshotStatus[args.status]
db.session.add(image)
db.session.flush()
if image.status == SnapshotStatus.active:
snapshot = image.snapshot
inactive_image_query = SnapshotImage.query.filter(
SnapshotImage.status != SnapshotStatus.active,
SnapshotImage.snapshot_id == snapshot.id,
).exists()
if not db.session.query(inactive_image_query).scalar():
snapshot.status = SnapshotStatus.active
db.session.add(snapshot)
+ elif snapshot.status == SnapshotStatus.active:
+ snapshot.status = SnapshotStatus.inactive
+ db.session.add(snapshot)
db.session.commit()
return self.respond(image)
| Mark snapshots as inactive if any are not valid | ## Code Before:
from __future__ import absolute_import
from flask.ext.restful import reqparse
from changes.api.base import APIView
from changes.config import db
from changes.models import SnapshotImage, SnapshotStatus
class SnapshotImageDetailsAPIView(APIView):
parser = reqparse.RequestParser()
parser.add_argument('status', choices=SnapshotStatus._member_names_)
def get(self, image_id):
image = SnapshotImage.query.get(image_id)
if image is None:
return '', 404
return self.respond(image)
def post(self, image_id):
image = SnapshotImage.query.get(image_id)
if image is None:
return '', 404
args = self.parser.parse_args()
if args.status:
image.status = SnapshotStatus[args.status]
db.session.add(image)
db.session.flush()
if image.status == SnapshotStatus.active:
snapshot = image.snapshot
inactive_image_query = SnapshotImage.query.filter(
SnapshotImage.status != SnapshotStatus.active,
SnapshotImage.snapshot_id == snapshot.id,
).exists()
if not db.session.query(inactive_image_query).scalar():
snapshot.status = SnapshotStatus.active
db.session.add(snapshot)
db.session.commit()
return self.respond(image)
## Instruction:
Mark snapshots as inactive if any are not valid
## Code After:
from __future__ import absolute_import
from flask.ext.restful import reqparse
from changes.api.base import APIView
from changes.config import db
from changes.models import SnapshotImage, SnapshotStatus
class SnapshotImageDetailsAPIView(APIView):
parser = reqparse.RequestParser()
parser.add_argument('status', choices=SnapshotStatus._member_names_)
def get(self, image_id):
image = SnapshotImage.query.get(image_id)
if image is None:
return '', 404
return self.respond(image)
def post(self, image_id):
image = SnapshotImage.query.get(image_id)
if image is None:
return '', 404
args = self.parser.parse_args()
if args.status:
image.status = SnapshotStatus[args.status]
db.session.add(image)
db.session.flush()
if image.status == SnapshotStatus.active:
snapshot = image.snapshot
inactive_image_query = SnapshotImage.query.filter(
SnapshotImage.status != SnapshotStatus.active,
SnapshotImage.snapshot_id == snapshot.id,
).exists()
if not db.session.query(inactive_image_query).scalar():
snapshot.status = SnapshotStatus.active
db.session.add(snapshot)
elif snapshot.status == SnapshotStatus.active:
snapshot.status = SnapshotStatus.inactive
db.session.add(snapshot)
db.session.commit()
return self.respond(image)
|
f8b4b1a860b5c0a3ff16dbb8bbf83010bd9a1009 | feincms3/plugins/__init__.py | feincms3/plugins/__init__.py |
from . import html
from . import snippet
try:
from . import external
except ImportError: # pragma: no cover
pass
try:
from . import image
except ImportError: # pragma: no cover
pass
try:
from . import richtext
except ImportError: # pragma: no cover
pass
try:
from . import versatileimage
except ImportError: # pragma: no cover
pass
|
from . import html
from . import snippet
try:
import requests
except ImportError: # pragma: no cover
pass
else:
from . import external
try:
import imagefield
except ImportError: # pragma: no cover
pass
else:
from . import image
try:
import feincms3.cleanse
except ImportError: # pragma: no cover
pass
else:
from . import richtext
try:
import versatileimagefield
except ImportError: # pragma: no cover
pass
else:
from . import versatileimage
| Stop hiding local import errors | feincms3.plugins: Stop hiding local import errors
| Python | bsd-3-clause | matthiask/feincms3,matthiask/feincms3,matthiask/feincms3 |
from . import html
from . import snippet
try:
- from . import external
+ import requests
except ImportError: # pragma: no cover
pass
+ else:
+ from . import external
try:
- from . import image
+ import imagefield
except ImportError: # pragma: no cover
pass
+ else:
+ from . import image
try:
- from . import richtext
+ import feincms3.cleanse
except ImportError: # pragma: no cover
pass
+ else:
+ from . import richtext
try:
- from . import versatileimage
+ import versatileimagefield
except ImportError: # pragma: no cover
pass
+ else:
+ from . import versatileimage
| Stop hiding local import errors | ## Code Before:
from . import html
from . import snippet
try:
from . import external
except ImportError: # pragma: no cover
pass
try:
from . import image
except ImportError: # pragma: no cover
pass
try:
from . import richtext
except ImportError: # pragma: no cover
pass
try:
from . import versatileimage
except ImportError: # pragma: no cover
pass
## Instruction:
Stop hiding local import errors
## Code After:
from . import html
from . import snippet
try:
import requests
except ImportError: # pragma: no cover
pass
else:
from . import external
try:
import imagefield
except ImportError: # pragma: no cover
pass
else:
from . import image
try:
import feincms3.cleanse
except ImportError: # pragma: no cover
pass
else:
from . import richtext
try:
import versatileimagefield
except ImportError: # pragma: no cover
pass
else:
from . import versatileimage
|
b2eebbdcc14dd47d6ad8bb385966f13ed13890c1 | superdesk/coverages.py | superdesk/coverages.py | from superdesk.base_model import BaseModel
def init_app(app):
CoverageModel(app=app)
def rel(resource, embeddable=False):
return {
'type': 'objectid',
'data_relation': {'resource': resource, 'field': '_id', 'embeddable': embeddable}
}
class CoverageModel(BaseModel):
endpoint_name = 'coverages'
schema = {
'headline': {'type': 'string'},
'type': {'type': 'string'},
'ed_note': {'type': 'string'},
'scheduled': {'type': 'datetime'},
'delivery': rel('archive'),
'assigned_user': rel('users', True),
'assigned_desk': rel('desks', True),
'planning_item': rel('planning'),
}
| from superdesk.base_model import BaseModel
def init_app(app):
CoverageModel(app=app)
def rel(resource, embeddable=False):
return {
'type': 'objectid',
'data_relation': {'resource': resource, 'field': '_id', 'embeddable': embeddable}
}
class CoverageModel(BaseModel):
endpoint_name = 'coverages'
schema = {
'headline': {'type': 'string'},
'type': {'type': 'string'},
'ed_note': {'type': 'string'},
'scheduled': {'type': 'datetime'},
'delivery': {'type': 'string'},
'assigned_user': rel('users', True),
'assigned_desk': rel('desks', True),
'planning_item': {'type': 'string'},
}
| Fix data relation not working for custom Guids | Fix data relation not working for custom Guids
| Python | agpl-3.0 | plamut/superdesk,sivakuna-aap/superdesk,mdhaman/superdesk-aap,sivakuna-aap/superdesk,liveblog/superdesk,pavlovicnemanja/superdesk,petrjasek/superdesk,mugurrus/superdesk,ioanpocol/superdesk,pavlovicnemanja/superdesk,Aca-jov/superdesk,akintolga/superdesk,vied12/superdesk,gbbr/superdesk,fritzSF/superdesk,ancafarcas/superdesk,ioanpocol/superdesk-ntb,mdhaman/superdesk-aap,marwoodandrew/superdesk-aap,darconny/superdesk,akintolga/superdesk-aap,amagdas/superdesk,sivakuna-aap/superdesk,thnkloud9/superdesk,fritzSF/superdesk,akintolga/superdesk-aap,ancafarcas/superdesk,akintolga/superdesk,pavlovicnemanja92/superdesk,amagdas/superdesk,vied12/superdesk,verifiedpixel/superdesk,superdesk/superdesk-ntb,Aca-jov/superdesk,superdesk/superdesk,akintolga/superdesk,marwoodandrew/superdesk-aap,hlmnrmr/superdesk,verifiedpixel/superdesk,pavlovicnemanja/superdesk,petrjasek/superdesk-server,pavlovicnemanja/superdesk,liveblog/superdesk,thnkloud9/superdesk,superdesk/superdesk-aap,plamut/superdesk,vied12/superdesk,ioanpocol/superdesk-ntb,plamut/superdesk,darconny/superdesk,sjunaid/superdesk,superdesk/superdesk-aap,amagdas/superdesk,verifiedpixel/superdesk,vied12/superdesk,darconny/superdesk,fritzSF/superdesk,sivakuna-aap/superdesk,amagdas/superdesk,superdesk/superdesk-aap,gbbr/superdesk,marwoodandrew/superdesk-aap,mdhaman/superdesk,petrjasek/superdesk-ntb,sivakuna-aap/superdesk,akintolga/superdesk-aap,akintolga/superdesk,superdesk/superdesk-ntb,fritzSF/superdesk,marwoodandrew/superdesk,marwoodandrew/superdesk,verifiedpixel/superdesk,amagdas/superdesk,marwoodandrew/superdesk-aap,sjunaid/superdesk,petrjasek/superdesk-ntb,sjunaid/superdesk,Aca-jov/superdesk,ioanpocol/superdesk-ntb,petrjasek/superdesk-server,pavlovicnemanja92/superdesk,superdesk/superdesk,ancafarcas/superdesk,superdesk/superdesk,mdhaman/superdesk,fritzSF/superdesk,vied12/superdesk,pavlovicnemanja92/superdesk,petrjasek/superdesk-ntb,hlmnrmr/superdesk,marwoodandrew/superdesk,superdesk/superdesk-ntb,petrjasek/superdesk,hlmnrmr/superdesk,petrjasek/superdesk,mugurrus/superdesk,gbbr/superdesk,pavlovicnemanja92/superdesk,plamut/superdesk,liveblog/superdesk,verifiedpixel/superdesk,petrjasek/superdesk,mdhaman/superdesk,petrjasek/superdesk-ntb,mugurrus/superdesk,mdhaman/superdesk-aap,superdesk/superdesk-aap,superdesk/superdesk-ntb,marwoodandrew/superdesk,pavlovicnemanja92/superdesk,ioanpocol/superdesk,thnkloud9/superdesk,marwoodandrew/superdesk,liveblog/superdesk,mdhaman/superdesk-aap,ioanpocol/superdesk,akintolga/superdesk,plamut/superdesk,liveblog/superdesk,superdesk/superdesk,akintolga/superdesk-aap | from superdesk.base_model import BaseModel
def init_app(app):
CoverageModel(app=app)
def rel(resource, embeddable=False):
return {
'type': 'objectid',
'data_relation': {'resource': resource, 'field': '_id', 'embeddable': embeddable}
}
class CoverageModel(BaseModel):
endpoint_name = 'coverages'
schema = {
'headline': {'type': 'string'},
'type': {'type': 'string'},
'ed_note': {'type': 'string'},
'scheduled': {'type': 'datetime'},
- 'delivery': rel('archive'),
+ 'delivery': {'type': 'string'},
'assigned_user': rel('users', True),
'assigned_desk': rel('desks', True),
- 'planning_item': rel('planning'),
+ 'planning_item': {'type': 'string'},
}
| Fix data relation not working for custom Guids | ## Code Before:
from superdesk.base_model import BaseModel
def init_app(app):
CoverageModel(app=app)
def rel(resource, embeddable=False):
return {
'type': 'objectid',
'data_relation': {'resource': resource, 'field': '_id', 'embeddable': embeddable}
}
class CoverageModel(BaseModel):
endpoint_name = 'coverages'
schema = {
'headline': {'type': 'string'},
'type': {'type': 'string'},
'ed_note': {'type': 'string'},
'scheduled': {'type': 'datetime'},
'delivery': rel('archive'),
'assigned_user': rel('users', True),
'assigned_desk': rel('desks', True),
'planning_item': rel('planning'),
}
## Instruction:
Fix data relation not working for custom Guids
## Code After:
from superdesk.base_model import BaseModel
def init_app(app):
CoverageModel(app=app)
def rel(resource, embeddable=False):
return {
'type': 'objectid',
'data_relation': {'resource': resource, 'field': '_id', 'embeddable': embeddable}
}
class CoverageModel(BaseModel):
endpoint_name = 'coverages'
schema = {
'headline': {'type': 'string'},
'type': {'type': 'string'},
'ed_note': {'type': 'string'},
'scheduled': {'type': 'datetime'},
'delivery': {'type': 'string'},
'assigned_user': rel('users', True),
'assigned_desk': rel('desks', True),
'planning_item': {'type': 'string'},
}
|
4147e6f560889c75abbfd9c8e85ea38ffe408550 | suelta/mechanisms/facebook_platform.py | suelta/mechanisms/facebook_platform.py | from suelta.util import bytes
from suelta.sasl import Mechanism, register_mechanism
try:
import urlparse
except ImportError:
import urllib.parse as urlparse
class X_FACEBOOK_PLATFORM(Mechanism):
def __init__(self, sasl, name):
super(X_FACEBOOK_PLATFORM, self).__init__(sasl, name)
self.check_values(['access_token', 'api_key'])
def process(self, challenge=None):
if challenge is not None:
values = {}
for kv in challenge.split('&'):
key, value = kv.split('=')
values[key] = value
resp_data = {
'method': values['method'],
'v': '1.0',
'call_id': '1.0',
'nonce': values['nonce'],
'access_token': self.values['access_token'],
'api_key': self.values['api_key']
}
resp = '&'.join(['%s=%s' % (k, v) for k, v in resp_data.items()])
return bytes(resp)
return bytes('')
def okay(self):
return True
register_mechanism('X-FACEBOOK-PLATFORM', 40, X_FACEBOOK_PLATFORM, use_hashes=False)
| from suelta.util import bytes
from suelta.sasl import Mechanism, register_mechanism
try:
import urlparse
except ImportError:
import urllib.parse as urlparse
class X_FACEBOOK_PLATFORM(Mechanism):
def __init__(self, sasl, name):
super(X_FACEBOOK_PLATFORM, self).__init__(sasl, name)
self.check_values(['access_token', 'api_key'])
def process(self, challenge=None):
if challenge is not None:
values = {}
for kv in challenge.split(b'&'):
key, value = kv.split(b'=')
values[key] = value
resp_data = {
b'method': values[b'method'],
b'v': b'1.0',
b'call_id': b'1.0',
b'nonce': values[b'nonce'],
b'access_token': self.values['access_token'],
b'api_key': self.values['api_key']
}
resp = '&'.join(['%s=%s' % (k, v) for k, v in resp_data.items()])
return bytes(resp)
return b''
def okay(self):
return True
register_mechanism('X-FACEBOOK-PLATFORM', 40, X_FACEBOOK_PLATFORM, use_hashes=False)
| Work around Python3's byte semantics. | Work around Python3's byte semantics.
| Python | mit | dwd/Suelta | from suelta.util import bytes
from suelta.sasl import Mechanism, register_mechanism
try:
import urlparse
except ImportError:
import urllib.parse as urlparse
class X_FACEBOOK_PLATFORM(Mechanism):
def __init__(self, sasl, name):
super(X_FACEBOOK_PLATFORM, self).__init__(sasl, name)
self.check_values(['access_token', 'api_key'])
def process(self, challenge=None):
if challenge is not None:
values = {}
- for kv in challenge.split('&'):
+ for kv in challenge.split(b'&'):
- key, value = kv.split('=')
+ key, value = kv.split(b'=')
values[key] = value
resp_data = {
- 'method': values['method'],
+ b'method': values[b'method'],
- 'v': '1.0',
+ b'v': b'1.0',
- 'call_id': '1.0',
+ b'call_id': b'1.0',
- 'nonce': values['nonce'],
+ b'nonce': values[b'nonce'],
- 'access_token': self.values['access_token'],
+ b'access_token': self.values['access_token'],
- 'api_key': self.values['api_key']
+ b'api_key': self.values['api_key']
}
resp = '&'.join(['%s=%s' % (k, v) for k, v in resp_data.items()])
return bytes(resp)
- return bytes('')
+ return b''
def okay(self):
return True
register_mechanism('X-FACEBOOK-PLATFORM', 40, X_FACEBOOK_PLATFORM, use_hashes=False)
| Work around Python3's byte semantics. | ## Code Before:
from suelta.util import bytes
from suelta.sasl import Mechanism, register_mechanism
try:
import urlparse
except ImportError:
import urllib.parse as urlparse
class X_FACEBOOK_PLATFORM(Mechanism):
def __init__(self, sasl, name):
super(X_FACEBOOK_PLATFORM, self).__init__(sasl, name)
self.check_values(['access_token', 'api_key'])
def process(self, challenge=None):
if challenge is not None:
values = {}
for kv in challenge.split('&'):
key, value = kv.split('=')
values[key] = value
resp_data = {
'method': values['method'],
'v': '1.0',
'call_id': '1.0',
'nonce': values['nonce'],
'access_token': self.values['access_token'],
'api_key': self.values['api_key']
}
resp = '&'.join(['%s=%s' % (k, v) for k, v in resp_data.items()])
return bytes(resp)
return bytes('')
def okay(self):
return True
register_mechanism('X-FACEBOOK-PLATFORM', 40, X_FACEBOOK_PLATFORM, use_hashes=False)
## Instruction:
Work around Python3's byte semantics.
## Code After:
from suelta.util import bytes
from suelta.sasl import Mechanism, register_mechanism
try:
import urlparse
except ImportError:
import urllib.parse as urlparse
class X_FACEBOOK_PLATFORM(Mechanism):
def __init__(self, sasl, name):
super(X_FACEBOOK_PLATFORM, self).__init__(sasl, name)
self.check_values(['access_token', 'api_key'])
def process(self, challenge=None):
if challenge is not None:
values = {}
for kv in challenge.split(b'&'):
key, value = kv.split(b'=')
values[key] = value
resp_data = {
b'method': values[b'method'],
b'v': b'1.0',
b'call_id': b'1.0',
b'nonce': values[b'nonce'],
b'access_token': self.values['access_token'],
b'api_key': self.values['api_key']
}
resp = '&'.join(['%s=%s' % (k, v) for k, v in resp_data.items()])
return bytes(resp)
return b''
def okay(self):
return True
register_mechanism('X-FACEBOOK-PLATFORM', 40, X_FACEBOOK_PLATFORM, use_hashes=False)
|
1dbe7acc945a545d3b18ec5025c19b26d1ed110f | test/test_sparql_construct_bindings.py | test/test_sparql_construct_bindings.py | from rdflib import Graph, URIRef, Literal, BNode
from rdflib.plugins.sparql import prepareQuery
from rdflib.compare import isomorphic
import unittest
class TestConstructInitBindings(unittest.TestCase):
def test_construct_init_bindings(self):
"""
This is issue https://github.com/RDFLib/rdflib/issues/1001
"""
g1 = Graph()
q_str = ("""
PREFIX : <urn:ns1:>
CONSTRUCT {
?uri :prop1 ?val1;
:prop2 ?c .
}
WHERE {
bind(uri(concat("urn:ns1:", ?a)) as ?uri)
bind(?b as ?val1)
}
""")
q_prepared = prepareQuery(q_str)
expected = [
(URIRef('urn:ns1:A'),URIRef('urn:ns1:prop1'), Literal('B')),
(URIRef('urn:ns1:A'),URIRef('urn:ns1:prop2'), Literal('C'))
]
results = g1.query(q_prepared, initBindings={
'a': Literal('A'),
'b': Literal('B'),
'c': Literal('C')
})
self.assertCountEqual(list(results), expected)
| from rdflib import Graph, URIRef, Literal, BNode
from rdflib.plugins.sparql import prepareQuery
from rdflib.compare import isomorphic
import unittest
from nose.tools import eq_
class TestConstructInitBindings(unittest.TestCase):
def test_construct_init_bindings(self):
"""
This is issue https://github.com/RDFLib/rdflib/issues/1001
"""
g1 = Graph()
q_str = ("""
PREFIX : <urn:ns1:>
CONSTRUCT {
?uri :prop1 ?val1;
:prop2 ?c .
}
WHERE {
bind(uri(concat("urn:ns1:", ?a)) as ?uri)
bind(?b as ?val1)
}
""")
q_prepared = prepareQuery(q_str)
expected = [
(URIRef('urn:ns1:A'),URIRef('urn:ns1:prop1'), Literal('B')),
(URIRef('urn:ns1:A'),URIRef('urn:ns1:prop2'), Literal('C'))
]
results = g1.query(q_prepared, initBindings={
'a': Literal('A'),
'b': Literal('B'),
'c': Literal('C')
})
eq_(sorted(results, key=lambda x: str(x[1])), expected)
| Fix unit tests for python2 | Fix unit tests for python2
| Python | bsd-3-clause | RDFLib/rdflib,RDFLib/rdflib,RDFLib/rdflib,RDFLib/rdflib | from rdflib import Graph, URIRef, Literal, BNode
from rdflib.plugins.sparql import prepareQuery
from rdflib.compare import isomorphic
import unittest
+ from nose.tools import eq_
class TestConstructInitBindings(unittest.TestCase):
def test_construct_init_bindings(self):
"""
This is issue https://github.com/RDFLib/rdflib/issues/1001
"""
g1 = Graph()
q_str = ("""
PREFIX : <urn:ns1:>
CONSTRUCT {
?uri :prop1 ?val1;
:prop2 ?c .
}
WHERE {
bind(uri(concat("urn:ns1:", ?a)) as ?uri)
bind(?b as ?val1)
}
""")
q_prepared = prepareQuery(q_str)
expected = [
(URIRef('urn:ns1:A'),URIRef('urn:ns1:prop1'), Literal('B')),
(URIRef('urn:ns1:A'),URIRef('urn:ns1:prop2'), Literal('C'))
]
results = g1.query(q_prepared, initBindings={
'a': Literal('A'),
'b': Literal('B'),
'c': Literal('C')
})
- self.assertCountEqual(list(results), expected)
+ eq_(sorted(results, key=lambda x: str(x[1])), expected)
| Fix unit tests for python2 | ## Code Before:
from rdflib import Graph, URIRef, Literal, BNode
from rdflib.plugins.sparql import prepareQuery
from rdflib.compare import isomorphic
import unittest
class TestConstructInitBindings(unittest.TestCase):
def test_construct_init_bindings(self):
"""
This is issue https://github.com/RDFLib/rdflib/issues/1001
"""
g1 = Graph()
q_str = ("""
PREFIX : <urn:ns1:>
CONSTRUCT {
?uri :prop1 ?val1;
:prop2 ?c .
}
WHERE {
bind(uri(concat("urn:ns1:", ?a)) as ?uri)
bind(?b as ?val1)
}
""")
q_prepared = prepareQuery(q_str)
expected = [
(URIRef('urn:ns1:A'),URIRef('urn:ns1:prop1'), Literal('B')),
(URIRef('urn:ns1:A'),URIRef('urn:ns1:prop2'), Literal('C'))
]
results = g1.query(q_prepared, initBindings={
'a': Literal('A'),
'b': Literal('B'),
'c': Literal('C')
})
self.assertCountEqual(list(results), expected)
## Instruction:
Fix unit tests for python2
## Code After:
from rdflib import Graph, URIRef, Literal, BNode
from rdflib.plugins.sparql import prepareQuery
from rdflib.compare import isomorphic
import unittest
from nose.tools import eq_
class TestConstructInitBindings(unittest.TestCase):
def test_construct_init_bindings(self):
"""
This is issue https://github.com/RDFLib/rdflib/issues/1001
"""
g1 = Graph()
q_str = ("""
PREFIX : <urn:ns1:>
CONSTRUCT {
?uri :prop1 ?val1;
:prop2 ?c .
}
WHERE {
bind(uri(concat("urn:ns1:", ?a)) as ?uri)
bind(?b as ?val1)
}
""")
q_prepared = prepareQuery(q_str)
expected = [
(URIRef('urn:ns1:A'),URIRef('urn:ns1:prop1'), Literal('B')),
(URIRef('urn:ns1:A'),URIRef('urn:ns1:prop2'), Literal('C'))
]
results = g1.query(q_prepared, initBindings={
'a': Literal('A'),
'b': Literal('B'),
'c': Literal('C')
})
eq_(sorted(results, key=lambda x: str(x[1])), expected)
|
2ebbe2f9f23621d10a70d0817d83da33b002299e | rest_surveys/urls.py | rest_surveys/urls.py | from __future__ import unicode_literals
from django.conf import settings
from django.conf.urls import include, url
from rest_framework_bulk.routes import BulkRouter
from rest_surveys.views import (
SurveyViewSet,
SurveyResponseViewSet,
)
# API
# With trailing slash appended:
router = BulkRouter()
router.register(r'surveys', SurveyViewSet, base_name='survey')
router.register(r'survey-responses', SurveyResponseViewSet,
base_name='survey-response')
slashless_router = BulkRouter(trailing_slash=False)
slashless_router.registry = router.registry[:]
urlpatterns = [
url(r'^{api_path}'.format(api_path=settings.REST_SURVEYS['API_PATH']),
include(router.urls)),
url(r'^{api_path}'.format(api_path=settings.REST_SURVEYS['API_PATH']),
include(slashless_router.urls)),
]
| from __future__ import unicode_literals
from django.conf import settings
from django.conf.urls import include, url
from rest_framework_bulk.routes import BulkRouter
from rest_surveys.views import (
SurveyViewSet,
SurveyResponseViewSet,
)
# API
# With trailing slash appended:
router = BulkRouter()
router.register(r'surveys', SurveyViewSet, base_name='survey')
router.register(r'survey-responses', SurveyResponseViewSet,
base_name='survey-response')
slashless_router = BulkRouter(trailing_slash=False)
slashless_router.registry = router.registry[:]
urlpatterns = [
url(r'^{api_path}'.format(
api_path=settings.REST_SURVEYS.get('API_PATH', 'api/')),
include(router.urls)),
url(r'^{api_path}'.format(
api_path=settings.REST_SURVEYS.get('API_PATH', 'api/')),
include(slashless_router.urls)),
]
| Set a default api path | Set a default api path
| Python | mit | danxshap/django-rest-surveys | from __future__ import unicode_literals
from django.conf import settings
from django.conf.urls import include, url
from rest_framework_bulk.routes import BulkRouter
from rest_surveys.views import (
SurveyViewSet,
SurveyResponseViewSet,
)
# API
# With trailing slash appended:
router = BulkRouter()
router.register(r'surveys', SurveyViewSet, base_name='survey')
router.register(r'survey-responses', SurveyResponseViewSet,
base_name='survey-response')
slashless_router = BulkRouter(trailing_slash=False)
slashless_router.registry = router.registry[:]
urlpatterns = [
- url(r'^{api_path}'.format(api_path=settings.REST_SURVEYS['API_PATH']),
+ url(r'^{api_path}'.format(
+ api_path=settings.REST_SURVEYS.get('API_PATH', 'api/')),
include(router.urls)),
- url(r'^{api_path}'.format(api_path=settings.REST_SURVEYS['API_PATH']),
+ url(r'^{api_path}'.format(
+ api_path=settings.REST_SURVEYS.get('API_PATH', 'api/')),
include(slashless_router.urls)),
]
| Set a default api path | ## Code Before:
from __future__ import unicode_literals
from django.conf import settings
from django.conf.urls import include, url
from rest_framework_bulk.routes import BulkRouter
from rest_surveys.views import (
SurveyViewSet,
SurveyResponseViewSet,
)
# API
# With trailing slash appended:
router = BulkRouter()
router.register(r'surveys', SurveyViewSet, base_name='survey')
router.register(r'survey-responses', SurveyResponseViewSet,
base_name='survey-response')
slashless_router = BulkRouter(trailing_slash=False)
slashless_router.registry = router.registry[:]
urlpatterns = [
url(r'^{api_path}'.format(api_path=settings.REST_SURVEYS['API_PATH']),
include(router.urls)),
url(r'^{api_path}'.format(api_path=settings.REST_SURVEYS['API_PATH']),
include(slashless_router.urls)),
]
## Instruction:
Set a default api path
## Code After:
from __future__ import unicode_literals
from django.conf import settings
from django.conf.urls import include, url
from rest_framework_bulk.routes import BulkRouter
from rest_surveys.views import (
SurveyViewSet,
SurveyResponseViewSet,
)
# API
# With trailing slash appended:
router = BulkRouter()
router.register(r'surveys', SurveyViewSet, base_name='survey')
router.register(r'survey-responses', SurveyResponseViewSet,
base_name='survey-response')
slashless_router = BulkRouter(trailing_slash=False)
slashless_router.registry = router.registry[:]
urlpatterns = [
url(r'^{api_path}'.format(
api_path=settings.REST_SURVEYS.get('API_PATH', 'api/')),
include(router.urls)),
url(r'^{api_path}'.format(
api_path=settings.REST_SURVEYS.get('API_PATH', 'api/')),
include(slashless_router.urls)),
]
|
a7437e657f55cd708baba83421941e67d474daf7 | tests/test_utilities.py | tests/test_utilities.py | from __future__ import (absolute_import, division, print_function)
from folium.utilities import camelize
def test_camelize():
assert camelize('variable_name') == 'variableName'
assert camelize('variableName') == 'variableName'
assert camelize('name') == 'name'
assert camelize('very_long_variable_name') == 'veryLongVariableName'
| from __future__ import (absolute_import, division, print_function)
from folium.utilities import camelize, deep_copy
from folium import Map, FeatureGroup, Marker
def test_camelize():
assert camelize('variable_name') == 'variableName'
assert camelize('variableName') == 'variableName'
assert camelize('name') == 'name'
assert camelize('very_long_variable_name') == 'veryLongVariableName'
def test_deep_copy():
m = Map()
fg = FeatureGroup().add_to(m)
Marker(location=(0, 0)).add_to(fg)
m_copy = deep_copy(m)
def check(item, item_copy):
assert type(item) is type(item_copy)
assert item._name == item_copy._name
for attr in item.__dict__.keys():
if not attr.startswith('_'):
assert getattr(item, attr) == getattr(item_copy, attr)
assert item is not item_copy
assert item._id != item_copy._id
for child, child_copy in zip(item._children.values(),
item_copy._children.values()):
check(child, child_copy)
check(m, m_copy)
| Add test for deep_copy function | Add test for deep_copy function
| Python | mit | python-visualization/folium,ocefpaf/folium,ocefpaf/folium,python-visualization/folium | from __future__ import (absolute_import, division, print_function)
- from folium.utilities import camelize
+ from folium.utilities import camelize, deep_copy
+ from folium import Map, FeatureGroup, Marker
def test_camelize():
assert camelize('variable_name') == 'variableName'
assert camelize('variableName') == 'variableName'
assert camelize('name') == 'name'
assert camelize('very_long_variable_name') == 'veryLongVariableName'
+
+ def test_deep_copy():
+ m = Map()
+ fg = FeatureGroup().add_to(m)
+ Marker(location=(0, 0)).add_to(fg)
+ m_copy = deep_copy(m)
+
+ def check(item, item_copy):
+ assert type(item) is type(item_copy)
+ assert item._name == item_copy._name
+ for attr in item.__dict__.keys():
+ if not attr.startswith('_'):
+ assert getattr(item, attr) == getattr(item_copy, attr)
+ assert item is not item_copy
+ assert item._id != item_copy._id
+ for child, child_copy in zip(item._children.values(),
+ item_copy._children.values()):
+ check(child, child_copy)
+
+ check(m, m_copy)
+ | Add test for deep_copy function | ## Code Before:
from __future__ import (absolute_import, division, print_function)
from folium.utilities import camelize
def test_camelize():
assert camelize('variable_name') == 'variableName'
assert camelize('variableName') == 'variableName'
assert camelize('name') == 'name'
assert camelize('very_long_variable_name') == 'veryLongVariableName'
## Instruction:
Add test for deep_copy function
## Code After:
from __future__ import (absolute_import, division, print_function)
from folium.utilities import camelize, deep_copy
from folium import Map, FeatureGroup, Marker
def test_camelize():
assert camelize('variable_name') == 'variableName'
assert camelize('variableName') == 'variableName'
assert camelize('name') == 'name'
assert camelize('very_long_variable_name') == 'veryLongVariableName'
def test_deep_copy():
m = Map()
fg = FeatureGroup().add_to(m)
Marker(location=(0, 0)).add_to(fg)
m_copy = deep_copy(m)
def check(item, item_copy):
assert type(item) is type(item_copy)
assert item._name == item_copy._name
for attr in item.__dict__.keys():
if not attr.startswith('_'):
assert getattr(item, attr) == getattr(item_copy, attr)
assert item is not item_copy
assert item._id != item_copy._id
for child, child_copy in zip(item._children.values(),
item_copy._children.values()):
check(child, child_copy)
check(m, m_copy)
|
fe05b5f694671a46dd3391b9cb6561923345c4b7 | rpi_gpio_http/app.py | rpi_gpio_http/app.py | from flask import Flask
import logging
import logging.config
import RPi.GPIO as GPIO
from .config import config, config_loader
from .channel import ChannelFactory
app = Flask('rpi_gpio_http')
logging.config.dictConfig(config['logger'])
logger = logging.getLogger(__name__)
logger.info("Config loaded from %s" % config_loader.filename)
channels = {}
GPIO.setmode(GPIO.BOARD)
for ch in config['channels']:
if ch['enabled'] != True:
continue
channel = ChannelFactory.create(ch)
if channel:
channels[channel.pin] = channel
import controllers
| from flask import Flask
import logging
import logging.config
import RPi.GPIO as GPIO
from .config import config, config_loader
from .channel import ChannelFactory
app = Flask('rpi_gpio_http')
logging.config.dictConfig(config['logger'])
logger = logging.getLogger(__name__)
logger.info("Config loaded from %s" % config_loader.filename)
channels = {}
GPIO.setwarnings(False)
GPIO.setmode(GPIO.BOARD)
for ch in config['channels']:
if ch['enabled'] != True:
continue
channel = ChannelFactory.create(ch)
if channel:
channels[channel.pin] = channel
import controllers
| Disable warnings in GPIO lib | Disable warnings in GPIO lib
| Python | mit | voidpp/rpi-gpio-http | from flask import Flask
import logging
import logging.config
import RPi.GPIO as GPIO
from .config import config, config_loader
from .channel import ChannelFactory
app = Flask('rpi_gpio_http')
logging.config.dictConfig(config['logger'])
logger = logging.getLogger(__name__)
logger.info("Config loaded from %s" % config_loader.filename)
channels = {}
+ GPIO.setwarnings(False)
GPIO.setmode(GPIO.BOARD)
for ch in config['channels']:
if ch['enabled'] != True:
continue
channel = ChannelFactory.create(ch)
if channel:
channels[channel.pin] = channel
import controllers
| Disable warnings in GPIO lib | ## Code Before:
from flask import Flask
import logging
import logging.config
import RPi.GPIO as GPIO
from .config import config, config_loader
from .channel import ChannelFactory
app = Flask('rpi_gpio_http')
logging.config.dictConfig(config['logger'])
logger = logging.getLogger(__name__)
logger.info("Config loaded from %s" % config_loader.filename)
channels = {}
GPIO.setmode(GPIO.BOARD)
for ch in config['channels']:
if ch['enabled'] != True:
continue
channel = ChannelFactory.create(ch)
if channel:
channels[channel.pin] = channel
import controllers
## Instruction:
Disable warnings in GPIO lib
## Code After:
from flask import Flask
import logging
import logging.config
import RPi.GPIO as GPIO
from .config import config, config_loader
from .channel import ChannelFactory
app = Flask('rpi_gpio_http')
logging.config.dictConfig(config['logger'])
logger = logging.getLogger(__name__)
logger.info("Config loaded from %s" % config_loader.filename)
channels = {}
GPIO.setwarnings(False)
GPIO.setmode(GPIO.BOARD)
for ch in config['channels']:
if ch['enabled'] != True:
continue
channel = ChannelFactory.create(ch)
if channel:
channels[channel.pin] = channel
import controllers
|
378f55687131324bb5c43e3b50f9db5fe3b39662 | zaqar_ui/__init__.py | zaqar_ui/__init__.py |
import pbr.version
__version__ = pbr.version.VersionInfo(
'neutron_lbaas_dashboard').version_string()
|
import pbr.version
__version__ = pbr.version.VersionInfo('zaqar_ui').version_string()
| Fix Zaqar-ui with wrong reference pbr version | Fix Zaqar-ui with wrong reference pbr version
Change-Id: I84cdb865478a232886ba1059febf56735a0d91ba
| Python | apache-2.0 | openstack/zaqar-ui,openstack/zaqar-ui,openstack/zaqar-ui,openstack/zaqar-ui |
import pbr.version
+ __version__ = pbr.version.VersionInfo('zaqar_ui').version_string()
- __version__ = pbr.version.VersionInfo(
- 'neutron_lbaas_dashboard').version_string()
| Fix Zaqar-ui with wrong reference pbr version | ## Code Before:
import pbr.version
__version__ = pbr.version.VersionInfo(
'neutron_lbaas_dashboard').version_string()
## Instruction:
Fix Zaqar-ui with wrong reference pbr version
## Code After:
import pbr.version
__version__ = pbr.version.VersionInfo('zaqar_ui').version_string()
|
d659c685f40de7eb7b2ccd007888177fb158e139 | tests/integration/players.py | tests/integration/players.py | import urllib.parse
import urllib.request
def create_player(username, password, email):
url = 'https://localhost:3000/players'
values = {'username' : username,
'password' : password,
'email' : email }
data = urllib.parse.urlencode(values)
data = data.encode('utf-8') # data should be bytes
req = urllib.request.Request(url, data)
response = urllib.request.urlopen(req)
the_page = response.read()
print("Created user \'{}\' with password \'{}\' and email \'{}\'".format(username, password, email))
if __name__ == '__main__':
create_player("chapmang", "password", "[email protected]")
create_player("idlee", "deadparrot", "[email protected]")
create_player("gilliamt", "lumberjack", "[email protected]")
create_player("jonest", "trojanrabbit", "[email protected]")
create_player("cleesej", "generaldirection", "[email protected]")
create_player("palinm", "fleshwound", "[email protected]")
|
import requests
def create_player(username, password, email):
url = 'https://localhost:3000/players'
values = {'username' : username,
'password' : password,
'email' : email }
r = requests.post(url, params=values, verify=False)
r.raise_for_status()
if (r.status_code == 201):
print("Created user \'{}\' with password \'{}\' and email \'{}\'".format(username,
password,
email))
if __name__ == '__main__':
create_player("chapmang", "password", "[email protected]")
create_player("idlee", "deadparrot", "[email protected]")
create_player("gilliamt", "lumberjack", "[email protected]")
create_player("jonest", "trojanrabbit", "[email protected]")
create_player("cleesej", "generaldirection", "[email protected]")
create_player("palinm", "fleshwound", "[email protected]")
| Switch to requests library instead of urllib | Switch to requests library instead of urllib
| Python | mit | dropshot/dropshot-server | - import urllib.parse
+
- import urllib.request
+ import requests
def create_player(username, password, email):
url = 'https://localhost:3000/players'
values = {'username' : username,
'password' : password,
'email' : email }
- data = urllib.parse.urlencode(values)
- data = data.encode('utf-8') # data should be bytes
- req = urllib.request.Request(url, data)
- response = urllib.request.urlopen(req)
- the_page = response.read()
+ r = requests.post(url, params=values, verify=False)
+
+ r.raise_for_status()
+
+ if (r.status_code == 201):
- print("Created user \'{}\' with password \'{}\' and email \'{}\'".format(username, password, email))
+ print("Created user \'{}\' with password \'{}\' and email \'{}\'".format(username,
+ password,
+ email))
+
if __name__ == '__main__':
create_player("chapmang", "password", "[email protected]")
create_player("idlee", "deadparrot", "[email protected]")
create_player("gilliamt", "lumberjack", "[email protected]")
create_player("jonest", "trojanrabbit", "[email protected]")
create_player("cleesej", "generaldirection", "[email protected]")
create_player("palinm", "fleshwound", "[email protected]")
| Switch to requests library instead of urllib | ## Code Before:
import urllib.parse
import urllib.request
def create_player(username, password, email):
url = 'https://localhost:3000/players'
values = {'username' : username,
'password' : password,
'email' : email }
data = urllib.parse.urlencode(values)
data = data.encode('utf-8') # data should be bytes
req = urllib.request.Request(url, data)
response = urllib.request.urlopen(req)
the_page = response.read()
print("Created user \'{}\' with password \'{}\' and email \'{}\'".format(username, password, email))
if __name__ == '__main__':
create_player("chapmang", "password", "[email protected]")
create_player("idlee", "deadparrot", "[email protected]")
create_player("gilliamt", "lumberjack", "[email protected]")
create_player("jonest", "trojanrabbit", "[email protected]")
create_player("cleesej", "generaldirection", "[email protected]")
create_player("palinm", "fleshwound", "[email protected]")
## Instruction:
Switch to requests library instead of urllib
## Code After:
import requests
def create_player(username, password, email):
url = 'https://localhost:3000/players'
values = {'username' : username,
'password' : password,
'email' : email }
r = requests.post(url, params=values, verify=False)
r.raise_for_status()
if (r.status_code == 201):
print("Created user \'{}\' with password \'{}\' and email \'{}\'".format(username,
password,
email))
if __name__ == '__main__':
create_player("chapmang", "password", "[email protected]")
create_player("idlee", "deadparrot", "[email protected]")
create_player("gilliamt", "lumberjack", "[email protected]")
create_player("jonest", "trojanrabbit", "[email protected]")
create_player("cleesej", "generaldirection", "[email protected]")
create_player("palinm", "fleshwound", "[email protected]")
|
eeeba609afe732b8e95aa535e70d4cdd2ae1aac7 | tests/unit/test_cufflinks.py | tests/unit/test_cufflinks.py | import os
import unittest
import shutil
from bcbio.rnaseq import cufflinks
from bcbio.utils import file_exists, safe_makedir
from nose.plugins.attrib import attr
DATA_DIR = os.path.join(os.path.dirname(__file__), "bcbio-nextgen-test-data", "data")
class TestCufflinks(unittest.TestCase):
merged_gtf = os.path.join(DATA_DIR, "cufflinks", "merged.gtf")
ref_gtf = os.path.join(DATA_DIR, "cufflinks", "ref-transcripts.gtf")
out_dir = "cufflinks-test"
def setUp(self):
safe_makedir(self.out_dir)
@attr("unit")
def test_cufflinks_clean(self):
clean_fn = os.path.join(self.out_dir, "clean.gtf")
dirty_fn = os.path.join(self.out_dir, "dirty.gtf")
clean, dirty = cufflinks.clean_assembly(self.merged_gtf, clean_fn,
dirty_fn)
# fixed_fn = os.path.join(self.out_dir, "fixed.gtf")
# fixed = cufflinks.fix_cufflinks_attributes(self.ref_gtf, clean, fixed_fn)
assert(file_exists(clean))
assert(os.path.exists(dirty))
# assert(file_exists(fixed))
def tearDown(self):
shutil.rmtree(self.out_dir)
| import os
import unittest
import shutil
from bcbio.rnaseq import cufflinks
from bcbio.utils import file_exists, safe_makedir
from nose.plugins.attrib import attr
DATA_DIR = os.path.join(os.path.dirname(__file__), "bcbio-nextgen-test-data", "data")
class TestCufflinks(unittest.TestCase):
merged_gtf = os.path.join(DATA_DIR, "cufflinks", "merged.gtf")
ref_gtf = os.path.join(DATA_DIR, "cufflinks", "ref-transcripts.gtf")
out_dir = "cufflinks-test"
def setUp(self):
safe_makedir(self.out_dir)
@attr("unit")
def test_cufflinks_clean(self):
clean_fn = os.path.join(self.out_dir, "clean.gtf")
dirty_fn = os.path.join(self.out_dir, "dirty.gtf")
clean, dirty = cufflinks.clean_assembly(self.merged_gtf, clean_fn,
dirty_fn)
assert(file_exists(clean))
assert(os.path.exists(dirty))
def tearDown(self):
shutil.rmtree(self.out_dir)
| Remove some cruft from the cufflinks test. | Remove some cruft from the cufflinks test.
| Python | mit | vladsaveliev/bcbio-nextgen,biocyberman/bcbio-nextgen,verdurin/bcbio-nextgen,fw1121/bcbio-nextgen,gifford-lab/bcbio-nextgen,chapmanb/bcbio-nextgen,Cyberbio-Lab/bcbio-nextgen,hjanime/bcbio-nextgen,verdurin/bcbio-nextgen,lbeltrame/bcbio-nextgen,verdurin/bcbio-nextgen,SciLifeLab/bcbio-nextgen,chapmanb/bcbio-nextgen,lpantano/bcbio-nextgen,vladsaveliev/bcbio-nextgen,elkingtonmcb/bcbio-nextgen,mjafin/bcbio-nextgen,brainstorm/bcbio-nextgen,lbeltrame/bcbio-nextgen,guillermo-carrasco/bcbio-nextgen,fw1121/bcbio-nextgen,a113n/bcbio-nextgen,brainstorm/bcbio-nextgen,SciLifeLab/bcbio-nextgen,mjafin/bcbio-nextgen,elkingtonmcb/bcbio-nextgen,mjafin/bcbio-nextgen,lbeltrame/bcbio-nextgen,biocyberman/bcbio-nextgen,Cyberbio-Lab/bcbio-nextgen,chapmanb/bcbio-nextgen,gifford-lab/bcbio-nextgen,lpantano/bcbio-nextgen,lpantano/bcbio-nextgen,elkingtonmcb/bcbio-nextgen,gifford-lab/bcbio-nextgen,fw1121/bcbio-nextgen,vladsaveliev/bcbio-nextgen,guillermo-carrasco/bcbio-nextgen,a113n/bcbio-nextgen,Cyberbio-Lab/bcbio-nextgen,hjanime/bcbio-nextgen,SciLifeLab/bcbio-nextgen,brainstorm/bcbio-nextgen,biocyberman/bcbio-nextgen,hjanime/bcbio-nextgen,a113n/bcbio-nextgen,guillermo-carrasco/bcbio-nextgen | import os
import unittest
import shutil
from bcbio.rnaseq import cufflinks
from bcbio.utils import file_exists, safe_makedir
from nose.plugins.attrib import attr
DATA_DIR = os.path.join(os.path.dirname(__file__), "bcbio-nextgen-test-data", "data")
class TestCufflinks(unittest.TestCase):
merged_gtf = os.path.join(DATA_DIR, "cufflinks", "merged.gtf")
ref_gtf = os.path.join(DATA_DIR, "cufflinks", "ref-transcripts.gtf")
out_dir = "cufflinks-test"
def setUp(self):
safe_makedir(self.out_dir)
@attr("unit")
def test_cufflinks_clean(self):
clean_fn = os.path.join(self.out_dir, "clean.gtf")
dirty_fn = os.path.join(self.out_dir, "dirty.gtf")
clean, dirty = cufflinks.clean_assembly(self.merged_gtf, clean_fn,
dirty_fn)
- # fixed_fn = os.path.join(self.out_dir, "fixed.gtf")
- # fixed = cufflinks.fix_cufflinks_attributes(self.ref_gtf, clean, fixed_fn)
assert(file_exists(clean))
assert(os.path.exists(dirty))
- # assert(file_exists(fixed))
def tearDown(self):
shutil.rmtree(self.out_dir)
| Remove some cruft from the cufflinks test. | ## Code Before:
import os
import unittest
import shutil
from bcbio.rnaseq import cufflinks
from bcbio.utils import file_exists, safe_makedir
from nose.plugins.attrib import attr
DATA_DIR = os.path.join(os.path.dirname(__file__), "bcbio-nextgen-test-data", "data")
class TestCufflinks(unittest.TestCase):
merged_gtf = os.path.join(DATA_DIR, "cufflinks", "merged.gtf")
ref_gtf = os.path.join(DATA_DIR, "cufflinks", "ref-transcripts.gtf")
out_dir = "cufflinks-test"
def setUp(self):
safe_makedir(self.out_dir)
@attr("unit")
def test_cufflinks_clean(self):
clean_fn = os.path.join(self.out_dir, "clean.gtf")
dirty_fn = os.path.join(self.out_dir, "dirty.gtf")
clean, dirty = cufflinks.clean_assembly(self.merged_gtf, clean_fn,
dirty_fn)
# fixed_fn = os.path.join(self.out_dir, "fixed.gtf")
# fixed = cufflinks.fix_cufflinks_attributes(self.ref_gtf, clean, fixed_fn)
assert(file_exists(clean))
assert(os.path.exists(dirty))
# assert(file_exists(fixed))
def tearDown(self):
shutil.rmtree(self.out_dir)
## Instruction:
Remove some cruft from the cufflinks test.
## Code After:
import os
import unittest
import shutil
from bcbio.rnaseq import cufflinks
from bcbio.utils import file_exists, safe_makedir
from nose.plugins.attrib import attr
DATA_DIR = os.path.join(os.path.dirname(__file__), "bcbio-nextgen-test-data", "data")
class TestCufflinks(unittest.TestCase):
merged_gtf = os.path.join(DATA_DIR, "cufflinks", "merged.gtf")
ref_gtf = os.path.join(DATA_DIR, "cufflinks", "ref-transcripts.gtf")
out_dir = "cufflinks-test"
def setUp(self):
safe_makedir(self.out_dir)
@attr("unit")
def test_cufflinks_clean(self):
clean_fn = os.path.join(self.out_dir, "clean.gtf")
dirty_fn = os.path.join(self.out_dir, "dirty.gtf")
clean, dirty = cufflinks.clean_assembly(self.merged_gtf, clean_fn,
dirty_fn)
assert(file_exists(clean))
assert(os.path.exists(dirty))
def tearDown(self):
shutil.rmtree(self.out_dir)
|
c956fbbbc6e4dbd713728c1feda6bce2956a0894 | runtime/Python3/src/antlr4/__init__.py | runtime/Python3/src/antlr4/__init__.py | from antlr4.Token import Token
from antlr4.InputStream import InputStream
from antlr4.FileStream import FileStream
from antlr4.BufferedTokenStream import TokenStream
from antlr4.CommonTokenStream import CommonTokenStream
from antlr4.Lexer import Lexer
from antlr4.Parser import Parser
from antlr4.dfa.DFA import DFA
from antlr4.atn.ATN import ATN
from antlr4.atn.ATNDeserializer import ATNDeserializer
from antlr4.atn.LexerATNSimulator import LexerATNSimulator
from antlr4.atn.ParserATNSimulator import ParserATNSimulator
from antlr4.atn.PredictionMode import PredictionMode
from antlr4.PredictionContext import PredictionContextCache
from antlr4.ParserRuleContext import RuleContext, ParserRuleContext
from antlr4.tree.Tree import ParseTreeListener, ParseTreeVisitor, ParseTreeWalker, TerminalNode, ErrorNode, RuleNode
from antlr4.error.Errors import RecognitionException, IllegalStateException, NoViableAltException
from antlr4.error.ErrorStrategy import BailErrorStrategy
from antlr4.error.DiagnosticErrorListener import DiagnosticErrorListener
from antlr4.Utils import str_list | from antlr4.Token import Token
from antlr4.InputStream import InputStream
from antlr4.FileStream import FileStream
from antlr4.StdinStream import StdinStream
from antlr4.BufferedTokenStream import TokenStream
from antlr4.CommonTokenStream import CommonTokenStream
from antlr4.Lexer import Lexer
from antlr4.Parser import Parser
from antlr4.dfa.DFA import DFA
from antlr4.atn.ATN import ATN
from antlr4.atn.ATNDeserializer import ATNDeserializer
from antlr4.atn.LexerATNSimulator import LexerATNSimulator
from antlr4.atn.ParserATNSimulator import ParserATNSimulator
from antlr4.atn.PredictionMode import PredictionMode
from antlr4.PredictionContext import PredictionContextCache
from antlr4.ParserRuleContext import RuleContext, ParserRuleContext
from antlr4.tree.Tree import ParseTreeListener, ParseTreeVisitor, ParseTreeWalker, TerminalNode, ErrorNode, RuleNode
from antlr4.error.Errors import RecognitionException, IllegalStateException, NoViableAltException
from antlr4.error.ErrorStrategy import BailErrorStrategy
from antlr4.error.DiagnosticErrorListener import DiagnosticErrorListener
from antlr4.Utils import str_list
| Allow importing StdinStream from antlr4 package | Allow importing StdinStream from antlr4 package
| Python | bsd-3-clause | parrt/antlr4,ericvergnaud/antlr4,antlr/antlr4,antlr/antlr4,ericvergnaud/antlr4,parrt/antlr4,ericvergnaud/antlr4,parrt/antlr4,antlr/antlr4,parrt/antlr4,parrt/antlr4,antlr/antlr4,antlr/antlr4,antlr/antlr4,ericvergnaud/antlr4,ericvergnaud/antlr4,ericvergnaud/antlr4,parrt/antlr4,antlr/antlr4,antlr/antlr4,ericvergnaud/antlr4,ericvergnaud/antlr4,antlr/antlr4,ericvergnaud/antlr4,antlr/antlr4,parrt/antlr4,parrt/antlr4,ericvergnaud/antlr4,parrt/antlr4,parrt/antlr4 | from antlr4.Token import Token
from antlr4.InputStream import InputStream
from antlr4.FileStream import FileStream
+ from antlr4.StdinStream import StdinStream
from antlr4.BufferedTokenStream import TokenStream
from antlr4.CommonTokenStream import CommonTokenStream
from antlr4.Lexer import Lexer
from antlr4.Parser import Parser
from antlr4.dfa.DFA import DFA
from antlr4.atn.ATN import ATN
from antlr4.atn.ATNDeserializer import ATNDeserializer
from antlr4.atn.LexerATNSimulator import LexerATNSimulator
from antlr4.atn.ParserATNSimulator import ParserATNSimulator
from antlr4.atn.PredictionMode import PredictionMode
from antlr4.PredictionContext import PredictionContextCache
from antlr4.ParserRuleContext import RuleContext, ParserRuleContext
from antlr4.tree.Tree import ParseTreeListener, ParseTreeVisitor, ParseTreeWalker, TerminalNode, ErrorNode, RuleNode
from antlr4.error.Errors import RecognitionException, IllegalStateException, NoViableAltException
from antlr4.error.ErrorStrategy import BailErrorStrategy
from antlr4.error.DiagnosticErrorListener import DiagnosticErrorListener
from antlr4.Utils import str_list
+ | Allow importing StdinStream from antlr4 package | ## Code Before:
from antlr4.Token import Token
from antlr4.InputStream import InputStream
from antlr4.FileStream import FileStream
from antlr4.BufferedTokenStream import TokenStream
from antlr4.CommonTokenStream import CommonTokenStream
from antlr4.Lexer import Lexer
from antlr4.Parser import Parser
from antlr4.dfa.DFA import DFA
from antlr4.atn.ATN import ATN
from antlr4.atn.ATNDeserializer import ATNDeserializer
from antlr4.atn.LexerATNSimulator import LexerATNSimulator
from antlr4.atn.ParserATNSimulator import ParserATNSimulator
from antlr4.atn.PredictionMode import PredictionMode
from antlr4.PredictionContext import PredictionContextCache
from antlr4.ParserRuleContext import RuleContext, ParserRuleContext
from antlr4.tree.Tree import ParseTreeListener, ParseTreeVisitor, ParseTreeWalker, TerminalNode, ErrorNode, RuleNode
from antlr4.error.Errors import RecognitionException, IllegalStateException, NoViableAltException
from antlr4.error.ErrorStrategy import BailErrorStrategy
from antlr4.error.DiagnosticErrorListener import DiagnosticErrorListener
from antlr4.Utils import str_list
## Instruction:
Allow importing StdinStream from antlr4 package
## Code After:
from antlr4.Token import Token
from antlr4.InputStream import InputStream
from antlr4.FileStream import FileStream
from antlr4.StdinStream import StdinStream
from antlr4.BufferedTokenStream import TokenStream
from antlr4.CommonTokenStream import CommonTokenStream
from antlr4.Lexer import Lexer
from antlr4.Parser import Parser
from antlr4.dfa.DFA import DFA
from antlr4.atn.ATN import ATN
from antlr4.atn.ATNDeserializer import ATNDeserializer
from antlr4.atn.LexerATNSimulator import LexerATNSimulator
from antlr4.atn.ParserATNSimulator import ParserATNSimulator
from antlr4.atn.PredictionMode import PredictionMode
from antlr4.PredictionContext import PredictionContextCache
from antlr4.ParserRuleContext import RuleContext, ParserRuleContext
from antlr4.tree.Tree import ParseTreeListener, ParseTreeVisitor, ParseTreeWalker, TerminalNode, ErrorNode, RuleNode
from antlr4.error.Errors import RecognitionException, IllegalStateException, NoViableAltException
from antlr4.error.ErrorStrategy import BailErrorStrategy
from antlr4.error.DiagnosticErrorListener import DiagnosticErrorListener
from antlr4.Utils import str_list
|
7947d474da8bb086493890d81a6788d76e00b108 | numba/cuda/tests/__init__.py | numba/cuda/tests/__init__.py | from numba.testing import SerialSuite
from numba.testing import load_testsuite
from numba import cuda
from os.path import dirname, join
def load_tests(loader, tests, pattern):
suite = SerialSuite()
this_dir = dirname(__file__)
suite.addTests(load_testsuite(loader, join(this_dir, 'nocuda')))
suite.addTests(load_testsuite(loader, join(this_dir, 'cudasim')))
if cuda.is_available():
gpus = cuda.list_devices()
if gpus and gpus[0].compute_capability >= (2, 0):
suite.addTests(load_testsuite(loader, join(this_dir, 'cudadrv')))
suite.addTests(load_testsuite(loader, join(this_dir, 'cudapy')))
else:
print("skipped CUDA tests because GPU CC < 2.0")
else:
print("skipped CUDA tests")
return suite
| from numba.testing import SerialSuite
from numba.testing import load_testsuite
from numba import cuda
from os.path import dirname, join
def load_tests(loader, tests, pattern):
suite = SerialSuite()
this_dir = dirname(__file__)
suite.addTests(load_testsuite(loader, join(this_dir, 'nocuda')))
if cuda.is_available():
suite.addTests(load_testsuite(loader, join(this_dir, 'cudasim')))
gpus = cuda.list_devices()
if gpus and gpus[0].compute_capability >= (2, 0):
suite.addTests(load_testsuite(loader, join(this_dir, 'cudadrv')))
suite.addTests(load_testsuite(loader, join(this_dir, 'cudapy')))
else:
print("skipped CUDA tests because GPU CC < 2.0")
else:
print("skipped CUDA tests")
return suite
| Fix tests on machine without CUDA | Fix tests on machine without CUDA
| Python | bsd-2-clause | sklam/numba,numba/numba,seibert/numba,IntelLabs/numba,jriehl/numba,stonebig/numba,gmarkall/numba,cpcloud/numba,IntelLabs/numba,gmarkall/numba,jriehl/numba,cpcloud/numba,sklam/numba,cpcloud/numba,numba/numba,stonebig/numba,stefanseefeld/numba,sklam/numba,cpcloud/numba,seibert/numba,sklam/numba,gmarkall/numba,stefanseefeld/numba,jriehl/numba,numba/numba,cpcloud/numba,stefanseefeld/numba,IntelLabs/numba,numba/numba,IntelLabs/numba,stuartarchibald/numba,jriehl/numba,sklam/numba,IntelLabs/numba,numba/numba,stonebig/numba,stuartarchibald/numba,stonebig/numba,jriehl/numba,gmarkall/numba,stefanseefeld/numba,stuartarchibald/numba,stuartarchibald/numba,stonebig/numba,stefanseefeld/numba,seibert/numba,seibert/numba,gmarkall/numba,stuartarchibald/numba,seibert/numba | from numba.testing import SerialSuite
from numba.testing import load_testsuite
from numba import cuda
from os.path import dirname, join
def load_tests(loader, tests, pattern):
suite = SerialSuite()
this_dir = dirname(__file__)
suite.addTests(load_testsuite(loader, join(this_dir, 'nocuda')))
- suite.addTests(load_testsuite(loader, join(this_dir, 'cudasim')))
if cuda.is_available():
+ suite.addTests(load_testsuite(loader, join(this_dir, 'cudasim')))
gpus = cuda.list_devices()
if gpus and gpus[0].compute_capability >= (2, 0):
suite.addTests(load_testsuite(loader, join(this_dir, 'cudadrv')))
suite.addTests(load_testsuite(loader, join(this_dir, 'cudapy')))
else:
print("skipped CUDA tests because GPU CC < 2.0")
else:
print("skipped CUDA tests")
return suite
| Fix tests on machine without CUDA | ## Code Before:
from numba.testing import SerialSuite
from numba.testing import load_testsuite
from numba import cuda
from os.path import dirname, join
def load_tests(loader, tests, pattern):
suite = SerialSuite()
this_dir = dirname(__file__)
suite.addTests(load_testsuite(loader, join(this_dir, 'nocuda')))
suite.addTests(load_testsuite(loader, join(this_dir, 'cudasim')))
if cuda.is_available():
gpus = cuda.list_devices()
if gpus and gpus[0].compute_capability >= (2, 0):
suite.addTests(load_testsuite(loader, join(this_dir, 'cudadrv')))
suite.addTests(load_testsuite(loader, join(this_dir, 'cudapy')))
else:
print("skipped CUDA tests because GPU CC < 2.0")
else:
print("skipped CUDA tests")
return suite
## Instruction:
Fix tests on machine without CUDA
## Code After:
from numba.testing import SerialSuite
from numba.testing import load_testsuite
from numba import cuda
from os.path import dirname, join
def load_tests(loader, tests, pattern):
suite = SerialSuite()
this_dir = dirname(__file__)
suite.addTests(load_testsuite(loader, join(this_dir, 'nocuda')))
if cuda.is_available():
suite.addTests(load_testsuite(loader, join(this_dir, 'cudasim')))
gpus = cuda.list_devices()
if gpus and gpus[0].compute_capability >= (2, 0):
suite.addTests(load_testsuite(loader, join(this_dir, 'cudadrv')))
suite.addTests(load_testsuite(loader, join(this_dir, 'cudapy')))
else:
print("skipped CUDA tests because GPU CC < 2.0")
else:
print("skipped CUDA tests")
return suite
|
910d1288adddd0c8dd500c1be5e488502c1ed335 | localflavor/nl/forms.py | localflavor/nl/forms.py | """NL-specific Form helpers."""
from __future__ import unicode_literals
from django import forms
from django.utils import six
from .nl_provinces import PROVINCE_CHOICES
from .validators import NLBSNFieldValidator, NLZipCodeFieldValidator
class NLZipCodeField(forms.CharField):
"""A Dutch zip code field."""
default_validators = [NLZipCodeFieldValidator()]
def clean(self, value):
if isinstance(value, six.string_types):
value = value.upper().replace(' ', '')
if len(value) == 6:
value = '%s %s' % (value[:4], value[4:])
return super(NLZipCodeField, self).clean(value)
class NLProvinceSelect(forms.Select):
"""A Select widget that uses a list of provinces of the Netherlands as it's choices."""
def __init__(self, attrs=None):
super(NLProvinceSelect, self).__init__(attrs, choices=PROVINCE_CHOICES)
class NLBSNFormField(forms.CharField):
"""
A Dutch social security number (BSN) field.
http://nl.wikipedia.org/wiki/Sofinummer
.. versionadded:: 1.6
"""
default_validators = [NLBSNFieldValidator()]
def __init__(self, *args, **kwargs):
kwargs['max_length'] = 9
super(NLBSNFormField, self).__init__(*args, **kwargs)
| """NL-specific Form helpers."""
from __future__ import unicode_literals
from django import forms
from django.utils import six
from .nl_provinces import PROVINCE_CHOICES
from .validators import NLBSNFieldValidator, NLZipCodeFieldValidator
class NLZipCodeField(forms.CharField):
"""A Dutch zip code field."""
default_validators = [NLZipCodeFieldValidator()]
def clean(self, value):
if isinstance(value, six.string_types):
value = value.upper().replace(' ', '')
if len(value) == 6:
value = '%s %s' % (value[:4], value[4:])
return super(NLZipCodeField, self).clean(value)
class NLProvinceSelect(forms.Select):
"""A Select widget that uses a list of provinces of the Netherlands as it's choices."""
def __init__(self, attrs=None):
super(NLProvinceSelect, self).__init__(attrs, choices=PROVINCE_CHOICES)
class NLBSNFormField(forms.CharField):
"""
A Dutch social security number (BSN) field.
https://nl.wikipedia.org/wiki/Burgerservicenummer
Note that you may only process the BSN if you have a legal basis to do so!
.. versionadded:: 1.6
"""
default_validators = [NLBSNFieldValidator()]
def __init__(self, *args, **kwargs):
kwargs['max_length'] = 9
super(NLBSNFormField, self).__init__(*args, **kwargs)
| Fix the wikipedia link and include a warning | Fix the wikipedia link and include a warning
| Python | bsd-3-clause | django/django-localflavor,rsalmaso/django-localflavor | """NL-specific Form helpers."""
from __future__ import unicode_literals
from django import forms
from django.utils import six
from .nl_provinces import PROVINCE_CHOICES
from .validators import NLBSNFieldValidator, NLZipCodeFieldValidator
class NLZipCodeField(forms.CharField):
"""A Dutch zip code field."""
default_validators = [NLZipCodeFieldValidator()]
def clean(self, value):
if isinstance(value, six.string_types):
value = value.upper().replace(' ', '')
if len(value) == 6:
value = '%s %s' % (value[:4], value[4:])
return super(NLZipCodeField, self).clean(value)
class NLProvinceSelect(forms.Select):
"""A Select widget that uses a list of provinces of the Netherlands as it's choices."""
def __init__(self, attrs=None):
super(NLProvinceSelect, self).__init__(attrs, choices=PROVINCE_CHOICES)
class NLBSNFormField(forms.CharField):
"""
A Dutch social security number (BSN) field.
- http://nl.wikipedia.org/wiki/Sofinummer
+ https://nl.wikipedia.org/wiki/Burgerservicenummer
+
+ Note that you may only process the BSN if you have a legal basis to do so!
.. versionadded:: 1.6
"""
default_validators = [NLBSNFieldValidator()]
def __init__(self, *args, **kwargs):
kwargs['max_length'] = 9
super(NLBSNFormField, self).__init__(*args, **kwargs)
| Fix the wikipedia link and include a warning | ## Code Before:
"""NL-specific Form helpers."""
from __future__ import unicode_literals
from django import forms
from django.utils import six
from .nl_provinces import PROVINCE_CHOICES
from .validators import NLBSNFieldValidator, NLZipCodeFieldValidator
class NLZipCodeField(forms.CharField):
"""A Dutch zip code field."""
default_validators = [NLZipCodeFieldValidator()]
def clean(self, value):
if isinstance(value, six.string_types):
value = value.upper().replace(' ', '')
if len(value) == 6:
value = '%s %s' % (value[:4], value[4:])
return super(NLZipCodeField, self).clean(value)
class NLProvinceSelect(forms.Select):
"""A Select widget that uses a list of provinces of the Netherlands as it's choices."""
def __init__(self, attrs=None):
super(NLProvinceSelect, self).__init__(attrs, choices=PROVINCE_CHOICES)
class NLBSNFormField(forms.CharField):
"""
A Dutch social security number (BSN) field.
http://nl.wikipedia.org/wiki/Sofinummer
.. versionadded:: 1.6
"""
default_validators = [NLBSNFieldValidator()]
def __init__(self, *args, **kwargs):
kwargs['max_length'] = 9
super(NLBSNFormField, self).__init__(*args, **kwargs)
## Instruction:
Fix the wikipedia link and include a warning
## Code After:
"""NL-specific Form helpers."""
from __future__ import unicode_literals
from django import forms
from django.utils import six
from .nl_provinces import PROVINCE_CHOICES
from .validators import NLBSNFieldValidator, NLZipCodeFieldValidator
class NLZipCodeField(forms.CharField):
"""A Dutch zip code field."""
default_validators = [NLZipCodeFieldValidator()]
def clean(self, value):
if isinstance(value, six.string_types):
value = value.upper().replace(' ', '')
if len(value) == 6:
value = '%s %s' % (value[:4], value[4:])
return super(NLZipCodeField, self).clean(value)
class NLProvinceSelect(forms.Select):
"""A Select widget that uses a list of provinces of the Netherlands as it's choices."""
def __init__(self, attrs=None):
super(NLProvinceSelect, self).__init__(attrs, choices=PROVINCE_CHOICES)
class NLBSNFormField(forms.CharField):
"""
A Dutch social security number (BSN) field.
https://nl.wikipedia.org/wiki/Burgerservicenummer
Note that you may only process the BSN if you have a legal basis to do so!
.. versionadded:: 1.6
"""
default_validators = [NLBSNFieldValidator()]
def __init__(self, *args, **kwargs):
kwargs['max_length'] = 9
super(NLBSNFormField, self).__init__(*args, **kwargs)
|
2e5ec8483930ad328b0a212ccc4b746f73b18c4c | pinax/ratings/tests/tests.py | pinax/ratings/tests/tests.py | from django.test import TestCase
from django.contrib.auth.models import User
from pinax.ratings.models import Rating
from .models import Car
class Tests(TestCase):
def setUp(self):
self.paltman = User.objects.create(username="paltman")
self.jtauber = User.objects.create(username="jtauber")
self.bronco = Car.objects.create(name="Ford Bronco")
self.blazer = Car.objects.create(name="Cheverolet Blazer")
self.expedition = Car.objects.create(name="Ford Expedition")
def test_rating(self):
overall = Rating.update(self.bronco, self.paltman, rating=5)
self.assertEquals(overall, 5)
overall = Rating.update(self.bronco, self.jtauber, rating=2)
self.assertEquals(overall, 3.5)
| from decimal import Decimal
from django.test import TestCase
from django.contrib.auth.models import User
from pinax.ratings.models import Rating
from .models import Car
class Tests(TestCase):
def setUp(self):
self.paltman = User.objects.create(username="paltman")
self.jtauber = User.objects.create(username="jtauber")
self.bronco = Car.objects.create(name="Ford Bronco")
self.blazer = Car.objects.create(name="Cheverolet Blazer")
self.expedition = Car.objects.create(name="Ford Expedition")
def test_rating(self):
overall = Rating.update(self.bronco, self.paltman, rating=5)
self.assertEquals(overall, Decimal("5"))
overall = Rating.update(self.bronco, self.jtauber, rating=2)
self.assertEquals(overall, Decimal("3.5"))
| Use explicit Decimal in test | Use explicit Decimal in test
| Python | mit | rizumu/pinax-ratings,pinax/pinax-ratings,arthur-wsw/pinax-ratings,arthur-wsw/pinax-ratings,pinax/pinax-ratings,arthur-wsw/pinax-ratings,pinax/pinax-ratings,rizumu/pinax-ratings,rizumu/pinax-ratings | + from decimal import Decimal
+
from django.test import TestCase
from django.contrib.auth.models import User
from pinax.ratings.models import Rating
from .models import Car
class Tests(TestCase):
def setUp(self):
self.paltman = User.objects.create(username="paltman")
self.jtauber = User.objects.create(username="jtauber")
self.bronco = Car.objects.create(name="Ford Bronco")
self.blazer = Car.objects.create(name="Cheverolet Blazer")
self.expedition = Car.objects.create(name="Ford Expedition")
def test_rating(self):
overall = Rating.update(self.bronco, self.paltman, rating=5)
- self.assertEquals(overall, 5)
+ self.assertEquals(overall, Decimal("5"))
overall = Rating.update(self.bronco, self.jtauber, rating=2)
- self.assertEquals(overall, 3.5)
+ self.assertEquals(overall, Decimal("3.5"))
| Use explicit Decimal in test | ## Code Before:
from django.test import TestCase
from django.contrib.auth.models import User
from pinax.ratings.models import Rating
from .models import Car
class Tests(TestCase):
def setUp(self):
self.paltman = User.objects.create(username="paltman")
self.jtauber = User.objects.create(username="jtauber")
self.bronco = Car.objects.create(name="Ford Bronco")
self.blazer = Car.objects.create(name="Cheverolet Blazer")
self.expedition = Car.objects.create(name="Ford Expedition")
def test_rating(self):
overall = Rating.update(self.bronco, self.paltman, rating=5)
self.assertEquals(overall, 5)
overall = Rating.update(self.bronco, self.jtauber, rating=2)
self.assertEquals(overall, 3.5)
## Instruction:
Use explicit Decimal in test
## Code After:
from decimal import Decimal
from django.test import TestCase
from django.contrib.auth.models import User
from pinax.ratings.models import Rating
from .models import Car
class Tests(TestCase):
def setUp(self):
self.paltman = User.objects.create(username="paltman")
self.jtauber = User.objects.create(username="jtauber")
self.bronco = Car.objects.create(name="Ford Bronco")
self.blazer = Car.objects.create(name="Cheverolet Blazer")
self.expedition = Car.objects.create(name="Ford Expedition")
def test_rating(self):
overall = Rating.update(self.bronco, self.paltman, rating=5)
self.assertEquals(overall, Decimal("5"))
overall = Rating.update(self.bronco, self.jtauber, rating=2)
self.assertEquals(overall, Decimal("3.5"))
|
41a0fa6412427dadfb33c77da45bc88c576fa67c | rdo/drivers/base.py | rdo/drivers/base.py | from subprocess import call
class BaseDriver(object):
def __init__(self, config):
self.config = config
def do(self, cmd):
cmd = self.command(cmd)
call(cmd)
def command(self):
raise NotImplementedError()
| from subprocess import call
class BaseDriver(object):
def __init__(self, config):
self.config = config
def working_dir(self, cmd):
command = ' '.join(cmd)
working_dir = self.config.get('directory')
if working_dir:
command = 'cd %s && %s' % (working_dir, command)
return command
def do(self, cmd):
cmd = self.command(cmd)
call(cmd)
def command(self):
raise NotImplementedError()
| Add a common function for deriving the working dir. | Add a common function for deriving the working dir.
| Python | bsd-3-clause | ionrock/rdo | from subprocess import call
class BaseDriver(object):
def __init__(self, config):
self.config = config
+ def working_dir(self, cmd):
+ command = ' '.join(cmd)
+ working_dir = self.config.get('directory')
+ if working_dir:
+ command = 'cd %s && %s' % (working_dir, command)
+ return command
+
def do(self, cmd):
cmd = self.command(cmd)
call(cmd)
def command(self):
raise NotImplementedError()
| Add a common function for deriving the working dir. | ## Code Before:
from subprocess import call
class BaseDriver(object):
def __init__(self, config):
self.config = config
def do(self, cmd):
cmd = self.command(cmd)
call(cmd)
def command(self):
raise NotImplementedError()
## Instruction:
Add a common function for deriving the working dir.
## Code After:
from subprocess import call
class BaseDriver(object):
def __init__(self, config):
self.config = config
def working_dir(self, cmd):
command = ' '.join(cmd)
working_dir = self.config.get('directory')
if working_dir:
command = 'cd %s && %s' % (working_dir, command)
return command
def do(self, cmd):
cmd = self.command(cmd)
call(cmd)
def command(self):
raise NotImplementedError()
|
e985163d189883a2419e34021971709c9c7498c0 | request/__init__.py | request/__init__.py | __version__ = 0.23
__copyright__ = 'Copyright (c) 2009, Kyle Fuller'
__licence__ = 'BSD'
__author__ = 'Kyle Fuller <[email protected]>, krisje8 <[email protected]>'
__URL__ = 'http://kylefuller.co.uk/project/django-request/'
| __version__ = 0.23
__copyright__ = 'Copyright (c) 2009, Kyle Fuller'
__licence__ = 'BSD'
__author__ = 'Kyle Fuller <[email protected]>, Jannis Leidel (jezdez), krisje8 <[email protected]>'
__URL__ = 'http://kylefuller.co.uk/project/django-request/'
| Add jezdez to the authors | Add jezdez to the authors
| Python | bsd-2-clause | gnublade/django-request,kylef/django-request,kylef/django-request,kylef/django-request,gnublade/django-request,gnublade/django-request | __version__ = 0.23
__copyright__ = 'Copyright (c) 2009, Kyle Fuller'
__licence__ = 'BSD'
- __author__ = 'Kyle Fuller <[email protected]>, krisje8 <[email protected]>'
+ __author__ = 'Kyle Fuller <[email protected]>, Jannis Leidel (jezdez), krisje8 <[email protected]>'
__URL__ = 'http://kylefuller.co.uk/project/django-request/'
| Add jezdez to the authors | ## Code Before:
__version__ = 0.23
__copyright__ = 'Copyright (c) 2009, Kyle Fuller'
__licence__ = 'BSD'
__author__ = 'Kyle Fuller <[email protected]>, krisje8 <[email protected]>'
__URL__ = 'http://kylefuller.co.uk/project/django-request/'
## Instruction:
Add jezdez to the authors
## Code After:
__version__ = 0.23
__copyright__ = 'Copyright (c) 2009, Kyle Fuller'
__licence__ = 'BSD'
__author__ = 'Kyle Fuller <[email protected]>, Jannis Leidel (jezdez), krisje8 <[email protected]>'
__URL__ = 'http://kylefuller.co.uk/project/django-request/'
|
5881436bea688ee49175192452dec18fad4ba9b2 | airflow/executors/__init__.py | airflow/executors/__init__.py | import logging
from airflow import configuration
from airflow.executors.base_executor import BaseExecutor
from airflow.executors.local_executor import LocalExecutor
from airflow.executors.sequential_executor import SequentialExecutor
# TODO Fix this emergency fix
try:
from airflow.executors.celery_executor import CeleryExecutor
except:
pass
try:
from airflow.contrib.executors.mesos_executor import MesosExecutor
except:
pass
from airflow.utils import AirflowException
_EXECUTOR = configuration.get('core', 'EXECUTOR')
if _EXECUTOR == 'LocalExecutor':
DEFAULT_EXECUTOR = LocalExecutor()
elif _EXECUTOR == 'CeleryExecutor':
DEFAULT_EXECUTOR = CeleryExecutor()
elif _EXECUTOR == 'SequentialExecutor':
DEFAULT_EXECUTOR = SequentialExecutor()
elif _EXECUTOR == 'MesosExecutor':
DEFAULT_EXECUTOR = MesosExecutor()
else:
# Loading plugins
from airflow.plugins_manager import executors as _executors
for _executor in _executors:
globals()[_executor.__name__] = _executor
if _EXECUTOR in globals():
DEFAULT_EXECUTOR = globals()[_EXECUTOR]()
else:
raise AirflowException("Executor {0} not supported.".format(_EXECUTOR))
logging.info("Using executor " + _EXECUTOR)
| import logging
from airflow import configuration
from airflow.executors.base_executor import BaseExecutor
from airflow.executors.local_executor import LocalExecutor
from airflow.executors.sequential_executor import SequentialExecutor
from airflow.utils import AirflowException
_EXECUTOR = configuration.get('core', 'EXECUTOR')
if _EXECUTOR == 'LocalExecutor':
DEFAULT_EXECUTOR = LocalExecutor()
elif _EXECUTOR == 'CeleryExecutor':
from airflow.executors.celery_executor import CeleryExecutor
DEFAULT_EXECUTOR = CeleryExecutor()
elif _EXECUTOR == 'SequentialExecutor':
DEFAULT_EXECUTOR = SequentialExecutor()
elif _EXECUTOR == 'MesosExecutor':
from airflow.contrib.executors.mesos_executor import MesosExecutor
DEFAULT_EXECUTOR = MesosExecutor()
else:
# Loading plugins
from airflow.plugins_manager import executors as _executors
for _executor in _executors:
globals()[_executor.__name__] = _executor
if _EXECUTOR in globals():
DEFAULT_EXECUTOR = globals()[_EXECUTOR]()
else:
raise AirflowException("Executor {0} not supported.".format(_EXECUTOR))
logging.info("Using executor " + _EXECUTOR)
| Remove hack by only importing when configured | Remove hack by only importing when configured
| Python | apache-2.0 | asnir/airflow,DEVELByte/incubator-airflow,yati-sagade/incubator-airflow,OpringaoDoTurno/airflow,yk5/incubator-airflow,spektom/incubator-airflow,owlabs/incubator-airflow,preete-dixit-ck/incubator-airflow,malmiron/incubator-airflow,alexvanboxel/airflow,wndhydrnt/airflow,bolkedebruin/airflow,dhuang/incubator-airflow,ledsusop/airflow,mylons/incubator-airflow,easytaxibr/airflow,lxneng/incubator-airflow,hgrif/incubator-airflow,modsy/incubator-airflow,vineet-rh/incubator-airflow,sergiohgz/incubator-airflow,andyxhadji/incubator-airflow,wooga/airflow,ProstoMaxim/incubator-airflow,artwr/airflow,jesusfcr/airflow,yoziru-desu/airflow,adrpar/incubator-airflow,Acehaidrey/incubator-airflow,adrpar/incubator-airflow,hgrif/incubator-airflow,sdiazb/airflow,forevernull/incubator-airflow,skudriashev/incubator-airflow,mtustin-handy/airflow,moritzpein/airflow,bolkedebruin/airflow,stverhae/incubator-airflow,dgies/incubator-airflow,apache/incubator-airflow,CloverHealth/airflow,mtdewulf/incubator-airflow,vineet-rh/incubator-airflow,brandsoulmates/incubator-airflow,andrewmchen/incubator-airflow,nathanielvarona/airflow,mrares/incubator-airflow,dud225/incubator-airflow,rishibarve/incubator-airflow,btallman/incubator-airflow,kerzhner/airflow,vijaysbhat/incubator-airflow,juvoinc/airflow,Twistbioscience/incubator-airflow,sdiazb/airflow,mistercrunch/airflow,malmiron/incubator-airflow,ledsusop/airflow,ronfung/incubator-airflow,d-lee/airflow,danielvdende/incubator-airflow,mtustin-handy/airflow,mylons/incubator-airflow,caseyching/incubator-airflow,asnir/airflow,DEVELByte/incubator-airflow,jfantom/incubator-airflow,moritzpein/airflow,holygits/incubator-airflow,skudriashev/incubator-airflow,asnir/airflow,lyft/incubator-airflow,ronfung/incubator-airflow,spektom/incubator-airflow,opensignal/airflow,bolkedebruin/airflow,owlabs/incubator-airflow,jwi078/incubator-airflow,jiwang576/incubator-airflow,andrewmchen/incubator-airflow,ProstoMaxim/incubator-airflow,jlowin/airflow,sid88in/incubator-airflow,OpringaoDoTurno/airflow,modsy/incubator-airflow,modsy/incubator-airflow,NielsZeilemaker/incubator-airflow,yk5/incubator-airflow,RealImpactAnalytics/airflow,wolfier/incubator-airflow,dgies/incubator-airflow,hamedhsn/incubator-airflow,N3da/incubator-airflow,janczak10/incubator-airflow,Chedi/airflow,apache/airflow,DinoCow/airflow,andyxhadji/incubator-airflow,apache/airflow,easytaxibr/airflow,mistercrunch/airflow,hamedhsn/incubator-airflow,danielvdende/incubator-airflow,hamedhsn/incubator-airflow,ronfung/incubator-airflow,wndhydrnt/airflow,gilt/incubator-airflow,Acehaidrey/incubator-airflow,plypaul/airflow,RealImpactAnalytics/airflow,yk5/incubator-airflow,yiqingj/airflow,janczak10/incubator-airflow,ProstoMaxim/incubator-airflow,mrkm4ntr/incubator-airflow,nathanielvarona/airflow,yoziru-desu/airflow,griffinqiu/airflow,dud225/incubator-airflow,sekikn/incubator-airflow,gtoonstra/airflow,alexvanboxel/airflow,jhsenjaliya/incubator-airflow,nathanielvarona/airflow,fenglu-g/incubator-airflow,DinoCow/airflow,Acehaidrey/incubator-airflow,N3da/incubator-airflow,danielvdende/incubator-airflow,Fokko/incubator-airflow,neovintage/airflow,Acehaidrey/incubator-airflow,cjqian/incubator-airflow,mylons/incubator-airflow,cfei18/incubator-airflow,ty707/airflow,ty707/airflow,mattuuh7/incubator-airflow,d-lee/airflow,Twistbioscience/incubator-airflow,wileeam/airflow,janczak10/incubator-airflow,N3da/incubator-airflow,NielsZeilemaker/incubator-airflow,yoziru-desu/airflow,apache/incubator-airflow,jwi078/incubator-airflow,vineet-rh/incubator-airflow,airbnb/airflow,danielvdende/incubator-airflow,opensignal/airflow,cjqian/incubator-airflow,fenglu-g/incubator-airflow,sergiohgz/incubator-airflow,wndhydrnt/airflow,mistercrunch/airflow,jgao54/airflow,gritlogic/incubator-airflow,fenglu-g/incubator-airflow,dgies/incubator-airflow,r39132/airflow,btallman/incubator-airflow,mattuuh7/incubator-airflow,dmitry-r/incubator-airflow,kerzhner/airflow,holygits/incubator-airflow,hamedhsn/incubator-airflow,MortalViews/incubator-airflow,btallman/incubator-airflow,mrkm4ntr/incubator-airflow,zack3241/incubator-airflow,owlabs/incubator-airflow,cfei18/incubator-airflow,AllisonWang/incubator-airflow,adamhaney/airflow,dgies/incubator-airflow,wolfier/incubator-airflow,sergiohgz/incubator-airflow,r39132/airflow,jesusfcr/airflow,cfei18/incubator-airflow,MetrodataTeam/incubator-airflow,airbnb/airflow,andyxhadji/incubator-airflow,yiqingj/airflow,yati-sagade/incubator-airflow,preete-dixit-ck/incubator-airflow,cfei18/incubator-airflow,holygits/incubator-airflow,sid88in/incubator-airflow,holygits/incubator-airflow,forevernull/incubator-airflow,CloverHealth/airflow,yati-sagade/incubator-airflow,fenglu-g/incubator-airflow,ronfung/incubator-airflow,zoyahav/incubator-airflow,cjqian/incubator-airflow,cjqian/incubator-airflow,lxneng/incubator-airflow,forevernull/incubator-airflow,sergiohgz/incubator-airflow,DinoCow/airflow,akosel/incubator-airflow,bolkedebruin/airflow,nathanielvarona/airflow,AllisonWang/incubator-airflow,wndhydrnt/airflow,rishibarve/incubator-airflow,edgarRd/incubator-airflow,jhsenjaliya/incubator-airflow,jesusfcr/airflow,lxneng/incubator-airflow,neovintage/airflow,adamhaney/airflow,jgao54/airflow,sdiazb/airflow,cfei18/incubator-airflow,MortalViews/incubator-airflow,brandsoulmates/incubator-airflow,gritlogic/incubator-airflow,mtustin-handy/airflow,stverhae/incubator-airflow,saguziel/incubator-airflow,jfantom/incubator-airflow,malmiron/incubator-airflow,Chedi/airflow,saguziel/incubator-airflow,neovintage/airflow,KL-WLCR/incubator-airflow,OpringaoDoTurno/airflow,wileeam/airflow,aminghadersohi/airflow,jwi078/incubator-airflow,DinoCow/airflow,mrkm4ntr/incubator-airflow,jfantom/incubator-airflow,Tagar/incubator-airflow,wxiang7/airflow,kerzhner/airflow,dmitry-r/incubator-airflow,Fokko/incubator-airflow,jiwang576/incubator-airflow,andrewmchen/incubator-airflow,N3da/incubator-airflow,mrkm4ntr/incubator-airflow,mattuuh7/incubator-airflow,Chedi/airflow,jhsenjaliya/incubator-airflow,NielsZeilemaker/incubator-airflow,d-lee/airflow,zack3241/incubator-airflow,vijaysbhat/incubator-airflow,nathanielvarona/airflow,wooga/airflow,preete-dixit-ck/incubator-airflow,skudriashev/incubator-airflow,wolfier/incubator-airflow,preete-dixit-ck/incubator-airflow,zodiac/incubator-airflow,MetrodataTeam/incubator-airflow,wooga/airflow,easytaxibr/airflow,caseyching/incubator-airflow,malmiron/incubator-airflow,dmitry-r/incubator-airflow,jhsenjaliya/incubator-airflow,ledsusop/airflow,KL-WLCR/incubator-airflow,DEVELByte/incubator-airflow,jiwang576/incubator-airflow,d-lee/airflow,bolkedebruin/airflow,griffinqiu/airflow,wxiang7/airflow,MetrodataTeam/incubator-airflow,sekikn/incubator-airflow,criccomini/airflow,akosel/incubator-airflow,Tagar/incubator-airflow,DEVELByte/incubator-airflow,airbnb/airflow,opensignal/airflow,mtustin-handy/airflow,danielvdende/incubator-airflow,subodhchhabra/airflow,yiqingj/airflow,plypaul/airflow,mtagle/airflow,rishibarve/incubator-airflow,caseyching/incubator-airflow,rishibarve/incubator-airflow,gtoonstra/airflow,wileeam/airflow,wxiang7/airflow,apache/airflow,plypaul/airflow,CloverHealth/airflow,biln/airflow,hgrif/incubator-airflow,AllisonWang/incubator-airflow,dhuang/incubator-airflow,gilt/incubator-airflow,sid88in/incubator-airflow,ty707/airflow,edgarRd/incubator-airflow,mrares/incubator-airflow,adrpar/incubator-airflow,KL-WLCR/incubator-airflow,zack3241/incubator-airflow,Tagar/incubator-airflow,Acehaidrey/incubator-airflow,ProstoMaxim/incubator-airflow,wolfier/incubator-airflow,edgarRd/incubator-airflow,mistercrunch/airflow,dud225/incubator-airflow,griffinqiu/airflow,zoyahav/incubator-airflow,spektom/incubator-airflow,mtdewulf/incubator-airflow,andrewmchen/incubator-airflow,sekikn/incubator-airflow,alexvanboxel/airflow,Fokko/incubator-airflow,jbhsieh/incubator-airflow,r39132/airflow,jwi078/incubator-airflow,mtagle/airflow,mrares/incubator-airflow,hgrif/incubator-airflow,btallman/incubator-airflow,yati-sagade/incubator-airflow,stverhae/incubator-airflow,apache/airflow,Acehaidrey/incubator-airflow,dhuang/incubator-airflow,jbhsieh/incubator-airflow,modsy/incubator-airflow,AllisonWang/incubator-airflow,Tagar/incubator-airflow,jgao54/airflow,gritlogic/incubator-airflow,opensignal/airflow,sekikn/incubator-airflow,yiqingj/airflow,zodiac/incubator-airflow,artwr/airflow,ledsusop/airflow,vijaysbhat/incubator-airflow,vineet-rh/incubator-airflow,caseyching/incubator-airflow,Twistbioscience/incubator-airflow,artwr/airflow,artwr/airflow,mattuuh7/incubator-airflow,easytaxibr/airflow,andyxhadji/incubator-airflow,adamhaney/airflow,jbhsieh/incubator-airflow,Twistbioscience/incubator-airflow,owlabs/incubator-airflow,lxneng/incubator-airflow,apache/airflow,vijaysbhat/incubator-airflow,adrpar/incubator-airflow,MetrodataTeam/incubator-airflow,sid88in/incubator-airflow,jfantom/incubator-airflow,dud225/incubator-airflow,brandsoulmates/incubator-airflow,nathanielvarona/airflow,gilt/incubator-airflow,KL-WLCR/incubator-airflow,subodhchhabra/airflow,cademarkegard/airflow,Fokko/incubator-airflow,stverhae/incubator-airflow,jlowin/airflow,gtoonstra/airflow,ty707/airflow,apache/incubator-airflow,criccomini/airflow,adamhaney/airflow,jesusfcr/airflow,MortalViews/incubator-airflow,yk5/incubator-airflow,mtagle/airflow,mtdewulf/incubator-airflow,airbnb/airflow,aminghadersohi/airflow,aminghadersohi/airflow,dhuang/incubator-airflow,lyft/incubator-airflow,juvoinc/airflow,gritlogic/incubator-airflow,juvoinc/airflow,wooga/airflow,sdiazb/airflow,zoyahav/incubator-airflow,juvoinc/airflow,mrares/incubator-airflow,subodhchhabra/airflow,brandsoulmates/incubator-airflow,cademarkegard/airflow,cademarkegard/airflow,saguziel/incubator-airflow,skudriashev/incubator-airflow,alexvanboxel/airflow,jiwang576/incubator-airflow,dmitry-r/incubator-airflow,subodhchhabra/airflow,zodiac/incubator-airflow,wxiang7/airflow,biln/airflow,cfei18/incubator-airflow,gtoonstra/airflow,mtdewulf/incubator-airflow,biln/airflow,lyft/incubator-airflow,spektom/incubator-airflow,gilt/incubator-airflow,akosel/incubator-airflow,mtagle/airflow,saguziel/incubator-airflow,yoziru-desu/airflow,kerzhner/airflow,jlowin/airflow,griffinqiu/airflow,RealImpactAnalytics/airflow,asnir/airflow,wileeam/airflow,jbhsieh/incubator-airflow,cademarkegard/airflow,criccomini/airflow,apache/incubator-airflow,zodiac/incubator-airflow,plypaul/airflow,RealImpactAnalytics/airflow,akosel/incubator-airflow,mylons/incubator-airflow,aminghadersohi/airflow,r39132/airflow,NielsZeilemaker/incubator-airflow,apache/airflow,lyft/incubator-airflow,criccomini/airflow,zoyahav/incubator-airflow,moritzpein/airflow,MortalViews/incubator-airflow,forevernull/incubator-airflow,jgao54/airflow,OpringaoDoTurno/airflow,janczak10/incubator-airflow,edgarRd/incubator-airflow,neovintage/airflow,Chedi/airflow,jlowin/airflow,CloverHealth/airflow,biln/airflow,moritzpein/airflow,danielvdende/incubator-airflow,zack3241/incubator-airflow | import logging
from airflow import configuration
from airflow.executors.base_executor import BaseExecutor
from airflow.executors.local_executor import LocalExecutor
from airflow.executors.sequential_executor import SequentialExecutor
-
- # TODO Fix this emergency fix
- try:
- from airflow.executors.celery_executor import CeleryExecutor
- except:
- pass
- try:
- from airflow.contrib.executors.mesos_executor import MesosExecutor
- except:
- pass
from airflow.utils import AirflowException
_EXECUTOR = configuration.get('core', 'EXECUTOR')
if _EXECUTOR == 'LocalExecutor':
DEFAULT_EXECUTOR = LocalExecutor()
elif _EXECUTOR == 'CeleryExecutor':
+ from airflow.executors.celery_executor import CeleryExecutor
DEFAULT_EXECUTOR = CeleryExecutor()
elif _EXECUTOR == 'SequentialExecutor':
DEFAULT_EXECUTOR = SequentialExecutor()
elif _EXECUTOR == 'MesosExecutor':
+ from airflow.contrib.executors.mesos_executor import MesosExecutor
DEFAULT_EXECUTOR = MesosExecutor()
else:
# Loading plugins
from airflow.plugins_manager import executors as _executors
for _executor in _executors:
globals()[_executor.__name__] = _executor
if _EXECUTOR in globals():
DEFAULT_EXECUTOR = globals()[_EXECUTOR]()
else:
raise AirflowException("Executor {0} not supported.".format(_EXECUTOR))
logging.info("Using executor " + _EXECUTOR)
| Remove hack by only importing when configured | ## Code Before:
import logging
from airflow import configuration
from airflow.executors.base_executor import BaseExecutor
from airflow.executors.local_executor import LocalExecutor
from airflow.executors.sequential_executor import SequentialExecutor
# TODO Fix this emergency fix
try:
from airflow.executors.celery_executor import CeleryExecutor
except:
pass
try:
from airflow.contrib.executors.mesos_executor import MesosExecutor
except:
pass
from airflow.utils import AirflowException
_EXECUTOR = configuration.get('core', 'EXECUTOR')
if _EXECUTOR == 'LocalExecutor':
DEFAULT_EXECUTOR = LocalExecutor()
elif _EXECUTOR == 'CeleryExecutor':
DEFAULT_EXECUTOR = CeleryExecutor()
elif _EXECUTOR == 'SequentialExecutor':
DEFAULT_EXECUTOR = SequentialExecutor()
elif _EXECUTOR == 'MesosExecutor':
DEFAULT_EXECUTOR = MesosExecutor()
else:
# Loading plugins
from airflow.plugins_manager import executors as _executors
for _executor in _executors:
globals()[_executor.__name__] = _executor
if _EXECUTOR in globals():
DEFAULT_EXECUTOR = globals()[_EXECUTOR]()
else:
raise AirflowException("Executor {0} not supported.".format(_EXECUTOR))
logging.info("Using executor " + _EXECUTOR)
## Instruction:
Remove hack by only importing when configured
## Code After:
import logging
from airflow import configuration
from airflow.executors.base_executor import BaseExecutor
from airflow.executors.local_executor import LocalExecutor
from airflow.executors.sequential_executor import SequentialExecutor
from airflow.utils import AirflowException
_EXECUTOR = configuration.get('core', 'EXECUTOR')
if _EXECUTOR == 'LocalExecutor':
DEFAULT_EXECUTOR = LocalExecutor()
elif _EXECUTOR == 'CeleryExecutor':
from airflow.executors.celery_executor import CeleryExecutor
DEFAULT_EXECUTOR = CeleryExecutor()
elif _EXECUTOR == 'SequentialExecutor':
DEFAULT_EXECUTOR = SequentialExecutor()
elif _EXECUTOR == 'MesosExecutor':
from airflow.contrib.executors.mesos_executor import MesosExecutor
DEFAULT_EXECUTOR = MesosExecutor()
else:
# Loading plugins
from airflow.plugins_manager import executors as _executors
for _executor in _executors:
globals()[_executor.__name__] = _executor
if _EXECUTOR in globals():
DEFAULT_EXECUTOR = globals()[_EXECUTOR]()
else:
raise AirflowException("Executor {0} not supported.".format(_EXECUTOR))
logging.info("Using executor " + _EXECUTOR)
|
5ac310b7c5cee4a8c5f247ae117fda17fc4cb61a | pypocketexplore/jobs.py | pypocketexplore/jobs.py | from datetime import datetime
import requests as req
from pymongo import MongoClient
from pypocketexplore.config import MONGO_URI
from time import sleep
def extract_topic_items(topic):
db = MongoClient(MONGO_URI).get_default_database()
resp = req.get('http://localhost:5000/api/topic/{}'.format(topic))
data = resp.json()
related_topics = data.get('related_topics')
items = data.get('items')
if items:
res = db['items'].insert(items)
db['topics'].update_many({'topic': topic}, {'$set': {'topic': topic,
'is_scraped': True,
'datetime_scraped': datetime.utcnow(),
'queued': True}},
upsert=True)
for related_topic in related_topics:
req.get('http://localhost:5000/api/topic/{}?async=true'.format(related_topic)).json()
print("Rate limit! Going to sleep for 2 mins!")
sleep(2 * 60)
print("Wakey wakey eggs and bakey!")
return res
elif resp.ok and not items:
return
else:
raise Exception
if __name__ == '__main__':
extract_topic_items('finance')
| from datetime import datetime
import requests as req
from pymongo import MongoClient
from pypocketexplore.config import MONGO_URI
from time import sleep
from redis import StrictRedis
import rq
def extract_topic_items(topic):
r = StrictRedis()
def topic_in_queue(topic):
q = rq.Queue('topics', connection=StrictRedis())
if any(job.kwargs.get('topic') for job in q.get_jobs()):
return True
else:
return False
db = MongoClient(MONGO_URI).get_default_database()
resp = req.get('http://localhost:5000/api/topic/{}'.format(topic))
data = resp.json()
related_topics = data.get('related_topics')
items = data.get('items')
if resp.ok:
print('Inserting {} items for topic {}'.format(len(items)), topic)
res = db['items'].insert(items)
r.sadd('scraped_topics', topic)
for related_topic in related_topics:
if not topic_in_queue(related_topic) and not r.sismember('scraped_topics', related_topic):
print('Enqueuing related topic'.format(related_topic))
req.get('http://localhost:5000/api/topic/{}?async=true'.format(related_topic)).json()
print("Rate limit! Going to sleep for 2 mins!")
sleep(2 * 60)
print("Wakey wakey eggs and bakey!")
return res
else:
raise Exception('Something went wrong with topic {}. /api/explore/{} returned {}'.format(topic, topic, resp))
if __name__ == '__main__':
extract_topic_items('finance')
| Fix bug to avoid duplicating topics | Fix bug to avoid duplicating topics
| Python | mit | Florents-Tselai/PyPocketExplore | from datetime import datetime
import requests as req
from pymongo import MongoClient
from pypocketexplore.config import MONGO_URI
from time import sleep
+ from redis import StrictRedis
+ import rq
+
def extract_topic_items(topic):
+ r = StrictRedis()
+
+ def topic_in_queue(topic):
+ q = rq.Queue('topics', connection=StrictRedis())
+ if any(job.kwargs.get('topic') for job in q.get_jobs()):
+ return True
+ else:
+ return False
+
db = MongoClient(MONGO_URI).get_default_database()
resp = req.get('http://localhost:5000/api/topic/{}'.format(topic))
data = resp.json()
related_topics = data.get('related_topics')
items = data.get('items')
- if items:
+ if resp.ok:
+ print('Inserting {} items for topic {}'.format(len(items)), topic)
res = db['items'].insert(items)
+ r.sadd('scraped_topics', topic)
+
- db['topics'].update_many({'topic': topic}, {'$set': {'topic': topic,
- 'is_scraped': True,
- 'datetime_scraped': datetime.utcnow(),
- 'queued': True}},
- upsert=True)
for related_topic in related_topics:
+ if not topic_in_queue(related_topic) and not r.sismember('scraped_topics', related_topic):
+ print('Enqueuing related topic'.format(related_topic))
- req.get('http://localhost:5000/api/topic/{}?async=true'.format(related_topic)).json()
+ req.get('http://localhost:5000/api/topic/{}?async=true'.format(related_topic)).json()
print("Rate limit! Going to sleep for 2 mins!")
sleep(2 * 60)
print("Wakey wakey eggs and bakey!")
return res
- elif resp.ok and not items:
- return
else:
- raise Exception
+ raise Exception('Something went wrong with topic {}. /api/explore/{} returned {}'.format(topic, topic, resp))
if __name__ == '__main__':
extract_topic_items('finance')
| Fix bug to avoid duplicating topics | ## Code Before:
from datetime import datetime
import requests as req
from pymongo import MongoClient
from pypocketexplore.config import MONGO_URI
from time import sleep
def extract_topic_items(topic):
db = MongoClient(MONGO_URI).get_default_database()
resp = req.get('http://localhost:5000/api/topic/{}'.format(topic))
data = resp.json()
related_topics = data.get('related_topics')
items = data.get('items')
if items:
res = db['items'].insert(items)
db['topics'].update_many({'topic': topic}, {'$set': {'topic': topic,
'is_scraped': True,
'datetime_scraped': datetime.utcnow(),
'queued': True}},
upsert=True)
for related_topic in related_topics:
req.get('http://localhost:5000/api/topic/{}?async=true'.format(related_topic)).json()
print("Rate limit! Going to sleep for 2 mins!")
sleep(2 * 60)
print("Wakey wakey eggs and bakey!")
return res
elif resp.ok and not items:
return
else:
raise Exception
if __name__ == '__main__':
extract_topic_items('finance')
## Instruction:
Fix bug to avoid duplicating topics
## Code After:
from datetime import datetime
import requests as req
from pymongo import MongoClient
from pypocketexplore.config import MONGO_URI
from time import sleep
from redis import StrictRedis
import rq
def extract_topic_items(topic):
r = StrictRedis()
def topic_in_queue(topic):
q = rq.Queue('topics', connection=StrictRedis())
if any(job.kwargs.get('topic') for job in q.get_jobs()):
return True
else:
return False
db = MongoClient(MONGO_URI).get_default_database()
resp = req.get('http://localhost:5000/api/topic/{}'.format(topic))
data = resp.json()
related_topics = data.get('related_topics')
items = data.get('items')
if resp.ok:
print('Inserting {} items for topic {}'.format(len(items)), topic)
res = db['items'].insert(items)
r.sadd('scraped_topics', topic)
for related_topic in related_topics:
if not topic_in_queue(related_topic) and not r.sismember('scraped_topics', related_topic):
print('Enqueuing related topic'.format(related_topic))
req.get('http://localhost:5000/api/topic/{}?async=true'.format(related_topic)).json()
print("Rate limit! Going to sleep for 2 mins!")
sleep(2 * 60)
print("Wakey wakey eggs and bakey!")
return res
else:
raise Exception('Something went wrong with topic {}. /api/explore/{} returned {}'.format(topic, topic, resp))
if __name__ == '__main__':
extract_topic_items('finance')
|
edec2186f5a83789a5d6a5dbd112c9ff716c3d46 | src/python/datamodels/output_models.py | src/python/datamodels/output_models.py | import hashlib
class Store(object):
def __init__(self):
self.id = None
self.name = None
self.location = None
def __repr__(self):
return "%s,%s,%s" % (self.name, self.location.zipcode, self.location.coords)
class Customer(object):
def __init__(self):
self.id = None
self.name = None
self.location = None
def __repr__(self):
return "(%s, %s, %s)" % \
(self.id, self.name, self.location.zipcode)
class Transaction(object):
def __init__(self, customer=None, trans_time=None, purchased_items=None, store=None,
trans_count=None):
self.store = store
self.customer = customer
self.trans_time = trans_time
self.purchased_items = purchased_items
self.trans_count = trans_count
def transaction_id(self):
return hashlib.md5(repr(self)).hexdigest()
def __repr__(self):
return "(%s, %s, %s, %s)" % (self.store.id,
self.customer.id,
self.trans_time,
self.trans_count)
| import hashlib
class Store(object):
"""
Record for stores.
id -- integer
name -- string
location -- ZipcodeRecord
"""
def __init__(self):
self.id = None
self.name = None
self.location = None
def __repr__(self):
return "%s,%s,%s" % (self.name, self.location.zipcode, self.location.coords)
class Customer(object):
"""
Record for customers.
id -- integer
name -- string
location -- ZipcodeRecord
"""
def __init__(self):
self.id = None
self.name = None
self.location = None
def __repr__(self):
return "(%s, %s, %s)" % \
(self.id, self.name, self.location.zipcode)
class Transaction(object):
"""
Record for transactions
store -- Store
customer -- Customer
trans_time -- transaction time in days since start of simulation. int or long
purchased_items -- list of products purchased
trans_count -- hidden transaction id
"""
def __init__(self, customer=None, trans_time=None, purchased_items=None, store=None,
trans_count=None):
self.store = store
self.customer = customer
self.trans_time = trans_time
self.purchased_items = purchased_items
self.trans_count = trans_count
def transaction_id(self):
"""
Compute transaction id as a hash of the transaction.
Returns a string
"""
return hashlib.md5(repr(self)).hexdigest()
def __repr__(self):
return "(%s, %s, %s, %s)" % (self.store.id,
self.customer.id,
self.trans_time,
self.trans_count)
| Add docstrings to output models | Add docstrings to output models
| Python | apache-2.0 | rnowling/bigpetstore-data-generator,rnowling/bigpetstore-data-generator,rnowling/bigpetstore-data-generator | import hashlib
class Store(object):
+ """
+ Record for stores.
+
+ id -- integer
+ name -- string
+ location -- ZipcodeRecord
+ """
+
def __init__(self):
self.id = None
self.name = None
self.location = None
def __repr__(self):
return "%s,%s,%s" % (self.name, self.location.zipcode, self.location.coords)
class Customer(object):
+ """
+ Record for customers.
+
+ id -- integer
+ name -- string
+ location -- ZipcodeRecord
+ """
+
def __init__(self):
self.id = None
self.name = None
self.location = None
def __repr__(self):
return "(%s, %s, %s)" % \
(self.id, self.name, self.location.zipcode)
class Transaction(object):
+ """
+ Record for transactions
+
+ store -- Store
+ customer -- Customer
+ trans_time -- transaction time in days since start of simulation. int or long
+ purchased_items -- list of products purchased
+ trans_count -- hidden transaction id
+ """
+
def __init__(self, customer=None, trans_time=None, purchased_items=None, store=None,
trans_count=None):
self.store = store
self.customer = customer
self.trans_time = trans_time
self.purchased_items = purchased_items
self.trans_count = trans_count
def transaction_id(self):
+ """
+ Compute transaction id as a hash of the transaction.
+
+ Returns a string
+ """
return hashlib.md5(repr(self)).hexdigest()
def __repr__(self):
return "(%s, %s, %s, %s)" % (self.store.id,
self.customer.id,
self.trans_time,
self.trans_count)
| Add docstrings to output models | ## Code Before:
import hashlib
class Store(object):
def __init__(self):
self.id = None
self.name = None
self.location = None
def __repr__(self):
return "%s,%s,%s" % (self.name, self.location.zipcode, self.location.coords)
class Customer(object):
def __init__(self):
self.id = None
self.name = None
self.location = None
def __repr__(self):
return "(%s, %s, %s)" % \
(self.id, self.name, self.location.zipcode)
class Transaction(object):
def __init__(self, customer=None, trans_time=None, purchased_items=None, store=None,
trans_count=None):
self.store = store
self.customer = customer
self.trans_time = trans_time
self.purchased_items = purchased_items
self.trans_count = trans_count
def transaction_id(self):
return hashlib.md5(repr(self)).hexdigest()
def __repr__(self):
return "(%s, %s, %s, %s)" % (self.store.id,
self.customer.id,
self.trans_time,
self.trans_count)
## Instruction:
Add docstrings to output models
## Code After:
import hashlib
class Store(object):
"""
Record for stores.
id -- integer
name -- string
location -- ZipcodeRecord
"""
def __init__(self):
self.id = None
self.name = None
self.location = None
def __repr__(self):
return "%s,%s,%s" % (self.name, self.location.zipcode, self.location.coords)
class Customer(object):
"""
Record for customers.
id -- integer
name -- string
location -- ZipcodeRecord
"""
def __init__(self):
self.id = None
self.name = None
self.location = None
def __repr__(self):
return "(%s, %s, %s)" % \
(self.id, self.name, self.location.zipcode)
class Transaction(object):
"""
Record for transactions
store -- Store
customer -- Customer
trans_time -- transaction time in days since start of simulation. int or long
purchased_items -- list of products purchased
trans_count -- hidden transaction id
"""
def __init__(self, customer=None, trans_time=None, purchased_items=None, store=None,
trans_count=None):
self.store = store
self.customer = customer
self.trans_time = trans_time
self.purchased_items = purchased_items
self.trans_count = trans_count
def transaction_id(self):
"""
Compute transaction id as a hash of the transaction.
Returns a string
"""
return hashlib.md5(repr(self)).hexdigest()
def __repr__(self):
return "(%s, %s, %s, %s)" % (self.store.id,
self.customer.id,
self.trans_time,
self.trans_count)
|
fb4aa211f64ed6fdc0443d03dd02dc52fc882978 | server/dummy/dummy_server.py | server/dummy/dummy_server.py |
import BaseHTTPServer
ServerClass = BaseHTTPServer.HTTPServer
RequestHandlerClass = BaseHTTPServer.BaseHTTPRequestHandler
SERVER_NAME = ''
SERVER_PORT = 9000
class JsonPostResponder(RequestHandlerClass):
def _get_content_from_stream(self, length, stream):
return stream.read(length)
def do_POST(self):
content_length = int(self.headers['Content-Length'])
content = self._get_content_from_stream(content_length, self.rfile)
print('\n--- %s%s\n%s' % (self.command, self.path, self.headers))
print content, '\n'
self.send_response(200)
self.end_headers()
server_address = (SERVER_NAME, SERVER_PORT)
httpd = ServerClass(server_address, JsonPostResponder)
httpd.serve_forever()
|
import BaseHTTPServer
ServerClass = BaseHTTPServer.HTTPServer
RequestHandlerClass = BaseHTTPServer.BaseHTTPRequestHandler
SERVER_NAME = ''
SERVER_PORT = 9000
class JsonPostResponder(RequestHandlerClass):
def _get_content_from_stream(self, length, stream):
return stream.read(length)
def _transaction_string(self, command, path, headers, content):
return '%s %s\n%s%s\n' % (command, path, headers, content)
def _print_request(self, *request):
print('--> %s' % self._transaction_string(*request))
def do_POST(self):
content_length = int(self.headers['Content-Length'])
content = self._get_content_from_stream(content_length, self.rfile)
self._print_request(self.command, self.path, self.headers, content)
self.send_response(200)
self.end_headers()
server_address = (SERVER_NAME, SERVER_PORT)
httpd = ServerClass(server_address, JsonPostResponder)
httpd.serve_forever()
| Clean up and refactor printing of request | Clean up and refactor printing of request
| Python | mit | jonspeicher/Puddle,jonspeicher/Puddle,jonspeicher/Puddle |
import BaseHTTPServer
ServerClass = BaseHTTPServer.HTTPServer
RequestHandlerClass = BaseHTTPServer.BaseHTTPRequestHandler
SERVER_NAME = ''
SERVER_PORT = 9000
class JsonPostResponder(RequestHandlerClass):
def _get_content_from_stream(self, length, stream):
return stream.read(length)
+ def _transaction_string(self, command, path, headers, content):
+ return '%s %s\n%s%s\n' % (command, path, headers, content)
+
+ def _print_request(self, *request):
+ print('--> %s' % self._transaction_string(*request))
+
def do_POST(self):
content_length = int(self.headers['Content-Length'])
content = self._get_content_from_stream(content_length, self.rfile)
+ self._print_request(self.command, self.path, self.headers, content)
-
- print('\n--- %s%s\n%s' % (self.command, self.path, self.headers))
- print content, '\n'
self.send_response(200)
self.end_headers()
server_address = (SERVER_NAME, SERVER_PORT)
httpd = ServerClass(server_address, JsonPostResponder)
httpd.serve_forever()
| Clean up and refactor printing of request | ## Code Before:
import BaseHTTPServer
ServerClass = BaseHTTPServer.HTTPServer
RequestHandlerClass = BaseHTTPServer.BaseHTTPRequestHandler
SERVER_NAME = ''
SERVER_PORT = 9000
class JsonPostResponder(RequestHandlerClass):
def _get_content_from_stream(self, length, stream):
return stream.read(length)
def do_POST(self):
content_length = int(self.headers['Content-Length'])
content = self._get_content_from_stream(content_length, self.rfile)
print('\n--- %s%s\n%s' % (self.command, self.path, self.headers))
print content, '\n'
self.send_response(200)
self.end_headers()
server_address = (SERVER_NAME, SERVER_PORT)
httpd = ServerClass(server_address, JsonPostResponder)
httpd.serve_forever()
## Instruction:
Clean up and refactor printing of request
## Code After:
import BaseHTTPServer
ServerClass = BaseHTTPServer.HTTPServer
RequestHandlerClass = BaseHTTPServer.BaseHTTPRequestHandler
SERVER_NAME = ''
SERVER_PORT = 9000
class JsonPostResponder(RequestHandlerClass):
def _get_content_from_stream(self, length, stream):
return stream.read(length)
def _transaction_string(self, command, path, headers, content):
return '%s %s\n%s%s\n' % (command, path, headers, content)
def _print_request(self, *request):
print('--> %s' % self._transaction_string(*request))
def do_POST(self):
content_length = int(self.headers['Content-Length'])
content = self._get_content_from_stream(content_length, self.rfile)
self._print_request(self.command, self.path, self.headers, content)
self.send_response(200)
self.end_headers()
server_address = (SERVER_NAME, SERVER_PORT)
httpd = ServerClass(server_address, JsonPostResponder)
httpd.serve_forever()
|
3e3f7b827e226146ec7d3efe523f1f900ac4e99a | sjconfparts/type.py | sjconfparts/type.py | class Type:
@classmethod
def str_to_list(xcls, str_object):
list = map(str.strip, str_object.split(','))
try:
list.remove('')
except ValueError:
pass
return list
@classmethod
def list_to_str(xcls, list_object):
return ', '.join(list_object)
@classmethod
def str_to_bool(xcls, str_object):
if str_object == "yes" or str_object == "on" or str_object == "true":
return True
elif str_object == "no" or str_object == "off" or str_object == "false":
return False
else:
raise TypeError
@classmethod
def bool_to_str(xcls, bool_object):
if bool_object:
return "yes"
else:
return "no"
| class Type:
@classmethod
def str_to_list(xcls, str_object):
list = map(str.strip, str_object.split(','))
try:
list.remove('')
except ValueError:
pass
return list
@classmethod
def list_to_str(xcls, list_object):
return ', '.join(list_object)
@classmethod
def str_to_bool(xcls, str_object):
if str_object == "yes" or str_object == "on" or str_object == "true" or str_object == "enabled" or str_object == "enable":
return True
elif str_object == "no" or str_object == "off" or str_object == "false" or str_object == "disabled" or str_object == "disable":
return False
else:
raise TypeError
@classmethod
def bool_to_str(xcls, bool_object):
if bool_object:
return "yes"
else:
return "no"
| Allow “enabled“, “enable”, “disabled“, “disable” as boolean values | Allow “enabled“, “enable”, “disabled“, “disable” as boolean values
| Python | lgpl-2.1 | SmartJog/sjconf,SmartJog/sjconf | class Type:
@classmethod
def str_to_list(xcls, str_object):
list = map(str.strip, str_object.split(','))
try:
list.remove('')
except ValueError:
pass
return list
@classmethod
def list_to_str(xcls, list_object):
return ', '.join(list_object)
@classmethod
def str_to_bool(xcls, str_object):
- if str_object == "yes" or str_object == "on" or str_object == "true":
+ if str_object == "yes" or str_object == "on" or str_object == "true" or str_object == "enabled" or str_object == "enable":
return True
- elif str_object == "no" or str_object == "off" or str_object == "false":
+ elif str_object == "no" or str_object == "off" or str_object == "false" or str_object == "disabled" or str_object == "disable":
return False
else:
raise TypeError
@classmethod
def bool_to_str(xcls, bool_object):
if bool_object:
return "yes"
else:
return "no"
| Allow “enabled“, “enable”, “disabled“, “disable” as boolean values | ## Code Before:
class Type:
@classmethod
def str_to_list(xcls, str_object):
list = map(str.strip, str_object.split(','))
try:
list.remove('')
except ValueError:
pass
return list
@classmethod
def list_to_str(xcls, list_object):
return ', '.join(list_object)
@classmethod
def str_to_bool(xcls, str_object):
if str_object == "yes" or str_object == "on" or str_object == "true":
return True
elif str_object == "no" or str_object == "off" or str_object == "false":
return False
else:
raise TypeError
@classmethod
def bool_to_str(xcls, bool_object):
if bool_object:
return "yes"
else:
return "no"
## Instruction:
Allow “enabled“, “enable”, “disabled“, “disable” as boolean values
## Code After:
class Type:
@classmethod
def str_to_list(xcls, str_object):
list = map(str.strip, str_object.split(','))
try:
list.remove('')
except ValueError:
pass
return list
@classmethod
def list_to_str(xcls, list_object):
return ', '.join(list_object)
@classmethod
def str_to_bool(xcls, str_object):
if str_object == "yes" or str_object == "on" or str_object == "true" or str_object == "enabled" or str_object == "enable":
return True
elif str_object == "no" or str_object == "off" or str_object == "false" or str_object == "disabled" or str_object == "disable":
return False
else:
raise TypeError
@classmethod
def bool_to_str(xcls, bool_object):
if bool_object:
return "yes"
else:
return "no"
|
00aad9bc179aa4a090f703db9669e8ba49ff8f3c | bibliopixel/main/arguments.py | bibliopixel/main/arguments.py | from .. project import project
"""Common command line arguments for run and demo."""
def add_to_parser(parser):
parser.add_argument(
'-d', '--driver', default='simpixel',
help='Default driver type if no driver is specified')
parser.add_argument(
'-l', '--layout', default='matrix',
help='Default layout class if no layout is specified')
parser.add_argument(
'-t', '--ledtype', default=None,
help='Default LED type if no LED type is specified')
parser.add_argument(
'-a', '--animation', default=None,
help='Default animation type if no animation is specified')
parser.add_argument(
'-s', action='store_true', help='Run SimPixel at the default URL')
parser.add_argument('--simpixel', help='Run SimPixel at a specific URL')
def get_dict(args):
result = {}
for name in 'driver', 'layout', 'animation':
value = args and getattr(args, name)
result[name] = {'typename': value} if value else {}
if args and args.ledtype:
result['driver']['ledtype'] = args.ledtype
return result
def make_animation(args, desc):
return project.project_to_animation(desc, get_dict(args))
| import json
from .. project import project
"""Common command line arguments for run and demo."""
COMPONENTS = 'driver', 'layout', 'animation'
def add_to_parser(parser):
parser.add_argument(
'-d', '--driver', default='simpixel',
help='Default driver type if no driver is specified')
parser.add_argument(
'-l', '--layout', default='matrix',
help='Default layout class if no layout is specified')
parser.add_argument(
'-t', '--ledtype', default=None,
help='Default LED type if no LED type is specified')
parser.add_argument(
'-a', '--animation', default=None,
help='Default animation type if no animation is specified')
parser.add_argument(
'-s', action='store_true', help='Run SimPixel at the default URL')
parser.add_argument('--simpixel', help='Run SimPixel at a specific URL')
def get_dict(args):
def get_value(name):
value = args and getattr(args, name)
if not value:
return {}
if '{' in value:
return json.loads(value)
return {'typename': value}
result = {name: get_value(name) for name in COMPONENTS}
if args and args.ledtype:
result['driver']['ledtype'] = args.ledtype
return result
def make_animation(args, desc):
return project.project_to_animation(desc, get_dict(args))
| Allow json in component flags. | Allow json in component flags.
| Python | mit | ManiacalLabs/BiblioPixel,ManiacalLabs/BiblioPixel,rec/BiblioPixel,ManiacalLabs/BiblioPixel,ManiacalLabs/BiblioPixel,rec/BiblioPixel,rec/BiblioPixel,rec/BiblioPixel | + import json
from .. project import project
"""Common command line arguments for run and demo."""
+
+ COMPONENTS = 'driver', 'layout', 'animation'
def add_to_parser(parser):
parser.add_argument(
'-d', '--driver', default='simpixel',
help='Default driver type if no driver is specified')
parser.add_argument(
'-l', '--layout', default='matrix',
help='Default layout class if no layout is specified')
parser.add_argument(
'-t', '--ledtype', default=None,
help='Default LED type if no LED type is specified')
parser.add_argument(
'-a', '--animation', default=None,
help='Default animation type if no animation is specified')
parser.add_argument(
'-s', action='store_true', help='Run SimPixel at the default URL')
parser.add_argument('--simpixel', help='Run SimPixel at a specific URL')
def get_dict(args):
+ def get_value(name):
- result = {}
- for name in 'driver', 'layout', 'animation':
value = args and getattr(args, name)
- result[name] = {'typename': value} if value else {}
+ if not value:
+ return {}
+ if '{' in value:
+ return json.loads(value)
+
+ return {'typename': value}
+
+ result = {name: get_value(name) for name in COMPONENTS}
if args and args.ledtype:
result['driver']['ledtype'] = args.ledtype
return result
def make_animation(args, desc):
return project.project_to_animation(desc, get_dict(args))
| Allow json in component flags. | ## Code Before:
from .. project import project
"""Common command line arguments for run and demo."""
def add_to_parser(parser):
parser.add_argument(
'-d', '--driver', default='simpixel',
help='Default driver type if no driver is specified')
parser.add_argument(
'-l', '--layout', default='matrix',
help='Default layout class if no layout is specified')
parser.add_argument(
'-t', '--ledtype', default=None,
help='Default LED type if no LED type is specified')
parser.add_argument(
'-a', '--animation', default=None,
help='Default animation type if no animation is specified')
parser.add_argument(
'-s', action='store_true', help='Run SimPixel at the default URL')
parser.add_argument('--simpixel', help='Run SimPixel at a specific URL')
def get_dict(args):
result = {}
for name in 'driver', 'layout', 'animation':
value = args and getattr(args, name)
result[name] = {'typename': value} if value else {}
if args and args.ledtype:
result['driver']['ledtype'] = args.ledtype
return result
def make_animation(args, desc):
return project.project_to_animation(desc, get_dict(args))
## Instruction:
Allow json in component flags.
## Code After:
import json
from .. project import project
"""Common command line arguments for run and demo."""
COMPONENTS = 'driver', 'layout', 'animation'
def add_to_parser(parser):
parser.add_argument(
'-d', '--driver', default='simpixel',
help='Default driver type if no driver is specified')
parser.add_argument(
'-l', '--layout', default='matrix',
help='Default layout class if no layout is specified')
parser.add_argument(
'-t', '--ledtype', default=None,
help='Default LED type if no LED type is specified')
parser.add_argument(
'-a', '--animation', default=None,
help='Default animation type if no animation is specified')
parser.add_argument(
'-s', action='store_true', help='Run SimPixel at the default URL')
parser.add_argument('--simpixel', help='Run SimPixel at a specific URL')
def get_dict(args):
def get_value(name):
value = args and getattr(args, name)
if not value:
return {}
if '{' in value:
return json.loads(value)
return {'typename': value}
result = {name: get_value(name) for name in COMPONENTS}
if args and args.ledtype:
result['driver']['ledtype'] = args.ledtype
return result
def make_animation(args, desc):
return project.project_to_animation(desc, get_dict(args))
|
802d030087d7f15add5ccfa5d305555632575642 | changes/jobs/cleanup_tasks.py | changes/jobs/cleanup_tasks.py | from __future__ import absolute_import
from datetime import datetime, timedelta
from changes.config import queue
from changes.constants import Status
from changes.experimental.stats import RCount
from changes.models import Task
from changes.queue.task import TrackedTask, tracked_task
CHECK_TIME = timedelta(minutes=60)
@tracked_task
def cleanup_tasks():
with RCount('cleanup_tasks'):
"""
Find any tasks which haven't checked in within a reasonable time period and
requeue them if necessary.
"""
now = datetime.utcnow()
cutoff = now - CHECK_TIME
pending_tasks = Task.query.filter(
Task.status != Status.finished,
Task.date_modified < cutoff,
)
for task in pending_tasks:
task_func = TrackedTask(queue.get_task(task.task_name))
task_func.delay(
task_id=task.task_id.hex,
parent_task_id=task.parent_id.hex if task.parent_id else None,
**task.data['kwargs']
)
| from __future__ import absolute_import
from datetime import datetime, timedelta
from changes.config import queue
from changes.constants import Status
from changes.experimental.stats import RCount, incr
from changes.models import Task
from changes.queue.task import TrackedTask, tracked_task
CHECK_TIME = timedelta(minutes=60)
@tracked_task
def cleanup_tasks():
with RCount('cleanup_tasks'):
"""
Find any tasks which haven't checked in within a reasonable time period and
requeue them if necessary.
"""
now = datetime.utcnow()
cutoff = now - CHECK_TIME
pending_tasks = Task.query.filter(
Task.status != Status.finished,
Task.date_modified < cutoff,
)
for task in pending_tasks:
incr('cleanup_unfinished')
task_func = TrackedTask(queue.get_task(task.task_name))
task_func.delay(
task_id=task.task_id.hex,
parent_task_id=task.parent_id.hex if task.parent_id else None,
**task.data['kwargs']
)
| Add counter for cleanup tasks not following the decorator | Add counter for cleanup tasks not following the decorator
| Python | apache-2.0 | bowlofstew/changes,wfxiang08/changes,dropbox/changes,wfxiang08/changes,bowlofstew/changes,wfxiang08/changes,dropbox/changes,bowlofstew/changes,bowlofstew/changes,wfxiang08/changes,dropbox/changes,dropbox/changes | from __future__ import absolute_import
from datetime import datetime, timedelta
from changes.config import queue
from changes.constants import Status
- from changes.experimental.stats import RCount
+ from changes.experimental.stats import RCount, incr
from changes.models import Task
from changes.queue.task import TrackedTask, tracked_task
CHECK_TIME = timedelta(minutes=60)
@tracked_task
def cleanup_tasks():
with RCount('cleanup_tasks'):
"""
Find any tasks which haven't checked in within a reasonable time period and
requeue them if necessary.
"""
now = datetime.utcnow()
cutoff = now - CHECK_TIME
pending_tasks = Task.query.filter(
Task.status != Status.finished,
Task.date_modified < cutoff,
)
for task in pending_tasks:
+ incr('cleanup_unfinished')
task_func = TrackedTask(queue.get_task(task.task_name))
task_func.delay(
task_id=task.task_id.hex,
parent_task_id=task.parent_id.hex if task.parent_id else None,
**task.data['kwargs']
)
| Add counter for cleanup tasks not following the decorator | ## Code Before:
from __future__ import absolute_import
from datetime import datetime, timedelta
from changes.config import queue
from changes.constants import Status
from changes.experimental.stats import RCount
from changes.models import Task
from changes.queue.task import TrackedTask, tracked_task
CHECK_TIME = timedelta(minutes=60)
@tracked_task
def cleanup_tasks():
with RCount('cleanup_tasks'):
"""
Find any tasks which haven't checked in within a reasonable time period and
requeue them if necessary.
"""
now = datetime.utcnow()
cutoff = now - CHECK_TIME
pending_tasks = Task.query.filter(
Task.status != Status.finished,
Task.date_modified < cutoff,
)
for task in pending_tasks:
task_func = TrackedTask(queue.get_task(task.task_name))
task_func.delay(
task_id=task.task_id.hex,
parent_task_id=task.parent_id.hex if task.parent_id else None,
**task.data['kwargs']
)
## Instruction:
Add counter for cleanup tasks not following the decorator
## Code After:
from __future__ import absolute_import
from datetime import datetime, timedelta
from changes.config import queue
from changes.constants import Status
from changes.experimental.stats import RCount, incr
from changes.models import Task
from changes.queue.task import TrackedTask, tracked_task
CHECK_TIME = timedelta(minutes=60)
@tracked_task
def cleanup_tasks():
with RCount('cleanup_tasks'):
"""
Find any tasks which haven't checked in within a reasonable time period and
requeue them if necessary.
"""
now = datetime.utcnow()
cutoff = now - CHECK_TIME
pending_tasks = Task.query.filter(
Task.status != Status.finished,
Task.date_modified < cutoff,
)
for task in pending_tasks:
incr('cleanup_unfinished')
task_func = TrackedTask(queue.get_task(task.task_name))
task_func.delay(
task_id=task.task_id.hex,
parent_task_id=task.parent_id.hex if task.parent_id else None,
**task.data['kwargs']
)
|
b6db7abfd59a1b97fbb4d1b867e3316c029c94ff | spec/Report_S06_spec.py | spec/Report_S06_spec.py | from expects import expect, equal
from primestg.report import Report
from ast import literal_eval
with description('Report S06 example'):
with before.all:
self.data_filenames = [
'spec/data/S06.xml',
# 'spec/data/S06_empty.xml'
]
self.report = []
for data_filename in self.data_filenames:
with open(data_filename) as data_file:
self.report.append(Report(data_file))
with it('generates the expected results for the whole report'):
result_filenames = []
for data_filename in self.data_filenames:
result_filenames.append('{}_result.txt'.format(data_filename))
for key, result_filename in enumerate(result_filenames):
with open(result_filename) as result_file:
result_string = result_file.read()
expected_result = literal_eval(result_string)
result = self.report[key].values
expect(result).to(equal(expected_result))
# result_filename = '{}_result.txt'.format(self.data_filename)
#
# with open(result_filename) as result_file:
# result_string = result_file.read()
# self.expected_result = literal_eval(result_string)
#
# result = self.report.values
#
# expect(result).to(equal(self.expected_result))
| from expects import expect, equal
from primestg.report import Report
from ast import literal_eval
with description('Report S06 example'):
with before.all:
self.data_filenames = [
'spec/data/S06.xml',
'spec/data/S06_with_error.xml',
# 'spec/data/S06_empty.xml'
]
self.report = []
for data_filename in self.data_filenames:
with open(data_filename) as data_file:
self.report.append(Report(data_file))
with it('generates the expected results for the whole report'):
result_filenames = []
warnings = []
for data_filename in self.data_filenames:
result_filenames.append('{}_result.txt'.format(data_filename))
for key, result_filename in enumerate(result_filenames):
result = []
with open(result_filename) as result_file:
result_string = result_file.read()
expected_result = literal_eval(result_string)
for cnc in self.report[key].concentrators:
if cnc.meters:
for meter in cnc.meters:
for value in meter.values:
result.append(value)
warnings.append(meter.warnings)
print('Result: {} \n Expected result: {} \n Warnings: {}'.format(
result, expected_result, warnings))
expect(result).to(equal(expected_result))
expected_warnings = [[], ["ERROR: Cnc(CIR4621704174), "
"Meter(ZIV42553686). Thrown exception: "
"object of type 'NoneType' has no len()"], []]
expect(warnings).to(equal(expected_warnings))
| TEST for correct an with errors S06 report | TEST for correct an with errors S06 report
| Python | agpl-3.0 | gisce/primestg | from expects import expect, equal
from primestg.report import Report
from ast import literal_eval
with description('Report S06 example'):
with before.all:
self.data_filenames = [
'spec/data/S06.xml',
+ 'spec/data/S06_with_error.xml',
# 'spec/data/S06_empty.xml'
]
self.report = []
for data_filename in self.data_filenames:
with open(data_filename) as data_file:
self.report.append(Report(data_file))
with it('generates the expected results for the whole report'):
result_filenames = []
+ warnings = []
for data_filename in self.data_filenames:
result_filenames.append('{}_result.txt'.format(data_filename))
for key, result_filename in enumerate(result_filenames):
+ result = []
with open(result_filename) as result_file:
result_string = result_file.read()
expected_result = literal_eval(result_string)
+ for cnc in self.report[key].concentrators:
+ if cnc.meters:
+ for meter in cnc.meters:
+ for value in meter.values:
+ result.append(value)
+ warnings.append(meter.warnings)
+ print('Result: {} \n Expected result: {} \n Warnings: {}'.format(
+ result, expected_result, warnings))
- result = self.report[key].values
- expect(result).to(equal(expected_result))
- # result_filename = '{}_result.txt'.format(self.data_filename)
- #
- # with open(result_filename) as result_file:
- # result_string = result_file.read()
- # self.expected_result = literal_eval(result_string)
- #
- # result = self.report.values
- #
- # expect(result).to(equal(self.expected_result))
+ expect(result).to(equal(expected_result))
+ expected_warnings = [[], ["ERROR: Cnc(CIR4621704174), "
+ "Meter(ZIV42553686). Thrown exception: "
+ "object of type 'NoneType' has no len()"], []]
+ expect(warnings).to(equal(expected_warnings))
+ | TEST for correct an with errors S06 report | ## Code Before:
from expects import expect, equal
from primestg.report import Report
from ast import literal_eval
with description('Report S06 example'):
with before.all:
self.data_filenames = [
'spec/data/S06.xml',
# 'spec/data/S06_empty.xml'
]
self.report = []
for data_filename in self.data_filenames:
with open(data_filename) as data_file:
self.report.append(Report(data_file))
with it('generates the expected results for the whole report'):
result_filenames = []
for data_filename in self.data_filenames:
result_filenames.append('{}_result.txt'.format(data_filename))
for key, result_filename in enumerate(result_filenames):
with open(result_filename) as result_file:
result_string = result_file.read()
expected_result = literal_eval(result_string)
result = self.report[key].values
expect(result).to(equal(expected_result))
# result_filename = '{}_result.txt'.format(self.data_filename)
#
# with open(result_filename) as result_file:
# result_string = result_file.read()
# self.expected_result = literal_eval(result_string)
#
# result = self.report.values
#
# expect(result).to(equal(self.expected_result))
## Instruction:
TEST for correct an with errors S06 report
## Code After:
from expects import expect, equal
from primestg.report import Report
from ast import literal_eval
with description('Report S06 example'):
with before.all:
self.data_filenames = [
'spec/data/S06.xml',
'spec/data/S06_with_error.xml',
# 'spec/data/S06_empty.xml'
]
self.report = []
for data_filename in self.data_filenames:
with open(data_filename) as data_file:
self.report.append(Report(data_file))
with it('generates the expected results for the whole report'):
result_filenames = []
warnings = []
for data_filename in self.data_filenames:
result_filenames.append('{}_result.txt'.format(data_filename))
for key, result_filename in enumerate(result_filenames):
result = []
with open(result_filename) as result_file:
result_string = result_file.read()
expected_result = literal_eval(result_string)
for cnc in self.report[key].concentrators:
if cnc.meters:
for meter in cnc.meters:
for value in meter.values:
result.append(value)
warnings.append(meter.warnings)
print('Result: {} \n Expected result: {} \n Warnings: {}'.format(
result, expected_result, warnings))
expect(result).to(equal(expected_result))
expected_warnings = [[], ["ERROR: Cnc(CIR4621704174), "
"Meter(ZIV42553686). Thrown exception: "
"object of type 'NoneType' has no len()"], []]
expect(warnings).to(equal(expected_warnings))
|
d7ea1e9c7728b5e98e6c798ab3d5ef5b9066463c | barrage/basetestcases.py | barrage/basetestcases.py | from .baselauncher import BaseLauncher
class BaseTestCases(BaseLauncher):
def handle_problem_set(self, name, problems):
for i, prob in enumerate(problems):
answer_got = self.get_answer(prob, name, i, len(problems))
if not answer_got:
return False
if not prob.validate(answer_got):
try:
answer_expected = prob.Answer().for_problem(prob)
except NotImplementedError:
print("\nFAILED. STDIN:\n{}\nGOT:\n{}"
.format(prob.to_stdin(), stdout))
else:
print("\nFAILED. STDIN:\n{}\nEXPECTED:\n{}\nGOT:\n{}"
.format(prob.to_stdin(), answer_expected.to_stdout(), stdout))
return False
print("")
return True
| from .baselauncher import BaseLauncher
class BaseTestCases(BaseLauncher):
def handle_problem_set(self, name, problems):
for i, prob in enumerate(problems):
answer_got = self.get_answer(prob, name, i, len(problems))
if not answer_got:
return False
if not prob.validate(answer_got):
try:
answer_expected = prob.Answer().for_problem(prob)
except NotImplementedError:
print("\nFAILED. STDIN:\n{}\nGOT:\n{}"
.format(prob.to_stdin(), answer_got.to_stdout()))
else:
print("\nFAILED. STDIN:\n{}\nEXPECTED:\n{}\nGOT:\n{}"
.format(prob.to_stdin(), answer_expected.to_stdout(), answer_got.to_stdout()))
return False
print("")
return True
| Fix a bug with application stdout print | Fix a bug with application stdout print
| Python | mit | vnetserg/barrage | from .baselauncher import BaseLauncher
class BaseTestCases(BaseLauncher):
def handle_problem_set(self, name, problems):
for i, prob in enumerate(problems):
answer_got = self.get_answer(prob, name, i, len(problems))
if not answer_got:
return False
if not prob.validate(answer_got):
try:
answer_expected = prob.Answer().for_problem(prob)
except NotImplementedError:
print("\nFAILED. STDIN:\n{}\nGOT:\n{}"
- .format(prob.to_stdin(), stdout))
+ .format(prob.to_stdin(), answer_got.to_stdout()))
else:
print("\nFAILED. STDIN:\n{}\nEXPECTED:\n{}\nGOT:\n{}"
- .format(prob.to_stdin(), answer_expected.to_stdout(), stdout))
+ .format(prob.to_stdin(), answer_expected.to_stdout(), answer_got.to_stdout()))
return False
print("")
return True
| Fix a bug with application stdout print | ## Code Before:
from .baselauncher import BaseLauncher
class BaseTestCases(BaseLauncher):
def handle_problem_set(self, name, problems):
for i, prob in enumerate(problems):
answer_got = self.get_answer(prob, name, i, len(problems))
if not answer_got:
return False
if not prob.validate(answer_got):
try:
answer_expected = prob.Answer().for_problem(prob)
except NotImplementedError:
print("\nFAILED. STDIN:\n{}\nGOT:\n{}"
.format(prob.to_stdin(), stdout))
else:
print("\nFAILED. STDIN:\n{}\nEXPECTED:\n{}\nGOT:\n{}"
.format(prob.to_stdin(), answer_expected.to_stdout(), stdout))
return False
print("")
return True
## Instruction:
Fix a bug with application stdout print
## Code After:
from .baselauncher import BaseLauncher
class BaseTestCases(BaseLauncher):
def handle_problem_set(self, name, problems):
for i, prob in enumerate(problems):
answer_got = self.get_answer(prob, name, i, len(problems))
if not answer_got:
return False
if not prob.validate(answer_got):
try:
answer_expected = prob.Answer().for_problem(prob)
except NotImplementedError:
print("\nFAILED. STDIN:\n{}\nGOT:\n{}"
.format(prob.to_stdin(), answer_got.to_stdout()))
else:
print("\nFAILED. STDIN:\n{}\nEXPECTED:\n{}\nGOT:\n{}"
.format(prob.to_stdin(), answer_expected.to_stdout(), answer_got.to_stdout()))
return False
print("")
return True
|
8a6bc4a46141b42d4457fdc4d63df234f788253d | django_nose/plugin.py | django_nose/plugin.py |
class ResultPlugin(object):
"""
Captures the TestResult object for later inspection.
nose doesn't return the full test result object from any of its runner
methods. Pass an instance of this plugin to the TestProgram and use
``result`` after running the tests to get the TestResult object.
"""
name = "result"
enabled = True
def finalize(self, result):
self.result = result
class DjangoSetUpPlugin(object):
"""
Configures Django to setup and tear down the environment.
This allows coverage to report on all code imported and used during the
initialisation of the test runner.
"""
name = "django setup"
enabled = True
# We need this to run before the coverage plugin (which has a score
# of 500), so that we still have a stdout for the user interaction
# Django sometimes wants to do during test database setup.
score = 700
def __init__(self, runner):
super(DjangoSetUpPlugin, self).__init__()
self.runner = runner
def begin(self):
"""Setup the environment"""
self.runner.setup_test_environment()
self.old_names = self.runner.setup_databases()
def finalize(self, result):
"""Destroy the environment"""
self.runner.teardown_databases(self.old_names)
self.runner.teardown_test_environment()
| import sys
class ResultPlugin(object):
"""
Captures the TestResult object for later inspection.
nose doesn't return the full test result object from any of its runner
methods. Pass an instance of this plugin to the TestProgram and use
``result`` after running the tests to get the TestResult object.
"""
name = "result"
enabled = True
def finalize(self, result):
self.result = result
class DjangoSetUpPlugin(object):
"""
Configures Django to setup and tear down the environment.
This allows coverage to report on all code imported and used during the
initialisation of the test runner.
"""
name = "django setup"
enabled = True
def __init__(self, runner):
super(DjangoSetUpPlugin, self).__init__()
self.runner = runner
self.sys_stdout = sys.stdout
def begin(self):
"""Setup the environment"""
sys_stdout = sys.stdout
sys.stdout = self.sys_stdout
self.runner.setup_test_environment()
self.old_names = self.runner.setup_databases()
sys.stdout = sys_stdout
def finalize(self, result):
"""Destroy the environment"""
self.runner.teardown_databases(self.old_names)
self.runner.teardown_test_environment()
| Allow coverage to work and keep stdout and be activated before initial imports. | Allow coverage to work and keep stdout and be activated before initial imports.
| Python | bsd-3-clause | aristiden7o/django-nose,harukaeru/django-nose,disqus/django-nose,dgladkov/django-nose,mzdaniel/django-nose,sociateru/django-nose,krinart/django-nose,alexhayes/django-nose,daineX/django-nose,harukaeru/django-nose,mzdaniel/django-nose,Deepomatic/django-nose,krinart/django-nose,fabiosantoscode/django-nose-123-fix,alexhayes/django-nose,daineX/django-nose,dgladkov/django-nose,sociateru/django-nose,aristiden7o/django-nose,millerdev/django-nose,Deepomatic/django-nose,franciscoruiz/django-nose,360youlun/django-nose,disqus/django-nose,franciscoruiz/django-nose,fabiosantoscode/django-nose-123-fix,millerdev/django-nose,brilliant-org/django-nose,360youlun/django-nose,brilliant-org/django-nose | -
+ import sys
class ResultPlugin(object):
"""
Captures the TestResult object for later inspection.
nose doesn't return the full test result object from any of its runner
methods. Pass an instance of this plugin to the TestProgram and use
``result`` after running the tests to get the TestResult object.
"""
name = "result"
enabled = True
def finalize(self, result):
self.result = result
class DjangoSetUpPlugin(object):
"""
Configures Django to setup and tear down the environment.
This allows coverage to report on all code imported and used during the
initialisation of the test runner.
"""
-
name = "django setup"
enabled = True
-
- # We need this to run before the coverage plugin (which has a score
- # of 500), so that we still have a stdout for the user interaction
- # Django sometimes wants to do during test database setup.
- score = 700
def __init__(self, runner):
super(DjangoSetUpPlugin, self).__init__()
self.runner = runner
+ self.sys_stdout = sys.stdout
def begin(self):
"""Setup the environment"""
+ sys_stdout = sys.stdout
+ sys.stdout = self.sys_stdout
+
self.runner.setup_test_environment()
self.old_names = self.runner.setup_databases()
+
+ sys.stdout = sys_stdout
def finalize(self, result):
"""Destroy the environment"""
self.runner.teardown_databases(self.old_names)
self.runner.teardown_test_environment()
| Allow coverage to work and keep stdout and be activated before initial imports. | ## Code Before:
class ResultPlugin(object):
"""
Captures the TestResult object for later inspection.
nose doesn't return the full test result object from any of its runner
methods. Pass an instance of this plugin to the TestProgram and use
``result`` after running the tests to get the TestResult object.
"""
name = "result"
enabled = True
def finalize(self, result):
self.result = result
class DjangoSetUpPlugin(object):
"""
Configures Django to setup and tear down the environment.
This allows coverage to report on all code imported and used during the
initialisation of the test runner.
"""
name = "django setup"
enabled = True
# We need this to run before the coverage plugin (which has a score
# of 500), so that we still have a stdout for the user interaction
# Django sometimes wants to do during test database setup.
score = 700
def __init__(self, runner):
super(DjangoSetUpPlugin, self).__init__()
self.runner = runner
def begin(self):
"""Setup the environment"""
self.runner.setup_test_environment()
self.old_names = self.runner.setup_databases()
def finalize(self, result):
"""Destroy the environment"""
self.runner.teardown_databases(self.old_names)
self.runner.teardown_test_environment()
## Instruction:
Allow coverage to work and keep stdout and be activated before initial imports.
## Code After:
import sys
class ResultPlugin(object):
"""
Captures the TestResult object for later inspection.
nose doesn't return the full test result object from any of its runner
methods. Pass an instance of this plugin to the TestProgram and use
``result`` after running the tests to get the TestResult object.
"""
name = "result"
enabled = True
def finalize(self, result):
self.result = result
class DjangoSetUpPlugin(object):
"""
Configures Django to setup and tear down the environment.
This allows coverage to report on all code imported and used during the
initialisation of the test runner.
"""
name = "django setup"
enabled = True
def __init__(self, runner):
super(DjangoSetUpPlugin, self).__init__()
self.runner = runner
self.sys_stdout = sys.stdout
def begin(self):
"""Setup the environment"""
sys_stdout = sys.stdout
sys.stdout = self.sys_stdout
self.runner.setup_test_environment()
self.old_names = self.runner.setup_databases()
sys.stdout = sys_stdout
def finalize(self, result):
"""Destroy the environment"""
self.runner.teardown_databases(self.old_names)
self.runner.teardown_test_environment()
|
8ae27080b8ff9fe124733005a8006261a3d22266 | migrate/crud/versions/001_create_initial_tables.py | migrate/crud/versions/001_create_initial_tables.py | from sqlalchemy import *
from migrate import *
metadata = MetaData()
table = Table('crud_versions', metadata,
Column('id', Integer, primary_key=True),
Column('object_type', Text, nullable=False),
Column('object_id', Integer, nullable=False),
Column('commit_time', DateTime, nullable=False),
Column('data', Blob, nullable=False),
Column('blame', Text, nullable=False),
Column('comment', Text, nullable=False),
)
def upgrade(migrate_engine):
# Upgrade operations go here. Don't create your own engine; bind migrate_engine
# to your metadata
metadata.bind = migrate_engine
table.create()
def downgrade(migrate_engine):
# Operations to reverse the above upgrade go here.
table.drop()
| from sqlalchemy import *
from migrate import *
metadata = MetaData()
table = Table('crud_versions', metadata,
Column('id', Integer, primary_key=True),
Column('object_type', Text, nullable=False),
Column('object_id', Integer, nullable=False),
Column('commit_time', DateTime, nullable=False),
Column('data', LargeBinary, nullable=False),
Column('blame', Text, nullable=False),
Column('comment', Text, nullable=False),
)
def upgrade(migrate_engine):
# Upgrade operations go here. Don't create your own engine; bind migrate_engine
# to your metadata
metadata.bind = migrate_engine
table.create()
def downgrade(migrate_engine):
# Operations to reverse the above upgrade go here.
table.drop()
| Fix some of the schema. | Fix some of the schema. | Python | bsd-3-clause | mikeboers/Nitrogen,mikeboers/Nitrogen,mikeboers/Nitrogen,mikeboers/Nitrogen,mikeboers/Nitrogen,mikeboers/Nitrogen | from sqlalchemy import *
from migrate import *
metadata = MetaData()
table = Table('crud_versions', metadata,
Column('id', Integer, primary_key=True),
Column('object_type', Text, nullable=False),
Column('object_id', Integer, nullable=False),
Column('commit_time', DateTime, nullable=False),
- Column('data', Blob, nullable=False),
+ Column('data', LargeBinary, nullable=False),
Column('blame', Text, nullable=False),
Column('comment', Text, nullable=False),
)
def upgrade(migrate_engine):
# Upgrade operations go here. Don't create your own engine; bind migrate_engine
# to your metadata
metadata.bind = migrate_engine
table.create()
def downgrade(migrate_engine):
# Operations to reverse the above upgrade go here.
table.drop()
| Fix some of the schema. | ## Code Before:
from sqlalchemy import *
from migrate import *
metadata = MetaData()
table = Table('crud_versions', metadata,
Column('id', Integer, primary_key=True),
Column('object_type', Text, nullable=False),
Column('object_id', Integer, nullable=False),
Column('commit_time', DateTime, nullable=False),
Column('data', Blob, nullable=False),
Column('blame', Text, nullable=False),
Column('comment', Text, nullable=False),
)
def upgrade(migrate_engine):
# Upgrade operations go here. Don't create your own engine; bind migrate_engine
# to your metadata
metadata.bind = migrate_engine
table.create()
def downgrade(migrate_engine):
# Operations to reverse the above upgrade go here.
table.drop()
## Instruction:
Fix some of the schema.
## Code After:
from sqlalchemy import *
from migrate import *
metadata = MetaData()
table = Table('crud_versions', metadata,
Column('id', Integer, primary_key=True),
Column('object_type', Text, nullable=False),
Column('object_id', Integer, nullable=False),
Column('commit_time', DateTime, nullable=False),
Column('data', LargeBinary, nullable=False),
Column('blame', Text, nullable=False),
Column('comment', Text, nullable=False),
)
def upgrade(migrate_engine):
# Upgrade operations go here. Don't create your own engine; bind migrate_engine
# to your metadata
metadata.bind = migrate_engine
table.create()
def downgrade(migrate_engine):
# Operations to reverse the above upgrade go here.
table.drop()
|
91acec032abeb942bf90d6522a4d9d38ad624d46 | tests/test_buffs.py | tests/test_buffs.py | import unittest
from buffs import *
class StatusEffectTests(unittest.TestCase):
"""
StatusEffect is the base class for buffs
"""
def test_init(self):
test_name = 'testman'
test_duration = 10
st_ef = StatusEffect(name=test_name, duration=test_duration)
self.assertEqual(st_ef.name, test_name)
self.assertEqual(st_ef.duration, test_duration)
def test_str(self):
test_name = 'testman'
test_duration = 10
st_ef = StatusEffect(name=test_name, duration=test_duration)
expected_str = "Default Status Effect"
self.assertEqual(str(st_ef), "Default Status Effect")
if __name__ == '__main__':
unittest.main()
| import unittest
from buffs import *
class StatusEffectTests(unittest.TestCase):
"""
StatusEffect is the base class for buffs
"""
def test_init(self):
test_name = 'testman'
test_duration = 10
st_ef = StatusEffect(name=test_name, duration=test_duration)
self.assertEqual(st_ef.name, test_name)
self.assertEqual(st_ef.duration, test_duration)
def test_str(self):
test_name = 'testman'
test_duration = 10
st_ef = StatusEffect(name=test_name, duration=test_duration)
expected_str = "Default Status Effect"
self.assertEqual(str(st_ef), "Default Status Effect")
class BeneficialBuffTests(unittest.TestCase):
def test_init(self):
name = 'BMW'
stats_amounts = [('strength', 10), ('armor', 20), ('health', 30)]
duration = 10
buff = BeneficialBuff(name=name, buff_stats_and_amounts=stats_amounts, duration=duration)
self.assertEqual(buff.name, name)
self.assertEqual(buff.buff_stats_and_amounts, stats_amounts)
self.assertEqual(buff.duration, duration)
if __name__ == '__main__':
unittest.main()
| Test for the BeneficialBuff class | Test for the BeneficialBuff class
| Python | mit | Enether/python_wow | import unittest
from buffs import *
class StatusEffectTests(unittest.TestCase):
"""
StatusEffect is the base class for buffs
"""
def test_init(self):
test_name = 'testman'
test_duration = 10
st_ef = StatusEffect(name=test_name, duration=test_duration)
self.assertEqual(st_ef.name, test_name)
self.assertEqual(st_ef.duration, test_duration)
def test_str(self):
test_name = 'testman'
test_duration = 10
st_ef = StatusEffect(name=test_name, duration=test_duration)
expected_str = "Default Status Effect"
self.assertEqual(str(st_ef), "Default Status Effect")
+ class BeneficialBuffTests(unittest.TestCase):
+ def test_init(self):
+ name = 'BMW'
+ stats_amounts = [('strength', 10), ('armor', 20), ('health', 30)]
+ duration = 10
+
+ buff = BeneficialBuff(name=name, buff_stats_and_amounts=stats_amounts, duration=duration)
+
+ self.assertEqual(buff.name, name)
+ self.assertEqual(buff.buff_stats_and_amounts, stats_amounts)
+ self.assertEqual(buff.duration, duration)
+
+
if __name__ == '__main__':
unittest.main()
| Test for the BeneficialBuff class | ## Code Before:
import unittest
from buffs import *
class StatusEffectTests(unittest.TestCase):
"""
StatusEffect is the base class for buffs
"""
def test_init(self):
test_name = 'testman'
test_duration = 10
st_ef = StatusEffect(name=test_name, duration=test_duration)
self.assertEqual(st_ef.name, test_name)
self.assertEqual(st_ef.duration, test_duration)
def test_str(self):
test_name = 'testman'
test_duration = 10
st_ef = StatusEffect(name=test_name, duration=test_duration)
expected_str = "Default Status Effect"
self.assertEqual(str(st_ef), "Default Status Effect")
if __name__ == '__main__':
unittest.main()
## Instruction:
Test for the BeneficialBuff class
## Code After:
import unittest
from buffs import *
class StatusEffectTests(unittest.TestCase):
"""
StatusEffect is the base class for buffs
"""
def test_init(self):
test_name = 'testman'
test_duration = 10
st_ef = StatusEffect(name=test_name, duration=test_duration)
self.assertEqual(st_ef.name, test_name)
self.assertEqual(st_ef.duration, test_duration)
def test_str(self):
test_name = 'testman'
test_duration = 10
st_ef = StatusEffect(name=test_name, duration=test_duration)
expected_str = "Default Status Effect"
self.assertEqual(str(st_ef), "Default Status Effect")
class BeneficialBuffTests(unittest.TestCase):
def test_init(self):
name = 'BMW'
stats_amounts = [('strength', 10), ('armor', 20), ('health', 30)]
duration = 10
buff = BeneficialBuff(name=name, buff_stats_and_amounts=stats_amounts, duration=duration)
self.assertEqual(buff.name, name)
self.assertEqual(buff.buff_stats_and_amounts, stats_amounts)
self.assertEqual(buff.duration, duration)
if __name__ == '__main__':
unittest.main()
|
6430785e60fcef9bbac3cf4e7c70981f5af6affa | fluent_contents/plugins/sharedcontent/models.py | fluent_contents/plugins/sharedcontent/models.py | from django.db import models
from django.utils.translation import ugettext_lazy as _
from parler.models import TranslatableModel, TranslatedFields
from fluent_contents.models import ContentItem, PlaceholderField
class SharedContent(TranslatableModel):
"""
The parent hosting object for shared content
"""
translations = TranslatedFields(
title = models.CharField(_("Title"), max_length=200)
)
slug = models.SlugField(_("Template code"), unique=True, help_text=_("This unique name can be used refer to this content in in templates."))
contents = PlaceholderField("shared_content", verbose_name=_("Contents"))
# NOTE: settings such as "template_name", and which plugins are allowed can be added later.
class Meta:
verbose_name = _("Shared content")
verbose_name_plural = _("Shared content")
def __unicode__(self):
return self.title
class SharedContentItem(ContentItem):
"""
The contentitem to include in a page.
"""
shared_content = models.ForeignKey(SharedContent, verbose_name=_('Shared content'), related_name='shared_content_items')
class Meta:
verbose_name = _('Shared content')
verbose_name_plural = _('Shared content')
def __unicode__(self):
return unicode(self.shared_content)
| from django.db import models
from django.utils.translation import ugettext_lazy as _
from parler.models import TranslatableModel, TranslatedFields
from fluent_contents.models import ContentItem, PlaceholderField, ContentItemRelation
class SharedContent(TranslatableModel):
"""
The parent hosting object for shared content
"""
translations = TranslatedFields(
title = models.CharField(_("Title"), max_length=200)
)
slug = models.SlugField(_("Template code"), unique=True, help_text=_("This unique name can be used refer to this content in in templates."))
contents = PlaceholderField("shared_content", verbose_name=_("Contents"))
# NOTE: settings such as "template_name", and which plugins are allowed can be added later.
# Adding the reverse relation for ContentItem objects
# causes the admin to list these objects when moving the shared content
contentitem_set = ContentItemRelation()
class Meta:
verbose_name = _("Shared content")
verbose_name_plural = _("Shared content")
def __unicode__(self):
return self.title
class SharedContentItem(ContentItem):
"""
The contentitem to include in a page.
"""
shared_content = models.ForeignKey(SharedContent, verbose_name=_('Shared content'), related_name='shared_content_items')
class Meta:
verbose_name = _('Shared content')
verbose_name_plural = _('Shared content')
def __unicode__(self):
return unicode(self.shared_content)
| Add ContentItemRelation to SharedContent model | Add ContentItemRelation to SharedContent model
Displays objects in the admin delete screen.
| Python | apache-2.0 | jpotterm/django-fluent-contents,django-fluent/django-fluent-contents,django-fluent/django-fluent-contents,ixc/django-fluent-contents,edoburu/django-fluent-contents,jpotterm/django-fluent-contents,django-fluent/django-fluent-contents,pombredanne/django-fluent-contents,jpotterm/django-fluent-contents,pombredanne/django-fluent-contents,ixc/django-fluent-contents,pombredanne/django-fluent-contents,ixc/django-fluent-contents,edoburu/django-fluent-contents,edoburu/django-fluent-contents | from django.db import models
from django.utils.translation import ugettext_lazy as _
from parler.models import TranslatableModel, TranslatedFields
- from fluent_contents.models import ContentItem, PlaceholderField
+ from fluent_contents.models import ContentItem, PlaceholderField, ContentItemRelation
class SharedContent(TranslatableModel):
"""
The parent hosting object for shared content
"""
translations = TranslatedFields(
title = models.CharField(_("Title"), max_length=200)
)
slug = models.SlugField(_("Template code"), unique=True, help_text=_("This unique name can be used refer to this content in in templates."))
contents = PlaceholderField("shared_content", verbose_name=_("Contents"))
# NOTE: settings such as "template_name", and which plugins are allowed can be added later.
+
+ # Adding the reverse relation for ContentItem objects
+ # causes the admin to list these objects when moving the shared content
+ contentitem_set = ContentItemRelation()
class Meta:
verbose_name = _("Shared content")
verbose_name_plural = _("Shared content")
def __unicode__(self):
return self.title
class SharedContentItem(ContentItem):
"""
The contentitem to include in a page.
"""
shared_content = models.ForeignKey(SharedContent, verbose_name=_('Shared content'), related_name='shared_content_items')
class Meta:
verbose_name = _('Shared content')
verbose_name_plural = _('Shared content')
def __unicode__(self):
return unicode(self.shared_content)
| Add ContentItemRelation to SharedContent model | ## Code Before:
from django.db import models
from django.utils.translation import ugettext_lazy as _
from parler.models import TranslatableModel, TranslatedFields
from fluent_contents.models import ContentItem, PlaceholderField
class SharedContent(TranslatableModel):
"""
The parent hosting object for shared content
"""
translations = TranslatedFields(
title = models.CharField(_("Title"), max_length=200)
)
slug = models.SlugField(_("Template code"), unique=True, help_text=_("This unique name can be used refer to this content in in templates."))
contents = PlaceholderField("shared_content", verbose_name=_("Contents"))
# NOTE: settings such as "template_name", and which plugins are allowed can be added later.
class Meta:
verbose_name = _("Shared content")
verbose_name_plural = _("Shared content")
def __unicode__(self):
return self.title
class SharedContentItem(ContentItem):
"""
The contentitem to include in a page.
"""
shared_content = models.ForeignKey(SharedContent, verbose_name=_('Shared content'), related_name='shared_content_items')
class Meta:
verbose_name = _('Shared content')
verbose_name_plural = _('Shared content')
def __unicode__(self):
return unicode(self.shared_content)
## Instruction:
Add ContentItemRelation to SharedContent model
## Code After:
from django.db import models
from django.utils.translation import ugettext_lazy as _
from parler.models import TranslatableModel, TranslatedFields
from fluent_contents.models import ContentItem, PlaceholderField, ContentItemRelation
class SharedContent(TranslatableModel):
"""
The parent hosting object for shared content
"""
translations = TranslatedFields(
title = models.CharField(_("Title"), max_length=200)
)
slug = models.SlugField(_("Template code"), unique=True, help_text=_("This unique name can be used refer to this content in in templates."))
contents = PlaceholderField("shared_content", verbose_name=_("Contents"))
# NOTE: settings such as "template_name", and which plugins are allowed can be added later.
# Adding the reverse relation for ContentItem objects
# causes the admin to list these objects when moving the shared content
contentitem_set = ContentItemRelation()
class Meta:
verbose_name = _("Shared content")
verbose_name_plural = _("Shared content")
def __unicode__(self):
return self.title
class SharedContentItem(ContentItem):
"""
The contentitem to include in a page.
"""
shared_content = models.ForeignKey(SharedContent, verbose_name=_('Shared content'), related_name='shared_content_items')
class Meta:
verbose_name = _('Shared content')
verbose_name_plural = _('Shared content')
def __unicode__(self):
return unicode(self.shared_content)
|
fc21802b68cf9a907218dab5b0e22cd8f1dc75d0 | djcelery/backends/database.py | djcelery/backends/database.py | from celery.backends.base import BaseDictBackend
from djcelery.models import TaskMeta, TaskSetMeta
class DatabaseBackend(BaseDictBackend):
"""The database backends. Using Django models to store task metadata."""
def _store_result(self, task_id, result, status, traceback=None):
"""Store return value and status of an executed task."""
TaskMeta.objects.store_result(task_id, result, status,
traceback=traceback)
return result
def _save_taskset(self, taskset_id, result):
"""Store the result of an executed taskset."""
TaskSetMeta.objects.store_result(taskset_id, result)
return result
def _get_task_meta_for(self, task_id):
"""Get task metadata for a task by id."""
meta = TaskMeta.objects.get_task(task_id)
if meta:
return meta.to_dict()
def _restore_taskset(self, taskset_id):
"""Get taskset metadata for a taskset by id."""
meta = TaskSetMeta.objects.restore_taskset(taskset_id)
if meta:
return meta.to_dict()
def cleanup(self):
"""Delete expired metadata."""
TaskMeta.objects.delete_expired()
TaskSetMeta.objects.delete_expired()
| from celery.backends.base import BaseDictBackend
from djcelery.models import TaskMeta, TaskSetMeta
class DatabaseBackend(BaseDictBackend):
"""The database backends. Using Django models to store task metadata."""
TaskModel = TaskMeta
TaskSetModel = TaskSetMeta
def _store_result(self, task_id, result, status, traceback=None):
"""Store return value and status of an executed task."""
self.TaskModel._default_manager.store_result(task_id, result, status,
traceback=traceback)
return result
def _save_taskset(self, taskset_id, result):
"""Store the result of an executed taskset."""
self.TaskModel._default_manager.store_result(taskset_id, result)
return result
def _get_task_meta_for(self, task_id):
"""Get task metadata for a task by id."""
meta = self.TaskModel._default_manager.get_task(task_id)
if meta:
return meta.to_dict()
def _restore_taskset(self, taskset_id):
"""Get taskset metadata for a taskset by id."""
meta = self.TaskSetModel._default_manager.restore_taskset(taskset_id)
if meta:
return meta.to_dict()
def cleanup(self):
"""Delete expired metadata."""
for model in self.TaskModel, self.TaskSetModel:
model._default_manager.delete_expired()
| Make it possible to override the models used to store task/taskset state | DatabaseBackend: Make it possible to override the models used to store task/taskset state
| Python | bsd-3-clause | Amanit/django-celery,kanemra/django-celery,axiom-data-science/django-celery,celery/django-celery,alexhayes/django-celery,digimarc/django-celery,tkanemoto/django-celery,iris-edu-int/django-celery,CloudNcodeInc/django-celery,Amanit/django-celery,CloudNcodeInc/django-celery,iris-edu-int/django-celery,CloudNcodeInc/django-celery,celery/django-celery,planorama/django-celery,nadios/django-celery,ask/django-celery,nadios/django-celery,georgewhewell/django-celery,iris-edu-int/django-celery,celery/django-celery,digimarc/django-celery,planorama/django-celery,georgewhewell/django-celery,ask/django-celery,digimarc/django-celery,tkanemoto/django-celery,georgewhewell/django-celery,alexhayes/django-celery,Amanit/django-celery,tkanemoto/django-celery,kanemra/django-celery,axiom-data-science/django-celery,axiom-data-science/django-celery,kanemra/django-celery | from celery.backends.base import BaseDictBackend
from djcelery.models import TaskMeta, TaskSetMeta
class DatabaseBackend(BaseDictBackend):
"""The database backends. Using Django models to store task metadata."""
+ TaskModel = TaskMeta
+ TaskSetModel = TaskSetMeta
def _store_result(self, task_id, result, status, traceback=None):
"""Store return value and status of an executed task."""
- TaskMeta.objects.store_result(task_id, result, status,
+ self.TaskModel._default_manager.store_result(task_id, result, status,
- traceback=traceback)
+ traceback=traceback)
return result
def _save_taskset(self, taskset_id, result):
"""Store the result of an executed taskset."""
- TaskSetMeta.objects.store_result(taskset_id, result)
+ self.TaskModel._default_manager.store_result(taskset_id, result)
return result
def _get_task_meta_for(self, task_id):
"""Get task metadata for a task by id."""
- meta = TaskMeta.objects.get_task(task_id)
+ meta = self.TaskModel._default_manager.get_task(task_id)
if meta:
return meta.to_dict()
def _restore_taskset(self, taskset_id):
"""Get taskset metadata for a taskset by id."""
- meta = TaskSetMeta.objects.restore_taskset(taskset_id)
+ meta = self.TaskSetModel._default_manager.restore_taskset(taskset_id)
if meta:
return meta.to_dict()
def cleanup(self):
"""Delete expired metadata."""
- TaskMeta.objects.delete_expired()
- TaskSetMeta.objects.delete_expired()
+ for model in self.TaskModel, self.TaskSetModel:
+ model._default_manager.delete_expired()
| Make it possible to override the models used to store task/taskset state | ## Code Before:
from celery.backends.base import BaseDictBackend
from djcelery.models import TaskMeta, TaskSetMeta
class DatabaseBackend(BaseDictBackend):
"""The database backends. Using Django models to store task metadata."""
def _store_result(self, task_id, result, status, traceback=None):
"""Store return value and status of an executed task."""
TaskMeta.objects.store_result(task_id, result, status,
traceback=traceback)
return result
def _save_taskset(self, taskset_id, result):
"""Store the result of an executed taskset."""
TaskSetMeta.objects.store_result(taskset_id, result)
return result
def _get_task_meta_for(self, task_id):
"""Get task metadata for a task by id."""
meta = TaskMeta.objects.get_task(task_id)
if meta:
return meta.to_dict()
def _restore_taskset(self, taskset_id):
"""Get taskset metadata for a taskset by id."""
meta = TaskSetMeta.objects.restore_taskset(taskset_id)
if meta:
return meta.to_dict()
def cleanup(self):
"""Delete expired metadata."""
TaskMeta.objects.delete_expired()
TaskSetMeta.objects.delete_expired()
## Instruction:
Make it possible to override the models used to store task/taskset state
## Code After:
from celery.backends.base import BaseDictBackend
from djcelery.models import TaskMeta, TaskSetMeta
class DatabaseBackend(BaseDictBackend):
"""The database backends. Using Django models to store task metadata."""
TaskModel = TaskMeta
TaskSetModel = TaskSetMeta
def _store_result(self, task_id, result, status, traceback=None):
"""Store return value and status of an executed task."""
self.TaskModel._default_manager.store_result(task_id, result, status,
traceback=traceback)
return result
def _save_taskset(self, taskset_id, result):
"""Store the result of an executed taskset."""
self.TaskModel._default_manager.store_result(taskset_id, result)
return result
def _get_task_meta_for(self, task_id):
"""Get task metadata for a task by id."""
meta = self.TaskModel._default_manager.get_task(task_id)
if meta:
return meta.to_dict()
def _restore_taskset(self, taskset_id):
"""Get taskset metadata for a taskset by id."""
meta = self.TaskSetModel._default_manager.restore_taskset(taskset_id)
if meta:
return meta.to_dict()
def cleanup(self):
"""Delete expired metadata."""
for model in self.TaskModel, self.TaskSetModel:
model._default_manager.delete_expired()
|
97535245f7da3d7e54d64dc384d6cd81caa9a689 | tests/test_story.py | tests/test_story.py | from py101 import Story
from py101 import variables
from py101 import lists
import unittest
class TestStory(unittest.TestCase):
def test_name(self):
self.assertEqual(Story().name, 'py101', "name should be py101")
class TestAdventureVariables(unittest.TestCase):
good_solution = """
myinteger = 4
mystring = 'Python String Here'
print(myinteger)
print(mystring)
"""
def test_solution(self):
test = variables.TestOutput(self.good_solution)
test.setUp()
try:
test.runTest()
finally:
test.tearDown()
class TestAdventureLists(unittest.TestCase):
good_solution = """
languages = ["ADA", "Pascal", "Fortran", "Smalltalk"]
print(languages)
"""
def test_solution(self):
test = lists.TestOutput(self.good_solution)
test.setUp()
try:
test.runTest()
finally:
test.tearDown()
| import py101
import py101.boilerplate
import py101.introduction
import py101.lists
import py101.variables
import unittest
class TestStory(unittest.TestCase):
def test_name(self):
self.assertEqual(py101.Story().name, 'py101', "name should be py101")
class AdventureData(object):
def __init__(self, test_module, good_solution):
self.module = test_module
self.good_solution = good_solution
class TestAdventures(unittest.TestCase):
adventures = [
AdventureData(
py101.boilerplate,
""
),
AdventureData(
py101.introduction,
"""print('Hello World')"""
),
AdventureData(
py101.variables,
"""myinteger = 4; mystring = 'Python String Here'; print(myinteger); print(mystring)"""
),
AdventureData(
py101.lists,
"""languages = ["ADA", "Pascal", "Fortran", "Smalltalk"]; print(languages)"""
)
]
def test_solution(self):
for adventure in self.adventures:
with self.subTest(adventure=adventure.module.__name__):
test = adventure.module.TestOutput(adventure.good_solution)
test.setUp()
try:
test.runTest()
finally:
test.tearDown()
| Refactor tests to remove duplicate code | Refactor tests to remove duplicate code
| Python | mit | sophilabs/py101 | - from py101 import Story
- from py101 import variables
- from py101 import lists
+ import py101
+ import py101.boilerplate
+ import py101.introduction
+ import py101.lists
+ import py101.variables
import unittest
class TestStory(unittest.TestCase):
def test_name(self):
- self.assertEqual(Story().name, 'py101', "name should be py101")
+ self.assertEqual(py101.Story().name, 'py101', "name should be py101")
+ class AdventureData(object):
+ def __init__(self, test_module, good_solution):
+ self.module = test_module
+ self.good_solution = good_solution
+
+
- class TestAdventureVariables(unittest.TestCase):
+ class TestAdventures(unittest.TestCase):
- good_solution = """
- myinteger = 4
- mystring = 'Python String Here'
- print(myinteger)
- print(mystring)
- """
+ adventures = [
+ AdventureData(
+ py101.boilerplate,
+ ""
+ ),
+ AdventureData(
+ py101.introduction,
+ """print('Hello World')"""
+ ),
+ AdventureData(
+ py101.variables,
+ """myinteger = 4; mystring = 'Python String Here'; print(myinteger); print(mystring)"""
+ ),
+ AdventureData(
+ py101.lists,
+ """languages = ["ADA", "Pascal", "Fortran", "Smalltalk"]; print(languages)"""
+ )
+ ]
def test_solution(self):
- test = variables.TestOutput(self.good_solution)
+ for adventure in self.adventures:
+ with self.subTest(adventure=adventure.module.__name__):
+ test = adventure.module.TestOutput(adventure.good_solution)
- test.setUp()
+ test.setUp()
- try:
+ try:
- test.runTest()
+ test.runTest()
- finally:
+ finally:
- test.tearDown()
+ test.tearDown()
-
- class TestAdventureLists(unittest.TestCase):
- good_solution = """
- languages = ["ADA", "Pascal", "Fortran", "Smalltalk"]
- print(languages)
- """
-
- def test_solution(self):
- test = lists.TestOutput(self.good_solution)
- test.setUp()
- try:
- test.runTest()
- finally:
- test.tearDown()
-
- | Refactor tests to remove duplicate code | ## Code Before:
from py101 import Story
from py101 import variables
from py101 import lists
import unittest
class TestStory(unittest.TestCase):
def test_name(self):
self.assertEqual(Story().name, 'py101', "name should be py101")
class TestAdventureVariables(unittest.TestCase):
good_solution = """
myinteger = 4
mystring = 'Python String Here'
print(myinteger)
print(mystring)
"""
def test_solution(self):
test = variables.TestOutput(self.good_solution)
test.setUp()
try:
test.runTest()
finally:
test.tearDown()
class TestAdventureLists(unittest.TestCase):
good_solution = """
languages = ["ADA", "Pascal", "Fortran", "Smalltalk"]
print(languages)
"""
def test_solution(self):
test = lists.TestOutput(self.good_solution)
test.setUp()
try:
test.runTest()
finally:
test.tearDown()
## Instruction:
Refactor tests to remove duplicate code
## Code After:
import py101
import py101.boilerplate
import py101.introduction
import py101.lists
import py101.variables
import unittest
class TestStory(unittest.TestCase):
def test_name(self):
self.assertEqual(py101.Story().name, 'py101', "name should be py101")
class AdventureData(object):
def __init__(self, test_module, good_solution):
self.module = test_module
self.good_solution = good_solution
class TestAdventures(unittest.TestCase):
adventures = [
AdventureData(
py101.boilerplate,
""
),
AdventureData(
py101.introduction,
"""print('Hello World')"""
),
AdventureData(
py101.variables,
"""myinteger = 4; mystring = 'Python String Here'; print(myinteger); print(mystring)"""
),
AdventureData(
py101.lists,
"""languages = ["ADA", "Pascal", "Fortran", "Smalltalk"]; print(languages)"""
)
]
def test_solution(self):
for adventure in self.adventures:
with self.subTest(adventure=adventure.module.__name__):
test = adventure.module.TestOutput(adventure.good_solution)
test.setUp()
try:
test.runTest()
finally:
test.tearDown()
|
510afd0c93c333e86511fb6f6b9e96a434d54d00 | zerver/migrations/0174_userprofile_delivery_email.py | zerver/migrations/0174_userprofile_delivery_email.py | from __future__ import unicode_literals
from django.db import migrations, models
from django.apps import apps
from django.db.models import F
from django.db.backends.postgresql_psycopg2.schema import DatabaseSchemaEditor
from django.db.migrations.state import StateApps
def copy_email_field(apps: StateApps, schema_editor: DatabaseSchemaEditor) -> None:
UserProfile = apps.get_model('zerver', 'UserProfile')
UserProfile.objects.all().update(delivery_email=F('email'))
class Migration(migrations.Migration):
dependencies = [
('zerver', '0173_support_seat_based_plans'),
]
operations = [
migrations.AddField(
model_name='userprofile',
name='delivery_email',
field=models.EmailField(db_index=True, default='', max_length=254),
preserve_default=False,
),
migrations.RunPython(copy_email_field,
reverse_code=migrations.RunPython.noop),
]
| from __future__ import unicode_literals
from django.db import migrations, models
from django.apps import apps
from django.db.models import F
from django.db.backends.postgresql_psycopg2.schema import DatabaseSchemaEditor
from django.db.migrations.state import StateApps
def copy_email_field(apps: StateApps, schema_editor: DatabaseSchemaEditor) -> None:
UserProfile = apps.get_model('zerver', 'UserProfile')
UserProfile.objects.all().update(delivery_email=F('email'))
class Migration(migrations.Migration):
atomic = False
dependencies = [
('zerver', '0173_support_seat_based_plans'),
]
operations = [
migrations.AddField(
model_name='userprofile',
name='delivery_email',
field=models.EmailField(db_index=True, default='', max_length=254),
preserve_default=False,
),
migrations.RunPython(copy_email_field,
reverse_code=migrations.RunPython.noop),
]
| Disable atomic for delivery_email migration. | migrations: Disable atomic for delivery_email migration.
I'm not sure theoretically why this should be required only for some
installations, but these articles all suggest the root problem is
doing these two migrations together atomically (creating the field and
setting a value for it), so the right answer is to declare the
migration as not atomic:
https://stackoverflow.com/questions/12838111/django-db-migrations-cannot-alter-table-because-it-has-pending-trigger-events
https://confluence.atlassian.com/confkb/upgrade-failed-with-the-error-message-error-cannot-alter-table-content-because-it-has-pending-trigger-events-747606853.html
| Python | apache-2.0 | dhcrzf/zulip,zulip/zulip,zulip/zulip,showell/zulip,dhcrzf/zulip,hackerkid/zulip,jackrzhang/zulip,eeshangarg/zulip,tommyip/zulip,brainwane/zulip,tommyip/zulip,synicalsyntax/zulip,tommyip/zulip,shubhamdhama/zulip,rht/zulip,dhcrzf/zulip,timabbott/zulip,shubhamdhama/zulip,rht/zulip,brainwane/zulip,hackerkid/zulip,synicalsyntax/zulip,andersk/zulip,dhcrzf/zulip,brainwane/zulip,timabbott/zulip,dhcrzf/zulip,punchagan/zulip,rht/zulip,zulip/zulip,shubhamdhama/zulip,rishig/zulip,kou/zulip,showell/zulip,rht/zulip,jackrzhang/zulip,zulip/zulip,hackerkid/zulip,punchagan/zulip,andersk/zulip,rishig/zulip,kou/zulip,punchagan/zulip,brainwane/zulip,dhcrzf/zulip,rishig/zulip,synicalsyntax/zulip,timabbott/zulip,hackerkid/zulip,jackrzhang/zulip,timabbott/zulip,timabbott/zulip,zulip/zulip,jackrzhang/zulip,hackerkid/zulip,kou/zulip,kou/zulip,shubhamdhama/zulip,brainwane/zulip,kou/zulip,rishig/zulip,zulip/zulip,synicalsyntax/zulip,zulip/zulip,eeshangarg/zulip,shubhamdhama/zulip,andersk/zulip,timabbott/zulip,rishig/zulip,brainwane/zulip,shubhamdhama/zulip,rht/zulip,punchagan/zulip,punchagan/zulip,timabbott/zulip,brainwane/zulip,eeshangarg/zulip,showell/zulip,showell/zulip,tommyip/zulip,rishig/zulip,punchagan/zulip,jackrzhang/zulip,eeshangarg/zulip,punchagan/zulip,rht/zulip,jackrzhang/zulip,andersk/zulip,eeshangarg/zulip,tommyip/zulip,hackerkid/zulip,tommyip/zulip,synicalsyntax/zulip,rishig/zulip,synicalsyntax/zulip,kou/zulip,andersk/zulip,andersk/zulip,kou/zulip,andersk/zulip,synicalsyntax/zulip,eeshangarg/zulip,showell/zulip,dhcrzf/zulip,showell/zulip,eeshangarg/zulip,tommyip/zulip,shubhamdhama/zulip,hackerkid/zulip,showell/zulip,jackrzhang/zulip,rht/zulip | from __future__ import unicode_literals
from django.db import migrations, models
from django.apps import apps
from django.db.models import F
from django.db.backends.postgresql_psycopg2.schema import DatabaseSchemaEditor
from django.db.migrations.state import StateApps
def copy_email_field(apps: StateApps, schema_editor: DatabaseSchemaEditor) -> None:
UserProfile = apps.get_model('zerver', 'UserProfile')
UserProfile.objects.all().update(delivery_email=F('email'))
class Migration(migrations.Migration):
+ atomic = False
dependencies = [
('zerver', '0173_support_seat_based_plans'),
]
operations = [
migrations.AddField(
model_name='userprofile',
name='delivery_email',
field=models.EmailField(db_index=True, default='', max_length=254),
preserve_default=False,
),
migrations.RunPython(copy_email_field,
reverse_code=migrations.RunPython.noop),
]
| Disable atomic for delivery_email migration. | ## Code Before:
from __future__ import unicode_literals
from django.db import migrations, models
from django.apps import apps
from django.db.models import F
from django.db.backends.postgresql_psycopg2.schema import DatabaseSchemaEditor
from django.db.migrations.state import StateApps
def copy_email_field(apps: StateApps, schema_editor: DatabaseSchemaEditor) -> None:
UserProfile = apps.get_model('zerver', 'UserProfile')
UserProfile.objects.all().update(delivery_email=F('email'))
class Migration(migrations.Migration):
dependencies = [
('zerver', '0173_support_seat_based_plans'),
]
operations = [
migrations.AddField(
model_name='userprofile',
name='delivery_email',
field=models.EmailField(db_index=True, default='', max_length=254),
preserve_default=False,
),
migrations.RunPython(copy_email_field,
reverse_code=migrations.RunPython.noop),
]
## Instruction:
Disable atomic for delivery_email migration.
## Code After:
from __future__ import unicode_literals
from django.db import migrations, models
from django.apps import apps
from django.db.models import F
from django.db.backends.postgresql_psycopg2.schema import DatabaseSchemaEditor
from django.db.migrations.state import StateApps
def copy_email_field(apps: StateApps, schema_editor: DatabaseSchemaEditor) -> None:
UserProfile = apps.get_model('zerver', 'UserProfile')
UserProfile.objects.all().update(delivery_email=F('email'))
class Migration(migrations.Migration):
atomic = False
dependencies = [
('zerver', '0173_support_seat_based_plans'),
]
operations = [
migrations.AddField(
model_name='userprofile',
name='delivery_email',
field=models.EmailField(db_index=True, default='', max_length=254),
preserve_default=False,
),
migrations.RunPython(copy_email_field,
reverse_code=migrations.RunPython.noop),
]
|
50fa164c4b09845bfa262c2f6959a3c5dfd6f76b | fluentcheck/classes/is_cls.py | fluentcheck/classes/is_cls.py | from typing import Any
from ..assertions_is.booleans import __IsBool
from ..assertions_is.collections import __IsCollections
from ..assertions_is.dicts import __IsDicts
from ..assertions_is.emptiness import __IsEmptiness
from ..assertions_is.geo import __IsGeo
from ..assertions_is.numbers import __IsNumbers
from ..assertions_is.sequences import __IsSequences
from ..assertions_is.strings import __IsStrings
from ..assertions_is.types import __IsTypes
from ..assertions_is.uuids import __IsUUIDs
class Is(__IsBool, __IsCollections, __IsDicts, __IsEmptiness, __IsGeo,
__IsNumbers, __IsSequences, __IsStrings, __IsTypes, __IsUUIDs):
def __init__(self, object_under_test: Any):
super().__init__(object_under_test)
| from typing import Any
from ..assertions_is.booleans import __IsBool
from ..assertions_is.collections import __IsCollections
from ..assertions_is.dicts import __IsDicts
from ..assertions_is.emptiness import __IsEmptiness
from ..assertions_is.geo import __IsGeo
from ..assertions_is.numbers import __IsNumbers
from ..assertions_is.sequences import __IsSequences
from ..assertions_is.strings import __IsStrings
from ..assertions_is.types import __IsTypes
from ..assertions_is.uuids import __IsUUIDs
class Is(__IsBool, __IsCollections, __IsDicts, __IsEmptiness, __IsGeo,
__IsNumbers, __IsSequences, __IsStrings, __IsTypes, __IsUUIDs):
pass
| Remove methods with unnecessary super delegation. | Remove methods with unnecessary super delegation. | Python | mit | csparpa/check | from typing import Any
from ..assertions_is.booleans import __IsBool
from ..assertions_is.collections import __IsCollections
from ..assertions_is.dicts import __IsDicts
from ..assertions_is.emptiness import __IsEmptiness
from ..assertions_is.geo import __IsGeo
from ..assertions_is.numbers import __IsNumbers
from ..assertions_is.sequences import __IsSequences
from ..assertions_is.strings import __IsStrings
from ..assertions_is.types import __IsTypes
from ..assertions_is.uuids import __IsUUIDs
class Is(__IsBool, __IsCollections, __IsDicts, __IsEmptiness, __IsGeo,
__IsNumbers, __IsSequences, __IsStrings, __IsTypes, __IsUUIDs):
+ pass
- def __init__(self, object_under_test: Any):
- super().__init__(object_under_test)
- | Remove methods with unnecessary super delegation. | ## Code Before:
from typing import Any
from ..assertions_is.booleans import __IsBool
from ..assertions_is.collections import __IsCollections
from ..assertions_is.dicts import __IsDicts
from ..assertions_is.emptiness import __IsEmptiness
from ..assertions_is.geo import __IsGeo
from ..assertions_is.numbers import __IsNumbers
from ..assertions_is.sequences import __IsSequences
from ..assertions_is.strings import __IsStrings
from ..assertions_is.types import __IsTypes
from ..assertions_is.uuids import __IsUUIDs
class Is(__IsBool, __IsCollections, __IsDicts, __IsEmptiness, __IsGeo,
__IsNumbers, __IsSequences, __IsStrings, __IsTypes, __IsUUIDs):
def __init__(self, object_under_test: Any):
super().__init__(object_under_test)
## Instruction:
Remove methods with unnecessary super delegation.
## Code After:
from typing import Any
from ..assertions_is.booleans import __IsBool
from ..assertions_is.collections import __IsCollections
from ..assertions_is.dicts import __IsDicts
from ..assertions_is.emptiness import __IsEmptiness
from ..assertions_is.geo import __IsGeo
from ..assertions_is.numbers import __IsNumbers
from ..assertions_is.sequences import __IsSequences
from ..assertions_is.strings import __IsStrings
from ..assertions_is.types import __IsTypes
from ..assertions_is.uuids import __IsUUIDs
class Is(__IsBool, __IsCollections, __IsDicts, __IsEmptiness, __IsGeo,
__IsNumbers, __IsSequences, __IsStrings, __IsTypes, __IsUUIDs):
pass
|
a15d2956cfd48e0d46d5d4cf567af05641b4c8e6 | yunity/api/utils.py | yunity/api/utils.py | from django.http import JsonResponse
class ApiBase(object):
@classmethod
def success(cls, data, status=200):
"""
:type data: dict
:type status: int
:rtype JsonResponse
"""
return JsonResponse(data, status=status)
@classmethod
def error(cls, error, status=400):
"""
:type error: str
:type status: int
:rtype JsonResponse
"""
return JsonResponse({'error': error}, status=status)
| from functools import wraps
from json import loads as load_json
from django.http import JsonResponse
class ApiBase(object):
@classmethod
def validation_failure(cls, message, status=400):
"""
:type message: str
:type status: int
:rtype JsonResponse
"""
return JsonResponse({'validation_failure': message}, status=status)
@classmethod
def success(cls, data, status=200):
"""
:type data: dict
:type status: int
:rtype JsonResponse
"""
return JsonResponse(data, status=status)
@classmethod
def error(cls, error, status=400):
"""
:type error: str
:type status: int
:rtype JsonResponse
"""
return JsonResponse({'error': error}, status=status)
def json_request(expected_keys=None):
"""Decorator to validate that a request is in JSON and (optionally) has some specific keys in the JSON object.
"""
expected_keys = expected_keys or []
def decorator(func):
@wraps(func)
def wrapper(cls, request, *args, **kwargs):
data = load_json(request.body.decode('utf8'))
for expected_key in expected_keys:
value = data.get(expected_key)
if not value:
return ApiBase.validation_failure('missing key: {}'.format(expected_key))
return func(cls, data, request, *args, **kwargs)
return wrapper
return decorator
| Implement JSON request validation decorator | Implement JSON request validation decorator
with @NerdyProjects
| Python | agpl-3.0 | yunity/foodsaving-backend,yunity/foodsaving-backend,yunity/yunity-core,yunity/foodsaving-backend,yunity/yunity-core | + from functools import wraps
+ from json import loads as load_json
+
from django.http import JsonResponse
class ApiBase(object):
+ @classmethod
+ def validation_failure(cls, message, status=400):
+ """
+ :type message: str
+ :type status: int
+ :rtype JsonResponse
+
+ """
+ return JsonResponse({'validation_failure': message}, status=status)
+
@classmethod
def success(cls, data, status=200):
"""
:type data: dict
:type status: int
:rtype JsonResponse
"""
return JsonResponse(data, status=status)
@classmethod
def error(cls, error, status=400):
"""
:type error: str
:type status: int
:rtype JsonResponse
"""
return JsonResponse({'error': error}, status=status)
+
+ def json_request(expected_keys=None):
+ """Decorator to validate that a request is in JSON and (optionally) has some specific keys in the JSON object.
+
+ """
+ expected_keys = expected_keys or []
+
+ def decorator(func):
+ @wraps(func)
+ def wrapper(cls, request, *args, **kwargs):
+ data = load_json(request.body.decode('utf8'))
+
+ for expected_key in expected_keys:
+ value = data.get(expected_key)
+ if not value:
+ return ApiBase.validation_failure('missing key: {}'.format(expected_key))
+
+ return func(cls, data, request, *args, **kwargs)
+
+ return wrapper
+ return decorator
+ | Implement JSON request validation decorator | ## Code Before:
from django.http import JsonResponse
class ApiBase(object):
@classmethod
def success(cls, data, status=200):
"""
:type data: dict
:type status: int
:rtype JsonResponse
"""
return JsonResponse(data, status=status)
@classmethod
def error(cls, error, status=400):
"""
:type error: str
:type status: int
:rtype JsonResponse
"""
return JsonResponse({'error': error}, status=status)
## Instruction:
Implement JSON request validation decorator
## Code After:
from functools import wraps
from json import loads as load_json
from django.http import JsonResponse
class ApiBase(object):
@classmethod
def validation_failure(cls, message, status=400):
"""
:type message: str
:type status: int
:rtype JsonResponse
"""
return JsonResponse({'validation_failure': message}, status=status)
@classmethod
def success(cls, data, status=200):
"""
:type data: dict
:type status: int
:rtype JsonResponse
"""
return JsonResponse(data, status=status)
@classmethod
def error(cls, error, status=400):
"""
:type error: str
:type status: int
:rtype JsonResponse
"""
return JsonResponse({'error': error}, status=status)
def json_request(expected_keys=None):
"""Decorator to validate that a request is in JSON and (optionally) has some specific keys in the JSON object.
"""
expected_keys = expected_keys or []
def decorator(func):
@wraps(func)
def wrapper(cls, request, *args, **kwargs):
data = load_json(request.body.decode('utf8'))
for expected_key in expected_keys:
value = data.get(expected_key)
if not value:
return ApiBase.validation_failure('missing key: {}'.format(expected_key))
return func(cls, data, request, *args, **kwargs)
return wrapper
return decorator
|
31f887979d2129bec80311e94b91cf0f77772f26 | zou/app/utils/fs.py | zou/app/utils/fs.py | import os
import shutil
import errno
def mkdir_p(path):
try:
os.makedirs(path)
except OSError as exception:
if exception.errno == errno.EEXIST and os.path.isdir(path):
pass
else:
raise
def rm_rf(path):
if os.path.exists(path):
shutil.rmtree(path)
| import os
import shutil
import errno
def mkdir_p(path):
try:
os.makedirs(path)
except OSError as exception:
if exception.errno == errno.EEXIST and os.path.isdir(path):
pass
else:
raise
def rm_rf(path):
if os.path.exists(path):
shutil.rmtree(path)
def copyfile(src, dest):
shutil.copyfile(src, dest)
| Add a new copy file util function | Add a new copy file util function
| Python | agpl-3.0 | cgwire/zou | import os
import shutil
import errno
def mkdir_p(path):
try:
os.makedirs(path)
except OSError as exception:
if exception.errno == errno.EEXIST and os.path.isdir(path):
pass
else:
raise
def rm_rf(path):
if os.path.exists(path):
shutil.rmtree(path)
+
+ def copyfile(src, dest):
+ shutil.copyfile(src, dest)
+ | Add a new copy file util function | ## Code Before:
import os
import shutil
import errno
def mkdir_p(path):
try:
os.makedirs(path)
except OSError as exception:
if exception.errno == errno.EEXIST and os.path.isdir(path):
pass
else:
raise
def rm_rf(path):
if os.path.exists(path):
shutil.rmtree(path)
## Instruction:
Add a new copy file util function
## Code After:
import os
import shutil
import errno
def mkdir_p(path):
try:
os.makedirs(path)
except OSError as exception:
if exception.errno == errno.EEXIST and os.path.isdir(path):
pass
else:
raise
def rm_rf(path):
if os.path.exists(path):
shutil.rmtree(path)
def copyfile(src, dest):
shutil.copyfile(src, dest)
|
463fa89c143cd4493ea3704f177c5aba0ebb2af7 | idiokit/xmpp/_resolve.py | idiokit/xmpp/_resolve.py | from __future__ import absolute_import
from .. import idiokit, dns
DEFAULT_XMPP_PORT = 5222
@idiokit.stream
def _add_port_and_count(port):
count = 0
while True:
try:
family, ip = yield idiokit.next()
except StopIteration:
idiokit.stop(count)
yield idiokit.send(family, ip, port)
count += 1
def _resolve_host(host, port):
return dns.host_lookup(host) | _add_port_and_count(port)
@idiokit.stream
def resolve(domain, forced_host=None, forced_port=None):
if forced_host is not None:
port = DEFAULT_XMPP_PORT if forced_port is None else forced_port
yield _resolve_host(forced_host, port)
return
try:
srv_records = yield dns.srv("_xmpp-client._tcp." + domain)
except dns.ResponseError:
srv_records = []
srv_count = 0
for srv_record in dns.ordered_srv_records(srv_records):
port = srv_record.port if forced_port is None else forced_port
srv_count += yield _resolve_host(srv_record.target, port)
if srv_count == 0:
port = DEFAULT_XMPP_PORT if forced_port is None else forced_port
yield _resolve_host(domain, port)
| from __future__ import absolute_import
from .. import idiokit, dns
DEFAULT_XMPP_PORT = 5222
@idiokit.stream
def _add_port(port):
while True:
family, ip = yield idiokit.next()
yield idiokit.send(family, ip, port)
def _resolve_host(host, port):
return dns.host_lookup(host) | _add_port(port)
@idiokit.stream
def resolve(domain, forced_host=None, forced_port=None):
if forced_host is not None:
port = DEFAULT_XMPP_PORT if forced_port is None else forced_port
yield _resolve_host(forced_host, port)
return
try:
srv_records = yield dns.srv("_xmpp-client._tcp." + domain)
except (dns.ResponseError, dns.DNSTimeout):
srv_records = []
if not srv_records:
port = DEFAULT_XMPP_PORT if forced_port is None else forced_port
yield _resolve_host(domain, port)
return
for srv_record in dns.ordered_srv_records(srv_records):
port = srv_record.port if forced_port is None else forced_port
yield _resolve_host(srv_record.target, port)
| Fix SRV logic. RFC 6120 states that the fallback logic shouldn't be applied when the entity (client in this case) receives an answer to the SRV query but fails to establish a connection using the answer data. | idiokit.xmpp: Fix SRV logic. RFC 6120 states that the fallback logic shouldn't be applied when the entity (client in this case) receives an answer to the SRV query but fails to establish a connection using the answer data.
| Python | mit | abusesa/idiokit | from __future__ import absolute_import
from .. import idiokit, dns
DEFAULT_XMPP_PORT = 5222
@idiokit.stream
- def _add_port_and_count(port):
+ def _add_port(port):
- count = 0
-
while True:
- try:
- family, ip = yield idiokit.next()
+ family, ip = yield idiokit.next()
- except StopIteration:
- idiokit.stop(count)
-
yield idiokit.send(family, ip, port)
- count += 1
def _resolve_host(host, port):
- return dns.host_lookup(host) | _add_port_and_count(port)
+ return dns.host_lookup(host) | _add_port(port)
@idiokit.stream
def resolve(domain, forced_host=None, forced_port=None):
if forced_host is not None:
port = DEFAULT_XMPP_PORT if forced_port is None else forced_port
yield _resolve_host(forced_host, port)
return
try:
srv_records = yield dns.srv("_xmpp-client._tcp." + domain)
- except dns.ResponseError:
+ except (dns.ResponseError, dns.DNSTimeout):
srv_records = []
- srv_count = 0
+ if not srv_records:
+ port = DEFAULT_XMPP_PORT if forced_port is None else forced_port
+ yield _resolve_host(domain, port)
+ return
+
for srv_record in dns.ordered_srv_records(srv_records):
port = srv_record.port if forced_port is None else forced_port
- srv_count += yield _resolve_host(srv_record.target, port)
+ yield _resolve_host(srv_record.target, port)
- if srv_count == 0:
- port = DEFAULT_XMPP_PORT if forced_port is None else forced_port
- yield _resolve_host(domain, port)
- | Fix SRV logic. RFC 6120 states that the fallback logic shouldn't be applied when the entity (client in this case) receives an answer to the SRV query but fails to establish a connection using the answer data. | ## Code Before:
from __future__ import absolute_import
from .. import idiokit, dns
DEFAULT_XMPP_PORT = 5222
@idiokit.stream
def _add_port_and_count(port):
count = 0
while True:
try:
family, ip = yield idiokit.next()
except StopIteration:
idiokit.stop(count)
yield idiokit.send(family, ip, port)
count += 1
def _resolve_host(host, port):
return dns.host_lookup(host) | _add_port_and_count(port)
@idiokit.stream
def resolve(domain, forced_host=None, forced_port=None):
if forced_host is not None:
port = DEFAULT_XMPP_PORT if forced_port is None else forced_port
yield _resolve_host(forced_host, port)
return
try:
srv_records = yield dns.srv("_xmpp-client._tcp." + domain)
except dns.ResponseError:
srv_records = []
srv_count = 0
for srv_record in dns.ordered_srv_records(srv_records):
port = srv_record.port if forced_port is None else forced_port
srv_count += yield _resolve_host(srv_record.target, port)
if srv_count == 0:
port = DEFAULT_XMPP_PORT if forced_port is None else forced_port
yield _resolve_host(domain, port)
## Instruction:
Fix SRV logic. RFC 6120 states that the fallback logic shouldn't be applied when the entity (client in this case) receives an answer to the SRV query but fails to establish a connection using the answer data.
## Code After:
from __future__ import absolute_import
from .. import idiokit, dns
DEFAULT_XMPP_PORT = 5222
@idiokit.stream
def _add_port(port):
while True:
family, ip = yield idiokit.next()
yield idiokit.send(family, ip, port)
def _resolve_host(host, port):
return dns.host_lookup(host) | _add_port(port)
@idiokit.stream
def resolve(domain, forced_host=None, forced_port=None):
if forced_host is not None:
port = DEFAULT_XMPP_PORT if forced_port is None else forced_port
yield _resolve_host(forced_host, port)
return
try:
srv_records = yield dns.srv("_xmpp-client._tcp." + domain)
except (dns.ResponseError, dns.DNSTimeout):
srv_records = []
if not srv_records:
port = DEFAULT_XMPP_PORT if forced_port is None else forced_port
yield _resolve_host(domain, port)
return
for srv_record in dns.ordered_srv_records(srv_records):
port = srv_record.port if forced_port is None else forced_port
yield _resolve_host(srv_record.target, port)
|
7e71e21734abb2b12e309ea37910c90f7b837651 | go/base/tests/test_decorators.py | go/base/tests/test_decorators.py | """Test for go.base.decorators."""
from go.vumitools.tests.helpers import djangotest_imports
with djangotest_imports(globals()):
from go.base.tests.helpers import GoDjangoTestCase
from go.base.decorators import render_exception
from django.template.response import TemplateResponse
class CatchableDummyError(Exception):
"""Error that will be caught by DummyView.post."""
class UncatchableDummyError(Exception):
"""Error that will not be caught by DummyView.post."""
class DummyView(object):
@render_exception(CatchableDummyError, 400, "Meep.")
def post(self, request, err=None):
if err is None:
return "Success"
raise err
class TestRenderException(GoDjangoTestCase):
def test_no_exception(self):
d = DummyView()
self.assertEqual(d.post("request"), "Success")
def test_expected_exception(self):
d = DummyView()
self.assertRaises(
UncatchableDummyError, d.post, "request", UncatchableDummyError())
def test_other_exception(self):
d = DummyView()
response = d.post("request", CatchableDummyError("foo"))
self.assertTrue(isinstance(response, TemplateResponse))
self.assertEqual(response.template_name, 'error.html')
self.assertEqual(response.status_code, 400)
| """Test for go.base.decorators."""
from go.vumitools.tests.helpers import djangotest_imports
with djangotest_imports(globals()):
from go.base.tests.helpers import GoDjangoTestCase
from go.base.decorators import render_exception
from django.template.response import TemplateResponse
class CatchableDummyError(Exception):
"""Error that will be caught by DummyView.post."""
class UncatchableDummyError(Exception):
"""Error that will not be caught by DummyView.post."""
class DummyView(object):
@render_exception(CatchableDummyError, 400, "Meep.")
def post(self, request, err=None):
if err is None:
return "Success"
raise err
class TestRenderException(GoDjangoTestCase):
def test_no_exception(self):
d = DummyView()
self.assertEqual(d.post("request"), "Success")
def test_expected_exception(self):
d = DummyView()
self.assertRaises(
UncatchableDummyError, d.post, "request", UncatchableDummyError())
def test_other_exception(self):
d = DummyView()
response = d.post("request", CatchableDummyError("foo"))
self.assertTrue(isinstance(response, TemplateResponse))
self.assertEqual(response.template_name, 'error.html')
self.assertEqual(response.status_code, 400)
| Move Django-specific pieces into the django_imports block. | Move Django-specific pieces into the django_imports block.
| Python | bsd-3-clause | praekelt/vumi-go,praekelt/vumi-go,praekelt/vumi-go,praekelt/vumi-go | """Test for go.base.decorators."""
from go.vumitools.tests.helpers import djangotest_imports
with djangotest_imports(globals()):
from go.base.tests.helpers import GoDjangoTestCase
from go.base.decorators import render_exception
from django.template.response import TemplateResponse
+ class CatchableDummyError(Exception):
+ """Error that will be caught by DummyView.post."""
- class CatchableDummyError(Exception):
+ class UncatchableDummyError(Exception):
- """Error that will be caught by DummyView.post."""
+ """Error that will not be caught by DummyView.post."""
-
- class UncatchableDummyError(Exception):
- """Error that will not be caught by DummyView.post."""
-
-
- class DummyView(object):
+ class DummyView(object):
- @render_exception(CatchableDummyError, 400, "Meep.")
+ @render_exception(CatchableDummyError, 400, "Meep.")
- def post(self, request, err=None):
+ def post(self, request, err=None):
- if err is None:
+ if err is None:
- return "Success"
+ return "Success"
- raise err
+ raise err
class TestRenderException(GoDjangoTestCase):
def test_no_exception(self):
d = DummyView()
self.assertEqual(d.post("request"), "Success")
def test_expected_exception(self):
d = DummyView()
self.assertRaises(
UncatchableDummyError, d.post, "request", UncatchableDummyError())
def test_other_exception(self):
d = DummyView()
response = d.post("request", CatchableDummyError("foo"))
self.assertTrue(isinstance(response, TemplateResponse))
self.assertEqual(response.template_name, 'error.html')
self.assertEqual(response.status_code, 400)
| Move Django-specific pieces into the django_imports block. | ## Code Before:
"""Test for go.base.decorators."""
from go.vumitools.tests.helpers import djangotest_imports
with djangotest_imports(globals()):
from go.base.tests.helpers import GoDjangoTestCase
from go.base.decorators import render_exception
from django.template.response import TemplateResponse
class CatchableDummyError(Exception):
"""Error that will be caught by DummyView.post."""
class UncatchableDummyError(Exception):
"""Error that will not be caught by DummyView.post."""
class DummyView(object):
@render_exception(CatchableDummyError, 400, "Meep.")
def post(self, request, err=None):
if err is None:
return "Success"
raise err
class TestRenderException(GoDjangoTestCase):
def test_no_exception(self):
d = DummyView()
self.assertEqual(d.post("request"), "Success")
def test_expected_exception(self):
d = DummyView()
self.assertRaises(
UncatchableDummyError, d.post, "request", UncatchableDummyError())
def test_other_exception(self):
d = DummyView()
response = d.post("request", CatchableDummyError("foo"))
self.assertTrue(isinstance(response, TemplateResponse))
self.assertEqual(response.template_name, 'error.html')
self.assertEqual(response.status_code, 400)
## Instruction:
Move Django-specific pieces into the django_imports block.
## Code After:
"""Test for go.base.decorators."""
from go.vumitools.tests.helpers import djangotest_imports
with djangotest_imports(globals()):
from go.base.tests.helpers import GoDjangoTestCase
from go.base.decorators import render_exception
from django.template.response import TemplateResponse
class CatchableDummyError(Exception):
"""Error that will be caught by DummyView.post."""
class UncatchableDummyError(Exception):
"""Error that will not be caught by DummyView.post."""
class DummyView(object):
@render_exception(CatchableDummyError, 400, "Meep.")
def post(self, request, err=None):
if err is None:
return "Success"
raise err
class TestRenderException(GoDjangoTestCase):
def test_no_exception(self):
d = DummyView()
self.assertEqual(d.post("request"), "Success")
def test_expected_exception(self):
d = DummyView()
self.assertRaises(
UncatchableDummyError, d.post, "request", UncatchableDummyError())
def test_other_exception(self):
d = DummyView()
response = d.post("request", CatchableDummyError("foo"))
self.assertTrue(isinstance(response, TemplateResponse))
self.assertEqual(response.template_name, 'error.html')
self.assertEqual(response.status_code, 400)
|
a50aeb81a588f8297f194d793cb8f8cf0e15a411 | lambda/list_member.py | lambda/list_member.py | from __future__ import print_function
from enum import IntEnum
import yaml
MemberFlag = IntEnum('MemberFlag', [
'digest',
'digest2',
'modPost',
'preapprove',
'noPost',
'diagnostic',
'moderator',
'myopic',
'superadmin',
'admin',
'protected',
'ccErrors',
'reports',
'vacation',
'ackPost',
'echoPost',
'hidden',
])
def member_flag_representer(dumper, data):
return dumper.represent_scalar(u'!flag', data.name)
yaml.add_representer(MemberFlag, member_flag_representer)
def member_flag_constructor(loader, node):
value = loader.construct_scalar(node)
return MemberFlag[value]
yaml.SafeLoader.add_constructor(u'!flag', member_flag_constructor)
class ListMember(yaml.YAMLObject):
yaml_tag = u'!Member'
yaml_loader = yaml.SafeLoader
def __init__(self, address, *args, **kwargs):
self.address = address
self.flags = set(a for a in args if isinstance(a, MemberFlag))
def __repr__(self):
return u'{}({}, flags: {})'.format(
self.__class__.__name__,
self.address,
', '.join(
map(lambda f: f.name,
self.flags)
),
)
| from __future__ import print_function
from enum import IntEnum
import yaml
MemberFlag = IntEnum('MemberFlag', [
'digest',
'digest2',
'modPost',
'preapprove',
'noPost',
'diagnostic',
'moderator',
'myopic',
'superadmin',
'admin',
'protected',
'ccErrors',
'reports',
'vacation',
'ackPost',
'echoPost',
'hidden',
])
def member_flag_representer(dumper, data):
return dumper.represent_scalar(u'!flag', data.name)
yaml.add_representer(MemberFlag, member_flag_representer)
def member_flag_constructor(loader, node):
value = loader.construct_scalar(node)
return MemberFlag[value]
yaml.SafeLoader.add_constructor(u'!flag', member_flag_constructor)
class ListMember(yaml.YAMLObject):
yaml_tag = u'!Member'
yaml_loader = yaml.SafeLoader
def __init__(self, address, *args, **kwargs):
if isinstance(address, unicode):
# Attempt to down-convert unicode-string addresses to plain strings
try:
address = str(address)
except UnicodeEncodeError:
pass
self.address = address
self.flags = set(a for a in args if isinstance(a, MemberFlag))
def __repr__(self):
return u'{}({}, flags: {})'.format(
self.__class__.__name__,
self.address,
', '.join(
map(lambda f: f.name,
self.flags)
),
)
| Convert list member addresses to non-unicode strings when possible. | Convert list member addresses to non-unicode strings when possible.
| Python | mit | ilg/LambdaMLM | from __future__ import print_function
from enum import IntEnum
import yaml
MemberFlag = IntEnum('MemberFlag', [
'digest',
'digest2',
'modPost',
'preapprove',
'noPost',
'diagnostic',
'moderator',
'myopic',
'superadmin',
'admin',
'protected',
'ccErrors',
'reports',
'vacation',
'ackPost',
'echoPost',
'hidden',
])
def member_flag_representer(dumper, data):
return dumper.represent_scalar(u'!flag', data.name)
yaml.add_representer(MemberFlag, member_flag_representer)
def member_flag_constructor(loader, node):
value = loader.construct_scalar(node)
return MemberFlag[value]
yaml.SafeLoader.add_constructor(u'!flag', member_flag_constructor)
class ListMember(yaml.YAMLObject):
yaml_tag = u'!Member'
yaml_loader = yaml.SafeLoader
def __init__(self, address, *args, **kwargs):
+ if isinstance(address, unicode):
+ # Attempt to down-convert unicode-string addresses to plain strings
+ try:
+ address = str(address)
+ except UnicodeEncodeError:
+ pass
self.address = address
self.flags = set(a for a in args if isinstance(a, MemberFlag))
def __repr__(self):
return u'{}({}, flags: {})'.format(
self.__class__.__name__,
self.address,
', '.join(
map(lambda f: f.name,
self.flags)
),
)
| Convert list member addresses to non-unicode strings when possible. | ## Code Before:
from __future__ import print_function
from enum import IntEnum
import yaml
MemberFlag = IntEnum('MemberFlag', [
'digest',
'digest2',
'modPost',
'preapprove',
'noPost',
'diagnostic',
'moderator',
'myopic',
'superadmin',
'admin',
'protected',
'ccErrors',
'reports',
'vacation',
'ackPost',
'echoPost',
'hidden',
])
def member_flag_representer(dumper, data):
return dumper.represent_scalar(u'!flag', data.name)
yaml.add_representer(MemberFlag, member_flag_representer)
def member_flag_constructor(loader, node):
value = loader.construct_scalar(node)
return MemberFlag[value]
yaml.SafeLoader.add_constructor(u'!flag', member_flag_constructor)
class ListMember(yaml.YAMLObject):
yaml_tag = u'!Member'
yaml_loader = yaml.SafeLoader
def __init__(self, address, *args, **kwargs):
self.address = address
self.flags = set(a for a in args if isinstance(a, MemberFlag))
def __repr__(self):
return u'{}({}, flags: {})'.format(
self.__class__.__name__,
self.address,
', '.join(
map(lambda f: f.name,
self.flags)
),
)
## Instruction:
Convert list member addresses to non-unicode strings when possible.
## Code After:
from __future__ import print_function
from enum import IntEnum
import yaml
MemberFlag = IntEnum('MemberFlag', [
'digest',
'digest2',
'modPost',
'preapprove',
'noPost',
'diagnostic',
'moderator',
'myopic',
'superadmin',
'admin',
'protected',
'ccErrors',
'reports',
'vacation',
'ackPost',
'echoPost',
'hidden',
])
def member_flag_representer(dumper, data):
return dumper.represent_scalar(u'!flag', data.name)
yaml.add_representer(MemberFlag, member_flag_representer)
def member_flag_constructor(loader, node):
value = loader.construct_scalar(node)
return MemberFlag[value]
yaml.SafeLoader.add_constructor(u'!flag', member_flag_constructor)
class ListMember(yaml.YAMLObject):
yaml_tag = u'!Member'
yaml_loader = yaml.SafeLoader
def __init__(self, address, *args, **kwargs):
if isinstance(address, unicode):
# Attempt to down-convert unicode-string addresses to plain strings
try:
address = str(address)
except UnicodeEncodeError:
pass
self.address = address
self.flags = set(a for a in args if isinstance(a, MemberFlag))
def __repr__(self):
return u'{}({}, flags: {})'.format(
self.__class__.__name__,
self.address,
', '.join(
map(lambda f: f.name,
self.flags)
),
)
|
bd59db76bb81218d04224e44773eae9d3d9dfc21 | rplugin/python3/denite/source/toc.py | rplugin/python3/denite/source/toc.py |
from .base import Base
class Source(Base):
def __init__(self, vim):
super().__init__(vim)
self.name = 'vimtex_toc'
self.kind = 'file'
@staticmethod
def format_number(n):
if not n or n['frontmatter'] or n['backmatter']:
return ''
num = [str(n[k]) for k in [
'part',
'chapter',
'section',
'subsection',
'subsubsection',
'subsubsubsection'] if n[k] is not 0]
if n['appendix']:
num[0] = chr(int(num[0]) + 64)
fnum = '.'.join(num)
return fnum
@staticmethod
def create_candidate(e, depth):
indent = (' ' * 2*(depth - e['level']) + e['title'])[:60]
number = Source.format_number(e['number'])
abbr = '{:65}{:10}'.format(indent, number)
return {'word': e['title'],
'abbr': abbr,
'action__path': e['file'],
'action__line': e.get('line', 0)}
def gather_candidates(self, context):
entries = self.vim.eval('vimtex#toc#get_entries()')
depth = max([e['level'] for e in entries])
return [Source.create_candidate(e, depth) for e in entries]
|
from .base import Base
class Source(Base):
def __init__(self, vim):
super().__init__(vim)
self.name = 'vimtex_toc'
self.kind = 'file'
@staticmethod
def format_number(n):
if not n or not type(n) is dict or n['frontmatter'] or n['backmatter']:
return ''
num = [str(n[k]) for k in [
'part',
'chapter',
'section',
'subsection',
'subsubsection',
'subsubsubsection'] if n[k] is not 0]
if n['appendix']:
num[0] = chr(int(num[0]) + 64)
fnum = '.'.join(num)
return fnum
@staticmethod
def create_candidate(e, depth):
indent = (' ' * 2*(int(depth) - int(e['level'])) + e['title'])[:60]
number = Source.format_number(e['number'])
abbr = '{:65}{:10}'.format(indent, number)
return {'word': e['title'],
'abbr': abbr,
'action__path': e['file'],
'action__line': e.get('line', 0)}
def gather_candidates(self, context):
entries = self.vim.eval('vimtex#toc#get_entries()')
depth = max([int(e['level']) for e in entries])
return [Source.create_candidate(e, depth) for e in entries]
| Fix Denite support for vim8. | Fix Denite support for vim8.
| Python | mit | lervag/vimtex,Aster89/vimtex,Aster89/vimtex,kmarius/vimtex,lervag/vimtex,kmarius/vimtex |
from .base import Base
class Source(Base):
def __init__(self, vim):
super().__init__(vim)
self.name = 'vimtex_toc'
self.kind = 'file'
@staticmethod
def format_number(n):
- if not n or n['frontmatter'] or n['backmatter']:
+ if not n or not type(n) is dict or n['frontmatter'] or n['backmatter']:
return ''
num = [str(n[k]) for k in [
'part',
'chapter',
'section',
'subsection',
'subsubsection',
'subsubsubsection'] if n[k] is not 0]
if n['appendix']:
num[0] = chr(int(num[0]) + 64)
fnum = '.'.join(num)
return fnum
@staticmethod
def create_candidate(e, depth):
- indent = (' ' * 2*(depth - e['level']) + e['title'])[:60]
+ indent = (' ' * 2*(int(depth) - int(e['level'])) + e['title'])[:60]
number = Source.format_number(e['number'])
abbr = '{:65}{:10}'.format(indent, number)
return {'word': e['title'],
'abbr': abbr,
'action__path': e['file'],
'action__line': e.get('line', 0)}
def gather_candidates(self, context):
entries = self.vim.eval('vimtex#toc#get_entries()')
- depth = max([e['level'] for e in entries])
+ depth = max([int(e['level']) for e in entries])
return [Source.create_candidate(e, depth) for e in entries]
| Fix Denite support for vim8. | ## Code Before:
from .base import Base
class Source(Base):
def __init__(self, vim):
super().__init__(vim)
self.name = 'vimtex_toc'
self.kind = 'file'
@staticmethod
def format_number(n):
if not n or n['frontmatter'] or n['backmatter']:
return ''
num = [str(n[k]) for k in [
'part',
'chapter',
'section',
'subsection',
'subsubsection',
'subsubsubsection'] if n[k] is not 0]
if n['appendix']:
num[0] = chr(int(num[0]) + 64)
fnum = '.'.join(num)
return fnum
@staticmethod
def create_candidate(e, depth):
indent = (' ' * 2*(depth - e['level']) + e['title'])[:60]
number = Source.format_number(e['number'])
abbr = '{:65}{:10}'.format(indent, number)
return {'word': e['title'],
'abbr': abbr,
'action__path': e['file'],
'action__line': e.get('line', 0)}
def gather_candidates(self, context):
entries = self.vim.eval('vimtex#toc#get_entries()')
depth = max([e['level'] for e in entries])
return [Source.create_candidate(e, depth) for e in entries]
## Instruction:
Fix Denite support for vim8.
## Code After:
from .base import Base
class Source(Base):
def __init__(self, vim):
super().__init__(vim)
self.name = 'vimtex_toc'
self.kind = 'file'
@staticmethod
def format_number(n):
if not n or not type(n) is dict or n['frontmatter'] or n['backmatter']:
return ''
num = [str(n[k]) for k in [
'part',
'chapter',
'section',
'subsection',
'subsubsection',
'subsubsubsection'] if n[k] is not 0]
if n['appendix']:
num[0] = chr(int(num[0]) + 64)
fnum = '.'.join(num)
return fnum
@staticmethod
def create_candidate(e, depth):
indent = (' ' * 2*(int(depth) - int(e['level'])) + e['title'])[:60]
number = Source.format_number(e['number'])
abbr = '{:65}{:10}'.format(indent, number)
return {'word': e['title'],
'abbr': abbr,
'action__path': e['file'],
'action__line': e.get('line', 0)}
def gather_candidates(self, context):
entries = self.vim.eval('vimtex#toc#get_entries()')
depth = max([int(e['level']) for e in entries])
return [Source.create_candidate(e, depth) for e in entries]
|
dc461956408ffa35e2391fccf4231d60144985f7 | yunity/groups/api.py | yunity/groups/api.py | from rest_framework import filters
from rest_framework import status, viewsets
from rest_framework.decorators import detail_route
from rest_framework.permissions import IsAuthenticated, IsAuthenticatedOrReadOnly
from rest_framework.response import Response
from yunity.groups.serializers import GroupSerializer
from yunity.groups.models import Group as GroupModel
class GroupViewSet(viewsets.ModelViewSet):
queryset = GroupModel.objects.all()
serializer_class = GroupSerializer
filter_fields = ('members',)
filter_backends = (filters.SearchFilter,)
search_fields = ('name', 'description')
permission_classes = (IsAuthenticatedOrReadOnly,)
@detail_route(methods=['POST', 'GET'],
permission_classes=(IsAuthenticated,))
def join(self, request, pk=None):
group = self.get_object()
group.members.add(request.user)
return Response(status=status.HTTP_200_OK)
@detail_route(methods=['POST', 'GET'],
permission_classes=(IsAuthenticated,))
def leave(self, request, pk=None):
group = self.get_object()
if not group.members.filter(id=request.user.id).exists():
return Response("User not member of group",
status=status.HTTP_400_BAD_REQUEST)
group.members.remove(request.user)
return Response(status=status.HTTP_200_OK)
| from rest_framework import filters
from rest_framework import status, viewsets
from rest_framework.decorators import detail_route
from rest_framework.permissions import IsAuthenticated, IsAuthenticatedOrReadOnly, BasePermission
from rest_framework.response import Response
from yunity.groups.serializers import GroupSerializer
from yunity.groups.models import Group as GroupModel
class IsMember(BasePermission):
message = 'You are not a member.'
def has_object_permission(self, request, view, obj):
return request.user in obj.members.all()
class GroupViewSet(viewsets.ModelViewSet):
queryset = GroupModel.objects.all()
serializer_class = GroupSerializer
filter_fields = ('members',)
filter_backends = (filters.SearchFilter,)
search_fields = ('name', 'description')
def get_permissions(self):
if self.action in ('update', 'partial_update', 'destroy'):
self.permission_classes = (IsMember,)
else:
self.permission_classes = (IsAuthenticatedOrReadOnly,)
return super().get_permissions()
@detail_route(methods=['POST', 'GET'],
permission_classes=(IsAuthenticated,))
def join(self, request, pk=None):
group = self.get_object()
group.members.add(request.user)
return Response(status=status.HTTP_200_OK)
@detail_route(methods=['POST', 'GET'],
permission_classes=(IsAuthenticated,))
def leave(self, request, pk=None):
group = self.get_object()
if not group.members.filter(id=request.user.id).exists():
return Response("User not member of group",
status=status.HTTP_400_BAD_REQUEST)
group.members.remove(request.user)
return Response(status=status.HTTP_200_OK)
| Fix permissions for groups endpoint | Fix permissions for groups endpoint
| Python | agpl-3.0 | yunity/yunity-core,yunity/yunity-core,yunity/foodsaving-backend,yunity/foodsaving-backend,yunity/foodsaving-backend | from rest_framework import filters
from rest_framework import status, viewsets
from rest_framework.decorators import detail_route
- from rest_framework.permissions import IsAuthenticated, IsAuthenticatedOrReadOnly
+ from rest_framework.permissions import IsAuthenticated, IsAuthenticatedOrReadOnly, BasePermission
from rest_framework.response import Response
from yunity.groups.serializers import GroupSerializer
from yunity.groups.models import Group as GroupModel
+
+
+ class IsMember(BasePermission):
+ message = 'You are not a member.'
+
+ def has_object_permission(self, request, view, obj):
+ return request.user in obj.members.all()
class GroupViewSet(viewsets.ModelViewSet):
queryset = GroupModel.objects.all()
serializer_class = GroupSerializer
filter_fields = ('members',)
filter_backends = (filters.SearchFilter,)
search_fields = ('name', 'description')
+
+ def get_permissions(self):
+ if self.action in ('update', 'partial_update', 'destroy'):
+ self.permission_classes = (IsMember,)
+ else:
- permission_classes = (IsAuthenticatedOrReadOnly,)
+ self.permission_classes = (IsAuthenticatedOrReadOnly,)
+
+ return super().get_permissions()
@detail_route(methods=['POST', 'GET'],
permission_classes=(IsAuthenticated,))
def join(self, request, pk=None):
group = self.get_object()
group.members.add(request.user)
return Response(status=status.HTTP_200_OK)
@detail_route(methods=['POST', 'GET'],
permission_classes=(IsAuthenticated,))
def leave(self, request, pk=None):
group = self.get_object()
if not group.members.filter(id=request.user.id).exists():
return Response("User not member of group",
status=status.HTTP_400_BAD_REQUEST)
group.members.remove(request.user)
return Response(status=status.HTTP_200_OK)
| Fix permissions for groups endpoint | ## Code Before:
from rest_framework import filters
from rest_framework import status, viewsets
from rest_framework.decorators import detail_route
from rest_framework.permissions import IsAuthenticated, IsAuthenticatedOrReadOnly
from rest_framework.response import Response
from yunity.groups.serializers import GroupSerializer
from yunity.groups.models import Group as GroupModel
class GroupViewSet(viewsets.ModelViewSet):
queryset = GroupModel.objects.all()
serializer_class = GroupSerializer
filter_fields = ('members',)
filter_backends = (filters.SearchFilter,)
search_fields = ('name', 'description')
permission_classes = (IsAuthenticatedOrReadOnly,)
@detail_route(methods=['POST', 'GET'],
permission_classes=(IsAuthenticated,))
def join(self, request, pk=None):
group = self.get_object()
group.members.add(request.user)
return Response(status=status.HTTP_200_OK)
@detail_route(methods=['POST', 'GET'],
permission_classes=(IsAuthenticated,))
def leave(self, request, pk=None):
group = self.get_object()
if not group.members.filter(id=request.user.id).exists():
return Response("User not member of group",
status=status.HTTP_400_BAD_REQUEST)
group.members.remove(request.user)
return Response(status=status.HTTP_200_OK)
## Instruction:
Fix permissions for groups endpoint
## Code After:
from rest_framework import filters
from rest_framework import status, viewsets
from rest_framework.decorators import detail_route
from rest_framework.permissions import IsAuthenticated, IsAuthenticatedOrReadOnly, BasePermission
from rest_framework.response import Response
from yunity.groups.serializers import GroupSerializer
from yunity.groups.models import Group as GroupModel
class IsMember(BasePermission):
message = 'You are not a member.'
def has_object_permission(self, request, view, obj):
return request.user in obj.members.all()
class GroupViewSet(viewsets.ModelViewSet):
queryset = GroupModel.objects.all()
serializer_class = GroupSerializer
filter_fields = ('members',)
filter_backends = (filters.SearchFilter,)
search_fields = ('name', 'description')
def get_permissions(self):
if self.action in ('update', 'partial_update', 'destroy'):
self.permission_classes = (IsMember,)
else:
self.permission_classes = (IsAuthenticatedOrReadOnly,)
return super().get_permissions()
@detail_route(methods=['POST', 'GET'],
permission_classes=(IsAuthenticated,))
def join(self, request, pk=None):
group = self.get_object()
group.members.add(request.user)
return Response(status=status.HTTP_200_OK)
@detail_route(methods=['POST', 'GET'],
permission_classes=(IsAuthenticated,))
def leave(self, request, pk=None):
group = self.get_object()
if not group.members.filter(id=request.user.id).exists():
return Response("User not member of group",
status=status.HTTP_400_BAD_REQUEST)
group.members.remove(request.user)
return Response(status=status.HTTP_200_OK)
|
0f7ebec0442da08b12cd88f2558146d5c5a551ad | K2fov/tests/test_plot.py | K2fov/tests/test_plot.py | """Tests K2fov.plot"""
from .. import plot
def test_basics():
"""Make sure this runs without exception."""
try:
import matplotlib
plot.create_context_plot(180, 0)
plot.create_context_plot_zoomed(180, 0)
except ImportError:
pass
| """Tests K2fov.plot"""
from .. import plot
"""
def test_basics():
# Make sure this runs without exception.
try:
import matplotlib
plot.create_context_plot(180, 0)
plot.create_context_plot_zoomed(180, 0)
except ImportError:
pass
"""
| Simplify plot test for now | Simplify plot test for now
| Python | mit | KeplerGO/K2fov,mrtommyb/K2fov | """Tests K2fov.plot"""
from .. import plot
-
+ """
def test_basics():
- """Make sure this runs without exception."""
+ # Make sure this runs without exception.
try:
import matplotlib
plot.create_context_plot(180, 0)
plot.create_context_plot_zoomed(180, 0)
except ImportError:
pass
+ """
| Simplify plot test for now | ## Code Before:
"""Tests K2fov.plot"""
from .. import plot
def test_basics():
"""Make sure this runs without exception."""
try:
import matplotlib
plot.create_context_plot(180, 0)
plot.create_context_plot_zoomed(180, 0)
except ImportError:
pass
## Instruction:
Simplify plot test for now
## Code After:
"""Tests K2fov.plot"""
from .. import plot
"""
def test_basics():
# Make sure this runs without exception.
try:
import matplotlib
plot.create_context_plot(180, 0)
plot.create_context_plot_zoomed(180, 0)
except ImportError:
pass
"""
|
3427b2583c38ed7ec5239c36faa82536f3f95a3b | automata/pda/stack.py | automata/pda/stack.py | """Classes and methods for working with PDA stacks."""
class PDAStack(object):
"""A PDA stack."""
def __init__(self, stack, **kwargs):
"""Initialize the new PDA stack."""
if isinstance(stack, PDAStack):
self._init_from_stack_obj(stack)
else:
self.stack = list(stack)
def _init_from_stack_obj(self, stack_obj):
"""Initialize this Stack as a deep copy of the given Stack."""
self.__init__(stack_obj.stack)
def top(self):
"""Return the symbol at the top of the stack."""
if self.stack:
return self.stack[-1]
else:
return ''
def pop(self):
"""Pop the stack top from the stack."""
self.stack.pop()
def replace(self, symbols):
"""
Replace the top of the stack with the given symbols.
The first symbol in the given sequence becomes the new stack top.
"""
self.stack.pop()
self.stack.extend(reversed(symbols))
def copy(self):
"""Return a deep copy of the stack."""
return self.__class__(self)
def __len__(self):
"""Return the number of symbols on the stack."""
return len(self.stack)
def __iter__(self):
"""Return an interator for the stack."""
return iter(self.stack)
def __repr__(self):
"""Return a string representation of the stack."""
return '{}({})'.format(self.__class__.__name__, self.stack)
def __eq__(self, other):
"""Check if two stacks are equal."""
return self.__dict__ == other.__dict__
| """Classes and methods for working with PDA stacks."""
class PDAStack(object):
"""A PDA stack."""
def __init__(self, stack):
"""Initialize the new PDA stack."""
self.stack = list(stack)
def top(self):
"""Return the symbol at the top of the stack."""
if self.stack:
return self.stack[-1]
else:
return ''
def pop(self):
"""Pop the stack top from the stack."""
self.stack.pop()
def replace(self, symbols):
"""
Replace the top of the stack with the given symbols.
The first symbol in the given sequence becomes the new stack top.
"""
self.stack.pop()
self.stack.extend(reversed(symbols))
def copy(self):
"""Return a deep copy of the stack."""
return self.__class__(**self.__dict__)
def __len__(self):
"""Return the number of symbols on the stack."""
return len(self.stack)
def __iter__(self):
"""Return an interator for the stack."""
return iter(self.stack)
def __repr__(self):
"""Return a string representation of the stack."""
return '{}({})'.format(self.__class__.__name__, self.stack)
def __eq__(self, other):
"""Check if two stacks are equal."""
return self.__dict__ == other.__dict__
| Remove copy constructor for PDAStack | Remove copy constructor for PDAStack
The copy() method is already sufficient.
| Python | mit | caleb531/automata | """Classes and methods for working with PDA stacks."""
class PDAStack(object):
"""A PDA stack."""
- def __init__(self, stack, **kwargs):
+ def __init__(self, stack):
"""Initialize the new PDA stack."""
- if isinstance(stack, PDAStack):
- self._init_from_stack_obj(stack)
- else:
- self.stack = list(stack)
+ self.stack = list(stack)
-
- def _init_from_stack_obj(self, stack_obj):
- """Initialize this Stack as a deep copy of the given Stack."""
- self.__init__(stack_obj.stack)
def top(self):
"""Return the symbol at the top of the stack."""
if self.stack:
return self.stack[-1]
else:
return ''
def pop(self):
"""Pop the stack top from the stack."""
self.stack.pop()
def replace(self, symbols):
"""
Replace the top of the stack with the given symbols.
The first symbol in the given sequence becomes the new stack top.
"""
self.stack.pop()
self.stack.extend(reversed(symbols))
def copy(self):
"""Return a deep copy of the stack."""
- return self.__class__(self)
+ return self.__class__(**self.__dict__)
def __len__(self):
"""Return the number of symbols on the stack."""
return len(self.stack)
def __iter__(self):
"""Return an interator for the stack."""
return iter(self.stack)
def __repr__(self):
"""Return a string representation of the stack."""
return '{}({})'.format(self.__class__.__name__, self.stack)
def __eq__(self, other):
"""Check if two stacks are equal."""
return self.__dict__ == other.__dict__
| Remove copy constructor for PDAStack | ## Code Before:
"""Classes and methods for working with PDA stacks."""
class PDAStack(object):
"""A PDA stack."""
def __init__(self, stack, **kwargs):
"""Initialize the new PDA stack."""
if isinstance(stack, PDAStack):
self._init_from_stack_obj(stack)
else:
self.stack = list(stack)
def _init_from_stack_obj(self, stack_obj):
"""Initialize this Stack as a deep copy of the given Stack."""
self.__init__(stack_obj.stack)
def top(self):
"""Return the symbol at the top of the stack."""
if self.stack:
return self.stack[-1]
else:
return ''
def pop(self):
"""Pop the stack top from the stack."""
self.stack.pop()
def replace(self, symbols):
"""
Replace the top of the stack with the given symbols.
The first symbol in the given sequence becomes the new stack top.
"""
self.stack.pop()
self.stack.extend(reversed(symbols))
def copy(self):
"""Return a deep copy of the stack."""
return self.__class__(self)
def __len__(self):
"""Return the number of symbols on the stack."""
return len(self.stack)
def __iter__(self):
"""Return an interator for the stack."""
return iter(self.stack)
def __repr__(self):
"""Return a string representation of the stack."""
return '{}({})'.format(self.__class__.__name__, self.stack)
def __eq__(self, other):
"""Check if two stacks are equal."""
return self.__dict__ == other.__dict__
## Instruction:
Remove copy constructor for PDAStack
## Code After:
"""Classes and methods for working with PDA stacks."""
class PDAStack(object):
"""A PDA stack."""
def __init__(self, stack):
"""Initialize the new PDA stack."""
self.stack = list(stack)
def top(self):
"""Return the symbol at the top of the stack."""
if self.stack:
return self.stack[-1]
else:
return ''
def pop(self):
"""Pop the stack top from the stack."""
self.stack.pop()
def replace(self, symbols):
"""
Replace the top of the stack with the given symbols.
The first symbol in the given sequence becomes the new stack top.
"""
self.stack.pop()
self.stack.extend(reversed(symbols))
def copy(self):
"""Return a deep copy of the stack."""
return self.__class__(**self.__dict__)
def __len__(self):
"""Return the number of symbols on the stack."""
return len(self.stack)
def __iter__(self):
"""Return an interator for the stack."""
return iter(self.stack)
def __repr__(self):
"""Return a string representation of the stack."""
return '{}({})'.format(self.__class__.__name__, self.stack)
def __eq__(self, other):
"""Check if two stacks are equal."""
return self.__dict__ == other.__dict__
|
3990e3aa64cff288def07ee36e24026cc15282c0 | taiga/projects/issues/serializers.py | taiga/projects/issues/serializers.py |
from rest_framework import serializers
from taiga.base.serializers import PickleField, NeighborsSerializerMixin
from . import models
class IssueSerializer(serializers.ModelSerializer):
tags = PickleField(required=False)
comment = serializers.SerializerMethodField("get_comment")
is_closed = serializers.Field(source="is_closed")
class Meta:
model = models.Issue
def get_comment(self, obj):
return ""
class IssueNeighborsSerializer(NeighborsSerializerMixin, IssueSerializer):
def serialize_neighbor(self, neighbor):
return NeighborIssueSerializer(neighbor).data
class NeighborIssueSerializer(serializers.ModelSerializer):
class Meta:
model = models.Issue
fields = ("id", "ref", "subject")
depth = 0
|
from rest_framework import serializers
from taiga.base.serializers import PickleField, NeighborsSerializerMixin
from . import models
class IssueSerializer(serializers.ModelSerializer):
tags = PickleField(required=False)
is_closed = serializers.Field(source="is_closed")
class Meta:
model = models.Issue
class IssueNeighborsSerializer(NeighborsSerializerMixin, IssueSerializer):
def serialize_neighbor(self, neighbor):
return NeighborIssueSerializer(neighbor).data
class NeighborIssueSerializer(serializers.ModelSerializer):
class Meta:
model = models.Issue
fields = ("id", "ref", "subject")
depth = 0
| Remove unnecessary field from IssueSerializer | Remove unnecessary field from IssueSerializer
| Python | agpl-3.0 | forging2012/taiga-back,EvgeneOskin/taiga-back,xdevelsistemas/taiga-back-community,seanchen/taiga-back,bdang2012/taiga-back-casting,Rademade/taiga-back,crr0004/taiga-back,dayatz/taiga-back,rajiteh/taiga-back,dycodedev/taiga-back,crr0004/taiga-back,obimod/taiga-back,Zaneh-/bearded-tribble-back,seanchen/taiga-back,gauravjns/taiga-back,joshisa/taiga-back,19kestier/taiga-back,jeffdwyatt/taiga-back,taigaio/taiga-back,WALR/taiga-back,joshisa/taiga-back,astronaut1712/taiga-back,taigaio/taiga-back,coopsource/taiga-back,gam-phon/taiga-back,Rademade/taiga-back,obimod/taiga-back,obimod/taiga-back,CMLL/taiga-back,frt-arch/taiga-back,dycodedev/taiga-back,bdang2012/taiga-back-casting,Tigerwhit4/taiga-back,19kestier/taiga-back,EvgeneOskin/taiga-back,EvgeneOskin/taiga-back,astagi/taiga-back,bdang2012/taiga-back-casting,Zaneh-/bearded-tribble-back,dayatz/taiga-back,CoolCloud/taiga-back,astronaut1712/taiga-back,jeffdwyatt/taiga-back,crr0004/taiga-back,WALR/taiga-back,gam-phon/taiga-back,CMLL/taiga-back,seanchen/taiga-back,astagi/taiga-back,gauravjns/taiga-back,gam-phon/taiga-back,WALR/taiga-back,jeffdwyatt/taiga-back,Tigerwhit4/taiga-back,Zaneh-/bearded-tribble-back,seanchen/taiga-back,xdevelsistemas/taiga-back-community,coopsource/taiga-back,astagi/taiga-back,EvgeneOskin/taiga-back,obimod/taiga-back,gam-phon/taiga-back,coopsource/taiga-back,CoolCloud/taiga-back,rajiteh/taiga-back,dycodedev/taiga-back,bdang2012/taiga-back-casting,19kestier/taiga-back,astronaut1712/taiga-back,forging2012/taiga-back,CMLL/taiga-back,frt-arch/taiga-back,astagi/taiga-back,WALR/taiga-back,forging2012/taiga-back,rajiteh/taiga-back,frt-arch/taiga-back,Rademade/taiga-back,xdevelsistemas/taiga-back-community,taigaio/taiga-back,joshisa/taiga-back,gauravjns/taiga-back,Rademade/taiga-back,crr0004/taiga-back,forging2012/taiga-back,joshisa/taiga-back,CMLL/taiga-back,dycodedev/taiga-back,coopsource/taiga-back,CoolCloud/taiga-back,Rademade/taiga-back,astronaut1712/taiga-back,jeffdwyatt/taiga-back,CoolCloud/taiga-back,gauravjns/taiga-back,rajiteh/taiga-back,dayatz/taiga-back,Tigerwhit4/taiga-back,Tigerwhit4/taiga-back |
from rest_framework import serializers
from taiga.base.serializers import PickleField, NeighborsSerializerMixin
from . import models
class IssueSerializer(serializers.ModelSerializer):
tags = PickleField(required=False)
- comment = serializers.SerializerMethodField("get_comment")
is_closed = serializers.Field(source="is_closed")
class Meta:
model = models.Issue
-
- def get_comment(self, obj):
- return ""
class IssueNeighborsSerializer(NeighborsSerializerMixin, IssueSerializer):
def serialize_neighbor(self, neighbor):
return NeighborIssueSerializer(neighbor).data
class NeighborIssueSerializer(serializers.ModelSerializer):
class Meta:
model = models.Issue
fields = ("id", "ref", "subject")
depth = 0
| Remove unnecessary field from IssueSerializer | ## Code Before:
from rest_framework import serializers
from taiga.base.serializers import PickleField, NeighborsSerializerMixin
from . import models
class IssueSerializer(serializers.ModelSerializer):
tags = PickleField(required=False)
comment = serializers.SerializerMethodField("get_comment")
is_closed = serializers.Field(source="is_closed")
class Meta:
model = models.Issue
def get_comment(self, obj):
return ""
class IssueNeighborsSerializer(NeighborsSerializerMixin, IssueSerializer):
def serialize_neighbor(self, neighbor):
return NeighborIssueSerializer(neighbor).data
class NeighborIssueSerializer(serializers.ModelSerializer):
class Meta:
model = models.Issue
fields = ("id", "ref", "subject")
depth = 0
## Instruction:
Remove unnecessary field from IssueSerializer
## Code After:
from rest_framework import serializers
from taiga.base.serializers import PickleField, NeighborsSerializerMixin
from . import models
class IssueSerializer(serializers.ModelSerializer):
tags = PickleField(required=False)
is_closed = serializers.Field(source="is_closed")
class Meta:
model = models.Issue
class IssueNeighborsSerializer(NeighborsSerializerMixin, IssueSerializer):
def serialize_neighbor(self, neighbor):
return NeighborIssueSerializer(neighbor).data
class NeighborIssueSerializer(serializers.ModelSerializer):
class Meta:
model = models.Issue
fields = ("id", "ref", "subject")
depth = 0
|
85e853a63d7fed79b931b337bb9e6678077cf8d5 | tests/integration/ssh/test_grains.py | tests/integration/ssh/test_grains.py | from __future__ import absolute_import
# Import Salt Testing Libs
from tests.support.case import SSHCase
from tests.support.unit import skipIf
# Import Salt Libs
import salt.utils
@skipIf(salt.utils.is_windows(), 'salt-ssh not available on Windows')
class SSHGrainsTest(SSHCase):
'''
testing grains with salt-ssh
'''
def test_grains_items(self):
'''
test grains.items with salt-ssh
'''
ret = self.run_function('grains.items')
self.assertEqual(ret['kernel'], 'Linux')
self.assertTrue(isinstance(ret, dict))
| from __future__ import absolute_import
# Import Salt Testing Libs
from tests.support.case import SSHCase
from tests.support.unit import skipIf
# Import Salt Libs
import salt.utils
@skipIf(salt.utils.is_windows(), 'salt-ssh not available on Windows')
class SSHGrainsTest(SSHCase):
'''
testing grains with salt-ssh
'''
def test_grains_items(self):
'''
test grains.items with salt-ssh
'''
ret = self.run_function('grains.items')
grain = 'Linux'
if salt.utils.platform.is_darwin():
grain = 'Darwin'
self.assertEqual(ret['kernel'], grain)
self.assertTrue(isinstance(ret, dict))
| Add darwin value for ssh grain items tests on MacOSX | Add darwin value for ssh grain items tests on MacOSX
| Python | apache-2.0 | saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt | from __future__ import absolute_import
# Import Salt Testing Libs
from tests.support.case import SSHCase
from tests.support.unit import skipIf
# Import Salt Libs
import salt.utils
@skipIf(salt.utils.is_windows(), 'salt-ssh not available on Windows')
class SSHGrainsTest(SSHCase):
'''
testing grains with salt-ssh
'''
def test_grains_items(self):
'''
test grains.items with salt-ssh
'''
ret = self.run_function('grains.items')
+ grain = 'Linux'
+ if salt.utils.platform.is_darwin():
+ grain = 'Darwin'
- self.assertEqual(ret['kernel'], 'Linux')
+ self.assertEqual(ret['kernel'], grain)
self.assertTrue(isinstance(ret, dict))
| Add darwin value for ssh grain items tests on MacOSX | ## Code Before:
from __future__ import absolute_import
# Import Salt Testing Libs
from tests.support.case import SSHCase
from tests.support.unit import skipIf
# Import Salt Libs
import salt.utils
@skipIf(salt.utils.is_windows(), 'salt-ssh not available on Windows')
class SSHGrainsTest(SSHCase):
'''
testing grains with salt-ssh
'''
def test_grains_items(self):
'''
test grains.items with salt-ssh
'''
ret = self.run_function('grains.items')
self.assertEqual(ret['kernel'], 'Linux')
self.assertTrue(isinstance(ret, dict))
## Instruction:
Add darwin value for ssh grain items tests on MacOSX
## Code After:
from __future__ import absolute_import
# Import Salt Testing Libs
from tests.support.case import SSHCase
from tests.support.unit import skipIf
# Import Salt Libs
import salt.utils
@skipIf(salt.utils.is_windows(), 'salt-ssh not available on Windows')
class SSHGrainsTest(SSHCase):
'''
testing grains with salt-ssh
'''
def test_grains_items(self):
'''
test grains.items with salt-ssh
'''
ret = self.run_function('grains.items')
grain = 'Linux'
if salt.utils.platform.is_darwin():
grain = 'Darwin'
self.assertEqual(ret['kernel'], grain)
self.assertTrue(isinstance(ret, dict))
|
79bbc95abd2c1b41bcbd19d9ce1ffa330bd76b7a | source/views.py | source/views.py | from multiprocessing.pool import ThreadPool
from django.shortcuts import render
from .forms import SearchForm
from source import view_models
def index(request):
if request.method == 'GET':
form = SearchForm(request.GET)
if form.is_valid():
title = request.GET.__getitem__('movie_title').__str__()
pool = ThreadPool(processes=5)
async_rt_rating = pool.apply_async(view_models.get_rt_rating, (title,))
async_bluray_rating = pool.apply_async(view_models.get_bluray_rating, (title,))
async_tech_specs = pool.apply_async(view_models.get_tech_spec, (title,))
async_price = pool.apply_async(view_models.get_price, (title,))
async_artwork = pool.apply_async(view_models.get_artwork, (title,))
rt_rating = async_rt_rating.get()
bluray_rating = async_bluray_rating.get()
tech_specs = async_tech_specs.get()
price = async_price.get()
artwork = async_artwork.get()
return render(request, 'index.html', {'form': form, 'rt_rating': rt_rating, 'bluray_rating': bluray_rating, 'tech_specs': tech_specs, 'price': price, 'artwork': artwork})
else:
form = SearchForm()
return render(request, 'index.html', {'form': form})
| from multiprocessing.pool import ThreadPool
from django.shortcuts import render
from .forms import SearchForm
from source import view_models
def index(request):
if request.method == 'GET':
form = SearchForm(request.GET)
if form.is_valid():
title = request.GET.__getitem__('movie_title').__str__()
pool = ThreadPool(processes=5)
async_rt_rating = pool.apply_async(view_models.get_rt_rating, (title,))
async_bluray_rating = pool.apply_async(view_models.get_bluray_rating, (title,))
async_tech_specs = pool.apply_async(view_models.get_tech_spec, (title,))
async_price = pool.apply_async(view_models.get_price, (title,))
async_artwork = pool.apply_async(view_models.get_artwork, (title,))
pool.close()
rt_rating = async_rt_rating.get()
bluray_rating = async_bluray_rating.get()
tech_specs = async_tech_specs.get()
price = async_price.get()
artwork = async_artwork.get()
pool.join()
return render(request, 'index.html', {'form': form, 'rt_rating': rt_rating, 'bluray_rating': bluray_rating, 'tech_specs': tech_specs, 'price': price, 'artwork': artwork})
else:
form = SearchForm()
return render(request, 'index.html', {'form': form})
| Join threads or else the number of running threads increments by 5 at each request and will never stop until main process is killed | Join threads or else the number of running threads increments by 5 at each request and will never stop until main process is killed
| Python | mit | jeremyrea/caterblu,jeremyrea/caterblu,jeremyrea/caterblu,jeremyrea/caterblu | from multiprocessing.pool import ThreadPool
from django.shortcuts import render
from .forms import SearchForm
from source import view_models
def index(request):
if request.method == 'GET':
form = SearchForm(request.GET)
if form.is_valid():
title = request.GET.__getitem__('movie_title').__str__()
pool = ThreadPool(processes=5)
async_rt_rating = pool.apply_async(view_models.get_rt_rating, (title,))
async_bluray_rating = pool.apply_async(view_models.get_bluray_rating, (title,))
async_tech_specs = pool.apply_async(view_models.get_tech_spec, (title,))
async_price = pool.apply_async(view_models.get_price, (title,))
async_artwork = pool.apply_async(view_models.get_artwork, (title,))
+ pool.close()
rt_rating = async_rt_rating.get()
bluray_rating = async_bluray_rating.get()
tech_specs = async_tech_specs.get()
price = async_price.get()
artwork = async_artwork.get()
+ pool.join()
return render(request, 'index.html', {'form': form, 'rt_rating': rt_rating, 'bluray_rating': bluray_rating, 'tech_specs': tech_specs, 'price': price, 'artwork': artwork})
else:
form = SearchForm()
return render(request, 'index.html', {'form': form})
| Join threads or else the number of running threads increments by 5 at each request and will never stop until main process is killed | ## Code Before:
from multiprocessing.pool import ThreadPool
from django.shortcuts import render
from .forms import SearchForm
from source import view_models
def index(request):
if request.method == 'GET':
form = SearchForm(request.GET)
if form.is_valid():
title = request.GET.__getitem__('movie_title').__str__()
pool = ThreadPool(processes=5)
async_rt_rating = pool.apply_async(view_models.get_rt_rating, (title,))
async_bluray_rating = pool.apply_async(view_models.get_bluray_rating, (title,))
async_tech_specs = pool.apply_async(view_models.get_tech_spec, (title,))
async_price = pool.apply_async(view_models.get_price, (title,))
async_artwork = pool.apply_async(view_models.get_artwork, (title,))
rt_rating = async_rt_rating.get()
bluray_rating = async_bluray_rating.get()
tech_specs = async_tech_specs.get()
price = async_price.get()
artwork = async_artwork.get()
return render(request, 'index.html', {'form': form, 'rt_rating': rt_rating, 'bluray_rating': bluray_rating, 'tech_specs': tech_specs, 'price': price, 'artwork': artwork})
else:
form = SearchForm()
return render(request, 'index.html', {'form': form})
## Instruction:
Join threads or else the number of running threads increments by 5 at each request and will never stop until main process is killed
## Code After:
from multiprocessing.pool import ThreadPool
from django.shortcuts import render
from .forms import SearchForm
from source import view_models
def index(request):
if request.method == 'GET':
form = SearchForm(request.GET)
if form.is_valid():
title = request.GET.__getitem__('movie_title').__str__()
pool = ThreadPool(processes=5)
async_rt_rating = pool.apply_async(view_models.get_rt_rating, (title,))
async_bluray_rating = pool.apply_async(view_models.get_bluray_rating, (title,))
async_tech_specs = pool.apply_async(view_models.get_tech_spec, (title,))
async_price = pool.apply_async(view_models.get_price, (title,))
async_artwork = pool.apply_async(view_models.get_artwork, (title,))
pool.close()
rt_rating = async_rt_rating.get()
bluray_rating = async_bluray_rating.get()
tech_specs = async_tech_specs.get()
price = async_price.get()
artwork = async_artwork.get()
pool.join()
return render(request, 'index.html', {'form': form, 'rt_rating': rt_rating, 'bluray_rating': bluray_rating, 'tech_specs': tech_specs, 'price': price, 'artwork': artwork})
else:
form = SearchForm()
return render(request, 'index.html', {'form': form})
|
93926a9986ab4ba7704cd564d0052b6e60ff38cb | casepro/pods/base.py | casepro/pods/base.py | import json
from confmodel import fields, Config as ConfmodelConfig
from django.apps import AppConfig
class PodConfig(ConfmodelConfig):
'''
This is the config that all pods should use as the base for their own
config.
'''
index = fields.ConfigInt(
"A unique identifier for the specific instance of this pod."
"Automatically determined and set in the pod registry.",
required=True)
title = fields.ConfigText(
"The title to show in the UI for this pod",
default=None)
class Pod(object):
'''
The base class for all pod plugins.
'''
def __init__(self, pod_type, config):
self.pod_type = pod_type
self.config = config
@property
def config_json(self):
return json.dumps(self.config._config_data)
def read_data(self, params):
'''Should return the data that should be used to create the display
for the pod.'''
return {}
def perform_action(self, params):
'''Should perform the action specified by params.'''
return {}
class PodPlugin(AppConfig):
name = 'casepro.pods'
label = 'base_pod'
pod_class = Pod
config_class = PodConfig
title = 'Pod'
controller = None
directive = None
| import json
from confmodel import fields, Config as ConfmodelConfig
from django.apps import AppConfig
class PodConfig(ConfmodelConfig):
'''
This is the config that all pods should use as the base for their own
config.
'''
index = fields.ConfigInt(
"A unique identifier for the specific instance of this pod."
"Automatically determined and set in the pod registry.",
required=True)
title = fields.ConfigText(
"The title to show in the UI for this pod",
default=None)
class Pod(object):
'''
The base class for all pod plugins.
'''
def __init__(self, pod_type, config):
self.pod_type = pod_type
self.config = config
@property
def config_json(self):
return json.dumps(self.config._config_data)
def read_data(self, params):
'''Should return the data that should be used to create the display
for the pod.'''
return {}
def perform_action(self, params):
'''Should perform the action specified by params.'''
return {}
class PodPlugin(AppConfig):
name = 'casepro.pods'
pod_class = Pod
config_class = PodConfig
# django application label, used to determine which pod type to use when
# loading pods configured in `settings.PODS`
label = 'base_pod'
# default title to use when configuring each pod
title = 'Pod'
# override to use a different angular controller
controller = 'PodController'
# override to use a different angular directive
directive = 'pod'
# override with paths to custom scripts that the pod needs
scripts = ()
# override with paths to custom styles that the pod needs
styles = ()
| Add the class-level vars we need for pod angular components to PodPlugin | Add the class-level vars we need for pod angular components to PodPlugin
| Python | bsd-3-clause | rapidpro/casepro,praekelt/casepro,xkmato/casepro,rapidpro/casepro,praekelt/casepro,xkmato/casepro,praekelt/casepro,rapidpro/casepro | import json
from confmodel import fields, Config as ConfmodelConfig
from django.apps import AppConfig
class PodConfig(ConfmodelConfig):
'''
This is the config that all pods should use as the base for their own
config.
'''
index = fields.ConfigInt(
"A unique identifier for the specific instance of this pod."
"Automatically determined and set in the pod registry.",
required=True)
title = fields.ConfigText(
"The title to show in the UI for this pod",
default=None)
class Pod(object):
'''
The base class for all pod plugins.
'''
def __init__(self, pod_type, config):
self.pod_type = pod_type
self.config = config
@property
def config_json(self):
return json.dumps(self.config._config_data)
def read_data(self, params):
'''Should return the data that should be used to create the display
for the pod.'''
return {}
def perform_action(self, params):
'''Should perform the action specified by params.'''
return {}
class PodPlugin(AppConfig):
name = 'casepro.pods'
- label = 'base_pod'
pod_class = Pod
config_class = PodConfig
+ # django application label, used to determine which pod type to use when
+ # loading pods configured in `settings.PODS`
+ label = 'base_pod'
+
+ # default title to use when configuring each pod
title = 'Pod'
+ # override to use a different angular controller
- controller = None
+ controller = 'PodController'
+ # override to use a different angular directive
- directive = None
+ directive = 'pod'
+ # override with paths to custom scripts that the pod needs
+ scripts = ()
+
+ # override with paths to custom styles that the pod needs
+ styles = ()
+ | Add the class-level vars we need for pod angular components to PodPlugin | ## Code Before:
import json
from confmodel import fields, Config as ConfmodelConfig
from django.apps import AppConfig
class PodConfig(ConfmodelConfig):
'''
This is the config that all pods should use as the base for their own
config.
'''
index = fields.ConfigInt(
"A unique identifier for the specific instance of this pod."
"Automatically determined and set in the pod registry.",
required=True)
title = fields.ConfigText(
"The title to show in the UI for this pod",
default=None)
class Pod(object):
'''
The base class for all pod plugins.
'''
def __init__(self, pod_type, config):
self.pod_type = pod_type
self.config = config
@property
def config_json(self):
return json.dumps(self.config._config_data)
def read_data(self, params):
'''Should return the data that should be used to create the display
for the pod.'''
return {}
def perform_action(self, params):
'''Should perform the action specified by params.'''
return {}
class PodPlugin(AppConfig):
name = 'casepro.pods'
label = 'base_pod'
pod_class = Pod
config_class = PodConfig
title = 'Pod'
controller = None
directive = None
## Instruction:
Add the class-level vars we need for pod angular components to PodPlugin
## Code After:
import json
from confmodel import fields, Config as ConfmodelConfig
from django.apps import AppConfig
class PodConfig(ConfmodelConfig):
'''
This is the config that all pods should use as the base for their own
config.
'''
index = fields.ConfigInt(
"A unique identifier for the specific instance of this pod."
"Automatically determined and set in the pod registry.",
required=True)
title = fields.ConfigText(
"The title to show in the UI for this pod",
default=None)
class Pod(object):
'''
The base class for all pod plugins.
'''
def __init__(self, pod_type, config):
self.pod_type = pod_type
self.config = config
@property
def config_json(self):
return json.dumps(self.config._config_data)
def read_data(self, params):
'''Should return the data that should be used to create the display
for the pod.'''
return {}
def perform_action(self, params):
'''Should perform the action specified by params.'''
return {}
class PodPlugin(AppConfig):
name = 'casepro.pods'
pod_class = Pod
config_class = PodConfig
# django application label, used to determine which pod type to use when
# loading pods configured in `settings.PODS`
label = 'base_pod'
# default title to use when configuring each pod
title = 'Pod'
# override to use a different angular controller
controller = 'PodController'
# override to use a different angular directive
directive = 'pod'
# override with paths to custom scripts that the pod needs
scripts = ()
# override with paths to custom styles that the pod needs
styles = ()
|
aceeac7e9dd2735add937bc7141cfdb29b6201c7 | pywatson/watson.py | pywatson/watson.py | from pywatson.answer.answer import Answer
from pywatson.question.question import Question
import requests
class Watson:
"""The Watson API adapter class"""
def __init__(self, url, username, password):
self.url = url
self.username = username
self.password = password
def ask_question(self, question_text, question=None):
"""Ask Watson a question via the Question and Answer API
:param question_text: question to ask Watson
:type question_text: str
:param question: if question_text is not provided, a Question object
representing the question to ask Watson
:type question: Question
:return: Answer
"""
if question is not None:
q = question.to_dict()
else:
q = Question(question_text).to_dict()
r = requests.post(self.url + '/question', json=q)
return Answer(r.json())
| from pywatson.answer.answer import Answer
from pywatson.question.question import Question
import requests
class Watson(object):
"""The Watson API adapter class"""
def __init__(self, url, username, password):
self.url = url
self.username = username
self.password = password
def ask_question(self, question_text, question=None):
"""Ask Watson a question via the Question and Answer API
:param question_text: question to ask Watson
:type question_text: str
:param question: if question_text is not provided, a Question object
representing the question to ask Watson
:type question: Question
:return: Answer
"""
if question is not None:
q = question.__dict__
else:
q = Question(question_text).__dict__
r = requests.post(self.url + '/question', json=q)
return Answer(r.json())
| Use __dict__ instead of to_dict() | Use __dict__ instead of to_dict()
| Python | mit | sherlocke/pywatson | from pywatson.answer.answer import Answer
from pywatson.question.question import Question
import requests
- class Watson:
+ class Watson(object):
"""The Watson API adapter class"""
def __init__(self, url, username, password):
self.url = url
self.username = username
self.password = password
def ask_question(self, question_text, question=None):
"""Ask Watson a question via the Question and Answer API
:param question_text: question to ask Watson
:type question_text: str
:param question: if question_text is not provided, a Question object
representing the question to ask Watson
:type question: Question
:return: Answer
"""
if question is not None:
- q = question.to_dict()
+ q = question.__dict__
else:
- q = Question(question_text).to_dict()
+ q = Question(question_text).__dict__
r = requests.post(self.url + '/question', json=q)
return Answer(r.json())
| Use __dict__ instead of to_dict() | ## Code Before:
from pywatson.answer.answer import Answer
from pywatson.question.question import Question
import requests
class Watson:
"""The Watson API adapter class"""
def __init__(self, url, username, password):
self.url = url
self.username = username
self.password = password
def ask_question(self, question_text, question=None):
"""Ask Watson a question via the Question and Answer API
:param question_text: question to ask Watson
:type question_text: str
:param question: if question_text is not provided, a Question object
representing the question to ask Watson
:type question: Question
:return: Answer
"""
if question is not None:
q = question.to_dict()
else:
q = Question(question_text).to_dict()
r = requests.post(self.url + '/question', json=q)
return Answer(r.json())
## Instruction:
Use __dict__ instead of to_dict()
## Code After:
from pywatson.answer.answer import Answer
from pywatson.question.question import Question
import requests
class Watson(object):
"""The Watson API adapter class"""
def __init__(self, url, username, password):
self.url = url
self.username = username
self.password = password
def ask_question(self, question_text, question=None):
"""Ask Watson a question via the Question and Answer API
:param question_text: question to ask Watson
:type question_text: str
:param question: if question_text is not provided, a Question object
representing the question to ask Watson
:type question: Question
:return: Answer
"""
if question is not None:
q = question.__dict__
else:
q = Question(question_text).__dict__
r = requests.post(self.url + '/question', json=q)
return Answer(r.json())
|
d7c9bcbf25a6b45a462216f426608474aa66ceb0 | mysite/missions/models.py | mysite/missions/models.py | from django.db import models
class MissionStep(models.Model):
pass
class MissionStepCompletion(models.Model):
person = models.ForeignKey('profile.Person')
step = models.ForeignKey('MissionStep')
class Meta:
unique_together = ('person', 'step')
| from django.db import models
class Step(models.Model):
pass
class StepCompletion(models.Model):
person = models.ForeignKey('profile.Person')
step = models.ForeignKey('Step')
class Meta:
unique_together = ('person', 'step')
| Remove the redundant "Mission" prefix from the mission model names. | Remove the redundant "Mission" prefix from the mission model names.
| Python | agpl-3.0 | heeraj123/oh-mainline,vipul-sharma20/oh-mainline,sudheesh001/oh-mainline,willingc/oh-mainline,jledbetter/openhatch,jledbetter/openhatch,moijes12/oh-mainline,openhatch/oh-mainline,mzdaniel/oh-mainline,openhatch/oh-mainline,jledbetter/openhatch,waseem18/oh-mainline,waseem18/oh-mainline,SnappleCap/oh-mainline,Changaco/oh-mainline,eeshangarg/oh-mainline,ehashman/oh-mainline,mzdaniel/oh-mainline,mzdaniel/oh-mainline,Changaco/oh-mainline,Changaco/oh-mainline,SnappleCap/oh-mainline,jledbetter/openhatch,onceuponatimeforever/oh-mainline,nirmeshk/oh-mainline,mzdaniel/oh-mainline,ojengwa/oh-mainline,ehashman/oh-mainline,openhatch/oh-mainline,eeshangarg/oh-mainline,nirmeshk/oh-mainline,nirmeshk/oh-mainline,campbe13/openhatch,heeraj123/oh-mainline,waseem18/oh-mainline,sudheesh001/oh-mainline,waseem18/oh-mainline,heeraj123/oh-mainline,vipul-sharma20/oh-mainline,campbe13/openhatch,willingc/oh-mainline,SnappleCap/oh-mainline,campbe13/openhatch,vipul-sharma20/oh-mainline,campbe13/openhatch,moijes12/oh-mainline,eeshangarg/oh-mainline,onceuponatimeforever/oh-mainline,moijes12/oh-mainline,SnappleCap/oh-mainline,willingc/oh-mainline,heeraj123/oh-mainline,mzdaniel/oh-mainline,mzdaniel/oh-mainline,moijes12/oh-mainline,ehashman/oh-mainline,moijes12/oh-mainline,Changaco/oh-mainline,vipul-sharma20/oh-mainline,Changaco/oh-mainline,onceuponatimeforever/oh-mainline,ojengwa/oh-mainline,onceuponatimeforever/oh-mainline,onceuponatimeforever/oh-mainline,sudheesh001/oh-mainline,heeraj123/oh-mainline,willingc/oh-mainline,SnappleCap/oh-mainline,sudheesh001/oh-mainline,jledbetter/openhatch,ehashman/oh-mainline,eeshangarg/oh-mainline,openhatch/oh-mainline,sudheesh001/oh-mainline,waseem18/oh-mainline,mzdaniel/oh-mainline,eeshangarg/oh-mainline,willingc/oh-mainline,nirmeshk/oh-mainline,campbe13/openhatch,vipul-sharma20/oh-mainline,ojengwa/oh-mainline,ojengwa/oh-mainline,ehashman/oh-mainline,ojengwa/oh-mainline,nirmeshk/oh-mainline,openhatch/oh-mainline | from django.db import models
- class MissionStep(models.Model):
+ class Step(models.Model):
pass
- class MissionStepCompletion(models.Model):
+ class StepCompletion(models.Model):
person = models.ForeignKey('profile.Person')
- step = models.ForeignKey('MissionStep')
+ step = models.ForeignKey('Step')
class Meta:
unique_together = ('person', 'step')
| Remove the redundant "Mission" prefix from the mission model names. | ## Code Before:
from django.db import models
class MissionStep(models.Model):
pass
class MissionStepCompletion(models.Model):
person = models.ForeignKey('profile.Person')
step = models.ForeignKey('MissionStep')
class Meta:
unique_together = ('person', 'step')
## Instruction:
Remove the redundant "Mission" prefix from the mission model names.
## Code After:
from django.db import models
class Step(models.Model):
pass
class StepCompletion(models.Model):
person = models.ForeignKey('profile.Person')
step = models.ForeignKey('Step')
class Meta:
unique_together = ('person', 'step')
|
a2e3f0590d5bd25993be5291c058c722896aa773 | tests/test_utils.py | tests/test_utils.py | import sys
import unittest
import numpy as np
import torch
sys.path.append("../metal")
from metal.utils import (
rargmax,
hard_to_soft,
recursive_merge_dicts
)
class UtilsTest(unittest.TestCase):
def test_rargmax(self):
x = np.array([2, 1, 2])
self.assertEqual(sorted(list(set(rargmax(x) for _ in range(10)))), [0, 2])
def test_hard_to_soft(self):
x = torch.tensor([1,2,2,1])
target = torch.tensor([
[1, 0],
[0, 1],
[0, 1],
[1, 0],
], dtype=torch.float)
self.assertTrue(((hard_to_soft(x, 2) == target).sum() == 8))
def test_recursive_merge_dicts(self):
x = {
'foo': {'Foo': {'FOO': 1}},
'bar': 2,
'baz': 3,
}
y = {
'FOO': 4,
'bar': 5,
}
z = {
'foo': 6
}
recursive_merge_dicts(x, y, verbose=False)
self.assertEqual(x['bar'], 5)
self.assertEqual(x['foo']['Foo']['FOO'], 4)
with self.assertRaises(ValueError):
recursive_merge_dicts(x, z, verbose=False)
if __name__ == '__main__':
unittest.main() | import sys
import unittest
import numpy as np
import torch
sys.path.append("../metal")
from metal.utils import (
rargmax,
hard_to_soft,
recursive_merge_dicts
)
class UtilsTest(unittest.TestCase):
def test_rargmax(self):
x = np.array([2, 1, 2])
np.random.seed(1)
self.assertEqual(sorted(list(set(rargmax(x) for _ in range(10)))), [0, 2])
def test_hard_to_soft(self):
x = torch.tensor([1,2,2,1])
target = torch.tensor([
[1, 0],
[0, 1],
[0, 1],
[1, 0],
], dtype=torch.float)
self.assertTrue(((hard_to_soft(x, 2) == target).sum() == 8))
def test_recursive_merge_dicts(self):
x = {
'foo': {'Foo': {'FOO': 1}},
'bar': 2,
'baz': 3,
}
y = {
'FOO': 4,
'bar': 5,
}
z = {
'foo': 6
}
recursive_merge_dicts(x, y, verbose=False)
self.assertEqual(x['bar'], 5)
self.assertEqual(x['foo']['Foo']['FOO'], 4)
with self.assertRaises(ValueError):
recursive_merge_dicts(x, z, verbose=False)
if __name__ == '__main__':
unittest.main() | Fix broken utils test with seed | Fix broken utils test with seed
| Python | apache-2.0 | HazyResearch/metal,HazyResearch/metal | import sys
import unittest
import numpy as np
import torch
sys.path.append("../metal")
from metal.utils import (
rargmax,
hard_to_soft,
recursive_merge_dicts
)
class UtilsTest(unittest.TestCase):
def test_rargmax(self):
x = np.array([2, 1, 2])
+ np.random.seed(1)
self.assertEqual(sorted(list(set(rargmax(x) for _ in range(10)))), [0, 2])
def test_hard_to_soft(self):
x = torch.tensor([1,2,2,1])
target = torch.tensor([
[1, 0],
[0, 1],
[0, 1],
[1, 0],
], dtype=torch.float)
self.assertTrue(((hard_to_soft(x, 2) == target).sum() == 8))
def test_recursive_merge_dicts(self):
x = {
'foo': {'Foo': {'FOO': 1}},
'bar': 2,
'baz': 3,
}
y = {
'FOO': 4,
'bar': 5,
}
z = {
'foo': 6
}
recursive_merge_dicts(x, y, verbose=False)
self.assertEqual(x['bar'], 5)
self.assertEqual(x['foo']['Foo']['FOO'], 4)
with self.assertRaises(ValueError):
recursive_merge_dicts(x, z, verbose=False)
if __name__ == '__main__':
unittest.main() | Fix broken utils test with seed | ## Code Before:
import sys
import unittest
import numpy as np
import torch
sys.path.append("../metal")
from metal.utils import (
rargmax,
hard_to_soft,
recursive_merge_dicts
)
class UtilsTest(unittest.TestCase):
def test_rargmax(self):
x = np.array([2, 1, 2])
self.assertEqual(sorted(list(set(rargmax(x) for _ in range(10)))), [0, 2])
def test_hard_to_soft(self):
x = torch.tensor([1,2,2,1])
target = torch.tensor([
[1, 0],
[0, 1],
[0, 1],
[1, 0],
], dtype=torch.float)
self.assertTrue(((hard_to_soft(x, 2) == target).sum() == 8))
def test_recursive_merge_dicts(self):
x = {
'foo': {'Foo': {'FOO': 1}},
'bar': 2,
'baz': 3,
}
y = {
'FOO': 4,
'bar': 5,
}
z = {
'foo': 6
}
recursive_merge_dicts(x, y, verbose=False)
self.assertEqual(x['bar'], 5)
self.assertEqual(x['foo']['Foo']['FOO'], 4)
with self.assertRaises(ValueError):
recursive_merge_dicts(x, z, verbose=False)
if __name__ == '__main__':
unittest.main()
## Instruction:
Fix broken utils test with seed
## Code After:
import sys
import unittest
import numpy as np
import torch
sys.path.append("../metal")
from metal.utils import (
rargmax,
hard_to_soft,
recursive_merge_dicts
)
class UtilsTest(unittest.TestCase):
def test_rargmax(self):
x = np.array([2, 1, 2])
np.random.seed(1)
self.assertEqual(sorted(list(set(rargmax(x) for _ in range(10)))), [0, 2])
def test_hard_to_soft(self):
x = torch.tensor([1,2,2,1])
target = torch.tensor([
[1, 0],
[0, 1],
[0, 1],
[1, 0],
], dtype=torch.float)
self.assertTrue(((hard_to_soft(x, 2) == target).sum() == 8))
def test_recursive_merge_dicts(self):
x = {
'foo': {'Foo': {'FOO': 1}},
'bar': 2,
'baz': 3,
}
y = {
'FOO': 4,
'bar': 5,
}
z = {
'foo': 6
}
recursive_merge_dicts(x, y, verbose=False)
self.assertEqual(x['bar'], 5)
self.assertEqual(x['foo']['Foo']['FOO'], 4)
with self.assertRaises(ValueError):
recursive_merge_dicts(x, z, verbose=False)
if __name__ == '__main__':
unittest.main() |
df5e6bdd03ad666afdd9b61745eec95afc08e9cb | tests/test_views.py | tests/test_views.py | """ Tests for the main server file. """
from unittest import TestCase
from unittest.mock import patch
from app import views
class ViewsTestCase(TestCase):
""" Our main server testcase. """
def test_ping(self):
self.assertEqual(views.ping(None, None), 'pong')
@patch('app.views.notify_recipient')
@patch('app.views.is_valid_pull_request')
def test_valid_pull_request(self, validator, notifier):
validator.return_value = True
notifier.return_value = True
result = views.pull_request({}, None)
self.assertEqual(result, 'Recipient Notified')
@patch('app.views.is_valid_pull_request')
def test_invalid_pull_request(self, validator):
validator.return_value = False
result = views.pull_request({}, None)
self.assertRegex(result, 'ignored')
| """ Tests for the main server file. """
from unittest import TestCase
from unittest.mock import patch
from app import views
class ViewsTestCase(TestCase):
""" Our main server testcase. """
def test_ping(self):
self.assertEqual(views.ping(None, None), 'pong')
@patch('app.views.notify_recipient')
@patch('app.views.is_valid_pull_request')
def test_valid_pull_request(self, validator, notifier):
""" Should notify upon a valid pull request. """
validator.return_value = True
notifier.return_value = True
result = views.pull_request({}, None)
self.assertEqual(result, 'Recipient Notified')
@patch('app.views.is_valid_pull_request')
def test_invalid_pull_request(self, validator):
""" Should ignore an invalid pull request. """
validator.return_value = False
result = views.pull_request({}, None)
self.assertRegex(result, 'ignored')
| Fix last code quality issues | Fix last code quality issues
| Python | mit | DobaTech/github-review-slack-notifier | """ Tests for the main server file. """
from unittest import TestCase
from unittest.mock import patch
from app import views
class ViewsTestCase(TestCase):
""" Our main server testcase. """
def test_ping(self):
self.assertEqual(views.ping(None, None), 'pong')
@patch('app.views.notify_recipient')
@patch('app.views.is_valid_pull_request')
def test_valid_pull_request(self, validator, notifier):
+ """ Should notify upon a valid pull request. """
validator.return_value = True
notifier.return_value = True
result = views.pull_request({}, None)
self.assertEqual(result, 'Recipient Notified')
@patch('app.views.is_valid_pull_request')
def test_invalid_pull_request(self, validator):
+ """ Should ignore an invalid pull request. """
validator.return_value = False
result = views.pull_request({}, None)
self.assertRegex(result, 'ignored')
| Fix last code quality issues | ## Code Before:
""" Tests for the main server file. """
from unittest import TestCase
from unittest.mock import patch
from app import views
class ViewsTestCase(TestCase):
""" Our main server testcase. """
def test_ping(self):
self.assertEqual(views.ping(None, None), 'pong')
@patch('app.views.notify_recipient')
@patch('app.views.is_valid_pull_request')
def test_valid_pull_request(self, validator, notifier):
validator.return_value = True
notifier.return_value = True
result = views.pull_request({}, None)
self.assertEqual(result, 'Recipient Notified')
@patch('app.views.is_valid_pull_request')
def test_invalid_pull_request(self, validator):
validator.return_value = False
result = views.pull_request({}, None)
self.assertRegex(result, 'ignored')
## Instruction:
Fix last code quality issues
## Code After:
""" Tests for the main server file. """
from unittest import TestCase
from unittest.mock import patch
from app import views
class ViewsTestCase(TestCase):
""" Our main server testcase. """
def test_ping(self):
self.assertEqual(views.ping(None, None), 'pong')
@patch('app.views.notify_recipient')
@patch('app.views.is_valid_pull_request')
def test_valid_pull_request(self, validator, notifier):
""" Should notify upon a valid pull request. """
validator.return_value = True
notifier.return_value = True
result = views.pull_request({}, None)
self.assertEqual(result, 'Recipient Notified')
@patch('app.views.is_valid_pull_request')
def test_invalid_pull_request(self, validator):
""" Should ignore an invalid pull request. """
validator.return_value = False
result = views.pull_request({}, None)
self.assertRegex(result, 'ignored')
|
39091c3390d121d48097d64526f40d0a09702673 | src/zeit/today/tests.py | src/zeit/today/tests.py | import pkg_resources
import zeit.cms.testing
product_config = """\
<product-config zeit.today>
today-xml-url file://{base}/today.xml
</product-config>
""".format(base=pkg_resources.resource_filename(__name__, '.'))
TodayLayer = zeit.cms.testing.ZCMLLayer('ftesting.zcml', product_config=(
product_config +
zeit.cms.testing.cms_product_config))
def test_suite():
return zeit.cms.testing.FunctionalDocFileSuite(
'README.txt',
'yesterday.txt',
layer=TodayLayer
)
| import pkg_resources
import zeit.cms.testing
product_config = """\
<product-config zeit.today>
today-xml-url file://{base}/today.xml
</product-config>
""".format(base=pkg_resources.resource_filename(__name__, '.'))
CONFIG_LAYER = zeit.cms.testing.ProductConfigLayer(product_config, bases=(
zeit.cms.testing.CONFIG_LAYER,))
ZCML_LAYER = zeit.cms.testing.ZCMLLayer(bases=(CONFIG_LAYER,))
ZOPE_LAYER = zeit.cms.testing.ZopeLayer(bases=(ZCML_LAYER,))
def test_suite():
return zeit.cms.testing.FunctionalDocFileSuite(
'README.txt',
'yesterday.txt',
layer=ZOPE_LAYER)
| Update to new testlayer API | ZON-5241: Update to new testlayer API
| Python | bsd-3-clause | ZeitOnline/zeit.today | import pkg_resources
import zeit.cms.testing
product_config = """\
<product-config zeit.today>
today-xml-url file://{base}/today.xml
</product-config>
""".format(base=pkg_resources.resource_filename(__name__, '.'))
- TodayLayer = zeit.cms.testing.ZCMLLayer('ftesting.zcml', product_config=(
- product_config +
- zeit.cms.testing.cms_product_config))
+ CONFIG_LAYER = zeit.cms.testing.ProductConfigLayer(product_config, bases=(
+ zeit.cms.testing.CONFIG_LAYER,))
+ ZCML_LAYER = zeit.cms.testing.ZCMLLayer(bases=(CONFIG_LAYER,))
+ ZOPE_LAYER = zeit.cms.testing.ZopeLayer(bases=(ZCML_LAYER,))
def test_suite():
return zeit.cms.testing.FunctionalDocFileSuite(
'README.txt',
'yesterday.txt',
+ layer=ZOPE_LAYER)
- layer=TodayLayer
- )
| Update to new testlayer API | ## Code Before:
import pkg_resources
import zeit.cms.testing
product_config = """\
<product-config zeit.today>
today-xml-url file://{base}/today.xml
</product-config>
""".format(base=pkg_resources.resource_filename(__name__, '.'))
TodayLayer = zeit.cms.testing.ZCMLLayer('ftesting.zcml', product_config=(
product_config +
zeit.cms.testing.cms_product_config))
def test_suite():
return zeit.cms.testing.FunctionalDocFileSuite(
'README.txt',
'yesterday.txt',
layer=TodayLayer
)
## Instruction:
Update to new testlayer API
## Code After:
import pkg_resources
import zeit.cms.testing
product_config = """\
<product-config zeit.today>
today-xml-url file://{base}/today.xml
</product-config>
""".format(base=pkg_resources.resource_filename(__name__, '.'))
CONFIG_LAYER = zeit.cms.testing.ProductConfigLayer(product_config, bases=(
zeit.cms.testing.CONFIG_LAYER,))
ZCML_LAYER = zeit.cms.testing.ZCMLLayer(bases=(CONFIG_LAYER,))
ZOPE_LAYER = zeit.cms.testing.ZopeLayer(bases=(ZCML_LAYER,))
def test_suite():
return zeit.cms.testing.FunctionalDocFileSuite(
'README.txt',
'yesterday.txt',
layer=ZOPE_LAYER)
|
81f7b2bdd0e916a001b954ce9bac24ebe4600150 | roboime/options.py | roboime/options.py |
#Position Log filename. Use None to disable.
position_log_filename = "math/pos_log.txt"
#position_log_filename = None
#Position Log with Noise filename. Use None to disable.
position_log_noise_filename = "math/pos_log_noise.txt"
#position_log_filename = None
#Command and Update Log filename. Use None to disable.
cmdupd_filename = "math/commands.txt"
#cmdupd_filename = None
#Gaussian noise addition variances
noise_var_x = 3.
noise_var_y = 3.
noise_var_angle = 0.05 |
#Position Log filename. Use None to disable.
position_log_filename = "math/pos_log.txt"
#position_log_filename = None
#Command and Update Log filename. Use None to disable.
cmdupd_filename = "math/commands.txt"
#cmdupd_filename = None
#Gaussian noise addition variances
noise_var_x = 3.E-5
noise_var_y = 3.E-5
noise_var_angle = 1.
# Process error estimate. The lower (higher negative exponent), more the filter
# becomes like a Low-Pass Filter (higher confidence in the model prediction).
Q = 1e-5
# Measurement error variances (for the R matrix).
# The higher (lower negative exponent), more the filter becomes like a
# Low-Pass Filter (higher possible measurement error).
R_var_x = 3.E-5
R_var_y = 3.E-5
R_var_angle = 3
| Add Q (generic) and R (3 values) to get more precise Kalman results | Add Q (generic) and R (3 values) to get more precise Kalman results
| Python | agpl-3.0 | roboime/pyroboime |
#Position Log filename. Use None to disable.
position_log_filename = "math/pos_log.txt"
- #position_log_filename = None
-
- #Position Log with Noise filename. Use None to disable.
- position_log_noise_filename = "math/pos_log_noise.txt"
#position_log_filename = None
#Command and Update Log filename. Use None to disable.
cmdupd_filename = "math/commands.txt"
#cmdupd_filename = None
#Gaussian noise addition variances
- noise_var_x = 3.
+ noise_var_x = 3.E-5
- noise_var_y = 3.
+ noise_var_y = 3.E-5
- noise_var_angle = 0.05
+ noise_var_angle = 1.
+
+ # Process error estimate. The lower (higher negative exponent), more the filter
+ # becomes like a Low-Pass Filter (higher confidence in the model prediction).
+ Q = 1e-5
+
+ # Measurement error variances (for the R matrix).
+ # The higher (lower negative exponent), more the filter becomes like a
+ # Low-Pass Filter (higher possible measurement error).
+ R_var_x = 3.E-5
+ R_var_y = 3.E-5
+ R_var_angle = 3
+ | Add Q (generic) and R (3 values) to get more precise Kalman results | ## Code Before:
#Position Log filename. Use None to disable.
position_log_filename = "math/pos_log.txt"
#position_log_filename = None
#Position Log with Noise filename. Use None to disable.
position_log_noise_filename = "math/pos_log_noise.txt"
#position_log_filename = None
#Command and Update Log filename. Use None to disable.
cmdupd_filename = "math/commands.txt"
#cmdupd_filename = None
#Gaussian noise addition variances
noise_var_x = 3.
noise_var_y = 3.
noise_var_angle = 0.05
## Instruction:
Add Q (generic) and R (3 values) to get more precise Kalman results
## Code After:
#Position Log filename. Use None to disable.
position_log_filename = "math/pos_log.txt"
#position_log_filename = None
#Command and Update Log filename. Use None to disable.
cmdupd_filename = "math/commands.txt"
#cmdupd_filename = None
#Gaussian noise addition variances
noise_var_x = 3.E-5
noise_var_y = 3.E-5
noise_var_angle = 1.
# Process error estimate. The lower (higher negative exponent), more the filter
# becomes like a Low-Pass Filter (higher confidence in the model prediction).
Q = 1e-5
# Measurement error variances (for the R matrix).
# The higher (lower negative exponent), more the filter becomes like a
# Low-Pass Filter (higher possible measurement error).
R_var_x = 3.E-5
R_var_y = 3.E-5
R_var_angle = 3
|
d6ce218b0da869f6b4319751c1fe59ef02fba6b6 | kremlin/imgutils.py | kremlin/imgutils.py | import os
from PIL import Image
def mkthumb(fp, h=128, w=128):
"""docstring for mkthumb"""
size = (h, w)
f, ext = os.path.splitext(fp)
im = Image.open(fp)
im.thumbnail(size, Image.ANTIALIAS)
im.save(f + ".thumbnail" + ext)
| import os
from PIL import Image
def mkthumb(fp, h=128, w=128):
"""docstring for mkthumb"""
size = (h, w)
f, ext = os.path.splitext(fp)
im = Image.open(fp)
im.thumbnail(size, Image.ANTIALIAS)
im.save('.thumbnail'.join([f, ext]))
| Use better string concatenation in mkthumb() | Use better string concatenation in mkthumb()
| Python | bsd-2-clause | glasnost/kremlin,glasnost/kremlin,glasnost/kremlin | import os
from PIL import Image
def mkthumb(fp, h=128, w=128):
"""docstring for mkthumb"""
size = (h, w)
f, ext = os.path.splitext(fp)
im = Image.open(fp)
im.thumbnail(size, Image.ANTIALIAS)
- im.save(f + ".thumbnail" + ext)
+ im.save('.thumbnail'.join([f, ext]))
| Use better string concatenation in mkthumb() | ## Code Before:
import os
from PIL import Image
def mkthumb(fp, h=128, w=128):
"""docstring for mkthumb"""
size = (h, w)
f, ext = os.path.splitext(fp)
im = Image.open(fp)
im.thumbnail(size, Image.ANTIALIAS)
im.save(f + ".thumbnail" + ext)
## Instruction:
Use better string concatenation in mkthumb()
## Code After:
import os
from PIL import Image
def mkthumb(fp, h=128, w=128):
"""docstring for mkthumb"""
size = (h, w)
f, ext = os.path.splitext(fp)
im = Image.open(fp)
im.thumbnail(size, Image.ANTIALIAS)
im.save('.thumbnail'.join([f, ext]))
|
09f65ff2a21cd00355193bcdee22a2289ead2d24 | tests/test_arguments.py | tests/test_arguments.py | from __future__ import print_function
import unittest
import wrapt
class TestArguments(unittest.TestCase):
def test_getcallargs(self):
def function(a, b=2, c=3, d=4, e=5, *args, **kwargs):
pass
expected = {'a': 10, 'c': 3, 'b': 20, 'e': 5, 'd': 40,
'args': (), 'kwargs': {'f': 50}}
calculated = wrapt.getcallargs(function, 10, 20, d=40, f=50)
self.assertEqual(expected, calculated)
expected = {'a': 10, 'c': 30, 'b': 20, 'e': 50, 'd': 40,
'args': (60,), 'kwargs': {}}
calculated = wrapt.getcallargs(function, 10, 20, 30, 40, 50, 60)
self.assertEqual(expected, calculated)
| from __future__ import print_function
import unittest
import wrapt
class TestArguments(unittest.TestCase):
def test_getcallargs(self):
def function(a, b=2, c=3, d=4, e=5, *args, **kwargs):
pass
expected = {'a': 10, 'c': 3, 'b': 20, 'e': 5, 'd': 40,
'args': (), 'kwargs': {'f': 50}}
calculated = wrapt.getcallargs(function, 10, 20, d=40, f=50)
self.assertEqual(expected, calculated)
expected = {'a': 10, 'c': 30, 'b': 20, 'e': 50, 'd': 40,
'args': (60,), 'kwargs': {}}
calculated = wrapt.getcallargs(function, 10, 20, 30, 40, 50, 60)
self.assertEqual(expected, calculated)
def test_unexpected_unicode_keyword(self):
def function(a=2):
pass
kwargs = { u'b': 40 }
self.assertRaises(TypeError, wrapt.getcallargs, function, **kwargs)
| Add test for unexpected unicode kwargs. | Add test for unexpected unicode kwargs.
| Python | bsd-2-clause | GrahamDumpleton/wrapt,GrahamDumpleton/wrapt | from __future__ import print_function
import unittest
import wrapt
class TestArguments(unittest.TestCase):
def test_getcallargs(self):
def function(a, b=2, c=3, d=4, e=5, *args, **kwargs):
pass
expected = {'a': 10, 'c': 3, 'b': 20, 'e': 5, 'd': 40,
'args': (), 'kwargs': {'f': 50}}
calculated = wrapt.getcallargs(function, 10, 20, d=40, f=50)
self.assertEqual(expected, calculated)
expected = {'a': 10, 'c': 30, 'b': 20, 'e': 50, 'd': 40,
'args': (60,), 'kwargs': {}}
calculated = wrapt.getcallargs(function, 10, 20, 30, 40, 50, 60)
self.assertEqual(expected, calculated)
+ def test_unexpected_unicode_keyword(self):
+ def function(a=2):
+ pass
+
+ kwargs = { u'b': 40 }
+ self.assertRaises(TypeError, wrapt.getcallargs, function, **kwargs)
+ | Add test for unexpected unicode kwargs. | ## Code Before:
from __future__ import print_function
import unittest
import wrapt
class TestArguments(unittest.TestCase):
def test_getcallargs(self):
def function(a, b=2, c=3, d=4, e=5, *args, **kwargs):
pass
expected = {'a': 10, 'c': 3, 'b': 20, 'e': 5, 'd': 40,
'args': (), 'kwargs': {'f': 50}}
calculated = wrapt.getcallargs(function, 10, 20, d=40, f=50)
self.assertEqual(expected, calculated)
expected = {'a': 10, 'c': 30, 'b': 20, 'e': 50, 'd': 40,
'args': (60,), 'kwargs': {}}
calculated = wrapt.getcallargs(function, 10, 20, 30, 40, 50, 60)
self.assertEqual(expected, calculated)
## Instruction:
Add test for unexpected unicode kwargs.
## Code After:
from __future__ import print_function
import unittest
import wrapt
class TestArguments(unittest.TestCase):
def test_getcallargs(self):
def function(a, b=2, c=3, d=4, e=5, *args, **kwargs):
pass
expected = {'a': 10, 'c': 3, 'b': 20, 'e': 5, 'd': 40,
'args': (), 'kwargs': {'f': 50}}
calculated = wrapt.getcallargs(function, 10, 20, d=40, f=50)
self.assertEqual(expected, calculated)
expected = {'a': 10, 'c': 30, 'b': 20, 'e': 50, 'd': 40,
'args': (60,), 'kwargs': {}}
calculated = wrapt.getcallargs(function, 10, 20, 30, 40, 50, 60)
self.assertEqual(expected, calculated)
def test_unexpected_unicode_keyword(self):
def function(a=2):
pass
kwargs = { u'b': 40 }
self.assertRaises(TypeError, wrapt.getcallargs, function, **kwargs)
|
397eb3ee376acec005a8d7b5a4c2b2e0193a938d | tests/test_bookmarks.py | tests/test_bookmarks.py | import bookmarks
import unittest
class FlaskrTestCase(unittest.TestCase):
def setUp(self):
self.app = bookmarks.app.test_client()
# with bookmarks.app.app_context():
bookmarks.database.init_db()
def tearDown(self):
# with bookmarks.app.app_context():
bookmarks.database.db_session.remove()
bookmarks.database.Base.metadata.drop_all(
bind=bookmarks.database.engine)
def test_empty_db(self):
rv = self.app.get('/')
assert b'There aren\'t any bookmarks yet.' in rv.data
def register(self, username, name, email, password):
return self.app.post('/register_user/', data=dict(
username=username,
name=name,
email=email,
password=password,
confirm=password
), follow_redirects=True)
def login(self, username, password):
return self.app.post('/login', data=dict(
username=username,
password=password,
confirm=password
), follow_redirects=True)
def logout(self):
return self.app.get('/logout', follow_redirects=True)
def test_register(self):
username = 'byanofsky'
name = 'Brandon Yanofsky'
email = '[email protected]'
password = 'Brandon123'
rv = self.register(username, name, email, password)
# print(rv.data)
assert (b'Successfully registered ' in rv.data)
if __name__ == '__main__':
unittest.main()
| import bookmarks
import unittest
class FlaskrTestCase(unittest.TestCase):
def setUp(self):
self.app = bookmarks.app.test_client()
# with bookmarks.app.app_context():
bookmarks.database.init_db()
def tearDown(self):
# with bookmarks.app.app_context():
bookmarks.database.db_session.remove()
bookmarks.database.Base.metadata.drop_all(
bind=bookmarks.database.engine)
def test_empty_db(self):
rv = self.app.get('/')
assert b'There aren\'t any bookmarks yet.' in rv.data
def register(self, username, name, email, password, confirm=None):
return self.app.post('/register_user/', data=dict(
username=username,
name=name,
email=email,
password=password,
confirm=confirm
), follow_redirects=True)
def login(self, username, password):
return self.app.post('/login', data=dict(
username=username,
password=password,
confirm=password
), follow_redirects=True)
def logout(self):
return self.app.get('/logout', follow_redirects=True)
def test_register(self):
username = 'byanofsky'
name = 'Brandon Yanofsky'
email = '[email protected]'
password = 'Brandon123'
rv = self.register(username, name, email, password)
# print(rv.data)
assert (b'Successfully registered ' in rv.data)
if __name__ == '__main__':
unittest.main()
| Add param for confirm field on register test func | Add param for confirm field on register test func
| Python | apache-2.0 | byanofsky/bookmarks,byanofsky/bookmarks,byanofsky/bookmarks | import bookmarks
import unittest
class FlaskrTestCase(unittest.TestCase):
def setUp(self):
self.app = bookmarks.app.test_client()
# with bookmarks.app.app_context():
bookmarks.database.init_db()
def tearDown(self):
# with bookmarks.app.app_context():
bookmarks.database.db_session.remove()
bookmarks.database.Base.metadata.drop_all(
bind=bookmarks.database.engine)
def test_empty_db(self):
rv = self.app.get('/')
assert b'There aren\'t any bookmarks yet.' in rv.data
- def register(self, username, name, email, password):
+ def register(self, username, name, email, password, confirm=None):
return self.app.post('/register_user/', data=dict(
username=username,
name=name,
email=email,
password=password,
- confirm=password
+ confirm=confirm
), follow_redirects=True)
def login(self, username, password):
return self.app.post('/login', data=dict(
username=username,
password=password,
confirm=password
), follow_redirects=True)
def logout(self):
return self.app.get('/logout', follow_redirects=True)
def test_register(self):
username = 'byanofsky'
name = 'Brandon Yanofsky'
email = '[email protected]'
password = 'Brandon123'
rv = self.register(username, name, email, password)
# print(rv.data)
assert (b'Successfully registered ' in rv.data)
if __name__ == '__main__':
unittest.main()
| Add param for confirm field on register test func | ## Code Before:
import bookmarks
import unittest
class FlaskrTestCase(unittest.TestCase):
def setUp(self):
self.app = bookmarks.app.test_client()
# with bookmarks.app.app_context():
bookmarks.database.init_db()
def tearDown(self):
# with bookmarks.app.app_context():
bookmarks.database.db_session.remove()
bookmarks.database.Base.metadata.drop_all(
bind=bookmarks.database.engine)
def test_empty_db(self):
rv = self.app.get('/')
assert b'There aren\'t any bookmarks yet.' in rv.data
def register(self, username, name, email, password):
return self.app.post('/register_user/', data=dict(
username=username,
name=name,
email=email,
password=password,
confirm=password
), follow_redirects=True)
def login(self, username, password):
return self.app.post('/login', data=dict(
username=username,
password=password,
confirm=password
), follow_redirects=True)
def logout(self):
return self.app.get('/logout', follow_redirects=True)
def test_register(self):
username = 'byanofsky'
name = 'Brandon Yanofsky'
email = '[email protected]'
password = 'Brandon123'
rv = self.register(username, name, email, password)
# print(rv.data)
assert (b'Successfully registered ' in rv.data)
if __name__ == '__main__':
unittest.main()
## Instruction:
Add param for confirm field on register test func
## Code After:
import bookmarks
import unittest
class FlaskrTestCase(unittest.TestCase):
def setUp(self):
self.app = bookmarks.app.test_client()
# with bookmarks.app.app_context():
bookmarks.database.init_db()
def tearDown(self):
# with bookmarks.app.app_context():
bookmarks.database.db_session.remove()
bookmarks.database.Base.metadata.drop_all(
bind=bookmarks.database.engine)
def test_empty_db(self):
rv = self.app.get('/')
assert b'There aren\'t any bookmarks yet.' in rv.data
def register(self, username, name, email, password, confirm=None):
return self.app.post('/register_user/', data=dict(
username=username,
name=name,
email=email,
password=password,
confirm=confirm
), follow_redirects=True)
def login(self, username, password):
return self.app.post('/login', data=dict(
username=username,
password=password,
confirm=password
), follow_redirects=True)
def logout(self):
return self.app.get('/logout', follow_redirects=True)
def test_register(self):
username = 'byanofsky'
name = 'Brandon Yanofsky'
email = '[email protected]'
password = 'Brandon123'
rv = self.register(username, name, email, password)
# print(rv.data)
assert (b'Successfully registered ' in rv.data)
if __name__ == '__main__':
unittest.main()
|
95fbbe9bac94e171424cb8ee23a675a70607fb62 | tests/test_constants.py | tests/test_constants.py | from __future__ import absolute_import, unicode_literals
import unittest
from draftjs_exporter.constants import Enum, BLOCK_TYPES, ENTITY_TYPES, INLINE_STYLES
class EnumConstants(unittest.TestCase):
def test_enum_returns_the_key_if_valid(self):
foo_value = 'foo'
e = Enum(foo_value)
self.assertEqual(e.foo, foo_value)
def test_enum_raises_an_error_for_invalid_keys(self):
e = Enum('foo', 'bar')
with self.assertRaises(AttributeError):
e.invalid_key
class TestConstants(unittest.TestCase):
def test_block_types(self):
self.assertIsInstance(BLOCK_TYPES, object)
self.assertEqual(BLOCK_TYPES.UNSTYLED, 'unstyled')
def test_entity_types(self):
self.assertIsInstance(ENTITY_TYPES, object)
self.assertEqual(ENTITY_TYPES.LINK, 'LINK')
def test_inline_styles(self):
self.assertIsInstance(INLINE_STYLES, object)
self.assertEqual(INLINE_STYLES.BOLD, 'BOLD')
| from __future__ import absolute_import, unicode_literals
import unittest
from draftjs_exporter.constants import BLOCK_TYPES, ENTITY_TYPES, INLINE_STYLES, Enum
class EnumConstants(unittest.TestCase):
def test_enum_returns_the_key_if_valid(self):
foo_value = 'foo'
e = Enum(foo_value)
self.assertEqual(e.foo, foo_value)
def test_enum_raises_an_error_for_invalid_keys(self):
e = Enum('foo', 'bar')
with self.assertRaises(AttributeError):
e.invalid_key
class TestConstants(unittest.TestCase):
def test_block_types(self):
self.assertIsInstance(BLOCK_TYPES, object)
self.assertEqual(BLOCK_TYPES.UNSTYLED, 'unstyled')
def test_entity_types(self):
self.assertIsInstance(ENTITY_TYPES, object)
self.assertEqual(ENTITY_TYPES.LINK, 'LINK')
def test_inline_styles(self):
self.assertIsInstance(INLINE_STYLES, object)
self.assertEqual(INLINE_STYLES.BOLD, 'BOLD')
| Fix import order picked up by isort | Fix import order picked up by isort
| Python | mit | springload/draftjs_exporter,springload/draftjs_exporter,springload/draftjs_exporter | from __future__ import absolute_import, unicode_literals
import unittest
- from draftjs_exporter.constants import Enum, BLOCK_TYPES, ENTITY_TYPES, INLINE_STYLES
+ from draftjs_exporter.constants import BLOCK_TYPES, ENTITY_TYPES, INLINE_STYLES, Enum
class EnumConstants(unittest.TestCase):
def test_enum_returns_the_key_if_valid(self):
foo_value = 'foo'
e = Enum(foo_value)
self.assertEqual(e.foo, foo_value)
def test_enum_raises_an_error_for_invalid_keys(self):
e = Enum('foo', 'bar')
with self.assertRaises(AttributeError):
e.invalid_key
class TestConstants(unittest.TestCase):
def test_block_types(self):
self.assertIsInstance(BLOCK_TYPES, object)
self.assertEqual(BLOCK_TYPES.UNSTYLED, 'unstyled')
def test_entity_types(self):
self.assertIsInstance(ENTITY_TYPES, object)
self.assertEqual(ENTITY_TYPES.LINK, 'LINK')
def test_inline_styles(self):
self.assertIsInstance(INLINE_STYLES, object)
self.assertEqual(INLINE_STYLES.BOLD, 'BOLD')
| Fix import order picked up by isort | ## Code Before:
from __future__ import absolute_import, unicode_literals
import unittest
from draftjs_exporter.constants import Enum, BLOCK_TYPES, ENTITY_TYPES, INLINE_STYLES
class EnumConstants(unittest.TestCase):
def test_enum_returns_the_key_if_valid(self):
foo_value = 'foo'
e = Enum(foo_value)
self.assertEqual(e.foo, foo_value)
def test_enum_raises_an_error_for_invalid_keys(self):
e = Enum('foo', 'bar')
with self.assertRaises(AttributeError):
e.invalid_key
class TestConstants(unittest.TestCase):
def test_block_types(self):
self.assertIsInstance(BLOCK_TYPES, object)
self.assertEqual(BLOCK_TYPES.UNSTYLED, 'unstyled')
def test_entity_types(self):
self.assertIsInstance(ENTITY_TYPES, object)
self.assertEqual(ENTITY_TYPES.LINK, 'LINK')
def test_inline_styles(self):
self.assertIsInstance(INLINE_STYLES, object)
self.assertEqual(INLINE_STYLES.BOLD, 'BOLD')
## Instruction:
Fix import order picked up by isort
## Code After:
from __future__ import absolute_import, unicode_literals
import unittest
from draftjs_exporter.constants import BLOCK_TYPES, ENTITY_TYPES, INLINE_STYLES, Enum
class EnumConstants(unittest.TestCase):
def test_enum_returns_the_key_if_valid(self):
foo_value = 'foo'
e = Enum(foo_value)
self.assertEqual(e.foo, foo_value)
def test_enum_raises_an_error_for_invalid_keys(self):
e = Enum('foo', 'bar')
with self.assertRaises(AttributeError):
e.invalid_key
class TestConstants(unittest.TestCase):
def test_block_types(self):
self.assertIsInstance(BLOCK_TYPES, object)
self.assertEqual(BLOCK_TYPES.UNSTYLED, 'unstyled')
def test_entity_types(self):
self.assertIsInstance(ENTITY_TYPES, object)
self.assertEqual(ENTITY_TYPES.LINK, 'LINK')
def test_inline_styles(self):
self.assertIsInstance(INLINE_STYLES, object)
self.assertEqual(INLINE_STYLES.BOLD, 'BOLD')
|
aa6a74abc382bb6be86fa4a91132a9be51f365a5 | tests/test_data_checksums.py | tests/test_data_checksums.py | """ test data_checksums"""
from nose.tools import assert_equal
def test_data_checksums():
from pyne.data import data_checksums
assert_equal(len(data_checksums), 6)
assert_equal(data_checksums['/neutron/simple_xs'], '3d6e086977783dcdf07e5c6b0c2416be') | """ test data_checksums and hashing functions"""
import os
from nose.tools import assert_equal, assert_true
import pyne
# These tests require nuc_data
if not os.path.isfile(pyne.nuc_data):
raise RuntimeError("Tests require nuc_data.h5. Please run nuc_data_make.")
def test_data_checksums():
from pyne.data import data_checksums
assert_equal(len(data_checksums), 6)
assert_equal(data_checksums['/neutron/simple_xs'], '3d6e086977783dcdf07e5c6b0c2416be')
def test_internal_hashes():
from pyne.dbgen import hashtools
hashtools.set_internal_hashes(pyne.nuc_data)
for item, val in hashtools.check_internal_hashes(pyne.nuc_data):
assert_true(val)
| Add test of internal hashes and guarded pyne.nuc_data use | Add test of internal hashes and guarded pyne.nuc_data use
| Python | bsd-3-clause | pyne/simplesim | - """ test data_checksums"""
- from nose.tools import assert_equal
+ """ test data_checksums and hashing functions"""
+ import os
+ from nose.tools import assert_equal, assert_true
+
+ import pyne
+
+ # These tests require nuc_data
+ if not os.path.isfile(pyne.nuc_data):
+ raise RuntimeError("Tests require nuc_data.h5. Please run nuc_data_make.")
def test_data_checksums():
from pyne.data import data_checksums
assert_equal(len(data_checksums), 6)
assert_equal(data_checksums['/neutron/simple_xs'], '3d6e086977783dcdf07e5c6b0c2416be')
+
+ def test_internal_hashes():
+ from pyne.dbgen import hashtools
+ hashtools.set_internal_hashes(pyne.nuc_data)
+ for item, val in hashtools.check_internal_hashes(pyne.nuc_data):
+ assert_true(val)
+
+ | Add test of internal hashes and guarded pyne.nuc_data use | ## Code Before:
""" test data_checksums"""
from nose.tools import assert_equal
def test_data_checksums():
from pyne.data import data_checksums
assert_equal(len(data_checksums), 6)
assert_equal(data_checksums['/neutron/simple_xs'], '3d6e086977783dcdf07e5c6b0c2416be')
## Instruction:
Add test of internal hashes and guarded pyne.nuc_data use
## Code After:
""" test data_checksums and hashing functions"""
import os
from nose.tools import assert_equal, assert_true
import pyne
# These tests require nuc_data
if not os.path.isfile(pyne.nuc_data):
raise RuntimeError("Tests require nuc_data.h5. Please run nuc_data_make.")
def test_data_checksums():
from pyne.data import data_checksums
assert_equal(len(data_checksums), 6)
assert_equal(data_checksums['/neutron/simple_xs'], '3d6e086977783dcdf07e5c6b0c2416be')
def test_internal_hashes():
from pyne.dbgen import hashtools
hashtools.set_internal_hashes(pyne.nuc_data)
for item, val in hashtools.check_internal_hashes(pyne.nuc_data):
assert_true(val)
|
698732f1276f92a94143b0531906caf37e885c28 | trello_notifications.py | trello_notifications.py | try:
from trello import TrelloCommand
from output import Output
except ImportError:
from .trello import TrelloCommand
from .output import Output
class TrelloNotificationsCommand(TrelloCommand):
def work(self, connection):
self.options = [
{ 'name': "Unread", 'action': self.show_unread },
{ 'name': "Read all", 'action': self.read_all },
{ 'name': "Exit", 'action': self.noop }
]
self.show_quick_panel(self.items(), self.callback)
def items(self):
return [option['name'] for option in self.options]
def callback(self, index):
option = self.options[index]
if not option is None:
option['action']()
def show_unread(self):
self.view.run_command("trello_unread_notifications")
def read_all():
pass
def noop():
pass
class TrelloUnreadNotificationsCommand(TrelloCommand):
def work(self, connection):
member = connection.me
output = Output.notifications(member.unread_notifications())
self.show_output_panel(output) | try:
from trello import TrelloCommand
from output import Output
except ImportError:
from .trello import TrelloCommand
from .output import Output
class TrelloNotificationsCommand(TrelloCommand):
def work(self, connection):
self.options = [
{ 'name': "Unread", 'action': self.show_unread },
{ 'name': "Read all", 'action': self.read_all },
{ 'name': "Exit", 'action': self.noop }
]
self.show_quick_panel(self.items(), self.callback)
self.connection = connection
def items(self):
return [option['name'] for option in self.options]
def callback(self, index):
option = self.options[index]
if not option is None:
option['action']()
def show_unread(self):
self.view.run_command("trello_unread_notifications")
def read_all(self):
pass
def noop(self):
pass
class TrelloUnreadNotificationsCommand(TrelloCommand):
def work(self, connection):
member = connection.me
output = Output.notifications(member.unread_notifications())
self.show_output_panel(output) | Store connection and missing self | Store connection and missing self
| Python | mit | NicoSantangelo/sublime-text-trello | try:
from trello import TrelloCommand
from output import Output
except ImportError:
from .trello import TrelloCommand
from .output import Output
class TrelloNotificationsCommand(TrelloCommand):
def work(self, connection):
self.options = [
{ 'name': "Unread", 'action': self.show_unread },
{ 'name': "Read all", 'action': self.read_all },
{ 'name': "Exit", 'action': self.noop }
]
self.show_quick_panel(self.items(), self.callback)
+ self.connection = connection
def items(self):
return [option['name'] for option in self.options]
def callback(self, index):
option = self.options[index]
if not option is None:
option['action']()
def show_unread(self):
self.view.run_command("trello_unread_notifications")
- def read_all():
+ def read_all(self):
pass
- def noop():
+ def noop(self):
pass
class TrelloUnreadNotificationsCommand(TrelloCommand):
def work(self, connection):
member = connection.me
output = Output.notifications(member.unread_notifications())
self.show_output_panel(output) | Store connection and missing self | ## Code Before:
try:
from trello import TrelloCommand
from output import Output
except ImportError:
from .trello import TrelloCommand
from .output import Output
class TrelloNotificationsCommand(TrelloCommand):
def work(self, connection):
self.options = [
{ 'name': "Unread", 'action': self.show_unread },
{ 'name': "Read all", 'action': self.read_all },
{ 'name': "Exit", 'action': self.noop }
]
self.show_quick_panel(self.items(), self.callback)
def items(self):
return [option['name'] for option in self.options]
def callback(self, index):
option = self.options[index]
if not option is None:
option['action']()
def show_unread(self):
self.view.run_command("trello_unread_notifications")
def read_all():
pass
def noop():
pass
class TrelloUnreadNotificationsCommand(TrelloCommand):
def work(self, connection):
member = connection.me
output = Output.notifications(member.unread_notifications())
self.show_output_panel(output)
## Instruction:
Store connection and missing self
## Code After:
try:
from trello import TrelloCommand
from output import Output
except ImportError:
from .trello import TrelloCommand
from .output import Output
class TrelloNotificationsCommand(TrelloCommand):
def work(self, connection):
self.options = [
{ 'name': "Unread", 'action': self.show_unread },
{ 'name': "Read all", 'action': self.read_all },
{ 'name': "Exit", 'action': self.noop }
]
self.show_quick_panel(self.items(), self.callback)
self.connection = connection
def items(self):
return [option['name'] for option in self.options]
def callback(self, index):
option = self.options[index]
if not option is None:
option['action']()
def show_unread(self):
self.view.run_command("trello_unread_notifications")
def read_all(self):
pass
def noop(self):
pass
class TrelloUnreadNotificationsCommand(TrelloCommand):
def work(self, connection):
member = connection.me
output = Output.notifications(member.unread_notifications())
self.show_output_panel(output) |
9796e60975474006940af723a6cb8b16bc632ae0 | tz_app/context_processors.py | tz_app/context_processors.py | from django.conf import settings
from django.utils import timezone
try:
import pytz
except ImportError:
pytz = None
def timezones(request):
alt_timezone = request.session.get('alt_timezone', pytz.utc)
return {
'pytz': pytz,
'default_timezone_name': settings.TIME_ZONE,
'timezones': pytz.common_timezones if pytz else [],
'alt_timezone': alt_timezone if pytz else timezone.utc,
'alt_timezone_name': alt_timezone.zone if pytz else 'UTC',
}
| from django.conf import settings
from django.utils import timezone
try:
import pytz
except ImportError:
pytz = None
def timezones(request):
alt_timezone = request.session.get('alt_timezone', (pytz or timezone).utc)
return {
'pytz': pytz,
'default_timezone_name': settings.TIME_ZONE,
'timezones': pytz.common_timezones if pytz else [],
'alt_timezone': alt_timezone if pytz else timezone.utc,
'alt_timezone_name': alt_timezone.zone if pytz else 'UTC',
}
| Fix a bug when pytz isn't installed. | Fix a bug when pytz isn't installed.
| Python | bsd-3-clause | aaugustin/django-tz-demo | from django.conf import settings
from django.utils import timezone
try:
import pytz
except ImportError:
pytz = None
def timezones(request):
- alt_timezone = request.session.get('alt_timezone', pytz.utc)
+ alt_timezone = request.session.get('alt_timezone', (pytz or timezone).utc)
return {
'pytz': pytz,
'default_timezone_name': settings.TIME_ZONE,
'timezones': pytz.common_timezones if pytz else [],
'alt_timezone': alt_timezone if pytz else timezone.utc,
'alt_timezone_name': alt_timezone.zone if pytz else 'UTC',
}
| Fix a bug when pytz isn't installed. | ## Code Before:
from django.conf import settings
from django.utils import timezone
try:
import pytz
except ImportError:
pytz = None
def timezones(request):
alt_timezone = request.session.get('alt_timezone', pytz.utc)
return {
'pytz': pytz,
'default_timezone_name': settings.TIME_ZONE,
'timezones': pytz.common_timezones if pytz else [],
'alt_timezone': alt_timezone if pytz else timezone.utc,
'alt_timezone_name': alt_timezone.zone if pytz else 'UTC',
}
## Instruction:
Fix a bug when pytz isn't installed.
## Code After:
from django.conf import settings
from django.utils import timezone
try:
import pytz
except ImportError:
pytz = None
def timezones(request):
alt_timezone = request.session.get('alt_timezone', (pytz or timezone).utc)
return {
'pytz': pytz,
'default_timezone_name': settings.TIME_ZONE,
'timezones': pytz.common_timezones if pytz else [],
'alt_timezone': alt_timezone if pytz else timezone.utc,
'alt_timezone_name': alt_timezone.zone if pytz else 'UTC',
}
|
c8b86afc53af25c845c8303111a6e7b17d8c26b4 | ciscripts/check/psqcppconan/check.py | ciscripts/check/psqcppconan/check.py | """Run tests and static analysis checks on a polysquare conan c++ project."""
import argparse
import os
def run(cont, util, shell, argv=None):
"""Run checks on this conan project."""
parser = argparse.ArgumentParser(description="""Run conan checks""")
parser.add_argument("--run-test-binaries",
nargs="*",
type=str,
help="""Files relative to the build dir to run""")
result, remainder = parser.parse_known_args(argv or list())
conan_check_script = "check/conan/check.py"
conan_check = cont.fetch_and_import(conan_check_script)
def _during_test(cont, executor, util, build):
"""Run the specified test binaries with the --tap switch.
We then pipe the output into tap-mocha-reporter.
"""
del build
for binary in result.run_test_binaries or list():
executor(cont,
util.running_output,
os.path.join(os.getcwd(), binary))
util.print_message(binary)
kwargs = {
"kind": "polysquare conan c++",
"during_test": _during_test
}
return conan_check.run(cont,
util,
shell,
argv=remainder,
override_kwargs=kwargs)
| """Run tests and static analysis checks on a polysquare conan c++ project."""
import argparse
import os
def run(cont, util, shell, argv=None):
"""Run checks on this conan project."""
parser = argparse.ArgumentParser(description="""Run conan checks""")
parser.add_argument("--run-test-binaries",
nargs="*",
type=str,
help="""Files relative to the build dir to run""")
result, remainder = parser.parse_known_args(argv or list())
conan_check_script = "check/conan/check.py"
conan_check = cont.fetch_and_import(conan_check_script)
def _during_test(cont, executor, util, build):
"""Run the specified test binaries with the --tap switch.
We then pipe the output into tap-mocha-reporter.
"""
del build
for binary in result.run_test_binaries or list():
if not os.path.exists(binary) and os.path.exists(binary + ".exe"):
binary = binary + ".exe"
executor(cont,
util.running_output,
os.path.join(os.getcwd(), binary))
util.print_message(binary)
kwargs = {
"kind": "polysquare conan c++",
"during_test": _during_test
}
return conan_check.run(cont,
util,
shell,
argv=remainder,
override_kwargs=kwargs)
| Allow the use of .exe | psqcppconan: Allow the use of .exe
| Python | mit | polysquare/polysquare-ci-scripts,polysquare/polysquare-ci-scripts | """Run tests and static analysis checks on a polysquare conan c++ project."""
import argparse
import os
def run(cont, util, shell, argv=None):
"""Run checks on this conan project."""
parser = argparse.ArgumentParser(description="""Run conan checks""")
parser.add_argument("--run-test-binaries",
nargs="*",
type=str,
help="""Files relative to the build dir to run""")
result, remainder = parser.parse_known_args(argv or list())
conan_check_script = "check/conan/check.py"
conan_check = cont.fetch_and_import(conan_check_script)
def _during_test(cont, executor, util, build):
"""Run the specified test binaries with the --tap switch.
We then pipe the output into tap-mocha-reporter.
"""
del build
for binary in result.run_test_binaries or list():
+ if not os.path.exists(binary) and os.path.exists(binary + ".exe"):
+ binary = binary + ".exe"
+
executor(cont,
util.running_output,
os.path.join(os.getcwd(), binary))
util.print_message(binary)
kwargs = {
"kind": "polysquare conan c++",
"during_test": _during_test
}
return conan_check.run(cont,
util,
shell,
argv=remainder,
override_kwargs=kwargs)
| Allow the use of .exe | ## Code Before:
"""Run tests and static analysis checks on a polysquare conan c++ project."""
import argparse
import os
def run(cont, util, shell, argv=None):
"""Run checks on this conan project."""
parser = argparse.ArgumentParser(description="""Run conan checks""")
parser.add_argument("--run-test-binaries",
nargs="*",
type=str,
help="""Files relative to the build dir to run""")
result, remainder = parser.parse_known_args(argv or list())
conan_check_script = "check/conan/check.py"
conan_check = cont.fetch_and_import(conan_check_script)
def _during_test(cont, executor, util, build):
"""Run the specified test binaries with the --tap switch.
We then pipe the output into tap-mocha-reporter.
"""
del build
for binary in result.run_test_binaries or list():
executor(cont,
util.running_output,
os.path.join(os.getcwd(), binary))
util.print_message(binary)
kwargs = {
"kind": "polysquare conan c++",
"during_test": _during_test
}
return conan_check.run(cont,
util,
shell,
argv=remainder,
override_kwargs=kwargs)
## Instruction:
Allow the use of .exe
## Code After:
"""Run tests and static analysis checks on a polysquare conan c++ project."""
import argparse
import os
def run(cont, util, shell, argv=None):
"""Run checks on this conan project."""
parser = argparse.ArgumentParser(description="""Run conan checks""")
parser.add_argument("--run-test-binaries",
nargs="*",
type=str,
help="""Files relative to the build dir to run""")
result, remainder = parser.parse_known_args(argv or list())
conan_check_script = "check/conan/check.py"
conan_check = cont.fetch_and_import(conan_check_script)
def _during_test(cont, executor, util, build):
"""Run the specified test binaries with the --tap switch.
We then pipe the output into tap-mocha-reporter.
"""
del build
for binary in result.run_test_binaries or list():
if not os.path.exists(binary) and os.path.exists(binary + ".exe"):
binary = binary + ".exe"
executor(cont,
util.running_output,
os.path.join(os.getcwd(), binary))
util.print_message(binary)
kwargs = {
"kind": "polysquare conan c++",
"during_test": _during_test
}
return conan_check.run(cont,
util,
shell,
argv=remainder,
override_kwargs=kwargs)
|
e3cb7ad226e3c26cbfa6f9f322ebdb4fde7e7d60 | coop_cms/apps/coop_bootstrap/templatetags/coop_bs.py | coop_cms/apps/coop_bootstrap/templatetags/coop_bs.py |
from __future__ import unicode_literals
from django import template
from coop_cms.templatetags.coop_utils import is_checkbox as _is_checkbox
from coop_cms.templatetags.coop_navigation import NavigationAsNestedUlNode
register = template.Library()
# Just for compatibility
@register.filter(name='is_checkbox')
def is_checkbox(field):
"""returns true if field is a checkbox"""
return _is_checkbox(field)
@register.tag
def navigation_bootstrap(parser, token):
"""returns the bootstrap-friendly navigation"""
return NavigationAsNestedUlNode(li_node="coop_bootstrap/li_node.html")
|
from __future__ import unicode_literals
from django import template
from coop_cms.templatetags.coop_utils import is_checkbox as _is_checkbox
from coop_cms.templatetags.coop_navigation import NavigationAsNestedUlNode, extract_kwargs
register = template.Library()
# Just for compatibility
@register.filter(name='is_checkbox')
def is_checkbox(field):
"""returns true if field is a checkbox"""
return _is_checkbox(field)
@register.tag
def navigation_bootstrap(parser, token):
"""returns the bootstrap-friendly navigation"""
kwargs = dict(li_node="coop_bootstrap/li_node.html")
args = token.contents.split()
kwargs.update(extract_kwargs(args))
return NavigationAsNestedUlNode(**kwargs)
| Fix "navigation_bootstrap" templatetag : arguments were ignored | Fix "navigation_bootstrap" templatetag : arguments were ignored
| Python | bsd-3-clause | ljean/coop_cms,ljean/coop_cms,ljean/coop_cms |
from __future__ import unicode_literals
from django import template
from coop_cms.templatetags.coop_utils import is_checkbox as _is_checkbox
- from coop_cms.templatetags.coop_navigation import NavigationAsNestedUlNode
+ from coop_cms.templatetags.coop_navigation import NavigationAsNestedUlNode, extract_kwargs
register = template.Library()
# Just for compatibility
@register.filter(name='is_checkbox')
def is_checkbox(field):
"""returns true if field is a checkbox"""
return _is_checkbox(field)
+
@register.tag
def navigation_bootstrap(parser, token):
"""returns the bootstrap-friendly navigation"""
- return NavigationAsNestedUlNode(li_node="coop_bootstrap/li_node.html")
+ kwargs = dict(li_node="coop_bootstrap/li_node.html")
+ args = token.contents.split()
+ kwargs.update(extract_kwargs(args))
+ return NavigationAsNestedUlNode(**kwargs)
| Fix "navigation_bootstrap" templatetag : arguments were ignored | ## Code Before:
from __future__ import unicode_literals
from django import template
from coop_cms.templatetags.coop_utils import is_checkbox as _is_checkbox
from coop_cms.templatetags.coop_navigation import NavigationAsNestedUlNode
register = template.Library()
# Just for compatibility
@register.filter(name='is_checkbox')
def is_checkbox(field):
"""returns true if field is a checkbox"""
return _is_checkbox(field)
@register.tag
def navigation_bootstrap(parser, token):
"""returns the bootstrap-friendly navigation"""
return NavigationAsNestedUlNode(li_node="coop_bootstrap/li_node.html")
## Instruction:
Fix "navigation_bootstrap" templatetag : arguments were ignored
## Code After:
from __future__ import unicode_literals
from django import template
from coop_cms.templatetags.coop_utils import is_checkbox as _is_checkbox
from coop_cms.templatetags.coop_navigation import NavigationAsNestedUlNode, extract_kwargs
register = template.Library()
# Just for compatibility
@register.filter(name='is_checkbox')
def is_checkbox(field):
"""returns true if field is a checkbox"""
return _is_checkbox(field)
@register.tag
def navigation_bootstrap(parser, token):
"""returns the bootstrap-friendly navigation"""
kwargs = dict(li_node="coop_bootstrap/li_node.html")
args = token.contents.split()
kwargs.update(extract_kwargs(args))
return NavigationAsNestedUlNode(**kwargs)
|
8a4b576d6df4ef1f174c8698ff9a86dbf2f5bd4a | workshops/models.py | workshops/models.py | from django.db import models
from django.db.models.deletion import PROTECT
from django_extensions.db.fields import AutoSlugField
class Workshop(models.Model):
event = models.ForeignKey('events.Event', PROTECT, related_name='workshops')
applicant = models.ForeignKey('cfp.Applicant', related_name='workshops')
title = models.CharField(max_length=80)
slug = AutoSlugField(populate_from="title", unique=True)
about = models.TextField()
abstract = models.TextField()
extra_info = models.TextField(blank=True)
skill_level = models.ForeignKey('cfp.AudienceSkillLevel', PROTECT)
starts_at = models.DateTimeField()
duration_hours = models.DecimalField(max_digits=3, decimal_places=1)
tickets_link = models.URLField(blank=True)
price = models.PositiveIntegerField(blank=True, null=True)
@property
def approximate_euro_price(self):
return int(self.price / 7.5)
| from django.db import models
from django.db.models.deletion import PROTECT
from django_extensions.db.fields import AutoSlugField
class Workshop(models.Model):
event = models.ForeignKey('events.Event', PROTECT, related_name='workshops')
applicant = models.ForeignKey('cfp.Applicant', related_name='workshops')
title = models.CharField(max_length=80)
slug = AutoSlugField(populate_from="title", unique=True)
about = models.TextField()
abstract = models.TextField()
extra_info = models.TextField(blank=True)
skill_level = models.ForeignKey('cfp.AudienceSkillLevel', PROTECT)
starts_at = models.DateTimeField()
duration_hours = models.DecimalField(max_digits=3, decimal_places=1)
tickets_link = models.URLField(blank=True)
price = models.PositiveIntegerField(blank=True, null=True)
@property
def approximate_euro_price(self):
return int(self.price / 7.5) if self.price else None
| Check price exists before using it | Check price exists before using it
| Python | bsd-3-clause | WebCampZg/conference-web,WebCampZg/conference-web,WebCampZg/conference-web | from django.db import models
from django.db.models.deletion import PROTECT
from django_extensions.db.fields import AutoSlugField
class Workshop(models.Model):
event = models.ForeignKey('events.Event', PROTECT, related_name='workshops')
applicant = models.ForeignKey('cfp.Applicant', related_name='workshops')
title = models.CharField(max_length=80)
slug = AutoSlugField(populate_from="title", unique=True)
about = models.TextField()
abstract = models.TextField()
extra_info = models.TextField(blank=True)
skill_level = models.ForeignKey('cfp.AudienceSkillLevel', PROTECT)
starts_at = models.DateTimeField()
duration_hours = models.DecimalField(max_digits=3, decimal_places=1)
tickets_link = models.URLField(blank=True)
price = models.PositiveIntegerField(blank=True, null=True)
@property
def approximate_euro_price(self):
- return int(self.price / 7.5)
+ return int(self.price / 7.5) if self.price else None
| Check price exists before using it | ## Code Before:
from django.db import models
from django.db.models.deletion import PROTECT
from django_extensions.db.fields import AutoSlugField
class Workshop(models.Model):
event = models.ForeignKey('events.Event', PROTECT, related_name='workshops')
applicant = models.ForeignKey('cfp.Applicant', related_name='workshops')
title = models.CharField(max_length=80)
slug = AutoSlugField(populate_from="title", unique=True)
about = models.TextField()
abstract = models.TextField()
extra_info = models.TextField(blank=True)
skill_level = models.ForeignKey('cfp.AudienceSkillLevel', PROTECT)
starts_at = models.DateTimeField()
duration_hours = models.DecimalField(max_digits=3, decimal_places=1)
tickets_link = models.URLField(blank=True)
price = models.PositiveIntegerField(blank=True, null=True)
@property
def approximate_euro_price(self):
return int(self.price / 7.5)
## Instruction:
Check price exists before using it
## Code After:
from django.db import models
from django.db.models.deletion import PROTECT
from django_extensions.db.fields import AutoSlugField
class Workshop(models.Model):
event = models.ForeignKey('events.Event', PROTECT, related_name='workshops')
applicant = models.ForeignKey('cfp.Applicant', related_name='workshops')
title = models.CharField(max_length=80)
slug = AutoSlugField(populate_from="title", unique=True)
about = models.TextField()
abstract = models.TextField()
extra_info = models.TextField(blank=True)
skill_level = models.ForeignKey('cfp.AudienceSkillLevel', PROTECT)
starts_at = models.DateTimeField()
duration_hours = models.DecimalField(max_digits=3, decimal_places=1)
tickets_link = models.URLField(blank=True)
price = models.PositiveIntegerField(blank=True, null=True)
@property
def approximate_euro_price(self):
return int(self.price / 7.5) if self.price else None
|
ea3660bcc1a9f7be619def8e26dd7b0ab4a873cf | estmator_project/est_client/forms.py | estmator_project/est_client/forms.py | from django.forms import ModelForm, Select, TextInput
from .models import Client, Company
class ClientCreateForm(ModelForm):
class Meta:
model = Client
fields = [
'company',
'first_name',
'last_name',
'title',
'cell',
'desk',
'email'
]
widgets = {
'company': Select(attrs={'required': True}),
}
class CompanyCreateForm(ModelForm):
class Meta:
model = Company
fields = [
'company_name',
'phone',
'address',
'address2',
'city',
'state',
'postal',
'st_rate',
'ot_rate'
]
widgets = {
'company_name': TextInput(attrs={'required': True}),
}
class CompanyListForm(ModelForm):
class Meta:
model = Client
fields = ['company']
| from django.forms import ModelForm, Select, TextInput
from .models import Client, Company
class ClientCreateForm(ModelForm):
class Meta:
model = Client
fields = [
'company',
'first_name',
'last_name',
'title',
'cell',
'desk',
'email'
]
widgets = {
'company': Select(attrs={'required': True}),
'first_name': TextInput(attrs={'required': True}),
'last_name': TextInput(attrs={'required': True}),
'title': TextInput(attrs={'required': True}),
'cell': TextInput(attrs={'required': True}),
'email': TextInput(attrs={'required': True}),
}
class CompanyCreateForm(ModelForm):
class Meta:
model = Company
fields = [
'company_name',
'phone',
'address',
'address2',
'city',
'state',
'postal',
'st_rate',
'ot_rate'
]
widgets = {
'company_name': TextInput(attrs={'required': True}),
'phone': TextInput(attrs={'required': True}),
'address': TextInput(attrs={'required': True}),
'city': TextInput(attrs={'required': True}),
'postal': TextInput(attrs={'required': True}),
}
class CompanyListForm(ModelForm):
class Meta:
model = Client
fields = ['company']
| Make fields required on new client and company | Make fields required on new client and company
| Python | mit | Estmator/EstmatorApp,Estmator/EstmatorApp,Estmator/EstmatorApp | from django.forms import ModelForm, Select, TextInput
from .models import Client, Company
class ClientCreateForm(ModelForm):
class Meta:
model = Client
fields = [
'company',
'first_name',
'last_name',
'title',
'cell',
'desk',
'email'
]
widgets = {
'company': Select(attrs={'required': True}),
+ 'first_name': TextInput(attrs={'required': True}),
+ 'last_name': TextInput(attrs={'required': True}),
+ 'title': TextInput(attrs={'required': True}),
+ 'cell': TextInput(attrs={'required': True}),
+ 'email': TextInput(attrs={'required': True}),
}
class CompanyCreateForm(ModelForm):
class Meta:
model = Company
fields = [
'company_name',
'phone',
'address',
'address2',
'city',
'state',
'postal',
'st_rate',
'ot_rate'
]
widgets = {
'company_name': TextInput(attrs={'required': True}),
+ 'phone': TextInput(attrs={'required': True}),
+ 'address': TextInput(attrs={'required': True}),
+ 'city': TextInput(attrs={'required': True}),
+ 'postal': TextInput(attrs={'required': True}),
}
class CompanyListForm(ModelForm):
class Meta:
model = Client
fields = ['company']
| Make fields required on new client and company | ## Code Before:
from django.forms import ModelForm, Select, TextInput
from .models import Client, Company
class ClientCreateForm(ModelForm):
class Meta:
model = Client
fields = [
'company',
'first_name',
'last_name',
'title',
'cell',
'desk',
'email'
]
widgets = {
'company': Select(attrs={'required': True}),
}
class CompanyCreateForm(ModelForm):
class Meta:
model = Company
fields = [
'company_name',
'phone',
'address',
'address2',
'city',
'state',
'postal',
'st_rate',
'ot_rate'
]
widgets = {
'company_name': TextInput(attrs={'required': True}),
}
class CompanyListForm(ModelForm):
class Meta:
model = Client
fields = ['company']
## Instruction:
Make fields required on new client and company
## Code After:
from django.forms import ModelForm, Select, TextInput
from .models import Client, Company
class ClientCreateForm(ModelForm):
class Meta:
model = Client
fields = [
'company',
'first_name',
'last_name',
'title',
'cell',
'desk',
'email'
]
widgets = {
'company': Select(attrs={'required': True}),
'first_name': TextInput(attrs={'required': True}),
'last_name': TextInput(attrs={'required': True}),
'title': TextInput(attrs={'required': True}),
'cell': TextInput(attrs={'required': True}),
'email': TextInput(attrs={'required': True}),
}
class CompanyCreateForm(ModelForm):
class Meta:
model = Company
fields = [
'company_name',
'phone',
'address',
'address2',
'city',
'state',
'postal',
'st_rate',
'ot_rate'
]
widgets = {
'company_name': TextInput(attrs={'required': True}),
'phone': TextInput(attrs={'required': True}),
'address': TextInput(attrs={'required': True}),
'city': TextInput(attrs={'required': True}),
'postal': TextInput(attrs={'required': True}),
}
class CompanyListForm(ModelForm):
class Meta:
model = Client
fields = ['company']
|
b7c52258d39e5c0ee8fba2be87e8e671e0c583c3 | xclib/postfix_io.py | xclib/postfix_io.py | import sys
import re
import logging
# Message formats described in `../doc/Protocol.md`
class postfix_io:
@classmethod
def read_request(cls, infd, outfd):
# "for line in sys.stdin:" would be more concise but adds unwanted buffering
while True:
line = infd.readline()
if not line:
break
match = re.match('^get ([^\000- @%]+)@([^\000- @%]+)\r?\n$', line)
if match:
yield ('isuser',) + match.group(1,2)
else:
logging.error('Illegal request format: ' + line)
outfd.write('500 Illegal request format\n')
outfd.flush()
@classmethod
def write_response(cls, flag, outfd):
if flag == None:
outfd.write('400 Trouble connecting to backend\n')
elif flag:
outfd.write('200 OK\n')
else:
outfd.write('500 No such user\n')
outfd.flush()
| import sys
import re
import logging
# Message formats described in `../doc/Protocol.md`
class postfix_io:
@classmethod
def read_request(cls, infd, outfd):
# "for line in sys.stdin:" would be more concise but adds unwanted buffering
while True:
line = infd.readline()
if not line:
break
match = re.match('^get ([^\000- @%]+)@([^\000- @%]+)\r?\n$', line)
if match:
yield ('isuser',) + match.group(1,2)
elif line == 'quit':
yield ('quit',)
else:
logging.error('Illegal request format: ' + line)
outfd.write('500 Illegal request format\n')
outfd.flush()
@classmethod
def write_response(cls, flag, outfd):
if flag == None:
outfd.write('400 Trouble connecting to backend\n')
elif flag:
outfd.write('200 OK\n')
else:
outfd.write('500 No such user\n')
outfd.flush()
| Add quit command to postfix | Add quit command to postfix
| Python | mit | jsxc/xmpp-cloud-auth,jsxc/xmpp-cloud-auth,jsxc/xmpp-cloud-auth,jsxc/xmpp-cloud-auth | import sys
import re
import logging
# Message formats described in `../doc/Protocol.md`
class postfix_io:
@classmethod
def read_request(cls, infd, outfd):
# "for line in sys.stdin:" would be more concise but adds unwanted buffering
while True:
line = infd.readline()
if not line:
break
match = re.match('^get ([^\000- @%]+)@([^\000- @%]+)\r?\n$', line)
if match:
yield ('isuser',) + match.group(1,2)
+ elif line == 'quit':
+ yield ('quit',)
else:
logging.error('Illegal request format: ' + line)
outfd.write('500 Illegal request format\n')
outfd.flush()
@classmethod
def write_response(cls, flag, outfd):
if flag == None:
outfd.write('400 Trouble connecting to backend\n')
elif flag:
outfd.write('200 OK\n')
else:
outfd.write('500 No such user\n')
outfd.flush()
| Add quit command to postfix | ## Code Before:
import sys
import re
import logging
# Message formats described in `../doc/Protocol.md`
class postfix_io:
@classmethod
def read_request(cls, infd, outfd):
# "for line in sys.stdin:" would be more concise but adds unwanted buffering
while True:
line = infd.readline()
if not line:
break
match = re.match('^get ([^\000- @%]+)@([^\000- @%]+)\r?\n$', line)
if match:
yield ('isuser',) + match.group(1,2)
else:
logging.error('Illegal request format: ' + line)
outfd.write('500 Illegal request format\n')
outfd.flush()
@classmethod
def write_response(cls, flag, outfd):
if flag == None:
outfd.write('400 Trouble connecting to backend\n')
elif flag:
outfd.write('200 OK\n')
else:
outfd.write('500 No such user\n')
outfd.flush()
## Instruction:
Add quit command to postfix
## Code After:
import sys
import re
import logging
# Message formats described in `../doc/Protocol.md`
class postfix_io:
@classmethod
def read_request(cls, infd, outfd):
# "for line in sys.stdin:" would be more concise but adds unwanted buffering
while True:
line = infd.readline()
if not line:
break
match = re.match('^get ([^\000- @%]+)@([^\000- @%]+)\r?\n$', line)
if match:
yield ('isuser',) + match.group(1,2)
elif line == 'quit':
yield ('quit',)
else:
logging.error('Illegal request format: ' + line)
outfd.write('500 Illegal request format\n')
outfd.flush()
@classmethod
def write_response(cls, flag, outfd):
if flag == None:
outfd.write('400 Trouble connecting to backend\n')
elif flag:
outfd.write('200 OK\n')
else:
outfd.write('500 No such user\n')
outfd.flush()
|
3be9ef4c2ec4c2b10503633c55fd1634f4d5debb | comics/search/indexes.py | comics/search/indexes.py | from django.template.loader import get_template
from django.template import Context
from haystack import indexes
from haystack import site
from comics.core.models import Image
class ImageIndex(indexes.SearchIndex):
document = indexes.CharField(document=True, use_template=True)
rendered = indexes.CharField(indexed=False)
def prepare_rendered(self, obj):
template = get_template('search/results.html')
context = Context({'release': obj.get_first_release()})
return template.render(context)
site.register(Image, ImageIndex)
| from django.template.loader import get_template
from django.template import Context
from haystack import indexes
from haystack import site
from comics.core.models import Image
class ImageIndex(indexes.SearchIndex):
document = indexes.CharField(document=True, use_template=True)
rendered = indexes.CharField(indexed=False)
def get_updated_field(self):
return 'fetched'
def prepare_rendered(self, obj):
template = get_template('search/results.html')
context = Context({'release': obj.get_first_release()})
return template.render(context)
site.register(Image, ImageIndex)
| Add get_updated_field to search index | Add get_updated_field to search index
| Python | agpl-3.0 | jodal/comics,jodal/comics,klette/comics,datagutten/comics,jodal/comics,datagutten/comics,jodal/comics,datagutten/comics,klette/comics,klette/comics,datagutten/comics | from django.template.loader import get_template
from django.template import Context
from haystack import indexes
from haystack import site
from comics.core.models import Image
class ImageIndex(indexes.SearchIndex):
document = indexes.CharField(document=True, use_template=True)
rendered = indexes.CharField(indexed=False)
+ def get_updated_field(self):
+ return 'fetched'
+
def prepare_rendered(self, obj):
template = get_template('search/results.html')
context = Context({'release': obj.get_first_release()})
return template.render(context)
site.register(Image, ImageIndex)
| Add get_updated_field to search index | ## Code Before:
from django.template.loader import get_template
from django.template import Context
from haystack import indexes
from haystack import site
from comics.core.models import Image
class ImageIndex(indexes.SearchIndex):
document = indexes.CharField(document=True, use_template=True)
rendered = indexes.CharField(indexed=False)
def prepare_rendered(self, obj):
template = get_template('search/results.html')
context = Context({'release': obj.get_first_release()})
return template.render(context)
site.register(Image, ImageIndex)
## Instruction:
Add get_updated_field to search index
## Code After:
from django.template.loader import get_template
from django.template import Context
from haystack import indexes
from haystack import site
from comics.core.models import Image
class ImageIndex(indexes.SearchIndex):
document = indexes.CharField(document=True, use_template=True)
rendered = indexes.CharField(indexed=False)
def get_updated_field(self):
return 'fetched'
def prepare_rendered(self, obj):
template = get_template('search/results.html')
context = Context({'release': obj.get_first_release()})
return template.render(context)
site.register(Image, ImageIndex)
|
b0701205f0b96645d3643bab5188f349cd604603 | binaries/streamer_binaries/__init__.py | binaries/streamer_binaries/__init__.py | import os
__version__ = '0.5.0'
# Module level variables.
ffmpeg = ''
"""The path to the installed FFmpeg binary."""
ffprobe = ''
"""The path to the installed FFprobe binary."""
packager = ''
"""The path to the installed Shaka Packager binary."""
# Get the directory path where this __init__.py file resides.
_dir_path = os.path.abspath(os.path.dirname(__file__))
# This will be executed at import time.
for _file in os.listdir(_dir_path):
if _file.startswith('ffmpeg'):
ffmpeg = os.path.join(_dir_path, _file)
elif _file.startswith('ffprobe'):
ffprobe = os.path.join(_dir_path, _file)
elif _file.startswith('packager'):
packager = os.path.join(_dir_path, _file)
| import os
import platform
__version__ = '0.5.0'
# Get the directory path where this __init__.py file resides.
_dir_path = os.path.abspath(os.path.dirname(__file__))
# Compute the part of the file name that indicates the OS.
_os = {
'Linux': 'linux',
'Windows': 'win',
'Darwin': 'osx',
}[platform.system()]
# Compute the part of the file name that indicates the CPU architecture.
_cpu = {
'x86_64': 'x64', # Linux/Mac report this key
'AMD64': 'x64', # Windows reports this key
'aarch64': 'arm64',
}[platform.machine()]
# Module level variables.
ffmpeg = os.path.join(_dir_path, 'ffmpeg-{}-{}'.format(_os, _cpu))
"""The path to the installed FFmpeg binary."""
ffprobe = os.path.join(_dir_path, 'ffprobe-{}-{}'.format(_os, _cpu))
"""The path to the installed FFprobe binary."""
packager = os.path.join(_dir_path, 'packager-{}-{}'.format(_os, _cpu))
"""The path to the installed Shaka Packager binary."""
| Fix usage of local streamer_binaries module | build: Fix usage of local streamer_binaries module
The old code would search the directory for the binary to use. This
worked fine if the package were installed, but when adding the module
path to PYTHONPATH, this technique would fail because the folder would
have executables for all architetures.
Now we will compute the exact filename we expect for each exectuable,
allowing the module to be used locally without installation. This is
useful for testing pre-release versions of the module.
Change-Id: I35d3a1009b677ef9d29379147312abe3d0a7f8b2
| Python | apache-2.0 | shaka-project/shaka-streamer,shaka-project/shaka-streamer | import os
+ import platform
__version__ = '0.5.0'
-
-
- # Module level variables.
- ffmpeg = ''
- """The path to the installed FFmpeg binary."""
- ffprobe = ''
- """The path to the installed FFprobe binary."""
- packager = ''
- """The path to the installed Shaka Packager binary."""
# Get the directory path where this __init__.py file resides.
_dir_path = os.path.abspath(os.path.dirname(__file__))
+ # Compute the part of the file name that indicates the OS.
+ _os = {
+ 'Linux': 'linux',
+ 'Windows': 'win',
+ 'Darwin': 'osx',
+ }[platform.system()]
- # This will be executed at import time.
- for _file in os.listdir(_dir_path):
- if _file.startswith('ffmpeg'):
- ffmpeg = os.path.join(_dir_path, _file)
- elif _file.startswith('ffprobe'):
- ffprobe = os.path.join(_dir_path, _file)
- elif _file.startswith('packager'):
- packager = os.path.join(_dir_path, _file)
+ # Compute the part of the file name that indicates the CPU architecture.
+ _cpu = {
+ 'x86_64': 'x64', # Linux/Mac report this key
+ 'AMD64': 'x64', # Windows reports this key
+ 'aarch64': 'arm64',
+ }[platform.machine()]
+
+ # Module level variables.
+ ffmpeg = os.path.join(_dir_path, 'ffmpeg-{}-{}'.format(_os, _cpu))
+ """The path to the installed FFmpeg binary."""
+
+ ffprobe = os.path.join(_dir_path, 'ffprobe-{}-{}'.format(_os, _cpu))
+ """The path to the installed FFprobe binary."""
+
+ packager = os.path.join(_dir_path, 'packager-{}-{}'.format(_os, _cpu))
+ """The path to the installed Shaka Packager binary."""
+
+ | Fix usage of local streamer_binaries module | ## Code Before:
import os
__version__ = '0.5.0'
# Module level variables.
ffmpeg = ''
"""The path to the installed FFmpeg binary."""
ffprobe = ''
"""The path to the installed FFprobe binary."""
packager = ''
"""The path to the installed Shaka Packager binary."""
# Get the directory path where this __init__.py file resides.
_dir_path = os.path.abspath(os.path.dirname(__file__))
# This will be executed at import time.
for _file in os.listdir(_dir_path):
if _file.startswith('ffmpeg'):
ffmpeg = os.path.join(_dir_path, _file)
elif _file.startswith('ffprobe'):
ffprobe = os.path.join(_dir_path, _file)
elif _file.startswith('packager'):
packager = os.path.join(_dir_path, _file)
## Instruction:
Fix usage of local streamer_binaries module
## Code After:
import os
import platform
__version__ = '0.5.0'
# Get the directory path where this __init__.py file resides.
_dir_path = os.path.abspath(os.path.dirname(__file__))
# Compute the part of the file name that indicates the OS.
_os = {
'Linux': 'linux',
'Windows': 'win',
'Darwin': 'osx',
}[platform.system()]
# Compute the part of the file name that indicates the CPU architecture.
_cpu = {
'x86_64': 'x64', # Linux/Mac report this key
'AMD64': 'x64', # Windows reports this key
'aarch64': 'arm64',
}[platform.machine()]
# Module level variables.
ffmpeg = os.path.join(_dir_path, 'ffmpeg-{}-{}'.format(_os, _cpu))
"""The path to the installed FFmpeg binary."""
ffprobe = os.path.join(_dir_path, 'ffprobe-{}-{}'.format(_os, _cpu))
"""The path to the installed FFprobe binary."""
packager = os.path.join(_dir_path, 'packager-{}-{}'.format(_os, _cpu))
"""The path to the installed Shaka Packager binary."""
|
d57670995709ae60e9cbed575b1ac9e63cba113a | src/env.py | src/env.py | class Environment:
def __init__(self, par=None, bnd=None):
if bnd:
self.binds = bnd
else:
self.binds = {}
self.parent = par
if par:
self.level = self.parent.level + 1
else:
self.level = 0
def get(self, key):
if key in self.binds:
return self.binds[key]
elif self.parent:
return self.parent.get(key)
else:
return None
def set(self, key, value):
if key in self.binds:
self.binds[key] = value
elif self.parent:
self.parent.set(key,value)
else:
self.binds[key] = value
def __repr__( self):
ret = "\n%s:\n" % self.level
keys = self.binds.keys()
for key in keys:
ret = ret + " %5s: %s\n" % (key, self.binds[key])
return ret
| class Environment:
def __init__(self, par=None, bnd=None):
if bnd:
self.binds = bnd
else:
self.binds = {}
self.parent = par
if par:
self.level = self.parent.level + 1
else:
self.level = 0
def get(self, key):
if key in self.binds:
return self.binds[key]
elif self.parent:
return self.parent.get(key)
else:
raise ValueError("Invalid symbol " + key)
def set(self, key, value):
if key in self.binds:
self.binds[key] = value
elif self.parent:
self.parent.set(key,value)
else:
self.binds[key] = value
def __repr__( self):
ret = "\n%s:\n" % self.level
keys = self.binds.keys()
for key in keys:
ret = ret + " %5s: %s\n" % (key, self.binds[key])
return ret
| Raise an error when a symbol cannot be found | Raise an error when a symbol cannot be found
| Python | mit | readevalprintlove/lithp,fogus/lithp,fogus/lithp,readevalprintlove/lithp,magomsk/lithp,readevalprintlove/lithp,fogus/lithp,magomsk/lithp,magomsk/lithp | class Environment:
def __init__(self, par=None, bnd=None):
if bnd:
self.binds = bnd
else:
self.binds = {}
self.parent = par
if par:
self.level = self.parent.level + 1
else:
self.level = 0
def get(self, key):
if key in self.binds:
return self.binds[key]
elif self.parent:
return self.parent.get(key)
else:
- return None
+ raise ValueError("Invalid symbol " + key)
def set(self, key, value):
if key in self.binds:
self.binds[key] = value
elif self.parent:
self.parent.set(key,value)
else:
self.binds[key] = value
def __repr__( self):
ret = "\n%s:\n" % self.level
keys = self.binds.keys()
for key in keys:
ret = ret + " %5s: %s\n" % (key, self.binds[key])
return ret
| Raise an error when a symbol cannot be found | ## Code Before:
class Environment:
def __init__(self, par=None, bnd=None):
if bnd:
self.binds = bnd
else:
self.binds = {}
self.parent = par
if par:
self.level = self.parent.level + 1
else:
self.level = 0
def get(self, key):
if key in self.binds:
return self.binds[key]
elif self.parent:
return self.parent.get(key)
else:
return None
def set(self, key, value):
if key in self.binds:
self.binds[key] = value
elif self.parent:
self.parent.set(key,value)
else:
self.binds[key] = value
def __repr__( self):
ret = "\n%s:\n" % self.level
keys = self.binds.keys()
for key in keys:
ret = ret + " %5s: %s\n" % (key, self.binds[key])
return ret
## Instruction:
Raise an error when a symbol cannot be found
## Code After:
class Environment:
def __init__(self, par=None, bnd=None):
if bnd:
self.binds = bnd
else:
self.binds = {}
self.parent = par
if par:
self.level = self.parent.level + 1
else:
self.level = 0
def get(self, key):
if key in self.binds:
return self.binds[key]
elif self.parent:
return self.parent.get(key)
else:
raise ValueError("Invalid symbol " + key)
def set(self, key, value):
if key in self.binds:
self.binds[key] = value
elif self.parent:
self.parent.set(key,value)
else:
self.binds[key] = value
def __repr__( self):
ret = "\n%s:\n" % self.level
keys = self.binds.keys()
for key in keys:
ret = ret + " %5s: %s\n" % (key, self.binds[key])
return ret
|
d1d0576b94ce000a77e08bd8353f5c1c10b0839f | setup.py | setup.py | from distutils.core import setup
setup(
name = 'AudioTranscode',
version = '1.0',
packages = ['audioTranscode'],
scripts = ['transcode'],
author = 'Jeffrey Aylesworth',
author_email = '[email protected]',
license = 'MIT',
url = 'http://github.com/jeffayle/Transcode'
)
| from distutils.core import setup
setup(
name = 'AudioTranscode',
version = '1.0',
packages = ['audioTranscode','audioTranscode.encoders','audioTranscode.decoders'],
scripts = ['transcode'],
author = 'Jeffrey Aylesworth',
author_email = '[email protected]',
license = 'MIT',
url = 'http://github.com/jeffayle/Transcode'
)
| Include .encoders and .decoders packages with the distribution | Include .encoders and .decoders packages with the distribution | Python | isc | jeffayle/Transcode | from distutils.core import setup
setup(
name = 'AudioTranscode',
version = '1.0',
- packages = ['audioTranscode'],
+ packages = ['audioTranscode','audioTranscode.encoders','audioTranscode.decoders'],
scripts = ['transcode'],
author = 'Jeffrey Aylesworth',
author_email = '[email protected]',
license = 'MIT',
url = 'http://github.com/jeffayle/Transcode'
)
| Include .encoders and .decoders packages with the distribution | ## Code Before:
from distutils.core import setup
setup(
name = 'AudioTranscode',
version = '1.0',
packages = ['audioTranscode'],
scripts = ['transcode'],
author = 'Jeffrey Aylesworth',
author_email = '[email protected]',
license = 'MIT',
url = 'http://github.com/jeffayle/Transcode'
)
## Instruction:
Include .encoders and .decoders packages with the distribution
## Code After:
from distutils.core import setup
setup(
name = 'AudioTranscode',
version = '1.0',
packages = ['audioTranscode','audioTranscode.encoders','audioTranscode.decoders'],
scripts = ['transcode'],
author = 'Jeffrey Aylesworth',
author_email = '[email protected]',
license = 'MIT',
url = 'http://github.com/jeffayle/Transcode'
)
|
ef7f0090bfb7f37fa584123520b02f69a3a392a0 | setup.py | setup.py |
from distutils.core import setup
setup(
name="workout",
version="0.2.0",
description="Store and display workout-data from FIT-files in mezzanine.",
author="Arnold Krille",
author_email="[email protected]",
url="http://github.com/kampfschlaefer/mezzanine-workout",
license=open('LICENSE', 'r').read(),
packages=['workout'],
package_data={'workout': ['templates/workout/*']},
install_requires=['fitparse==0.0.1-dev'],
dependency_links=['git+https://github.com/kampfschlaefer/python-fitparse.git@ng#egg=fitparse-0.0.1-dev'],
)
|
from distutils.core import setup
setup(
name="workout",
version="0.2.1",
description="Store and display workout-data from FIT-files in mezzanine.",
author="Arnold Krille",
author_email="[email protected]",
url="http://github.com/kampfschlaefer/mezzanine-workout",
license=open('LICENSE', 'r').read(),
packages=['workout'],
package_data={'workout': ['templates/workout/*', 'static/*']},
install_requires=['fitparse==0.0.1-dev'],
dependency_links=['git+https://github.com/kampfschlaefer/python-fitparse.git@ng#egg=fitparse-0.0.1-dev'],
)
| Fix inclusion of static files into the package | Fix inclusion of static files into the package
and increase the version-number a bit.
| Python | apache-2.0 | kampfschlaefer/mezzanine-workout,kampfschlaefer/mezzanine-workout,kampfschlaefer/mezzanine-workout |
from distutils.core import setup
setup(
name="workout",
- version="0.2.0",
+ version="0.2.1",
description="Store and display workout-data from FIT-files in mezzanine.",
author="Arnold Krille",
author_email="[email protected]",
url="http://github.com/kampfschlaefer/mezzanine-workout",
license=open('LICENSE', 'r').read(),
packages=['workout'],
- package_data={'workout': ['templates/workout/*']},
+ package_data={'workout': ['templates/workout/*', 'static/*']},
install_requires=['fitparse==0.0.1-dev'],
dependency_links=['git+https://github.com/kampfschlaefer/python-fitparse.git@ng#egg=fitparse-0.0.1-dev'],
)
| Fix inclusion of static files into the package | ## Code Before:
from distutils.core import setup
setup(
name="workout",
version="0.2.0",
description="Store and display workout-data from FIT-files in mezzanine.",
author="Arnold Krille",
author_email="[email protected]",
url="http://github.com/kampfschlaefer/mezzanine-workout",
license=open('LICENSE', 'r').read(),
packages=['workout'],
package_data={'workout': ['templates/workout/*']},
install_requires=['fitparse==0.0.1-dev'],
dependency_links=['git+https://github.com/kampfschlaefer/python-fitparse.git@ng#egg=fitparse-0.0.1-dev'],
)
## Instruction:
Fix inclusion of static files into the package
## Code After:
from distutils.core import setup
setup(
name="workout",
version="0.2.1",
description="Store and display workout-data from FIT-files in mezzanine.",
author="Arnold Krille",
author_email="[email protected]",
url="http://github.com/kampfschlaefer/mezzanine-workout",
license=open('LICENSE', 'r').read(),
packages=['workout'],
package_data={'workout': ['templates/workout/*', 'static/*']},
install_requires=['fitparse==0.0.1-dev'],
dependency_links=['git+https://github.com/kampfschlaefer/python-fitparse.git@ng#egg=fitparse-0.0.1-dev'],
)
|
5c8754aefa0a0b2f9e49d95970475a66a6de9510 | start.py | start.py | from core.computer import Computer
from time import sleep
from console import start as start_console
# Initialize computer instance
computer = Computer()
computer.start_monitoring()
computer.processor.start_monitoring()
for mem in computer.nonvolatile_memory:
mem.start_monitoring()
computer.virtual_memory.start_monitoring()
# Start console interface
start_console(computer)
# Shutdown
computer.processor.stop_monitoring()
for mem in computer.nonvolatile_memory:
mem.stop_monitoring()
computer.virtual_memory.stop_monitoring()
sleep(1) | from core.computer import Computer
from time import sleep
from console import start as start_console
# Initialize computer instance
computer = Computer()
computer.start_monitoring()
computer.processor.start_monitoring()
for mem in computer.nonvolatile_memory:
mem.start_monitoring()
computer.virtual_memory.start_monitoring()
# Start console interface
start_console(computer)
# Shutdown
computer.processor.stop_monitoring()
for mem in computer.nonvolatile_memory:
mem.stop_monitoring()
computer.virtual_memory.stop_monitoring()
computer.stop_monitoring()
sleep(1) | Stop monitoring computer on shutdown. | Stop monitoring computer on shutdown.
| Python | bsd-3-clause | uzumaxy/pyspectator | from core.computer import Computer
from time import sleep
from console import start as start_console
# Initialize computer instance
computer = Computer()
computer.start_monitoring()
computer.processor.start_monitoring()
for mem in computer.nonvolatile_memory:
mem.start_monitoring()
computer.virtual_memory.start_monitoring()
# Start console interface
start_console(computer)
# Shutdown
computer.processor.stop_monitoring()
for mem in computer.nonvolatile_memory:
mem.stop_monitoring()
computer.virtual_memory.stop_monitoring()
+ computer.stop_monitoring()
sleep(1) | Stop monitoring computer on shutdown. | ## Code Before:
from core.computer import Computer
from time import sleep
from console import start as start_console
# Initialize computer instance
computer = Computer()
computer.start_monitoring()
computer.processor.start_monitoring()
for mem in computer.nonvolatile_memory:
mem.start_monitoring()
computer.virtual_memory.start_monitoring()
# Start console interface
start_console(computer)
# Shutdown
computer.processor.stop_monitoring()
for mem in computer.nonvolatile_memory:
mem.stop_monitoring()
computer.virtual_memory.stop_monitoring()
sleep(1)
## Instruction:
Stop monitoring computer on shutdown.
## Code After:
from core.computer import Computer
from time import sleep
from console import start as start_console
# Initialize computer instance
computer = Computer()
computer.start_monitoring()
computer.processor.start_monitoring()
for mem in computer.nonvolatile_memory:
mem.start_monitoring()
computer.virtual_memory.start_monitoring()
# Start console interface
start_console(computer)
# Shutdown
computer.processor.stop_monitoring()
for mem in computer.nonvolatile_memory:
mem.stop_monitoring()
computer.virtual_memory.stop_monitoring()
computer.stop_monitoring()
sleep(1) |
1d448b65840509c5f21abb7f5ad65a6ce20b139c | packs/travisci/actions/lib/action.py | packs/travisci/actions/lib/action.py | from st2actions.runners.pythonrunner import Action
import requests
class TravisCI(Action):
def __init__(self, config):
super(TravisCI, self).__init__(config)
def _init_header(self):
travis_header = {
'User_Agent': self.config['User-Agent'],
'Accept': self.config['Accept'],
'Host': self.config['Host'],
}
return travis_header
def _auth_header(self):
_HEADERS = self._init_header()
_HEADERS['Authorization'] = self.config["Authorization"]
_HEADERS['Content-Type'] = self.config["Content-Type"]
return _HEADERS
def _perform_request(self, uri, method, data=None, requires_auth=False):
if method == "GET":
if requires_auth:
_HEADERS = self._auth_header()
else:
_HEADERS = self._init_header()
response = requests.get(uri, headers=_HEADERS)
elif method == "POST":
_HEADERS = self._auth_header
response = requests.post(uri, headers=_HEADERS)
elif method == "PUT":
_HEADERS = self._auth_header()
_HEADERS['Authorization'] = self.config["Authorization"]
_HEADERS['Content-Type'] = self.config["Content-Type"]
response = requests.put(uri, data=data, headers=_HEADERS)
return response
| import requests
from st2actions.runners.pythonrunner import Action
API_URL = 'https://api.travis-ci.org'
HEADERS_ACCEPT = 'application/vnd.travis-ci.2+json'
HEADERS_HOST = ''
class TravisCI(Action):
def __init__(self, config):
super(TravisCI, self).__init__(config)
def _get_auth_headers(self):
headers = {}
headers['Authorization'] = self.config["Authorization"]
headers['Content-Type'] = self.config["Content-Type"]
return headers
def _perform_request(self, uri, method, data=None, requires_auth=False):
if method == "GET":
if requires_auth:
headers = self._get_auth_headers()
else:
headers = {}
response = requests.get(uri, headers=headers)
elif method == 'POST':
headers = self._get_auth_headers()
response = requests.post(uri, headers=headers)
elif method == 'PUT':
headers = self._get_auth_headers()
response = requests.put(uri, data=data, headers=headers)
return response
| Remove unnecessary values from the config - those should just be constants. | Remove unnecessary values from the config - those should just be constants.
| Python | apache-2.0 | StackStorm/st2contrib,StackStorm/st2contrib,pidah/st2contrib,pidah/st2contrib,pearsontechnology/st2contrib,StackStorm/st2contrib,pearsontechnology/st2contrib,tonybaloney/st2contrib,psychopenguin/st2contrib,digideskio/st2contrib,pearsontechnology/st2contrib,lmEshoo/st2contrib,tonybaloney/st2contrib,tonybaloney/st2contrib,pearsontechnology/st2contrib,lmEshoo/st2contrib,armab/st2contrib,armab/st2contrib,digideskio/st2contrib,pidah/st2contrib,armab/st2contrib,psychopenguin/st2contrib | + import requests
+
from st2actions.runners.pythonrunner import Action
- import requests
+
+ API_URL = 'https://api.travis-ci.org'
+ HEADERS_ACCEPT = 'application/vnd.travis-ci.2+json'
+ HEADERS_HOST = ''
class TravisCI(Action):
def __init__(self, config):
super(TravisCI, self).__init__(config)
- def _init_header(self):
- travis_header = {
- 'User_Agent': self.config['User-Agent'],
- 'Accept': self.config['Accept'],
- 'Host': self.config['Host'],
- }
- return travis_header
-
- def _auth_header(self):
+ def _get_auth_headers(self):
- _HEADERS = self._init_header()
+ headers = {}
- _HEADERS['Authorization'] = self.config["Authorization"]
+ headers['Authorization'] = self.config["Authorization"]
- _HEADERS['Content-Type'] = self.config["Content-Type"]
+ headers['Content-Type'] = self.config["Content-Type"]
- return _HEADERS
+ return headers
def _perform_request(self, uri, method, data=None, requires_auth=False):
if method == "GET":
if requires_auth:
- _HEADERS = self._auth_header()
+ headers = self._get_auth_headers()
else:
- _HEADERS = self._init_header()
+ headers = {}
- response = requests.get(uri, headers=_HEADERS)
+ response = requests.get(uri, headers=headers)
- elif method == "POST":
+ elif method == 'POST':
- _HEADERS = self._auth_header
+ headers = self._get_auth_headers()
- response = requests.post(uri, headers=_HEADERS)
+ response = requests.post(uri, headers=headers)
- elif method == "PUT":
+ elif method == 'PUT':
- _HEADERS = self._auth_header()
+ headers = self._get_auth_headers()
- _HEADERS['Authorization'] = self.config["Authorization"]
- _HEADERS['Content-Type'] = self.config["Content-Type"]
- response = requests.put(uri, data=data, headers=_HEADERS)
+ response = requests.put(uri, data=data, headers=headers)
return response
| Remove unnecessary values from the config - those should just be constants. | ## Code Before:
from st2actions.runners.pythonrunner import Action
import requests
class TravisCI(Action):
def __init__(self, config):
super(TravisCI, self).__init__(config)
def _init_header(self):
travis_header = {
'User_Agent': self.config['User-Agent'],
'Accept': self.config['Accept'],
'Host': self.config['Host'],
}
return travis_header
def _auth_header(self):
_HEADERS = self._init_header()
_HEADERS['Authorization'] = self.config["Authorization"]
_HEADERS['Content-Type'] = self.config["Content-Type"]
return _HEADERS
def _perform_request(self, uri, method, data=None, requires_auth=False):
if method == "GET":
if requires_auth:
_HEADERS = self._auth_header()
else:
_HEADERS = self._init_header()
response = requests.get(uri, headers=_HEADERS)
elif method == "POST":
_HEADERS = self._auth_header
response = requests.post(uri, headers=_HEADERS)
elif method == "PUT":
_HEADERS = self._auth_header()
_HEADERS['Authorization'] = self.config["Authorization"]
_HEADERS['Content-Type'] = self.config["Content-Type"]
response = requests.put(uri, data=data, headers=_HEADERS)
return response
## Instruction:
Remove unnecessary values from the config - those should just be constants.
## Code After:
import requests
from st2actions.runners.pythonrunner import Action
API_URL = 'https://api.travis-ci.org'
HEADERS_ACCEPT = 'application/vnd.travis-ci.2+json'
HEADERS_HOST = ''
class TravisCI(Action):
def __init__(self, config):
super(TravisCI, self).__init__(config)
def _get_auth_headers(self):
headers = {}
headers['Authorization'] = self.config["Authorization"]
headers['Content-Type'] = self.config["Content-Type"]
return headers
def _perform_request(self, uri, method, data=None, requires_auth=False):
if method == "GET":
if requires_auth:
headers = self._get_auth_headers()
else:
headers = {}
response = requests.get(uri, headers=headers)
elif method == 'POST':
headers = self._get_auth_headers()
response = requests.post(uri, headers=headers)
elif method == 'PUT':
headers = self._get_auth_headers()
response = requests.put(uri, data=data, headers=headers)
return response
|
68eb1bd58b84c1937f6f8d15bb9ea9f02a402e22 | tests/cdscommon.py | tests/cdscommon.py |
import hashlib
import os
import shutil
import cdsapi
SAMPLE_DATA_FOLDER = os.path.join(os.path.dirname(__file__), 'sample-data')
EXTENSIONS = {'grib': '.grib', 'netcdf': '.nc'}
def ensure_data(dataset, request, folder=SAMPLE_DATA_FOLDER, name='{uuid}.grib'):
request_text = str(sorted(request.items())).encode('utf-8')
uuid = hashlib.sha3_224(request_text).hexdigest()[:10]
format = request.get('format', 'grib')
ext = EXTENSIONS.get(format, '.bin')
name = name.format(**locals())
path = os.path.join(SAMPLE_DATA_FOLDER, name)
if not os.path.exists(path):
c = cdsapi.Client()
try:
c.retrieve(dataset, request, target=path + '.tmp')
shutil.move(path + '.tmp', path)
except:
os.unlink(path + '.tmp')
raise
return path
def message_count(dataset, request, count=1):
if dataset == 'reanalysis-era5-single-levels' \
and request.get('product_type') == 'ensemble_members':
count = 20
elif dataset == 'reanalysis-era5-pressure-levels' \
and request.get('product_type') == 'ensemble_members':
count = 20
elif dataset == 'seasonal-original-single-levels':
count = 51
elif dataset.startswith('seasonal-') and request.get('product_type') == 'monthly_mean':
count = 51
for key in ['year', 'month', 'day', 'time', 'leadtime_hour', 'leadtime_month']:
value = request.get(key)
if isinstance(value, list):
count *= len(value)
return count
|
import hashlib
import os
import shutil
import cdsapi
SAMPLE_DATA_FOLDER = os.path.join(os.path.dirname(__file__), 'sample-data')
EXTENSIONS = {'grib': '.grib', 'netcdf': '.nc'}
def ensure_data(dataset, request, folder=SAMPLE_DATA_FOLDER, name='{uuid}.grib'):
request_text = str(sorted(request.items())).encode('utf-8')
uuid = hashlib.sha3_224(request_text).hexdigest()[:10]
format = request.get('format', 'grib')
ext = EXTENSIONS.get(format, '.bin')
name = name.format(**locals())
path = os.path.join(SAMPLE_DATA_FOLDER, name)
if not os.path.exists(path):
c = cdsapi.Client()
try:
c.retrieve(dataset, request, target=path + '.tmp')
shutil.move(path + '.tmp', path)
except:
os.unlink(path + '.tmp')
raise
return path
| Drop impossible to get right code. | Drop impossible to get right code.
| Python | apache-2.0 | ecmwf/cfgrib |
import hashlib
import os
import shutil
import cdsapi
SAMPLE_DATA_FOLDER = os.path.join(os.path.dirname(__file__), 'sample-data')
EXTENSIONS = {'grib': '.grib', 'netcdf': '.nc'}
def ensure_data(dataset, request, folder=SAMPLE_DATA_FOLDER, name='{uuid}.grib'):
request_text = str(sorted(request.items())).encode('utf-8')
uuid = hashlib.sha3_224(request_text).hexdigest()[:10]
format = request.get('format', 'grib')
ext = EXTENSIONS.get(format, '.bin')
name = name.format(**locals())
path = os.path.join(SAMPLE_DATA_FOLDER, name)
if not os.path.exists(path):
c = cdsapi.Client()
try:
c.retrieve(dataset, request, target=path + '.tmp')
shutil.move(path + '.tmp', path)
except:
os.unlink(path + '.tmp')
raise
return path
-
- def message_count(dataset, request, count=1):
- if dataset == 'reanalysis-era5-single-levels' \
- and request.get('product_type') == 'ensemble_members':
- count = 20
- elif dataset == 'reanalysis-era5-pressure-levels' \
- and request.get('product_type') == 'ensemble_members':
- count = 20
- elif dataset == 'seasonal-original-single-levels':
- count = 51
- elif dataset.startswith('seasonal-') and request.get('product_type') == 'monthly_mean':
- count = 51
- for key in ['year', 'month', 'day', 'time', 'leadtime_hour', 'leadtime_month']:
- value = request.get(key)
- if isinstance(value, list):
- count *= len(value)
- return count
- | Drop impossible to get right code. | ## Code Before:
import hashlib
import os
import shutil
import cdsapi
SAMPLE_DATA_FOLDER = os.path.join(os.path.dirname(__file__), 'sample-data')
EXTENSIONS = {'grib': '.grib', 'netcdf': '.nc'}
def ensure_data(dataset, request, folder=SAMPLE_DATA_FOLDER, name='{uuid}.grib'):
request_text = str(sorted(request.items())).encode('utf-8')
uuid = hashlib.sha3_224(request_text).hexdigest()[:10]
format = request.get('format', 'grib')
ext = EXTENSIONS.get(format, '.bin')
name = name.format(**locals())
path = os.path.join(SAMPLE_DATA_FOLDER, name)
if not os.path.exists(path):
c = cdsapi.Client()
try:
c.retrieve(dataset, request, target=path + '.tmp')
shutil.move(path + '.tmp', path)
except:
os.unlink(path + '.tmp')
raise
return path
def message_count(dataset, request, count=1):
if dataset == 'reanalysis-era5-single-levels' \
and request.get('product_type') == 'ensemble_members':
count = 20
elif dataset == 'reanalysis-era5-pressure-levels' \
and request.get('product_type') == 'ensemble_members':
count = 20
elif dataset == 'seasonal-original-single-levels':
count = 51
elif dataset.startswith('seasonal-') and request.get('product_type') == 'monthly_mean':
count = 51
for key in ['year', 'month', 'day', 'time', 'leadtime_hour', 'leadtime_month']:
value = request.get(key)
if isinstance(value, list):
count *= len(value)
return count
## Instruction:
Drop impossible to get right code.
## Code After:
import hashlib
import os
import shutil
import cdsapi
SAMPLE_DATA_FOLDER = os.path.join(os.path.dirname(__file__), 'sample-data')
EXTENSIONS = {'grib': '.grib', 'netcdf': '.nc'}
def ensure_data(dataset, request, folder=SAMPLE_DATA_FOLDER, name='{uuid}.grib'):
request_text = str(sorted(request.items())).encode('utf-8')
uuid = hashlib.sha3_224(request_text).hexdigest()[:10]
format = request.get('format', 'grib')
ext = EXTENSIONS.get(format, '.bin')
name = name.format(**locals())
path = os.path.join(SAMPLE_DATA_FOLDER, name)
if not os.path.exists(path):
c = cdsapi.Client()
try:
c.retrieve(dataset, request, target=path + '.tmp')
shutil.move(path + '.tmp', path)
except:
os.unlink(path + '.tmp')
raise
return path
|
db6b869eae416e72fa30b1d7271b0ed1d7dc1a55 | sqlalchemy_json/__init__.py | sqlalchemy_json/__init__.py | from sqlalchemy.ext.mutable import (
Mutable,
MutableDict)
from sqlalchemy_utils.types.json import JSONType
from . track import (
TrackedDict,
TrackedList)
__all__ = 'MutableJson', 'NestedMutableJson'
class NestedMutableDict(TrackedDict, Mutable):
@classmethod
def coerce(cls, key, value):
if isinstance(value, cls):
return value
if isinstance(value, dict):
return cls(value)
return super(cls).coerce(key, value)
class NestedMutableList(TrackedList, Mutable):
@classmethod
def coerce(cls, key, value):
if isinstance(value, cls):
return value
if isinstance(value, list):
return cls(value)
return super(cls).coerce(key, value)
class NestedMutable(Mutable):
"""SQLAlchemy `mutable` extension with nested change tracking."""
@classmethod
def coerce(cls, key, value):
"""Convert plain dictionary to NestedMutable."""
if isinstance(value, cls):
return value
if isinstance(value, dict):
return NestedMutableDict.coerce(key, value)
if isinstance(value, list):
return NestedMutableList.coerce(key, value)
return super(cls).coerce(key, value)
class MutableJson(JSONType):
"""JSON type for SQLAlchemy with change tracking at top level."""
class NestedMutableJson(JSONType):
"""JSON type for SQLAlchemy with nested change tracking."""
MutableDict.associate_with(MutableJson)
NestedMutable.associate_with(NestedMutableJson)
| from sqlalchemy.ext.mutable import (
Mutable,
MutableDict)
from sqlalchemy_utils.types.json import JSONType
from . track import (
TrackedDict,
TrackedList)
__all__ = 'MutableJson', 'NestedMutableJson'
class NestedMutableDict(TrackedDict, Mutable):
@classmethod
def coerce(cls, key, value):
if isinstance(value, cls):
return value
if isinstance(value, dict):
return cls(value)
return super(cls).coerce(key, value)
class NestedMutableList(TrackedList, Mutable):
@classmethod
def coerce(cls, key, value):
if isinstance(value, cls):
return value
if isinstance(value, list):
return cls(value)
return super(cls).coerce(key, value)
class NestedMutable(Mutable):
"""SQLAlchemy `mutable` extension with nested change tracking."""
@classmethod
def coerce(cls, key, value):
"""Convert plain dictionary to NestedMutable."""
if value is None:
return value
if isinstance(value, cls):
return value
if isinstance(value, dict):
return NestedMutableDict.coerce(key, value)
if isinstance(value, list):
return NestedMutableList.coerce(key, value)
return super(cls).coerce(key, value)
class MutableJson(JSONType):
"""JSON type for SQLAlchemy with change tracking at top level."""
class NestedMutableJson(JSONType):
"""JSON type for SQLAlchemy with nested change tracking."""
MutableDict.associate_with(MutableJson)
NestedMutable.associate_with(NestedMutableJson)
| Fix error when setting JSON value to be `None` | Fix error when setting JSON value to be `None`
Previously this would raise an attribute error as `None` does not
have the `coerce` attribute.
| Python | bsd-2-clause | edelooff/sqlalchemy-json | from sqlalchemy.ext.mutable import (
Mutable,
MutableDict)
from sqlalchemy_utils.types.json import JSONType
from . track import (
TrackedDict,
TrackedList)
__all__ = 'MutableJson', 'NestedMutableJson'
class NestedMutableDict(TrackedDict, Mutable):
@classmethod
def coerce(cls, key, value):
if isinstance(value, cls):
return value
if isinstance(value, dict):
return cls(value)
return super(cls).coerce(key, value)
class NestedMutableList(TrackedList, Mutable):
@classmethod
def coerce(cls, key, value):
if isinstance(value, cls):
return value
if isinstance(value, list):
return cls(value)
return super(cls).coerce(key, value)
class NestedMutable(Mutable):
"""SQLAlchemy `mutable` extension with nested change tracking."""
@classmethod
def coerce(cls, key, value):
"""Convert plain dictionary to NestedMutable."""
+ if value is None:
+ return value
if isinstance(value, cls):
return value
if isinstance(value, dict):
return NestedMutableDict.coerce(key, value)
if isinstance(value, list):
return NestedMutableList.coerce(key, value)
return super(cls).coerce(key, value)
class MutableJson(JSONType):
"""JSON type for SQLAlchemy with change tracking at top level."""
class NestedMutableJson(JSONType):
"""JSON type for SQLAlchemy with nested change tracking."""
MutableDict.associate_with(MutableJson)
NestedMutable.associate_with(NestedMutableJson)
| Fix error when setting JSON value to be `None` | ## Code Before:
from sqlalchemy.ext.mutable import (
Mutable,
MutableDict)
from sqlalchemy_utils.types.json import JSONType
from . track import (
TrackedDict,
TrackedList)
__all__ = 'MutableJson', 'NestedMutableJson'
class NestedMutableDict(TrackedDict, Mutable):
@classmethod
def coerce(cls, key, value):
if isinstance(value, cls):
return value
if isinstance(value, dict):
return cls(value)
return super(cls).coerce(key, value)
class NestedMutableList(TrackedList, Mutable):
@classmethod
def coerce(cls, key, value):
if isinstance(value, cls):
return value
if isinstance(value, list):
return cls(value)
return super(cls).coerce(key, value)
class NestedMutable(Mutable):
"""SQLAlchemy `mutable` extension with nested change tracking."""
@classmethod
def coerce(cls, key, value):
"""Convert plain dictionary to NestedMutable."""
if isinstance(value, cls):
return value
if isinstance(value, dict):
return NestedMutableDict.coerce(key, value)
if isinstance(value, list):
return NestedMutableList.coerce(key, value)
return super(cls).coerce(key, value)
class MutableJson(JSONType):
"""JSON type for SQLAlchemy with change tracking at top level."""
class NestedMutableJson(JSONType):
"""JSON type for SQLAlchemy with nested change tracking."""
MutableDict.associate_with(MutableJson)
NestedMutable.associate_with(NestedMutableJson)
## Instruction:
Fix error when setting JSON value to be `None`
## Code After:
from sqlalchemy.ext.mutable import (
Mutable,
MutableDict)
from sqlalchemy_utils.types.json import JSONType
from . track import (
TrackedDict,
TrackedList)
__all__ = 'MutableJson', 'NestedMutableJson'
class NestedMutableDict(TrackedDict, Mutable):
@classmethod
def coerce(cls, key, value):
if isinstance(value, cls):
return value
if isinstance(value, dict):
return cls(value)
return super(cls).coerce(key, value)
class NestedMutableList(TrackedList, Mutable):
@classmethod
def coerce(cls, key, value):
if isinstance(value, cls):
return value
if isinstance(value, list):
return cls(value)
return super(cls).coerce(key, value)
class NestedMutable(Mutable):
"""SQLAlchemy `mutable` extension with nested change tracking."""
@classmethod
def coerce(cls, key, value):
"""Convert plain dictionary to NestedMutable."""
if value is None:
return value
if isinstance(value, cls):
return value
if isinstance(value, dict):
return NestedMutableDict.coerce(key, value)
if isinstance(value, list):
return NestedMutableList.coerce(key, value)
return super(cls).coerce(key, value)
class MutableJson(JSONType):
"""JSON type for SQLAlchemy with change tracking at top level."""
class NestedMutableJson(JSONType):
"""JSON type for SQLAlchemy with nested change tracking."""
MutableDict.associate_with(MutableJson)
NestedMutable.associate_with(NestedMutableJson)
|
edf95105b7522b115dd4d3882ed57e707126c6af | timepiece/admin.py | timepiece/admin.py | from django.contrib import admin
from django.contrib.auth.models import Permission
from django.contrib.contenttypes.models import ContentType
class PermissionAdmin(admin.ModelAdmin):
list_display = ['__unicode__', 'codename']
list_filter = ['content_type__app_label']
class ContentTypeAdmin(admin.ModelAdmin):
list_display = ['id', 'app_label', 'model']
list_filter = ['app_label']
admin.site.register(Permission, PermissionAdmin)
admin.site.register(ContentType, ContentTypeAdmin)
| from django.contrib import admin
from django.contrib.auth.models import Permission
from django.contrib.contenttypes.models import ContentType
class PermissionAdmin(admin.ModelAdmin):
list_display = ['content_type', 'codename', 'name']
list_filter = ['content_type__app_label']
class ContentTypeAdmin(admin.ModelAdmin):
list_display = ['id', 'app_label', 'model']
list_filter = ['app_label']
admin.site.register(Permission, PermissionAdmin)
admin.site.register(ContentType, ContentTypeAdmin)
| Update Python/Django: Remove unnecessary reference to __unicode__ | Update Python/Django: Remove unnecessary reference to __unicode__
| Python | mit | BocuStudio/django-timepiece,caktus/django-timepiece,arbitrahj/django-timepiece,caktus/django-timepiece,arbitrahj/django-timepiece,BocuStudio/django-timepiece,caktus/django-timepiece,BocuStudio/django-timepiece,arbitrahj/django-timepiece | from django.contrib import admin
from django.contrib.auth.models import Permission
from django.contrib.contenttypes.models import ContentType
class PermissionAdmin(admin.ModelAdmin):
- list_display = ['__unicode__', 'codename']
+ list_display = ['content_type', 'codename', 'name']
list_filter = ['content_type__app_label']
class ContentTypeAdmin(admin.ModelAdmin):
list_display = ['id', 'app_label', 'model']
list_filter = ['app_label']
admin.site.register(Permission, PermissionAdmin)
admin.site.register(ContentType, ContentTypeAdmin)
| Update Python/Django: Remove unnecessary reference to __unicode__ | ## Code Before:
from django.contrib import admin
from django.contrib.auth.models import Permission
from django.contrib.contenttypes.models import ContentType
class PermissionAdmin(admin.ModelAdmin):
list_display = ['__unicode__', 'codename']
list_filter = ['content_type__app_label']
class ContentTypeAdmin(admin.ModelAdmin):
list_display = ['id', 'app_label', 'model']
list_filter = ['app_label']
admin.site.register(Permission, PermissionAdmin)
admin.site.register(ContentType, ContentTypeAdmin)
## Instruction:
Update Python/Django: Remove unnecessary reference to __unicode__
## Code After:
from django.contrib import admin
from django.contrib.auth.models import Permission
from django.contrib.contenttypes.models import ContentType
class PermissionAdmin(admin.ModelAdmin):
list_display = ['content_type', 'codename', 'name']
list_filter = ['content_type__app_label']
class ContentTypeAdmin(admin.ModelAdmin):
list_display = ['id', 'app_label', 'model']
list_filter = ['app_label']
admin.site.register(Permission, PermissionAdmin)
admin.site.register(ContentType, ContentTypeAdmin)
|
20017da43fe1bf5287b33d9d2fc7f597850bb5b5 | readthedocs/settings/proxito/base.py | readthedocs/settings/proxito/base.py |
class CommunityProxitoSettingsMixin:
ROOT_URLCONF = 'readthedocs.proxito.urls'
USE_SUBDOMAIN = True
@property
def MIDDLEWARE(self): # noqa
# Use our new middleware instead of the old one
classes = super().MIDDLEWARE
classes = list(classes)
index = classes.index(
'readthedocs.core.middleware.SubdomainMiddleware'
)
classes[index] = 'readthedocs.proxito.middleware.ProxitoMiddleware'
middleware_to_remove = (
'readthedocs.core.middleware.SingleVersionMiddleware',
'csp.middleware.CSPMiddleware',
)
for mw in middleware_to_remove:
if mw in classes:
classes.remove(mw)
return classes
|
class CommunityProxitoSettingsMixin:
ROOT_URLCONF = 'readthedocs.proxito.urls'
USE_SUBDOMAIN = True
@property
def DATABASES(self):
# This keeps connections to the DB alive,
# which reduces latency with connecting to postgres
dbs = getattr(super(), 'DATABASES', {})
for db in dbs.keys():
dbs[db]['CONN_MAX_AGE'] = 86400
return dbs
@property
def MIDDLEWARE(self): # noqa
# Use our new middleware instead of the old one
classes = super().MIDDLEWARE
classes = list(classes)
index = classes.index(
'readthedocs.core.middleware.SubdomainMiddleware'
)
classes[index] = 'readthedocs.proxito.middleware.ProxitoMiddleware'
middleware_to_remove = (
'readthedocs.core.middleware.SingleVersionMiddleware',
'csp.middleware.CSPMiddleware',
)
for mw in middleware_to_remove:
if mw in classes:
classes.remove(mw)
return classes
| Expand the logic in our proxito mixin. | Expand the logic in our proxito mixin.
This makes proxito mixin match production for .com/.org
in the areas where we are overriding the same things.
| Python | mit | rtfd/readthedocs.org,rtfd/readthedocs.org,rtfd/readthedocs.org,rtfd/readthedocs.org | +
class CommunityProxitoSettingsMixin:
ROOT_URLCONF = 'readthedocs.proxito.urls'
USE_SUBDOMAIN = True
+
+ @property
+ def DATABASES(self):
+ # This keeps connections to the DB alive,
+ # which reduces latency with connecting to postgres
+ dbs = getattr(super(), 'DATABASES', {})
+ for db in dbs.keys():
+ dbs[db]['CONN_MAX_AGE'] = 86400
+ return dbs
@property
def MIDDLEWARE(self): # noqa
# Use our new middleware instead of the old one
classes = super().MIDDLEWARE
classes = list(classes)
index = classes.index(
'readthedocs.core.middleware.SubdomainMiddleware'
)
classes[index] = 'readthedocs.proxito.middleware.ProxitoMiddleware'
middleware_to_remove = (
'readthedocs.core.middleware.SingleVersionMiddleware',
'csp.middleware.CSPMiddleware',
)
for mw in middleware_to_remove:
if mw in classes:
classes.remove(mw)
return classes
| Expand the logic in our proxito mixin. | ## Code Before:
class CommunityProxitoSettingsMixin:
ROOT_URLCONF = 'readthedocs.proxito.urls'
USE_SUBDOMAIN = True
@property
def MIDDLEWARE(self): # noqa
# Use our new middleware instead of the old one
classes = super().MIDDLEWARE
classes = list(classes)
index = classes.index(
'readthedocs.core.middleware.SubdomainMiddleware'
)
classes[index] = 'readthedocs.proxito.middleware.ProxitoMiddleware'
middleware_to_remove = (
'readthedocs.core.middleware.SingleVersionMiddleware',
'csp.middleware.CSPMiddleware',
)
for mw in middleware_to_remove:
if mw in classes:
classes.remove(mw)
return classes
## Instruction:
Expand the logic in our proxito mixin.
## Code After:
class CommunityProxitoSettingsMixin:
ROOT_URLCONF = 'readthedocs.proxito.urls'
USE_SUBDOMAIN = True
@property
def DATABASES(self):
# This keeps connections to the DB alive,
# which reduces latency with connecting to postgres
dbs = getattr(super(), 'DATABASES', {})
for db in dbs.keys():
dbs[db]['CONN_MAX_AGE'] = 86400
return dbs
@property
def MIDDLEWARE(self): # noqa
# Use our new middleware instead of the old one
classes = super().MIDDLEWARE
classes = list(classes)
index = classes.index(
'readthedocs.core.middleware.SubdomainMiddleware'
)
classes[index] = 'readthedocs.proxito.middleware.ProxitoMiddleware'
middleware_to_remove = (
'readthedocs.core.middleware.SingleVersionMiddleware',
'csp.middleware.CSPMiddleware',
)
for mw in middleware_to_remove:
if mw in classes:
classes.remove(mw)
return classes
|
b4e5a284201d6d25607ff54aedcf6082e8a4d621 | st2client/st2client/models/reactor.py | st2client/st2client/models/reactor.py |
import logging
from st2client.models import core
LOG = logging.getLogger(__name__)
class Sensor(core.Resource):
_plural = 'Sensortypes'
_repr_attributes = ['name', 'pack']
class TriggerType(core.Resource):
_alias = 'Trigger'
_display_name = 'Trigger'
_plural = 'Triggertypes'
_plural_display_name = 'Triggers'
_repr_attributes = ['name', 'pack']
class Rule(core.Resource):
_plural = 'Rules'
|
import logging
from st2client.models import core
LOG = logging.getLogger(__name__)
class Sensor(core.Resource):
_plural = 'Sensortypes'
_repr_attributes = ['name', 'pack']
class TriggerType(core.Resource):
_alias = 'Trigger'
_display_name = 'Trigger'
_plural = 'Triggertypes'
_plural_display_name = 'Triggers'
_repr_attributes = ['name', 'pack']
class Trigger(core.Resource):
_alias = 'TriggerSpecification'
_display_name = 'TriggerSpecification'
_plural = 'Triggers'
_plural_display_name = 'Triggers'
_repr_attributes = ['name', 'pack']
class Rule(core.Resource):
_plural = 'Rules'
| Add Trigger model to client and alias it as TriggerSpecification. | Add Trigger model to client and alias it as TriggerSpecification.
| Python | apache-2.0 | pinterb/st2,peak6/st2,pixelrebel/st2,jtopjian/st2,pixelrebel/st2,alfasin/st2,pinterb/st2,Itxaka/st2,Plexxi/st2,lakshmi-kannan/st2,Itxaka/st2,grengojbo/st2,Plexxi/st2,jtopjian/st2,punalpatel/st2,punalpatel/st2,Plexxi/st2,nzlosh/st2,armab/st2,StackStorm/st2,punalpatel/st2,dennybaa/st2,nzlosh/st2,pixelrebel/st2,peak6/st2,armab/st2,jtopjian/st2,alfasin/st2,emedvedev/st2,tonybaloney/st2,emedvedev/st2,armab/st2,emedvedev/st2,pinterb/st2,dennybaa/st2,tonybaloney/st2,lakshmi-kannan/st2,dennybaa/st2,alfasin/st2,tonybaloney/st2,StackStorm/st2,nzlosh/st2,Plexxi/st2,nzlosh/st2,peak6/st2,Itxaka/st2,grengojbo/st2,lakshmi-kannan/st2,grengojbo/st2,StackStorm/st2,StackStorm/st2 |
import logging
from st2client.models import core
LOG = logging.getLogger(__name__)
class Sensor(core.Resource):
_plural = 'Sensortypes'
_repr_attributes = ['name', 'pack']
class TriggerType(core.Resource):
_alias = 'Trigger'
_display_name = 'Trigger'
_plural = 'Triggertypes'
_plural_display_name = 'Triggers'
_repr_attributes = ['name', 'pack']
+ class Trigger(core.Resource):
+ _alias = 'TriggerSpecification'
+ _display_name = 'TriggerSpecification'
+ _plural = 'Triggers'
+ _plural_display_name = 'Triggers'
+ _repr_attributes = ['name', 'pack']
+
+
class Rule(core.Resource):
_plural = 'Rules'
| Add Trigger model to client and alias it as TriggerSpecification. | ## Code Before:
import logging
from st2client.models import core
LOG = logging.getLogger(__name__)
class Sensor(core.Resource):
_plural = 'Sensortypes'
_repr_attributes = ['name', 'pack']
class TriggerType(core.Resource):
_alias = 'Trigger'
_display_name = 'Trigger'
_plural = 'Triggertypes'
_plural_display_name = 'Triggers'
_repr_attributes = ['name', 'pack']
class Rule(core.Resource):
_plural = 'Rules'
## Instruction:
Add Trigger model to client and alias it as TriggerSpecification.
## Code After:
import logging
from st2client.models import core
LOG = logging.getLogger(__name__)
class Sensor(core.Resource):
_plural = 'Sensortypes'
_repr_attributes = ['name', 'pack']
class TriggerType(core.Resource):
_alias = 'Trigger'
_display_name = 'Trigger'
_plural = 'Triggertypes'
_plural_display_name = 'Triggers'
_repr_attributes = ['name', 'pack']
class Trigger(core.Resource):
_alias = 'TriggerSpecification'
_display_name = 'TriggerSpecification'
_plural = 'Triggers'
_plural_display_name = 'Triggers'
_repr_attributes = ['name', 'pack']
class Rule(core.Resource):
_plural = 'Rules'
|
bd8901c18a6722660e7af742260ae4b8317a064b | youtube/tasks.py | youtube/tasks.py | import subprocess
import os
from pathlib import Path
from invoke import task
@task
def update(ctx):
"""
Update youtube-dl
"""
cmd = ['pipenv', 'update', 'youtube-dl']
subprocess.call(cmd)
@task
def clean(ctx):
"""
Clean up files
"""
import main
def rm(file_):
if file_.exists():
os.remove(file_)
rm(main.json_file)
for file_ in main.download_dir.iterdir():
if file_.name != '.gitkeep':
os.remove(file_)
@task
def playlist(ctx):
"""
Process YouTube playlist
"""
import main
main.process_playlist()
@task
def link(ctx, url):
"""
Process video link
"""
import main
main.process_link(url)
| import subprocess
import os
from pathlib import Path
from invoke import task
@task
def update(ctx):
"""
Update dependencies such as youtube-dl, etc.
"""
subprocess.call(['pipenv', 'update'])
@task
def clean(ctx):
"""
Clean up files
"""
import main
def rm(file_):
if file_.exists():
os.remove(file_)
rm(main.json_file)
for file_ in main.download_dir.iterdir():
if file_.name != '.gitkeep':
os.remove(file_)
@task
def playlist(ctx):
"""
Process YouTube playlist
"""
import main
main.process_playlist()
@task
def link(ctx, url):
"""
Process video link
"""
import main
main.process_link(url)
| Update task now updates all dependencies | Update task now updates all dependencies
| Python | apache-2.0 | feihong/chinese-music-processors,feihong/chinese-music-processors | import subprocess
import os
from pathlib import Path
from invoke import task
@task
def update(ctx):
"""
- Update youtube-dl
+ Update dependencies such as youtube-dl, etc.
"""
+ subprocess.call(['pipenv', 'update'])
- cmd = ['pipenv', 'update', 'youtube-dl']
- subprocess.call(cmd)
@task
def clean(ctx):
"""
Clean up files
"""
import main
def rm(file_):
if file_.exists():
os.remove(file_)
rm(main.json_file)
for file_ in main.download_dir.iterdir():
if file_.name != '.gitkeep':
os.remove(file_)
@task
def playlist(ctx):
"""
Process YouTube playlist
"""
import main
main.process_playlist()
@task
def link(ctx, url):
"""
Process video link
"""
import main
main.process_link(url)
| Update task now updates all dependencies | ## Code Before:
import subprocess
import os
from pathlib import Path
from invoke import task
@task
def update(ctx):
"""
Update youtube-dl
"""
cmd = ['pipenv', 'update', 'youtube-dl']
subprocess.call(cmd)
@task
def clean(ctx):
"""
Clean up files
"""
import main
def rm(file_):
if file_.exists():
os.remove(file_)
rm(main.json_file)
for file_ in main.download_dir.iterdir():
if file_.name != '.gitkeep':
os.remove(file_)
@task
def playlist(ctx):
"""
Process YouTube playlist
"""
import main
main.process_playlist()
@task
def link(ctx, url):
"""
Process video link
"""
import main
main.process_link(url)
## Instruction:
Update task now updates all dependencies
## Code After:
import subprocess
import os
from pathlib import Path
from invoke import task
@task
def update(ctx):
"""
Update dependencies such as youtube-dl, etc.
"""
subprocess.call(['pipenv', 'update'])
@task
def clean(ctx):
"""
Clean up files
"""
import main
def rm(file_):
if file_.exists():
os.remove(file_)
rm(main.json_file)
for file_ in main.download_dir.iterdir():
if file_.name != '.gitkeep':
os.remove(file_)
@task
def playlist(ctx):
"""
Process YouTube playlist
"""
import main
main.process_playlist()
@task
def link(ctx, url):
"""
Process video link
"""
import main
main.process_link(url)
|
532df8a669d7e54125c102ef4821272dc24aab23 | weasyprint/logger.py | weasyprint/logger.py |
from __future__ import division, unicode_literals
import logging
LOGGER = logging.getLogger('weasyprint')
# Default to logging to stderr.
if not LOGGER.handlers:
LOGGER.addHandler(logging.StreamHandler())
if LOGGER.level == logging.NOTSET:
LOGGER.setLevel(logging.INFO)
|
from __future__ import division, unicode_literals
import logging
LOGGER = logging.getLogger('weasyprint')
# Default to logging to stderr.
if not LOGGER.handlers:
handler = logging.StreamHandler()
formatter = logging.Formatter('%(levelname)s: %(message)s')
handler.setFormatter(formatter)
LOGGER.addHandler(handler)
if LOGGER.level == logging.NOTSET:
LOGGER.setLevel(logging.INFO)
| Add a better default formatter for logs | Add a better default formatter for logs
| Python | bsd-3-clause | Kozea/WeasyPrint,Kozea/WeasyPrint |
from __future__ import division, unicode_literals
import logging
LOGGER = logging.getLogger('weasyprint')
# Default to logging to stderr.
if not LOGGER.handlers:
- LOGGER.addHandler(logging.StreamHandler())
+ handler = logging.StreamHandler()
+ formatter = logging.Formatter('%(levelname)s: %(message)s')
+ handler.setFormatter(formatter)
+ LOGGER.addHandler(handler)
if LOGGER.level == logging.NOTSET:
LOGGER.setLevel(logging.INFO)
| Add a better default formatter for logs | ## Code Before:
from __future__ import division, unicode_literals
import logging
LOGGER = logging.getLogger('weasyprint')
# Default to logging to stderr.
if not LOGGER.handlers:
LOGGER.addHandler(logging.StreamHandler())
if LOGGER.level == logging.NOTSET:
LOGGER.setLevel(logging.INFO)
## Instruction:
Add a better default formatter for logs
## Code After:
from __future__ import division, unicode_literals
import logging
LOGGER = logging.getLogger('weasyprint')
# Default to logging to stderr.
if not LOGGER.handlers:
handler = logging.StreamHandler()
formatter = logging.Formatter('%(levelname)s: %(message)s')
handler.setFormatter(formatter)
LOGGER.addHandler(handler)
if LOGGER.level == logging.NOTSET:
LOGGER.setLevel(logging.INFO)
|
6049a916ea3adfe4ef8a7ae9dbfc918b69907ef4 | OnionLauncher/main.py | OnionLauncher/main.py | import sys
from PyQt5.QtWidgets import QApplication, QMainWindow
from PyQt5.uic import loadUi
class MainWindow(QMainWindow):
def __init__(self, *args):
super(MainWindow, self).__init__(*args)
loadUi("ui_files/main.ui", self)
self.tbAdd.clicked.connect(self.addRow)
self.tbRemove.clicked.connect(self.removeRow)
def addRow(self):
rowPos = self.twSettings.rowCount()
self.twSettings.insertRow(rowPos)
def removeRow(self):
rows = sorted(set(index.row() for index in self.twSettings.selectedIndexes()))
rows.reverse()
for row in rows:
self.twSettings.removeRow(row)
if __name__ == "__main__":
app = QApplication(sys.argv)
mw = MainWindow()
mw.show()
sys.exit(app.exec_())
| import sys
from PyQt5.QtWidgets import QApplication, QMainWindow
from PyQt5.uic import loadUi
class MainWindow(QMainWindow):
def __init__(self, *args):
super(MainWindow, self).__init__(*args)
loadUi("ui_files/main.ui", self)
buttons = {
self.tbAdd: self.addRow,
self.tbRemove: self.removeRow,
self.btnSwitchTor: self.switchTor,
}
self.evAddClick(buttons)
def evAddClick(self, obj_dict):
for obj in obj_dict:
obj.clicked.connect(obj_dict[obj])
def addRow(self):
rowPos = self.twSettings.rowCount()
self.twSettings.insertRow(rowPos)
def removeRow(self):
rows = sorted(set(index.row() for index in self.twSettings.selectedIndexes()))
rows.reverse()
for row in rows:
self.twSettings.removeRow(row)
def switchTor(self):
pass
if __name__ == "__main__":
app = QApplication(sys.argv)
mw = MainWindow()
mw.show()
sys.exit(app.exec_())
| Put mouse clicks in it's own dictionary | Put mouse clicks in it's own dictionary
| Python | bsd-2-clause | neelchauhan/OnionLauncher | import sys
from PyQt5.QtWidgets import QApplication, QMainWindow
from PyQt5.uic import loadUi
class MainWindow(QMainWindow):
def __init__(self, *args):
super(MainWindow, self).__init__(*args)
loadUi("ui_files/main.ui", self)
- self.tbAdd.clicked.connect(self.addRow)
- self.tbRemove.clicked.connect(self.removeRow)
+ buttons = {
+ self.tbAdd: self.addRow,
+ self.tbRemove: self.removeRow,
+ self.btnSwitchTor: self.switchTor,
+ }
+
+ self.evAddClick(buttons)
+
+ def evAddClick(self, obj_dict):
+ for obj in obj_dict:
+ obj.clicked.connect(obj_dict[obj])
def addRow(self):
rowPos = self.twSettings.rowCount()
self.twSettings.insertRow(rowPos)
def removeRow(self):
rows = sorted(set(index.row() for index in self.twSettings.selectedIndexes()))
rows.reverse()
for row in rows:
self.twSettings.removeRow(row)
+ def switchTor(self):
+ pass
+
if __name__ == "__main__":
app = QApplication(sys.argv)
mw = MainWindow()
mw.show()
sys.exit(app.exec_())
| Put mouse clicks in it's own dictionary | ## Code Before:
import sys
from PyQt5.QtWidgets import QApplication, QMainWindow
from PyQt5.uic import loadUi
class MainWindow(QMainWindow):
def __init__(self, *args):
super(MainWindow, self).__init__(*args)
loadUi("ui_files/main.ui", self)
self.tbAdd.clicked.connect(self.addRow)
self.tbRemove.clicked.connect(self.removeRow)
def addRow(self):
rowPos = self.twSettings.rowCount()
self.twSettings.insertRow(rowPos)
def removeRow(self):
rows = sorted(set(index.row() for index in self.twSettings.selectedIndexes()))
rows.reverse()
for row in rows:
self.twSettings.removeRow(row)
if __name__ == "__main__":
app = QApplication(sys.argv)
mw = MainWindow()
mw.show()
sys.exit(app.exec_())
## Instruction:
Put mouse clicks in it's own dictionary
## Code After:
import sys
from PyQt5.QtWidgets import QApplication, QMainWindow
from PyQt5.uic import loadUi
class MainWindow(QMainWindow):
def __init__(self, *args):
super(MainWindow, self).__init__(*args)
loadUi("ui_files/main.ui", self)
buttons = {
self.tbAdd: self.addRow,
self.tbRemove: self.removeRow,
self.btnSwitchTor: self.switchTor,
}
self.evAddClick(buttons)
def evAddClick(self, obj_dict):
for obj in obj_dict:
obj.clicked.connect(obj_dict[obj])
def addRow(self):
rowPos = self.twSettings.rowCount()
self.twSettings.insertRow(rowPos)
def removeRow(self):
rows = sorted(set(index.row() for index in self.twSettings.selectedIndexes()))
rows.reverse()
for row in rows:
self.twSettings.removeRow(row)
def switchTor(self):
pass
if __name__ == "__main__":
app = QApplication(sys.argv)
mw = MainWindow()
mw.show()
sys.exit(app.exec_())
|
8a827d3e86cf2f6b9d36812e7058560ae120d4b2 | tests/test_watson.py | tests/test_watson.py | from pywatson.watson import Watson
class TestWatson:
def test_init(self, config):
watson = Watson(url=config['url'], username=config['username'], password=config['password'])
| from pywatson.answer.answer import Answer
from pywatson.watson import Watson
class TestWatson:
def test_ask_question_basic(self, watson):
answer = watson.ask_question('What is the Labour Code?')
assert type(answer) is Answer
| Add failing test for ask_question | Add failing test for ask_question
| Python | mit | sherlocke/pywatson | + from pywatson.answer.answer import Answer
from pywatson.watson import Watson
class TestWatson:
- def test_init(self, config):
- watson = Watson(url=config['url'], username=config['username'], password=config['password'])
+ def test_ask_question_basic(self, watson):
+ answer = watson.ask_question('What is the Labour Code?')
+ assert type(answer) is Answer
| Add failing test for ask_question | ## Code Before:
from pywatson.watson import Watson
class TestWatson:
def test_init(self, config):
watson = Watson(url=config['url'], username=config['username'], password=config['password'])
## Instruction:
Add failing test for ask_question
## Code After:
from pywatson.answer.answer import Answer
from pywatson.watson import Watson
class TestWatson:
def test_ask_question_basic(self, watson):
answer = watson.ask_question('What is the Labour Code?')
assert type(answer) is Answer
|
de324cc798da8694bab510efd51de4bfda528df7 | zinnia/views/entries.py | zinnia/views/entries.py | """Views for Zinnia entries"""
from django.views.generic.dates import BaseDateDetailView
from zinnia.models.entry import Entry
from zinnia.views.mixins.archives import ArchiveMixin
from zinnia.views.mixins.entry_protection import EntryProtectionMixin
from zinnia.views.mixins.callable_queryset import CallableQuerysetMixin
from zinnia.views.mixins.templates import EntryArchiveTemplateResponseMixin
class EntryDateDetail(ArchiveMixin,
EntryArchiveTemplateResponseMixin,
CallableQuerysetMixin,
BaseDateDetailView):
"""
Mixin combinating:
- ArchiveMixin configuration centralizing conf for archive views
- EntryArchiveTemplateResponseMixin to provide a
custom templates depending on the date
- BaseDateDetailView to retrieve the entry with date and slug
- CallableQueryMixin to defer the execution of the *queryset*
property when imported
"""
queryset = Entry.published.on_site
class EntryDetail(EntryProtectionMixin, EntryDateDetail):
"""
Detailled view archive view for an Entry
with password and login protections.
"""
| """Views for Zinnia entries"""
from django.views.generic.dates import BaseDateDetailView
from zinnia.models.entry import Entry
from zinnia.views.mixins.archives import ArchiveMixin
from zinnia.views.mixins.entry_preview import EntryPreviewMixin
from zinnia.views.mixins.entry_protection import EntryProtectionMixin
from zinnia.views.mixins.callable_queryset import CallableQuerysetMixin
from zinnia.views.mixins.templates import EntryArchiveTemplateResponseMixin
class EntryDateDetail(ArchiveMixin,
EntryArchiveTemplateResponseMixin,
CallableQuerysetMixin,
BaseDateDetailView):
"""
Mixin combinating:
- ArchiveMixin configuration centralizing conf for archive views
- EntryArchiveTemplateResponseMixin to provide a
custom templates depending on the date
- BaseDateDetailView to retrieve the entry with date and slug
- CallableQueryMixin to defer the execution of the *queryset*
property when imported
"""
queryset = Entry.published.on_site
class EntryDetail(EntryPreviewMixin,
EntryProtectionMixin,
EntryDateDetail):
"""
Detailled archive view for an Entry with password
and login protections and restricted preview.
"""
| Implement the EntryPreviewMixin in the EntryDetail view | Implement the EntryPreviewMixin in the EntryDetail view
| Python | bsd-3-clause | Maplecroft/django-blog-zinnia,ZuluPro/django-blog-zinnia,petecummings/django-blog-zinnia,Maplecroft/django-blog-zinnia,ZuluPro/django-blog-zinnia,petecummings/django-blog-zinnia,petecummings/django-blog-zinnia,aorzh/django-blog-zinnia,extertioner/django-blog-zinnia,Maplecroft/django-blog-zinnia,ghachey/django-blog-zinnia,bywbilly/django-blog-zinnia,ghachey/django-blog-zinnia,dapeng0802/django-blog-zinnia,Zopieux/django-blog-zinnia,dapeng0802/django-blog-zinnia,marctc/django-blog-zinnia,Fantomas42/django-blog-zinnia,aorzh/django-blog-zinnia,extertioner/django-blog-zinnia,Zopieux/django-blog-zinnia,bywbilly/django-blog-zinnia,Zopieux/django-blog-zinnia,dapeng0802/django-blog-zinnia,Fantomas42/django-blog-zinnia,1844144/django-blog-zinnia,Fantomas42/django-blog-zinnia,extertioner/django-blog-zinnia,marctc/django-blog-zinnia,aorzh/django-blog-zinnia,marctc/django-blog-zinnia,ghachey/django-blog-zinnia,bywbilly/django-blog-zinnia,ZuluPro/django-blog-zinnia,1844144/django-blog-zinnia,1844144/django-blog-zinnia | """Views for Zinnia entries"""
from django.views.generic.dates import BaseDateDetailView
from zinnia.models.entry import Entry
from zinnia.views.mixins.archives import ArchiveMixin
+ from zinnia.views.mixins.entry_preview import EntryPreviewMixin
from zinnia.views.mixins.entry_protection import EntryProtectionMixin
from zinnia.views.mixins.callable_queryset import CallableQuerysetMixin
from zinnia.views.mixins.templates import EntryArchiveTemplateResponseMixin
class EntryDateDetail(ArchiveMixin,
EntryArchiveTemplateResponseMixin,
CallableQuerysetMixin,
BaseDateDetailView):
"""
Mixin combinating:
- ArchiveMixin configuration centralizing conf for archive views
- EntryArchiveTemplateResponseMixin to provide a
custom templates depending on the date
- BaseDateDetailView to retrieve the entry with date and slug
- CallableQueryMixin to defer the execution of the *queryset*
property when imported
"""
queryset = Entry.published.on_site
- class EntryDetail(EntryProtectionMixin, EntryDateDetail):
+ class EntryDetail(EntryPreviewMixin,
+ EntryProtectionMixin,
+ EntryDateDetail):
"""
- Detailled view archive view for an Entry
+ Detailled archive view for an Entry with password
- with password and login protections.
+ and login protections and restricted preview.
"""
| Implement the EntryPreviewMixin in the EntryDetail view | ## Code Before:
"""Views for Zinnia entries"""
from django.views.generic.dates import BaseDateDetailView
from zinnia.models.entry import Entry
from zinnia.views.mixins.archives import ArchiveMixin
from zinnia.views.mixins.entry_protection import EntryProtectionMixin
from zinnia.views.mixins.callable_queryset import CallableQuerysetMixin
from zinnia.views.mixins.templates import EntryArchiveTemplateResponseMixin
class EntryDateDetail(ArchiveMixin,
EntryArchiveTemplateResponseMixin,
CallableQuerysetMixin,
BaseDateDetailView):
"""
Mixin combinating:
- ArchiveMixin configuration centralizing conf for archive views
- EntryArchiveTemplateResponseMixin to provide a
custom templates depending on the date
- BaseDateDetailView to retrieve the entry with date and slug
- CallableQueryMixin to defer the execution of the *queryset*
property when imported
"""
queryset = Entry.published.on_site
class EntryDetail(EntryProtectionMixin, EntryDateDetail):
"""
Detailled view archive view for an Entry
with password and login protections.
"""
## Instruction:
Implement the EntryPreviewMixin in the EntryDetail view
## Code After:
"""Views for Zinnia entries"""
from django.views.generic.dates import BaseDateDetailView
from zinnia.models.entry import Entry
from zinnia.views.mixins.archives import ArchiveMixin
from zinnia.views.mixins.entry_preview import EntryPreviewMixin
from zinnia.views.mixins.entry_protection import EntryProtectionMixin
from zinnia.views.mixins.callable_queryset import CallableQuerysetMixin
from zinnia.views.mixins.templates import EntryArchiveTemplateResponseMixin
class EntryDateDetail(ArchiveMixin,
EntryArchiveTemplateResponseMixin,
CallableQuerysetMixin,
BaseDateDetailView):
"""
Mixin combinating:
- ArchiveMixin configuration centralizing conf for archive views
- EntryArchiveTemplateResponseMixin to provide a
custom templates depending on the date
- BaseDateDetailView to retrieve the entry with date and slug
- CallableQueryMixin to defer the execution of the *queryset*
property when imported
"""
queryset = Entry.published.on_site
class EntryDetail(EntryPreviewMixin,
EntryProtectionMixin,
EntryDateDetail):
"""
Detailled archive view for an Entry with password
and login protections and restricted preview.
"""
|
e93a321e3d137fb21a42d0e0bfd257a537be05d3 | diy/parerga/config.py | diy/parerga/config.py |
import os
# directories constants
PARERGA_ROOT_DIR = os.path.dirname(os.path.abspath(__file__))
PARERGA_ENTRY_DIR = os.path.join(PARERGA_ROOT_DIR, "p")
PARERGA_STATIC_DIR = os.path.join(PARERGA_ROOT_DIR, "static")
PARERGA_TEMPLATE_DIR = os.path.join(PARERGA_ROOT_DIR, "templates")
# database location
PARERGA_DB = os.path.join(PARERGA_ROOT_DIR, 'static', 'parerga.db')
|
import os
# directories constants
PARERGA_ROOT_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
PARERGA_ENTRY_DIR = os.path.join(PARERGA_ROOT_DIR, "p")
PARERGA_STATIC_DIR = os.path.join(PARERGA_ROOT_DIR, "static")
PARERGA_TEMPLATE_DIR = os.path.join(PARERGA_ROOT_DIR, "templates")
# database location
PARERGA_DB = os.path.join(PARERGA_ROOT_DIR, 'static', 'parerga.db')
| Update path vars for the new source location | Update path vars for the new source location
| Python | bsd-3-clause | nadirs/parerga,nadirs/parerga |
import os
# directories constants
- PARERGA_ROOT_DIR = os.path.dirname(os.path.abspath(__file__))
+ PARERGA_ROOT_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
PARERGA_ENTRY_DIR = os.path.join(PARERGA_ROOT_DIR, "p")
PARERGA_STATIC_DIR = os.path.join(PARERGA_ROOT_DIR, "static")
PARERGA_TEMPLATE_DIR = os.path.join(PARERGA_ROOT_DIR, "templates")
# database location
PARERGA_DB = os.path.join(PARERGA_ROOT_DIR, 'static', 'parerga.db')
| Update path vars for the new source location | ## Code Before:
import os
# directories constants
PARERGA_ROOT_DIR = os.path.dirname(os.path.abspath(__file__))
PARERGA_ENTRY_DIR = os.path.join(PARERGA_ROOT_DIR, "p")
PARERGA_STATIC_DIR = os.path.join(PARERGA_ROOT_DIR, "static")
PARERGA_TEMPLATE_DIR = os.path.join(PARERGA_ROOT_DIR, "templates")
# database location
PARERGA_DB = os.path.join(PARERGA_ROOT_DIR, 'static', 'parerga.db')
## Instruction:
Update path vars for the new source location
## Code After:
import os
# directories constants
PARERGA_ROOT_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
PARERGA_ENTRY_DIR = os.path.join(PARERGA_ROOT_DIR, "p")
PARERGA_STATIC_DIR = os.path.join(PARERGA_ROOT_DIR, "static")
PARERGA_TEMPLATE_DIR = os.path.join(PARERGA_ROOT_DIR, "templates")
# database location
PARERGA_DB = os.path.join(PARERGA_ROOT_DIR, 'static', 'parerga.db')
|
2724b4dd7ed350baeae0a8e0ef53475f40b1208b | project_generator/tools/makearmclang.py | project_generator/tools/makearmclang.py |
from copy import deepcopy
import logging
from .makefile import MakefileTool
logger = logging.getLogger('progen.tools.armclang')
class MakefileArmclang(MakefileTool):
def __init__(self, workspace, env_settings):
MakefileTool.__init__(self, workspace, env_settings, logger)
@staticmethod
def get_toolnames():
return ['make_armclang']
@staticmethod
def get_toolchain():
return 'armclang'
def export_project(self):
""" Processes misc options specific for AC6, and run generator """
generated_projects = deepcopy(self.generated_projects)
self.process_data_for_makefile(self.workspace)
generated_projects['path'], generated_projects['files']['makefile'] = \
self.gen_file_jinja('makefile_armclang.tmpl', self.workspace, 'Makefile',
self.workspace['output_dir']['path'])
return generated_projects
|
from copy import deepcopy
import logging
from .makefile import MakefileTool
logger = logging.getLogger('progen.tools.armclang')
class MakefileArmclang(MakefileTool):
def __init__(self, workspace, env_settings):
MakefileTool.__init__(self, workspace, env_settings, logger)
# enable preprocessing linker files for GCC ARM
self.workspace['preprocess_linker_file'] = True
self.workspace['linker_extension'] = '.sct'
@staticmethod
def get_toolnames():
return ['make_armclang']
@staticmethod
def get_toolchain():
return 'armclang'
def export_project(self):
""" Processes misc options specific for AC6, and run generator """
generated_projects = deepcopy(self.generated_projects)
self.process_data_for_makefile(self.workspace)
generated_projects['path'], generated_projects['files']['makefile'] = \
self.gen_file_jinja('makefile_armclang.tmpl', self.workspace, 'Makefile',
self.workspace['output_dir']['path'])
return generated_projects
| Enable linker preprocessing for armclang. | Enable linker preprocessing for armclang.
This should be temporary; for some reason the .sct cpp shebang isn't working for me. Same result in any case.
| Python | apache-2.0 | project-generator/project_generator |
from copy import deepcopy
import logging
from .makefile import MakefileTool
logger = logging.getLogger('progen.tools.armclang')
class MakefileArmclang(MakefileTool):
def __init__(self, workspace, env_settings):
MakefileTool.__init__(self, workspace, env_settings, logger)
+ # enable preprocessing linker files for GCC ARM
+ self.workspace['preprocess_linker_file'] = True
+ self.workspace['linker_extension'] = '.sct'
@staticmethod
def get_toolnames():
return ['make_armclang']
@staticmethod
def get_toolchain():
return 'armclang'
def export_project(self):
""" Processes misc options specific for AC6, and run generator """
generated_projects = deepcopy(self.generated_projects)
self.process_data_for_makefile(self.workspace)
generated_projects['path'], generated_projects['files']['makefile'] = \
self.gen_file_jinja('makefile_armclang.tmpl', self.workspace, 'Makefile',
self.workspace['output_dir']['path'])
return generated_projects
| Enable linker preprocessing for armclang. | ## Code Before:
from copy import deepcopy
import logging
from .makefile import MakefileTool
logger = logging.getLogger('progen.tools.armclang')
class MakefileArmclang(MakefileTool):
def __init__(self, workspace, env_settings):
MakefileTool.__init__(self, workspace, env_settings, logger)
@staticmethod
def get_toolnames():
return ['make_armclang']
@staticmethod
def get_toolchain():
return 'armclang'
def export_project(self):
""" Processes misc options specific for AC6, and run generator """
generated_projects = deepcopy(self.generated_projects)
self.process_data_for_makefile(self.workspace)
generated_projects['path'], generated_projects['files']['makefile'] = \
self.gen_file_jinja('makefile_armclang.tmpl', self.workspace, 'Makefile',
self.workspace['output_dir']['path'])
return generated_projects
## Instruction:
Enable linker preprocessing for armclang.
## Code After:
from copy import deepcopy
import logging
from .makefile import MakefileTool
logger = logging.getLogger('progen.tools.armclang')
class MakefileArmclang(MakefileTool):
def __init__(self, workspace, env_settings):
MakefileTool.__init__(self, workspace, env_settings, logger)
# enable preprocessing linker files for GCC ARM
self.workspace['preprocess_linker_file'] = True
self.workspace['linker_extension'] = '.sct'
@staticmethod
def get_toolnames():
return ['make_armclang']
@staticmethod
def get_toolchain():
return 'armclang'
def export_project(self):
""" Processes misc options specific for AC6, and run generator """
generated_projects = deepcopy(self.generated_projects)
self.process_data_for_makefile(self.workspace)
generated_projects['path'], generated_projects['files']['makefile'] = \
self.gen_file_jinja('makefile_armclang.tmpl', self.workspace, 'Makefile',
self.workspace['output_dir']['path'])
return generated_projects
|
9ae5ea3876fae6ef0bc092d87c71d9ea86040cf7 | InvenTree/company/api.py | InvenTree/company/api.py | from __future__ import unicode_literals
from django_filters.rest_framework import DjangoFilterBackend
from rest_framework import filters
from rest_framework import generics, permissions
from django.conf.urls import url
from .models import Company
from .serializers import CompanySerializer
class CompanyList(generics.ListCreateAPIView):
serializer_class = CompanySerializer
queryset = Company.objects.all()
permission_classes = [
permissions.IsAuthenticatedOrReadOnly,
]
filter_backends = [
DjangoFilterBackend,
filters.SearchFilter,
filters.OrderingFilter,
]
filter_fields = [
'name',
'is_customer',
'is_supplier',
]
search_fields = [
'name',
'description',
]
ordering_fields = [
'name',
]
ordering = 'name'
company_api_urls = [
url(r'^.*$', CompanyList.as_view(), name='api-company-list'),
]
| from __future__ import unicode_literals
from django_filters.rest_framework import DjangoFilterBackend
from rest_framework import filters
from rest_framework import generics, permissions
from django.conf.urls import url
from .models import Company
from .serializers import CompanySerializer
class CompanyList(generics.ListCreateAPIView):
serializer_class = CompanySerializer
queryset = Company.objects.all()
permission_classes = [
permissions.IsAuthenticatedOrReadOnly,
]
filter_backends = [
DjangoFilterBackend,
filters.SearchFilter,
filters.OrderingFilter,
]
filter_fields = [
'name',
'is_customer',
'is_supplier',
]
search_fields = [
'name',
'description',
]
ordering_fields = [
'name',
]
ordering = 'name'
class CompanyDetail(generics.RetrieveUpdateDestroyAPIView):
queryset = Company.objects.all()
serializer_class = CompanySerializer
permission_classes = [
permissions.IsAuthenticatedOrReadOnly,
]
company_api_urls = [
url(r'^(?P<pk>\d+)/?', CompanyDetail.as_view(), name='api-company-detail'),
url(r'^.*$', CompanyList.as_view(), name='api-company-list'),
]
| Add RUD endpoint for Company | Add RUD endpoint for Company
| Python | mit | SchrodingersGat/InvenTree,SchrodingersGat/InvenTree,inventree/InvenTree,inventree/InvenTree,inventree/InvenTree,SchrodingersGat/InvenTree,inventree/InvenTree,SchrodingersGat/InvenTree | from __future__ import unicode_literals
from django_filters.rest_framework import DjangoFilterBackend
from rest_framework import filters
from rest_framework import generics, permissions
from django.conf.urls import url
from .models import Company
from .serializers import CompanySerializer
class CompanyList(generics.ListCreateAPIView):
serializer_class = CompanySerializer
queryset = Company.objects.all()
permission_classes = [
permissions.IsAuthenticatedOrReadOnly,
]
filter_backends = [
DjangoFilterBackend,
filters.SearchFilter,
filters.OrderingFilter,
]
filter_fields = [
'name',
'is_customer',
'is_supplier',
]
search_fields = [
'name',
'description',
]
ordering_fields = [
'name',
]
ordering = 'name'
+ class CompanyDetail(generics.RetrieveUpdateDestroyAPIView):
+
+ queryset = Company.objects.all()
+ serializer_class = CompanySerializer
+
+ permission_classes = [
+ permissions.IsAuthenticatedOrReadOnly,
+ ]
+
+
company_api_urls = [
+
+ url(r'^(?P<pk>\d+)/?', CompanyDetail.as_view(), name='api-company-detail'),
url(r'^.*$', CompanyList.as_view(), name='api-company-list'),
]
| Add RUD endpoint for Company | ## Code Before:
from __future__ import unicode_literals
from django_filters.rest_framework import DjangoFilterBackend
from rest_framework import filters
from rest_framework import generics, permissions
from django.conf.urls import url
from .models import Company
from .serializers import CompanySerializer
class CompanyList(generics.ListCreateAPIView):
serializer_class = CompanySerializer
queryset = Company.objects.all()
permission_classes = [
permissions.IsAuthenticatedOrReadOnly,
]
filter_backends = [
DjangoFilterBackend,
filters.SearchFilter,
filters.OrderingFilter,
]
filter_fields = [
'name',
'is_customer',
'is_supplier',
]
search_fields = [
'name',
'description',
]
ordering_fields = [
'name',
]
ordering = 'name'
company_api_urls = [
url(r'^.*$', CompanyList.as_view(), name='api-company-list'),
]
## Instruction:
Add RUD endpoint for Company
## Code After:
from __future__ import unicode_literals
from django_filters.rest_framework import DjangoFilterBackend
from rest_framework import filters
from rest_framework import generics, permissions
from django.conf.urls import url
from .models import Company
from .serializers import CompanySerializer
class CompanyList(generics.ListCreateAPIView):
serializer_class = CompanySerializer
queryset = Company.objects.all()
permission_classes = [
permissions.IsAuthenticatedOrReadOnly,
]
filter_backends = [
DjangoFilterBackend,
filters.SearchFilter,
filters.OrderingFilter,
]
filter_fields = [
'name',
'is_customer',
'is_supplier',
]
search_fields = [
'name',
'description',
]
ordering_fields = [
'name',
]
ordering = 'name'
class CompanyDetail(generics.RetrieveUpdateDestroyAPIView):
queryset = Company.objects.all()
serializer_class = CompanySerializer
permission_classes = [
permissions.IsAuthenticatedOrReadOnly,
]
company_api_urls = [
url(r'^(?P<pk>\d+)/?', CompanyDetail.as_view(), name='api-company-detail'),
url(r'^.*$', CompanyList.as_view(), name='api-company-list'),
]
|
Subsets and Splits