input
stringlengths
0
2.17k
instruction
stringlengths
18
2.94k
output
stringlengths
47
3.36k
""" Some extra picklers needed to gracefully dump and load a canvas. """ from future import standard_library standard_library.install_aliases() import copyreg # Allow instancemethod to be pickled: import types def construct_instancemethod(funcname, self, clazz): func = getattr(clazz, funcname) return types.MethodType(func, self, clazz) def reduce_instancemethod(im): return ( construct_instancemethod, (im.__func__.__name__, im.__self__, im.__self__.__class__), ) copyreg.pickle(types.MethodType, reduce_instancemethod, construct_instancemethod) # Allow cairo.Matrix to be pickled: import cairo def construct_cairo_matrix(*args): return cairo.Matrix(*args) def reduce_cairo_matrix(m): return construct_cairo_matrix, tuple(m) copyreg.pickle(cairo.Matrix, reduce_cairo_matrix, construct_cairo_matrix) # vim:sw=4:et:ai
Fix MethodType only takes two parameters Signed-off-by: Dan Yeaw <[email protected]>
""" Some extra picklers needed to gracefully dump and load a canvas. """ from future import standard_library standard_library.install_aliases() import copyreg # Allow instancemethod to be pickled: import types def construct_instancemethod(funcname, self, clazz): func = getattr(clazz, funcname) return types.MethodType(func, self) def reduce_instancemethod(im): return ( construct_instancemethod, (im.__func__.__name__, im.__self__, im.__self__.__class__), ) copyreg.pickle(types.MethodType, reduce_instancemethod, construct_instancemethod) # Allow cairo.Matrix to be pickled: import cairo def construct_cairo_matrix(*args): return cairo.Matrix(*args) def reduce_cairo_matrix(m): return construct_cairo_matrix, tuple(m) copyreg.pickle(cairo.Matrix, reduce_cairo_matrix, construct_cairo_matrix) # vim:sw=4:et:ai
__version__ = (2014, 10, 0) def get_version(): """ :rtype: str """ return '.'.join(str(i) for i in __version__) class Gignore(object): BASE_URL = 'https://raw.githubusercontent.com/github/gitignore/master/' name = None file_content = None def get_base_url(self): """ :rtype: str """ return self.BASE_URL def set_name(self, name): """ :type name: str """ self.name = name def get_name(self): """ :rtype: str """ return self.name def set_file_content(self, file_content): """ :type file_content: str """ self.file_content = file_content def get_file_content(self): """ :rtype: str """ return self.file_content
Add valid attribute with setter/getter
__version__ = (2014, 10, 0) def get_version(): """ :rtype: str """ return '.'.join(str(i) for i in __version__) class Gignore(object): BASE_URL = 'https://raw.githubusercontent.com/github/gitignore/master/' name = None file_content = None valid = True def get_base_url(self): """ :rtype: str """ return self.BASE_URL def set_name(self, name): """ :type name: str """ self.name = name def get_name(self): """ :rtype: str """ return self.name def set_file_content(self, file_content): """ :type file_content: str """ self.file_content = file_content def get_file_content(self): """ :rtype: str """ return self.file_content def is_valid(self): """ :rtype: bool """ return self.valid def set_valid(self, valid): """ :type valid: bool """ self.valid = valid
import re class Presenters(object): def __init__(self): return None def present(self, value, question_content): if "type" in question_content: field_type = question_content["type"] else: return value if hasattr(self, "_" + field_type): return getattr(self, "_" + field_type)(value) else: return value def _service_id(self, value): if re.findall("[a-zA-Z]", value): return [value] else: return re.findall("....", str(value)) def _upload(self, value): return { "url": value or "", "filename": value.split("/")[-1] or "" }
Fix the thing that @quis broke.
import re class Presenters(object): def __init__(self): return None def present(self, value, question_content): if "type" in question_content: field_type = question_content["type"] else: return value if hasattr(self, "_" + field_type): return getattr(self, "_" + field_type)(value) else: return value def _service_id(self, value): if re.findall("[a-zA-Z]", str(value)): return [value] else: return re.findall("....", str(value)) def _upload(self, value): return { "url": value or "", "filename": value.split("/")[-1] or "" }
from __future__ import unicode_literals import logging from django.core.management.base import BaseCommand from peering.models import InternetExchange class Command(BaseCommand): help = ('Deploy configurations each IX having a router and a configuration' ' template attached.') logger = logging.getLogger('peering.manager.peering') def handle(self, *args, **options): self.logger.info('Deploying configurations...') for ix in InternetExchange.objects.all(): if ix.configuration_template and ix.router: self.logger.info( 'Deploying configuration on {}'.format(ix.name)) ix.router.set_napalm_configuration(ix.generate_configuration(), commit=True) else: self.logger.info( 'No configuration to deploy on {}'.format(ix.name)) self.logger.info('Configurations deployed')
Check for router platform in auto-deploy script.
from __future__ import unicode_literals import logging from django.core.management.base import BaseCommand from peering.models import InternetExchange class Command(BaseCommand): help = ('Deploy configurations each IX having a router and a configuration' ' template attached.') logger = logging.getLogger('peering.manager.peering') def handle(self, *args, **options): self.logger.info('Deploying configurations...') for ix in InternetExchange.objects.all(): # Only deploy config if there are at least a configuration # template, a router and a platform for the router if ix.configuration_template and ix.router and ix.router.platform: self.logger.info( 'Deploying configuration on {}'.format(ix.name)) ix.router.set_napalm_configuration(ix.generate_configuration(), commit=True) else: self.logger.info( 'No configuration to deploy on {}'.format(ix.name)) self.logger.info('Configurations deployed')
# Namespace package: https://docs.python.org/2/library/pkgutil.html from pkgutil import extend_path __path__ = extend_path(__path__, __name__) __VERSION__ = "1.0.0" __AUTHOR__ = "GDS Developers" __AUTHOR_EMAIL__ = ""
Make version number match the latest tag We have a 2.0.0 tag in github which points to code where the version claims to be 1.0.0: https://github.com/alphagov/backdrop-collector/blob/2.0.0/backdrop/collector/__init__.py We definitely have code which specifies 2.0.0 as its dependency. Upversion to 2.0.1 so we can make a new, correct, tag.
# Namespace package: https://docs.python.org/2/library/pkgutil.html from pkgutil import extend_path __path__ = extend_path(__path__, __name__) __VERSION__ = "2.0.1" __AUTHOR__ = "GDS Developers" __AUTHOR_EMAIL__ = ""
from rest_framework import serializers from .models import Animal class AnimalSerializer(serializers.HyperlinkedModelSerializer): class Meta: model = Animal fields = ('id', 'name', 'dob', 'active', 'own')
Add parents and gender to the list of values in serializer
from rest_framework import serializers from .models import Animal class AnimalSerializer(serializers.HyperlinkedModelSerializer): class Meta: model = Animal fields = ('id', 'name', 'dob', 'gender', 'active', 'own', 'father', 'mother')
# Copyright 2018 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """A DAG consisting of a BashOperator that prints the result of a coin flip.""" import datetime import airflow from airflow.operators import bash_operator # [START composer_dag_local_deps] from .dependencies import coin_module # [END composer_dag_local_deps] default_args = { 'start_date': datetime.datetime.combine( datetime.datetime.today() - datetime.timedelta(days=1), datetime.datetime.min.time()), } with airflow.DAG('dependencies_dag', default_args=default_args) as dag: t1 = bash_operator.BashOperator( task_id='print_coin_result', bash_command='echo "{0}"'.format(coin_module.flip_coin()), dag=dag)
Use absolute / implicit relative imports for local deps Since Composer is Python 2.7 only for now, this sample can use implicit relative imports. Airflow doesn't seem to support explicit relative imports when I try to run the use_local_deps.py file in Composer. Aside: Airflow is using the imp.load_source method to load the DAG modules. This will be problematic for Python 3 support, see: https://issues.apache.org/jira/browse/AIRFLOW-2243.
# Copyright 2018 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """A DAG consisting of a BashOperator that prints the result of a coin flip.""" import datetime import airflow from airflow.operators import bash_operator # [START composer_dag_local_deps] from dependencies import coin_module # [END composer_dag_local_deps] default_args = { 'start_date': datetime.datetime.combine( datetime.datetime.today() - datetime.timedelta(days=1), datetime.datetime.min.time()), } with airflow.DAG('dependencies_dag', default_args=default_args) as dag: t1 = bash_operator.BashOperator( task_id='print_coin_result', bash_command='echo "{0}"'.format(coin_module.flip_coin()), dag=dag)
import os import pickle def load_data(path): """ Load Dataset from File """ input_file = os.path.join(path) with open(input_file, "r") as f: data = f.read() return data def preprocess_and_save_data(dataset_path, token_lookup, create_lookup_tables): """ Preprocess Text Data """ text = load_data(dataset_path) token_dict = token_lookup() for key, token in token_dict.items(): text = text.replace(key, ' {} '.format(token)) text = text.lower() text = text.split() vocab_to_int, int_to_vocab = create_lookup_tables(text) int_text = [vocab_to_int[word] for word in text] pickle.dump((int_text, vocab_to_int, int_to_vocab, token_dict), open('preprocess.p', 'wb')) def load_preprocess(): """ Load the Preprocessed Training data and return them in batches of <batch_size> or less """ return pickle.load(open('preprocess.p', mode='rb')) def save_params(params): """ Save parameters to file """ pickle.dump(params, open('params.p', 'wb')) def load_params(): """ Load parameters from file """ return pickle.load(open('params.p', mode='rb'))
Remove copyright notice during preprocessing
import os import pickle def load_data(path): """ Load Dataset from File """ input_file = os.path.join(path) with open(input_file, "r") as f: data = f.read() return data def preprocess_and_save_data(dataset_path, token_lookup, create_lookup_tables): """ Preprocess Text Data """ text = load_data(dataset_path) # Ignore notice, since we don't use it for analysing the data text = text[81:] token_dict = token_lookup() for key, token in token_dict.items(): text = text.replace(key, ' {} '.format(token)) text = text.lower() text = text.split() vocab_to_int, int_to_vocab = create_lookup_tables(text) int_text = [vocab_to_int[word] for word in text] pickle.dump((int_text, vocab_to_int, int_to_vocab, token_dict), open('preprocess.p', 'wb')) def load_preprocess(): """ Load the Preprocessed Training data and return them in batches of <batch_size> or less """ return pickle.load(open('preprocess.p', mode='rb')) def save_params(params): """ Save parameters to file """ pickle.dump(params, open('params.p', 'wb')) def load_params(): """ Load parameters from file """ return pickle.load(open('params.p', mode='rb'))
from contextlib import contextmanager from pyglet.gl import * def ptr(*args): return (GLfloat * len(args))(*args) @contextmanager def gl_disable(*bits): glPushAttrib(GL_ENABLE_BIT) map(glDisable, bits) yield glPopAttrib(GL_ENABLE_BIT) @contextmanager def gl_ortho(window): # clobbers current modelview matrix glMatrixMode(GL_PROJECTION) glPushMatrix() glLoadIdentity() gluOrtho2D(0, window.width, 0, window.height, -1, 1) glMatrixMode(GL_MODELVIEW) glLoadIdentity() yield glMatrixMode(GL_PROJECTION) glPopMatrix() glMatrixMode(GL_MODELVIEW)
Fix pyglet breackage by controlling exports.
from contextlib import contextmanager from pyglet.gl import * __all__ = [ 'ptr', 'gl_disable', 'gl_ortho', ] def ptr(*args): return (GLfloat * len(args))(*args) @contextmanager def gl_disable(*bits): glPushAttrib(GL_ENABLE_BIT) map(glDisable, bits) yield glPopAttrib(GL_ENABLE_BIT) @contextmanager def gl_ortho(window): # clobbers current modelview matrix glMatrixMode(GL_PROJECTION) glPushMatrix() glLoadIdentity() gluOrtho2D(0, window.width, 0, window.height, -1, 1) glMatrixMode(GL_MODELVIEW) glLoadIdentity() yield glMatrixMode(GL_PROJECTION) glPopMatrix() glMatrixMode(GL_MODELVIEW)
from pythonforandroid.recipe import PythonRecipe class PySDL2Recipe(PythonRecipe): version = '0.9.3' url = 'https://bitbucket.org/marcusva/py-sdl2/downloads/PySDL2-{version}.tar.gz' depends = ['sdl2'] recipe = PySDL2Recipe()
Fix outdated PySDL2 version and non-PyPI install source
from pythonforandroid.recipe import PythonRecipe class PySDL2Recipe(PythonRecipe): version = '0.9.6' url = 'https://files.pythonhosted.org/packages/source/P/PySDL2/PySDL2-{version}.tar.gz' depends = ['sdl2'] recipe = PySDL2Recipe()
from ..attribute_processor import AttributeProcessorError from .base_processor import BaseProcessor CONFIG_KEY_SCOPE = 'scope' CONFIG_DEFAULT_SCOPE = '' class ScopeProcessor(BaseProcessor): def process(self, internal_data, attribute, **kwargs): scope = kwargs.get(CONFIG_KEY_SCOPE, CONFIG_DEFAULT_SCOPE) if scope is None or scope == '': raise AttributeProcessorError("No scope set.") attributes = internal_data.attributes value = attributes.get(attribute, [None])[0] attributes[attribute][0] = value + '@' + scope
Allow scope processor to handle multivalued attributes
from ..attribute_processor import AttributeProcessorError from .base_processor import BaseProcessor CONFIG_KEY_SCOPE = 'scope' CONFIG_DEFAULT_SCOPE = '' class ScopeProcessor(BaseProcessor): def process(self, internal_data, attribute, **kwargs): scope = kwargs.get(CONFIG_KEY_SCOPE, CONFIG_DEFAULT_SCOPE) if scope is None or scope == '': raise AttributeProcessorError("No scope set.") attributes = internal_data.attributes values = attributes.get(attribute, []) if not isinstance(values, list): values = [values] if values: new_values=[] for value in values: new_values.append(value + '@' + scope) attributes[attribute] = new_values
import os from abc import ABCMeta, abstractmethod from gitfs import FuseMethodNotImplemented from gitfs.filesystems.passthrough import PassthroughFuse class View(PassthroughFuse): __metaclass__ = ABCMeta def __init__(self, *args, **kwargs): self.args = args for attr in kwargs: setattr(self, attr, kwargs[attr]) def getxattr(self, path, name, position=0): """Get extended attributes""" raise FuseMethodNotImplemented
Make View inherit from objects instead of PassthroughFuse
import os from abc import ABCMeta, abstractmethod from gitfs import FuseMethodNotImplemented class View(object): __metaclass__ = ABCMeta def __init__(self, *args, **kwargs): self.args = args for attr in kwargs: setattr(self, attr, kwargs[attr]) def getxattr(self, path, name, position=0): """Get extended attributes""" raise FuseMethodNotImplemented
from markdown import Markdown from markdown.inlinepatterns import ImagePattern, IMAGE_LINK_RE from ..models import Image class ImageLookupImagePattern(ImagePattern): def sanitize_url(self, url): if url.startswith("http"): return url else: try: image = Image.objects.get(pk=int(url)) return image.image_path.url except Image.DoesNotExist: pass except ValueError: return url return "" def parse(text): md = Markdown(extensions=["codehilite"]) md.inlinePatterns["image_link"] = ImageLookupImagePattern(IMAGE_LINK_RE, md) html = md.convert(text) return html
Add some extensions to the markdown parser Ultimately we should make this a setting or hookset so it could be overridden at the site level.
from markdown import Markdown from markdown.inlinepatterns import ImagePattern, IMAGE_LINK_RE from ..models import Image class ImageLookupImagePattern(ImagePattern): def sanitize_url(self, url): if url.startswith("http"): return url else: try: image = Image.objects.get(pk=int(url)) return image.image_path.url except Image.DoesNotExist: pass except ValueError: return url return "" def parse(text): md = Markdown(extensions=["codehilite", "tables", "smarty", "admonition", "toc"]) md.inlinePatterns["image_link"] = ImageLookupImagePattern(IMAGE_LINK_RE, md) html = md.convert(text) return html
from fractions import Fraction from math import sqrt from itertools import chain, cycle from typing import Generator, Iterable, List, Tuple def convergent_sequence(generator: Iterable[int]) -> \ Generator[Fraction, None, None]: h = (0, 1) k = (1, 0) for a in generator: h = h[1], a * h[1] + h[0] k = k[1], a * k[1] + k[0] yield Fraction(h[-1], k[-1]) def continued_fraction_sqrt(n: int) -> Tuple[List[int], List[int]]: remainders = [] continued_fraction = [] remainder = (Fraction(1), Fraction(0)) # remainder is sqrt(n) + 0. sqrt_n = sqrt(n) while remainder not in remainders: remainders.append(remainder) a = int(remainder[0] * sqrt_n + remainder[1]) continued_fraction.append(a) norm = (remainder[1] - a) ** 2 - remainder[0] ** 2 * n remainder = (-remainder[0] / norm, (remainder[1] - a) / norm) index = remainders.index(remainder) return continued_fraction[:index], continued_fraction[index:] def convergents_sqrt(n: int) -> Generator[Fraction, None, None]: initial, repeat = continued_fraction_sqrt(n) convergents = convergent_sequence(chain(initial, cycle(repeat))) yield from convergents
Make continued fractions sqrt much faster
from fractions import Fraction from math import sqrt from itertools import chain, cycle from typing import Generator, Iterable, List, Tuple from .gcd import gcd from ..sqrt import fsqrt def convergent_sequence(generator: Iterable[int]) -> \ Generator[Fraction, None, None]: h = (0, 1) k = (1, 0) for a in generator: h = h[1], a * h[1] + h[0] k = k[1], a * k[1] + k[0] yield Fraction(h[-1], k[-1]) def continued_fraction_sqrt(n: int) -> Tuple[List[int], List[int]]: sqrt_n = sqrt(n) remainders = [] remainder = (0, 1) # remainder is an + (sqrt(n) - p) / q and these are initial. continued_fraction = [] while remainder not in remainders: remainders.append(remainder) p, q = remainder q = (n - (p * p)) // q a = int((sqrt_n + p) / q) p = a * q - p continued_fraction.append(a) remainder = (p, q) index = remainders.index(remainder) return continued_fraction[1:index], continued_fraction[index:] def convergents_sqrt(n: int) -> Generator[Fraction, None, None]: initial, repeat = continued_fraction_sqrt(n) convergents = convergent_sequence(chain(initial, cycle(repeat))) yield from convergents
import numpy as np from msmbuilder.msm import MarkovStateModel, BayesianMarkovStateModel from matplotlib.axes import SubplotBase from seaborn.apionly import JointGrid from ..plots import plot_pop_resids, plot_msm_network, plot_timescales rs = np.random.RandomState(42) data = rs.randint(low=0, high=10, size=100000) msm = MarkovStateModel() msm.fit(data) bmsm = BayesianMarkovStateModel() bmsm.fit(data) def test_plot_pop_resids(): ax = plot_pop_resids(msm) assert isinstance(ax, JointGrid) def test_plot_msm_network(): ax = plot_msm_network(msm) assert isinstance(ax, SubplotBase) def test_plot_timescales_msm(): ax = plot_timescales(msm, n_timescales=3, xlabel='x', ylabel='y') assert isinstance(ax, SubplotBase) def test_plot_timescales_bmsm(): ax = plot_timescales(bmsm) assert isinstance(ax, SubplotBase)
Add test for implied timescales plot
import numpy as np from msmbuilder.msm import MarkovStateModel, BayesianMarkovStateModel from matplotlib.axes import SubplotBase from seaborn.apionly import JointGrid from ..plots import plot_pop_resids, plot_msm_network, plot_timescales, plot_implied_timescales rs = np.random.RandomState(42) data = rs.randint(low=0, high=10, size=100000) msm = MarkovStateModel() msm.fit(data) bmsm = BayesianMarkovStateModel() bmsm.fit(data) def test_plot_pop_resids(): ax = plot_pop_resids(msm) assert isinstance(ax, JointGrid) def test_plot_msm_network(): ax = plot_msm_network(msm) assert isinstance(ax, SubplotBase) def test_plot_timescales_msm(): ax = plot_timescales(msm, n_timescales=3, xlabel='x', ylabel='y') assert isinstance(ax, SubplotBase) def test_plot_timescales_bmsm(): ax = plot_timescales(bmsm) assert isinstance(ax, SubplotBase) def test_plot_implied_timescales(): lag_times = [1, 10, 50, 100, 200, 250, 500] msm_objs = [] for lag in lag_times: # Construct MSM msm = MarkovStateModel(lag_time=lag, n_timescales=5) msm.fit(clustered_trajs) msm_objs.append(msm) ax = plot_implied_timescales(msm_objs) assert isinstance(ax, SubplotBase)
# Copyright (c) 2020, Frappe Technologies Pvt. Ltd. and Contributors from __future__ import unicode_literals import shlex import subprocess import unittest import frappe def clean(value): if isinstance(value, (bytes, str)): value = value.decode().strip() return value class BaseTestCommands: def execute(self, command): command = command.format(**{"site": frappe.local.site}) command = shlex.split(command) self._proc = subprocess.run(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE) self.stdout = clean(self._proc.stdout) self.stderr = clean(self._proc.stderr) self.returncode = clean(self._proc.returncode)
test: Add tests for bench execute
# Copyright (c) 2020, Frappe Technologies Pvt. Ltd. and Contributors from __future__ import unicode_literals import shlex import subprocess import unittest import frappe def clean(value): if isinstance(value, (bytes, str)): value = value.decode().strip() return value class BaseTestCommands: def execute(self, command): command = command.format(**{"site": frappe.local.site}) command = shlex.split(command) self._proc = subprocess.run(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE) self.stdout = clean(self._proc.stdout) self.stderr = clean(self._proc.stderr) self.returncode = clean(self._proc.returncode) class TestCommands(BaseTestCommands, unittest.TestCase): def test_execute(self): # execute a command expecting a numeric output self.execute("bench --site {site} execute frappe.db.get_database_size") self.assertEquals(self.returncode, 0) self.assertIsInstance(float(self.stdout), float) # execute a command expecting an errored output as local won't exist self.execute("bench --site {site} execute frappe.local.site") self.assertEquals(self.returncode, 1) self.assertIsNotNone(self.stderr) # execute a command with kwargs self.execute("""bench --site {site} execute frappe.bold --kwargs '{{"text": "DocType"}}'""") self.assertEquals(self.returncode, 0) self.assertEquals(self.stdout[1:-1], frappe.bold(text='DocType'))
import smtplib, time messages_sent = 0.0 start_time = time.time() msg = file('examples/benchmarking/benchmark.eml').read() while True: if (messages_sent % 10) == 0: current_time = time.time() print '%s msg-written/sec' % (messages_sent / (current_time - start_time)) server = smtplib.SMTP('localhost', port=25) server.sendmail('foo@localhost', ['bar@localhost'], msg) server.quit() messages_sent += 1.0
Switch to non-privledged port to make testing easier
import smtplib, time messages_sent = 0.0 start_time = time.time() msg = file('examples/benchmarking/benchmark.eml').read() while True: if (messages_sent % 10) == 0: current_time = time.time() print '%s msg-written/sec' % (messages_sent / (current_time - start_time)) server = smtplib.SMTP('localhost', port=1025) server.sendmail('foo@localhost', ['bar@localhost'], msg) server.quit() messages_sent += 1.0
from django.conf import settings from airmozilla.main.models import Event def sidebar(request): featured = Event.objects.approved().filter(public=True, featured=True) upcoming = Event.objects.upcoming().order_by('start_time') if not request.user.is_active: featured = featured.filter(public=True) upcoming = upcoming.filter(public=True) upcoming = upcoming[:settings.UPCOMING_SIDEBAR_COUNT] return { 'upcoming': upcoming, 'featured': featured }
Fix context processor to correctly display internal featured videos.
from django.conf import settings from airmozilla.main.models import Event def sidebar(request): featured = Event.objects.approved().filter(featured=True) upcoming = Event.objects.upcoming().order_by('start_time') if not request.user.is_active: featured = featured.filter(public=True) upcoming = upcoming.filter(public=True) upcoming = upcoming[:settings.UPCOMING_SIDEBAR_COUNT] return { 'upcoming': upcoming, 'featured': featured }
"""Test `osf clone` command.""" import os from mock import patch, mock_open, call from osfclient import OSF from osfclient.cli import clone from osfclient.tests.mocks import MockProject from osfclient.tests.mocks import MockArgs @patch.object(OSF, 'project', return_value=MockProject('1234')) def test_clone_project(OSF_project): # check that `osf clone` opens files with the right names and modes args = MockArgs(project='1234') mock_open_func = mock_open() with patch('osfclient.cli.open', mock_open_func): with patch('osfclient.cli.os.makedirs'): clone(args) OSF_project.assert_called_once_with('1234') # check that the project and the files have been accessed for store in OSF_project.return_value.storages: assert store._name_mock.called for f in store.files: assert f._path_mock.called fname = f._path_mock.return_value if fname.startswith('/'): fname = fname[1:] full_path = os.path.join('1234', store._name_mock.return_value, fname) assert call(full_path, 'wb') in mock_open_func.mock_calls
Fix osf clone test that was asking for a password
"""Test `osf clone` command.""" import os from mock import patch, mock_open, call from osfclient import OSF from osfclient.cli import clone from osfclient.tests.mocks import MockProject from osfclient.tests.mocks import MockArgs @patch.object(OSF, 'project', return_value=MockProject('1234')) def test_clone_project(OSF_project): # check that `osf clone` opens files with the right names and modes args = MockArgs(project='1234') mock_open_func = mock_open() with patch('osfclient.cli.open', mock_open_func): with patch('osfclient.cli.os.makedirs'): with patch('osfclient.cli.os.getenv', side_effect='SECRET'): clone(args) OSF_project.assert_called_once_with('1234') # check that the project and the files have been accessed for store in OSF_project.return_value.storages: assert store._name_mock.called for f in store.files: assert f._path_mock.called fname = f._path_mock.return_value if fname.startswith('/'): fname = fname[1:] full_path = os.path.join('1234', store._name_mock.return_value, fname) assert call(full_path, 'wb') in mock_open_func.mock_calls
#!/usr/bin/python import sys import glob sys.path.append("python_scripts/gen-py") sys.path.append("gen-py/thrift_solr/") from thrift.transport import TSocket from thrift.server import TServer #import thrift_solr import ExtractorService import sys import readability import readability def extract_with_python_readability( raw_content ): doc = readability.Document( raw_content ) return [ u'' + doc.short_title(), u'' + doc.summary() ] class ExtractorHandler: def extract_html( self, raw_html ): print raw_html #raw_html = raw_html.encode( 'utf-8' ) ret = extract_with_python_readability( raw_html ) print ret[1] return ret handler = ExtractorHandler() processor = ExtractorService.Processor(handler) listening_socket = TSocket.TServerSocket(port=9090) server = TServer.TThreadPoolServer(processor, listening_socket) print ("[Server] Started") server.serve()
Fix include path and ascii / utf8 errors.
#!/usr/bin/python import sys import os import glob #sys.path.append(os.path.join(os.path.dirname(__file__), "gen-py")) sys.path.append(os.path.join(os.path.dirname(__file__),"gen-py/thrift_solr/")) sys.path.append(os.path.dirname(__file__) ) from thrift.transport import TSocket from thrift.server import TServer #import thrift_solr import ExtractorService import sys import readability import readability def extract_with_python_readability( raw_content ): doc = readability.Document( raw_content ) return [ u'' + doc.short_title(), u'' + doc.summary() ] class ExtractorHandler: def extract_html( self, raw_html ): #print raw_html #raw_html = raw_html.encode( 'utf-8' ) ret = extract_with_python_readability( raw_html ) #print ret[1] return ret handler = ExtractorHandler() processor = ExtractorService.Processor(handler) listening_socket = TSocket.TServerSocket(port=9090) server = TServer.TThreadPoolServer(processor, listening_socket) print ("[Server] Started") server.serve()
import warnings from django.test.runner import DiscoverRunner from django.utils.deprecation import RemovedInDjango20Warning, RemovedInDjango110Warning class DevilryTestRunner(DiscoverRunner): def setup_test_environment(self, **kwargs): # warnings.filterwarnings('ignore', category=RemovedInDjango) super(DevilryTestRunner, self).setup_test_environment(**kwargs) warnings.filterwarnings('ignore', category=RemovedInDjango20Warning) warnings.filterwarnings('ignore', category=DeprecationWarning) warnings.filterwarnings('ignore', category=RemovedInDjango110Warning)
project...DevilryTestRunner: Update warning ignores for Django 1.10.
import warnings from django.test.runner import DiscoverRunner from django.utils.deprecation import RemovedInDjango20Warning class DevilryTestRunner(DiscoverRunner): def setup_test_environment(self, **kwargs): # warnings.filterwarnings('ignore', category=RemovedInDjango) super(DevilryTestRunner, self).setup_test_environment(**kwargs) warnings.filterwarnings('ignore', category=RemovedInDjango20Warning) warnings.filterwarnings('ignore', category=DeprecationWarning)
import hashlib from datapackage_pipelines.utilities.extended_json import json from ..errors import SpecError from .dependency_resolver import resolve_dependencies class HashCalculator(object): def __init__(self): self.all_pipeline_ids = {} def calculate_hash(self, spec): cache_hash = None if spec.pipeline_id in self.all_pipeline_ids: message = 'Duplicate key {0} in {1}' \ .format(spec.pipeline_id, spec.abspath) spec.errors.append(SpecError('Duplicate Pipeline Id', message)) else: cache_hash = resolve_dependencies(spec, self.all_pipeline_ids) if len(spec.errors) > 0: return cache_hash for step in spec.pipeline_details['pipeline']: m = hashlib.md5() m.update(cache_hash.encode('ascii')) with open(step['executor'], 'rb') as f: m.update(f.read()) m.update(json.dumps(step, ensure_ascii=True, sort_keys=True) .encode('ascii')) cache_hash = m.hexdigest() step['_cache_hash'] = cache_hash self.all_pipeline_ids[spec.pipeline_id] = spec spec.cache_hash = cache_hash
Fix error in error log
import hashlib from datapackage_pipelines.utilities.extended_json import json from ..errors import SpecError from .dependency_resolver import resolve_dependencies class HashCalculator(object): def __init__(self): self.all_pipeline_ids = {} def calculate_hash(self, spec): cache_hash = None if spec.pipeline_id in self.all_pipeline_ids: message = 'Duplicate key {0} in {1}' \ .format(spec.pipeline_id, spec.path) spec.errors.append(SpecError('Duplicate Pipeline Id', message)) else: cache_hash = resolve_dependencies(spec, self.all_pipeline_ids) if len(spec.errors) > 0: return cache_hash for step in spec.pipeline_details['pipeline']: m = hashlib.md5() m.update(cache_hash.encode('ascii')) with open(step['executor'], 'rb') as f: m.update(f.read()) m.update(json.dumps(step, ensure_ascii=True, sort_keys=True) .encode('ascii')) cache_hash = m.hexdigest() step['_cache_hash'] = cache_hash self.all_pipeline_ids[spec.pipeline_id] = spec spec.cache_hash = cache_hash
import requests from bs4 import BeautifulSoup import json from dotenv import load_dotenv import os load_dotenv(".env") s = requests.Session() r = s.get("https://kcls.bibliocommons.com/user/login", verify=False) payload = { "name": os.environ.get("USER"), "user_pin": os.environ.get("PIN") } s.post("https://kcls.bibliocommons.com/user/login", data=payload) r = s.get("https://kcls.bibliocommons.com/checkedout?display_quantity=50&page=1&view=medium") soup = BeautifulSoup(r.text, "html.parser") checkedOutList = soup.find("div", { "id": "bibList" }) checkedOutItems = [] for title in checkedOutList.find_all("div", { "class": "listItem" }): title_name = title.find("span", { "class": "title" }) due_date = title.find("span", { "class": "item_due_date" }) checkedOutItems.append({ "title": title_name.text.strip(), "due": due_date.text.strip() }) with open("checkedout.json", "w") as f: print "%d title(s) checked out" % len(checkedOutItems) f.write(json.dumps(checkedOutItems))
Change .env variable to KCLS_USER
import requests from bs4 import BeautifulSoup import json from dotenv import load_dotenv import os load_dotenv(".env") s = requests.Session() r = s.get("https://kcls.bibliocommons.com/user/login", verify=False) payload = { "name": os.environ.get("KCLS_USER"), "user_pin": os.environ.get("PIN") } p = s.post("https://kcls.bibliocommons.com/user/login", data=payload) r = s.get("https://kcls.bibliocommons.com/checkedout?display_quantity=50&page=1&view=medium") soup = BeautifulSoup(r.text, "html.parser") checkedOutList = soup.find("div", { "id": "bibList" }) checkedOutItems = [] for title in checkedOutList.find_all("div", { "class": "listItem" }): title_name = title.find("span", { "class": "title" }) due_date = title.find("span", { "class": "item_due_date" }) checkedOutItems.append({ "title": title_name.text.strip(), "due": due_date.text.strip() }) with open("checkedout.json", "w") as f: print "%d title(s) checked out" % len(checkedOutItems) f.write(json.dumps(checkedOutItems))
import itertools import pytz from babel.dates import format_time from pylons import c def pairwise(iterable): a, b = itertools.tee(iterable) next(b, None) return itertools.izip(a, b) def pretty_time(dt): display_tz = pytz.timezone(c.liveupdate_event.timezone) return format_time( time=dt, tzinfo=display_tz, format="HH:mm z", locale=c.locale, )
Make timestamps more specific as temporal context fades. Fixes #6.
import datetime import itertools import pytz from babel.dates import format_time, format_datetime from pylons import c def pairwise(iterable): a, b = itertools.tee(iterable) next(b, None) return itertools.izip(a, b) def pretty_time(dt): display_tz = pytz.timezone(c.liveupdate_event.timezone) today = datetime.datetime.now(display_tz).date() date = dt.astimezone(display_tz).date() if date == today: return format_time( time=dt, tzinfo=display_tz, format="HH:mm z", locale=c.locale, ) elif today - date < datetime.timedelta(days=365): return format_datetime( datetime=dt, tzinfo=display_tz, format="dd MMM HH:mm z", locale=c.locale, ) else: return format_datetime( datetime=dt, tzinfo=display_tz, format="dd MMM YYYY HH:mm z", locale=c.locale, )
from django.forms import ModelForm from cyder.base.mixins import UsabilityFormMixin from cyder.base.eav.forms import get_eav_form from cyder.cydns.soa.models import SOA, SOAAV class SOAForm(ModelForm, UsabilityFormMixin): class Meta: model = SOA fields = ('root_domain', 'primary', 'contact', 'expire', 'retry', 'refresh', 'minimum', 'ttl', 'description', 'is_signed', 'dns_enabled') exclude = ('serial', 'dirty',) SOAAVForm = get_eav_form(SOAAV, SOA)
Replace @ with . in soa form clean
from django.forms import ModelForm from cyder.base.mixins import UsabilityFormMixin from cyder.base.eav.forms import get_eav_form from cyder.cydns.soa.models import SOA, SOAAV class SOAForm(ModelForm, UsabilityFormMixin): class Meta: model = SOA fields = ('root_domain', 'primary', 'contact', 'expire', 'retry', 'refresh', 'minimum', 'ttl', 'description', 'is_signed', 'dns_enabled') exclude = ('serial', 'dirty',) def clean(self, *args, **kwargs): contact = self.cleaned_data['contact'] self.cleaned_data['contact'] = contact.replace('@', '.') return super(SOAForm, self).clean(*args, **kwargs) SOAAVForm = get_eav_form(SOAAV, SOA)
# -*- coding: utf-8 -*- from django.utils.translation import ugettext_lazy as _ from cms.plugin_base import CMSPluginBase from cms.plugin_pool import plugin_pool from . import models class AnimationPlugin(CMSPluginBase): model = models.Animation name = _('Animation') render_template = 'djangocms_wow/animation.html' allow_children = True cache = True def render(self, context, instance, placeholder): context.update({'instance': instance}) return context plugin_pool.register_plugin(AnimationPlugin) class WOWAnimationPlugin(CMSPluginBase): model = models.WOWAnimation name = _("Wow Animation") render_template = 'djangocms_wow/wow_animation.html' allow_children = True cache = True def render(self, context, instance, placeholder): context.update({'instance': instance}) return context plugin_pool.register_plugin(WOWAnimationPlugin)
Allow WOW animations to be used in text plugin.
# -*- coding: utf-8 -*- from django.utils.translation import ugettext_lazy as _ from cms.plugin_base import CMSPluginBase from cms.plugin_pool import plugin_pool from . import models class AnimationPlugin(CMSPluginBase): model = models.Animation name = _('Animation') render_template = 'djangocms_wow/animation.html' allow_children = True text_enabled = True cache = True def render(self, context, instance, placeholder): context.update({'instance': instance}) return context plugin_pool.register_plugin(AnimationPlugin) class WOWAnimationPlugin(CMSPluginBase): model = models.WOWAnimation name = _("Wow Animation") render_template = 'djangocms_wow/wow_animation.html' allow_children = True text_enabled = True cache = True def render(self, context, instance, placeholder): context.update({'instance': instance}) return context plugin_pool.register_plugin(WOWAnimationPlugin)
# -*- coding: utf-8 -*- import argparse import sys from os import path from builder import DeconstJSONBuilder from sphinx.application import Sphinx from sphinx.builders import BUILTIN_BUILDERS def build(argv): """ Invoke Sphinx with locked arguments to generate JSON content. """ parser = argparse.ArgumentParser() parser.add_argument("-s", "--submit", help="Submit results to the content store.", action="store_true") args = parser.parse_args(argv[1:]) # I am a terrible person BUILTIN_BUILDERS['deconst'] = DeconstJSONBuilder # Lock source and destination to the same paths as the Makefile. srcdir, destdir = '.', '_build/deconst' doctreedir = path.join(destdir, '.doctrees') app = Sphinx(srcdir=srcdir, confdir=srcdir, outdir=destdir, doctreedir=doctreedir, buildername="deconst", confoverrides={}, status=sys.stdout, warning=sys.stderr, freshenv=True, warningiserror=False, tags=[], verbosity=0, parallel=1) app.build(True, []) if app.statuscode != 0 or not args.submit: return app.statuscode print("submit active") return 0
Validate the presence of CONTENT_STORE.
# -*- coding: utf-8 -*- from __future__ import print_function import argparse import sys import os from builder import DeconstJSONBuilder from sphinx.application import Sphinx from sphinx.builders import BUILTIN_BUILDERS def build(argv): """ Invoke Sphinx with locked arguments to generate JSON content. """ parser = argparse.ArgumentParser() parser.add_argument("-s", "--submit", help="Submit results to the content store.", action="store_true") args = parser.parse_args(argv[1:]) content_store_url = os.getenv("CONTENT_STORE") if args.submit and not content_store_url: print("Please set CONTENT_STORE if submitting results.", file=sys.stderr) sys.exit(1) # I am a terrible person BUILTIN_BUILDERS['deconst'] = DeconstJSONBuilder # Lock source and destination to the same paths as the Makefile. srcdir, destdir = '.', '_build/deconst' doctreedir = os.path.join(destdir, '.doctrees') app = Sphinx(srcdir=srcdir, confdir=srcdir, outdir=destdir, doctreedir=doctreedir, buildername="deconst", confoverrides={}, status=sys.stdout, warning=sys.stderr, freshenv=True, warningiserror=False, tags=[], verbosity=0, parallel=1) app.build(True, []) if app.statuscode != 0 or not args.submit: return app.statuscode print("submit active") return 0
#!/usr/bin/env python ################################################################################ # Created by Oscar Martinez # # [email protected] # ################################################################################ from flask import Flask, request, jsonify from TermSuggester import TermSuggester, SearchMethodAggregation from elsearch import ELSearch from wnsearch import WNSearch app = Flask(__name__) searchMethodClasses = (ELSearch, WNSearch) initializeParameters = ((None, False),('/home/oscarr/concept-search-wd/data/wordnet', False)) ts = TermSuggester(searchMethodClasses, initializeParameters) @app.route("/suggester", methods = ['GET',]) def api_term(): if request.method == 'GET': if 'term' in request.args: data = ts.getSuggestions(str(request.args['term']), SearchMethodAggregation.SumMethod) resp = jsonify(data) resp.status_code = 200 return resp else: return 'Error: Need to specif a term!' if __name__ == "__main__": app.run(debug=True)
Change init param of wordnet
#!/usr/bin/env python ################################################################################ # Created by Oscar Martinez # # [email protected] # ################################################################################ from flask import Flask, request, jsonify from TermSuggester import TermSuggester, SearchMethodAggregation from elsearch import ELSearch from wnsearch import WNSearch app = Flask(__name__) searchMethodClasses = (ELSearch, WNSearch) initializeParameters = ((None, False),([])) ts = TermSuggester(searchMethodClasses, initializeParameters) @app.route("/suggester", methods = ['GET',]) def api_term(): if request.method == 'GET': if 'term' in request.args: data = ts.getSuggestions(str(request.args['term']), SearchMethodAggregation.SumMethod) resp = jsonify(data) resp.status_code = 200 return resp else: return 'Error: Need to specif a term!' if __name__ == "__main__": app.run(debug=True)
import sys import megatableau, data_prob import scipy, scipy.optimize # Argument parsing assert len(sys.argv)==2 tableau_file_name = sys.argv[1] # Read in data mt = megatableau.MegaTableau(tableau_file_name) w_0 = -scipy.rand(len(mt.weights)) nonpos_reals = [(-25,0) for wt in mt.weights] def one_minus_probability(weights, tableau): return 1.0-data_prob.probability(weights, tableau) def negative_probability(weights, tableau): return -data_prob.probability(weights, tableau) learned_weights = scipy.optimize.fmin_tnc(data_prob.probability, w_0, args = (mt.tableau,), bounds=nonpos_reals, approx_grad=True) print(learned_weights) print("Probability given weights found by the original MEGT:") print(data_prob.probability([-2.19,-0.43], mt.tableau))
Comment out lines accidentally left in the last commit. Oops.
import sys import megatableau, data_prob import scipy, scipy.optimize # Argument parsing assert len(sys.argv)==2 tableau_file_name = sys.argv[1] # Read in data mt = megatableau.MegaTableau(tableau_file_name) w_0 = -scipy.rand(len(mt.weights)) nonpos_reals = [(-25,0) for wt in mt.weights] def one_minus_probability(weights, tableau): return 1.0-data_prob.probability(weights, tableau) def negative_probability(weights, tableau): return -data_prob.probability(weights, tableau) learned_weights = scipy.optimize.fmin_tnc(data_prob.probability, w_0, args = (mt.tableau,), bounds=nonpos_reals, approx_grad=True) print(learned_weights) # print("Probability given weights found by the original MEGT:") # print(data_prob.probability([-2.19,-0.43], mt.tableau))
import os import platform from .generic import MeshScript from ..constants import log from distutils.spawn import find_executable _search_path = os.environ['PATH'] if platform.system() == 'Windows': # split existing path by delimiter _search_path = [i for i in _search_path.split(';') if len(i) > 0] _search_path.append('C:\Program Files') _search_path.append('C:\Program Files (x86)') _search_path = ';'.join(_search_path) log.debug('searching for vhacd in: %s', _search_path) _vhacd_executable = None for _name in ['vhacd', 'testVHACD']: _vhacd_executable = find_executable(_name, path=_search_path) if _vhacd_executable is not None: break exists = _vhacd_executable is not None def convex_decomposition(mesh, **kwargs): if not exists: raise ValueError('No vhacd available!') argstring = ' --input $mesh_0 --output $mesh_post --log $script' # pass through extra arguments from the input dictionary for key, value in kwargs.items(): argstring += ' --{} {}'.format(str(key), str(value)) with MeshScript(meshes=[mesh], script='', tmpfile_ext='obj') as vhacd: result = vhacd.run(_vhacd_executable + argstring) return result
Use raw string for Windows paths This avoids: DeprecationWarning: invalid escape sequence \P _search_path.append('C:\Program Files')
import os import platform from .generic import MeshScript from ..constants import log from distutils.spawn import find_executable _search_path = os.environ['PATH'] if platform.system() == 'Windows': # split existing path by delimiter _search_path = [i for i in _search_path.split(';') if len(i) > 0] _search_path.append(r'C:\Program Files') _search_path.append(r'C:\Program Files (x86)') _search_path = ';'.join(_search_path) log.debug('searching for vhacd in: %s', _search_path) _vhacd_executable = None for _name in ['vhacd', 'testVHACD']: _vhacd_executable = find_executable(_name, path=_search_path) if _vhacd_executable is not None: break exists = _vhacd_executable is not None def convex_decomposition(mesh, **kwargs): if not exists: raise ValueError('No vhacd available!') argstring = ' --input $mesh_0 --output $mesh_post --log $script' # pass through extra arguments from the input dictionary for key, value in kwargs.items(): argstring += ' --{} {}'.format(str(key), str(value)) with MeshScript(meshes=[mesh], script='', tmpfile_ext='obj') as vhacd: result = vhacd.run(_vhacd_executable + argstring) return result
from django import template from django.template import Template register = template.Library() @register.simple_tag(takes_context=True) def sso_meta(context): return Template('<meta name="title" content="${ title }">' + ' ' + '<meta name="description" content="${ subtitle }">' + ' ' + ## OG (Open Graph) title and description added below to give social media info to display ## (https://developers.facebook.com/docs/opengraph/howtos/maximizing-distribution-media-content#tags) '<meta property="og:title" content="${ title }">' + ' ' + '<meta property="og:description" content="${ subtitle }">' + ' ' + '<meta prefix="og: http://ogp.me/ns#" name="image" property="og:image" content="${ banner_image[\'large\'][\'url\'] }">' + ' ' + '<meta property="og:image:width" content="512">' + ' ' + '<meta property="og:image:height" content="512">' + ' ' + '<meta name="twitter:image" content="${ banner_image[\'large\'][\'url\'] }">' + ' ' + '<meta name="twitter:card" content="${ banner_image[\'large\'][\'url\'] }">' + ' ' + '<meta name="twitter:site" content="@PhilanthropyUni">' + ' ' + '<meta name="twitter:title" content="${ title }">' + ' ' + '<meta name="twitter:description" content="${ subtitle }">').render(context);
Add Django Custom Tag SSO
from django import template from django.template.loader import get_template register = template.Library() @register.simple_tag(takes_context=True) def sso_meta(context): return get_template('features/specializations/sso_meta_template.html').render(context.flatten())
# -*- coding: utf-8 -*- import django from django.conf import settings from django.utils.importlib import import_module from django.utils.module_loading import module_has_submodule def autodiscover(): """Auto-discover INSTALLED_APPS mails.py modules.""" for app in settings.INSTALLED_APPS: module = '%s.mails' % app # Attempt to import the app's 'mails' module try: import_module(module) except: # Decide whether to bubble up this error. If the app just # doesn't have a mails module, we can ignore the error # attempting to import it, otherwise we want it to bubble up. app_module = import_module(app) if module_has_submodule(app_module, 'mails'): raise # If we're using Django >= 1.7, use the new app-loading mecanism which is way # better. if django.VERSION < (1, 7): autodiscover()
Use standard library instead of django.utils.importlib > django.utils.importlib is a compatibility library for when Python 2.6 was > still supported. It has been obsolete since Django 1.7, which dropped support > for Python 2.6, and is removed in 1.9 per the deprecation cycle. > Use Python's import_module function instead > — [1] References: [1] http://stackoverflow.com/a/32763639 [2] https://docs.djangoproject.com/en/1.9/internals/deprecation/#deprecation-removed-in-1-9
# -*- coding: utf-8 -*- import django from django.conf import settings from django.utils.module_loading import module_has_submodule try: from importlib import import_module except ImportError: # Compatibility for python-2.6 from django.utils.importlib import import_module def autodiscover(): """Auto-discover INSTALLED_APPS mails.py modules.""" for app in settings.INSTALLED_APPS: module = '%s.mails' % app # Attempt to import the app's 'mails' module try: import_module(module) except: # Decide whether to bubble up this error. If the app just # doesn't have a mails module, we can ignore the error # attempting to import it, otherwise we want it to bubble up. app_module = import_module(app) if module_has_submodule(app_module, 'mails'): raise # If we're using Django >= 1.7, use the new app-loading mecanism which is way # better. if django.VERSION < (1, 7): autodiscover()
# Copyright 2013 Donald Stufft # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from __future__ import absolute_import, division, print_function from __future__ import unicode_literals from werkzeug.routing import Rule, EndpointPrefix urls = [ EndpointPrefix("warehouse.packaging.views.", [ Rule( "/projects/<project_name>/", methods=["GET"], endpoint="project_detail", ), Rule( "/projects/<project_name>/<version>/", methods=["GET"], endpoint="project_detail", ), ]), ]
Remove the plural from the url
# Copyright 2013 Donald Stufft # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from __future__ import absolute_import, division, print_function from __future__ import unicode_literals from werkzeug.routing import Rule, EndpointPrefix urls = [ EndpointPrefix("warehouse.packaging.views.", [ Rule( "/project/<project_name>/", methods=["GET"], endpoint="project_detail", ), Rule( "/project/<project_name>/<version>/", methods=["GET"], endpoint="project_detail", ), ]), ]
import kindred def test_pubannotation(): corpus = kindred.pubannotation.load('bionlp-st-gro-2013-development') assert isinstance(corpus,kindred.Corpus) fileCount = len(corpus.documents) entityCount = sum([ len(d.entities) for d in corpus.documents ]) relationCount = sum([ len(d.relations) for d in corpus.documents ]) assert fileCount == 50 assert relationCount == 1454 assert entityCount == 2657 if __name__ == '__main__': test_pubannotation()
Simplify pubannotation test to not check exact numbers
import kindred def test_pubannotation(): corpus = kindred.pubannotation.load('bionlp-st-gro-2013-development') assert isinstance(corpus,kindred.Corpus) fileCount = len(corpus.documents) entityCount = sum([ len(d.entities) for d in corpus.documents ]) relationCount = sum([ len(d.relations) for d in corpus.documents ]) assert fileCount > 0 assert relationCount > 0 assert entityCount > 0 if __name__ == '__main__': test_pubannotation()
""" django-email-bandit is a Django email backend for hijacking email sending in a test environment. """ __version_info__ = { 'major': 0, 'minor': 2, 'micro': 0, 'releaselevel': 'final', } def get_version(): """ Return the formatted version information """ vers = ["%(major)i.%(minor)i" % __version_info__, ] if __version_info__['micro']: vers.append(".%(micro)i" % __version_info__) if __version_info__['releaselevel'] != 'final': vers.append('%(releaselevel)s' % __version_info__) return ''.join(vers) __version__ = get_version()
Bump version number to reflect dev status.
""" django-email-bandit is a Django email backend for hijacking email sending in a test environment. """ __version_info__ = { 'major': 1, 'minor': 0, 'micro': 0, 'releaselevel': 'dev', } def get_version(): """ Return the formatted version information """ vers = ["%(major)i.%(minor)i" % __version_info__, ] if __version_info__['micro']: vers.append(".%(micro)i" % __version_info__) if __version_info__['releaselevel'] != 'final': vers.append('%(releaselevel)s' % __version_info__) return ''.join(vers) __version__ = get_version()
org = "servo" ignore_repos = ["skia", "skia-snapshots", "cairo", "libpng", "libcss", "libhubbub", "libparserutils", "libwapcaplet", "pixman"] count_forks = ["glutin","rust-openssl"] # Path to where we'll dump the bare checkouts. Must end in / clones_dir = "repos/" # Path to the concatenated log log_path = "log.txt" # Nuke the clones_dir afterwards? destroy_clones = True
Remove libfreetype2, which should have been omitted and was breaking the scripts
org = "servo" ignore_repos = ["skia", "skia-snapshots", "cairo", "libpng", "libcss", "libhubbub", "libparserutils", "libwapcaplet", "pixman", "libfreetype2"] count_forks = ["glutin","rust-openssl"] # Path to where we'll dump the bare checkouts. Must end in / clones_dir = "repos/" # Path to the concatenated log log_path = "log.txt" # Nuke the clones_dir afterwards? destroy_clones = True
patches = [ { "op": "move", "from": "/ResourceTypes/AWS::Transfer::Server/Properties/Protocols/ItemType", "path": "/ResourceTypes/AWS::Transfer::Server/Properties/Protocols/PrimitiveItemType", }, { "op": "replace", "path": "/ResourceTypes/AWS::Transfer::Server/Properties/Protocols/PrimitiveItemType", "value": "String", }, { "op": "move", "from": "/ResourceTypes/AWS::Transfer::User/Properties/SshPublicKeys/ItemType", "path": "/ResourceTypes/AWS::Transfer::User/Properties/SshPublicKeys/PrimitiveItemType", }, { "op": "replace", "path": "/ResourceTypes/AWS::Transfer::User/Properties/SshPublicKeys/PrimitiveItemType", "value": "String", }, ]
Fix spec issue with Transfer::Server ProtocolDetails
patches = [ { "op": "move", "from": "/ResourceTypes/AWS::Transfer::Server/Properties/Protocols/ItemType", "path": "/ResourceTypes/AWS::Transfer::Server/Properties/Protocols/PrimitiveItemType", }, { "op": "replace", "path": "/ResourceTypes/AWS::Transfer::Server/Properties/Protocols/PrimitiveItemType", "value": "String", }, { "op": "move", "from": "/ResourceTypes/AWS::Transfer::User/Properties/SshPublicKeys/ItemType", "path": "/ResourceTypes/AWS::Transfer::User/Properties/SshPublicKeys/PrimitiveItemType", }, { "op": "replace", "path": "/ResourceTypes/AWS::Transfer::User/Properties/SshPublicKeys/PrimitiveItemType", "value": "String", }, { "op": "move", "from": "/PropertyTypes/AWS::Transfer::Server.ProtocolDetails/Properties/As2Transports/ItemType", "path": "/PropertyTypes/AWS::Transfer::Server.ProtocolDetails/Properties/As2Transports/PrimitiveItemType", }, { "op": "replace", "path": "/PropertyTypes/AWS::Transfer::Server.ProtocolDetails/Properties/As2Transports/PrimitiveItemType", "value": "String", }, ]
from django.contrib.auth.forms import UserCreationForm, UserChangeForm from .models import UniqueEmailUser class UniqueEmailUserCreationForm(UserCreationForm): """ A form that creates a UniqueEmailUser. """ def __init__(self, *args, **kargs): super(UniqueEmailUserCreationForm, self).__init__(*args, **kargs) del self.fields['username'] class Meta: model = UniqueEmailUser fields = ("email",) class UniqueEmailUserChangeForm(UserChangeForm): """ A form for updating a UniqueEmailUser. """ def __init__(self, *args, **kargs): super(UniqueEmailUserChangeForm, self).__init__(*args, **kargs) del self.fields['username'] class Meta: model = UniqueEmailUser fields = ("email",)
Fix bug in admin user editing Fixes KeyError when creating or editing a UniqueEmailUser in the admin interface.
from django.contrib.auth.forms import UserCreationForm, UserChangeForm from .models import UniqueEmailUser class UniqueEmailUserCreationForm(UserCreationForm): """ A form that creates a UniqueEmailUser. """ class Meta: model = UniqueEmailUser fields = ("email",) class UniqueEmailUserChangeForm(UserChangeForm): """ A form for updating a UniqueEmailUser. """ class Meta: model = UniqueEmailUser fields = ("email",)
import random import time from datetime import datetime from django.core.cache import cache class MemcacheLock(object): def __init__(self, identifier, cache, unique_value): self.identifier = identifier self._cache = cache self.unique_value = unique_value @classmethod def acquire(cls, identifier, wait=True, steal_after_ms=None): start_time = datetime.utcnow() unique_value = random.randint(1, 100000) while True: acquired = cache.add(identifier, unique_value) if acquired: return cls(identifier, cache, unique_value) elif not wait: return None else: # We are waiting for the lock if steal_after_ms and (datetime.utcnow() - start_time).total_seconds() * 1000 > steal_after_ms: # Steal anyway cache.set(identifier, unique_value) return cls(identifier, cache, unique_value) time.sleep(0) def release(self): cache = self._cache # Delete the key if it was ours. There is a race condition here # if something steals the lock between the if and the delete... if cache.get(self.identifier) == self.unique_value: cache.delete(self.identifier)
Remove pointless `_cache` attribute on MemcacheLock class. If this was doing anything useful, I have no idea what it was.
import random import time from datetime import datetime from django.core.cache import cache class MemcacheLock(object): def __init__(self, identifier, unique_value): self.identifier = identifier self.unique_value = unique_value @classmethod def acquire(cls, identifier, wait=True, steal_after_ms=None): start_time = datetime.utcnow() unique_value = random.randint(1, 100000) while True: acquired = cache.add(identifier, unique_value) if acquired: return cls(identifier, unique_value) elif not wait: return None else: # We are waiting for the lock if steal_after_ms and (datetime.utcnow() - start_time).total_seconds() * 1000 > steal_after_ms: # Steal anyway cache.set(identifier, unique_value) return cls(identifier, unique_value) time.sleep(0) def release(self): # Delete the key if it was ours. There is a race condition here # if something steals the lock between the if and the delete... if cache.get(self.identifier) == self.unique_value: cache.delete(self.identifier)
"""Read events and parameters from your Axis device.""" import asyncio import argparse import logging import sys from axis import AxisDevice async def main(args): loop = asyncio.get_event_loop() device = AxisDevice( loop=loop, host=args.host, username=args.username, password=args.password, port=args.port) if args.params: await loop.run_in_executor(None, device.vapix.initialize_params) await loop.run_in_executor(None, device.vapix.initialize_ports) await loop.run_in_executor(None, device.vapix.initialize_users) if not args.events: return if args.events: device.start() try: while True: await asyncio.sleep(1) except KeyboardInterrupt: pass finally: device.stop() if __name__ == "__main__": logging.basicConfig(format='%(message)s', level=logging.DEBUG) parser = argparse.ArgumentParser() parser.add_argument('host', type=str) parser.add_argument('username', type=str) parser.add_argument('password', type=str) parser.add_argument('-p', '--port', type=int, default=80) parser.add_argument('--events', action='store_true') parser.add_argument('--params', action='store_true') args = parser.parse_args() asyncio.run(main(args))
Fix main failing on no event_callback
"""Read events and parameters from your Axis device.""" import asyncio import argparse import logging import sys from axis import AxisDevice async def main(args): loop = asyncio.get_event_loop() device = AxisDevice( loop=loop, host=args.host, username=args.username, password=args.password, port=args.port) if args.params: await loop.run_in_executor(None, device.vapix.initialize_params) await loop.run_in_executor(None, device.vapix.initialize_ports) await loop.run_in_executor(None, device.vapix.initialize_users) if not args.events: return if args.events: def event_handler(action, event): print(action, event) device.enable_events(event_callback=event_handler) device.start() try: while True: await asyncio.sleep(1) except KeyboardInterrupt: pass finally: device.stop() if __name__ == "__main__": logging.basicConfig(format='%(message)s', level=logging.DEBUG) parser = argparse.ArgumentParser() parser.add_argument('host', type=str) parser.add_argument('username', type=str) parser.add_argument('password', type=str) parser.add_argument('-p', '--port', type=int, default=80) parser.add_argument('--events', action='store_true') parser.add_argument('--params', action='store_true') args = parser.parse_args() asyncio.run(main(args))
import webquery from lxml import etree import inspect from expression import Expression from collections import defaultdict class Parser(object): registry = defaultdict(dict) @classmethod def __init_subclass__(cls): for name, member in inspect.getmembers(cls): if isinstance(member, Expression): cls.registry[cls.__name__][name] = member @property def fields(self): cls = self.__class__ return cls.registry[cls.__name__] def parse(self, url): content = webquery.urlcontent(url) root = etree.HTML(content, base_url=url) data = {name: expr.parse(root) for name, expr in self.fields.items()} data['url'] = url return data
Add ability to customize URL
import webquery from lxml import etree import inspect from expression import Expression from collections import defaultdict class Parser(object): registry = defaultdict(dict) @classmethod def __init_subclass__(cls): for name, member in inspect.getmembers(cls): if isinstance(member, Expression): cls.registry[cls.__name__][name] = member @property def fields(self): cls = self.__class__ return cls.registry[cls.__name__] def canonical_url(self, url): """By overriding this method canonical url can be used""" return url def parse(self, url): canonical_url = self.canonical_url(url) content = webquery.urlcontent(canonical_url) root = etree.HTML(content, base_url=canonical_url) data = {name: expr.parse(root) for name, expr in self.fields.items()} data['url'] = canonical_url return data
import os import dj_database_url from .base import * # noqa: F401,F403 DEBUG = False # SECURITY WARNING: keep the secret key used in production secret! SECRET_KEY = os.environ['SECRET_KEY'] # Database # https://docs.djangoproject.com/en/1.11/ref/settings/#databases DATABASES = { 'default': dj_database_url.config(conn_max_age=500) } # Email SENDGRID_USERNAME = os.environ.get('SENDGRID_USERNAME', None) # noqa: F405 SENDGRID_PASSWORD = os.environ.get('SENDGRID_PASSWORD', None) # noqa: F405 # Use SendGrid if we have the addon installed, else just print to console which # is accessible via Heroku logs if SENDGRID_USERNAME and SENDGRID_PASSWORD: EMAIL_HOST = 'smtp.sendgrid.net' EMAIL_HOST_USER = SENDGRID_USERNAME EMAIL_HOST_PASSWORD = SENDGRID_PASSWORD EMAIL_PORT = 587 EMAIL_USE_TLS = True EMAIL_TIMEOUT = 60 else: EMAIL_BACKEND = 'django.core.mail.backends.console.EmailBackend'
Add a DEBUG environment option to Heroku settings.
import os import dj_database_url from .base import * # noqa: F401,F403 DEBUG = os.environ.get('DEBUG', False) # SECURITY WARNING: keep the secret key used in production secret! SECRET_KEY = os.environ['SECRET_KEY'] # Database # https://docs.djangoproject.com/en/1.11/ref/settings/#databases DATABASES = { 'default': dj_database_url.config(conn_max_age=500) } # Email SENDGRID_USERNAME = os.environ.get('SENDGRID_USERNAME', None) # noqa: F405 SENDGRID_PASSWORD = os.environ.get('SENDGRID_PASSWORD', None) # noqa: F405 # Use SendGrid if we have the addon installed, else just print to console which # is accessible via Heroku logs if SENDGRID_USERNAME and SENDGRID_PASSWORD: EMAIL_HOST = 'smtp.sendgrid.net' EMAIL_HOST_USER = SENDGRID_USERNAME EMAIL_HOST_PASSWORD = SENDGRID_PASSWORD EMAIL_PORT = 587 EMAIL_USE_TLS = True EMAIL_TIMEOUT = 60 else: EMAIL_BACKEND = 'django.core.mail.backends.console.EmailBackend'
import rethinkdb as r from . import coerce, geo, operators, terms from .coerce import COERSIONS from .operators import BOOLEAN, EXPRESSIONS, MODIFIERS from .terms import TERMS from .exceptions import ReqonError, InvalidTypeError, InvalidFilterError def query(query): try: reql = r.db(query['$db']).table(query['$table']) except KeyError: try: reql = r.table(query['$table']) except KeyError: raise ReqonError('The query descriptor requires a $table key.') return build_terms(query['$query'], reql) def build_terms(reql, query): for sequence in query: term = sequence[0] try: reql = TERMS[term](reql, *sequence[1:]) except ReqonError: raise except r.ReqlError: message = 'Invalid values for {0} with args {1}' raise ReqonError(message.format(term, sequence[1:])) except Exception: message = 'Unknown exception, {0}: {1}' raise ReqonError(message.format(term, sequence[1:])) return reql
Fix arguments order of reqon.deprecated.build_terms().
import rethinkdb as r from . import coerce, geo, operators, terms from .coerce import COERSIONS from .operators import BOOLEAN, EXPRESSIONS, MODIFIERS from .terms import TERMS from .exceptions import ReqonError, InvalidTypeError, InvalidFilterError def query(query): try: reql = r.db(query['$db']).table(query['$table']) except KeyError: try: reql = r.table(query['$table']) except KeyError: raise ReqonError('The query descriptor requires a $table key.') return build_terms(reql, query['$query']) def build_terms(reql, query): for sequence in query: term = sequence[0] try: reql = TERMS[term](reql, *sequence[1:]) except ReqonError: raise except r.ReqlError: message = 'Invalid values for {0} with args {1}' raise ReqonError(message.format(term, sequence[1:])) except Exception: message = 'Unknown exception, {0}: {1}' raise ReqonError(message.format(term, sequence[1:])) return reql
"""Notify Slack channel.""" import time from ..utils import get_properties, get_template, post_slack_message class SlackNotification: """Post slack notification. Inform users about infrastructure changes to prod* accounts. """ def __init__(self, app=None, env=None, prop_path=None): self.info = {'app': app, 'env': env, 'properties': prop_path} timestamp = time.strftime("%B %d, %Y %H:%M:%S %Z", time.gmtime()) self.info['timestamp'] = timestamp self.settings = get_properties(self.info['properties']) self.info['config_commit_short'] = self.settings['pipeline'][ 'config_commit'][0:11] def post_message(self): """Send templated message to **#deployments-{env}**.""" message = get_template( template_file='slack-templates/pipeline-prepare-ran.j2', info=self.info) channel = '#deployments-{}'.format(self.info['env'].lower()) post_slack_message(message, channel) def notify_slack_channel(self): """Post message to a defined Slack channel.""" message = get_template( template_file='slack-templates/pipeline-prepare-ran.j2', info=self.info) if self.settings['pipeline']['notifications']['slack']: post_slack_message( message, self.settings['pipeline']['notifications']['slack'])
fix: Move timestamp before dict for insertion
"""Notify Slack channel.""" import time from ..utils import get_properties, get_template, post_slack_message class SlackNotification: """Post slack notification. Inform users about infrastructure changes to prod* accounts. """ def __init__(self, app=None, env=None, prop_path=None): timestamp = time.strftime("%B %d, %Y %H:%M:%S %Z", time.gmtime()) self.info = {'app': app, 'env': env, 'properties': prop_path, 'timestamp': timestamp} self.settings = get_properties(self.info['properties']) self.info['config_commit_short'] = self.settings['pipeline'][ 'config_commit'][0:11] def post_message(self): """Send templated message to **#deployments-{env}**.""" message = get_template( template_file='slack-templates/pipeline-prepare-ran.j2', info=self.info) channel = '#deployments-{}'.format(self.info['env'].lower()) post_slack_message(message, channel) def notify_slack_channel(self): """Post message to a defined Slack channel.""" message = get_template( template_file='slack-templates/pipeline-prepare-ran.j2', info=self.info) if self.settings['pipeline']['notifications']['slack']: post_slack_message( message, self.settings['pipeline']['notifications']['slack'])
from django.conf.urls import patterns, include, url from django.contrib import admin admin.autodiscover() urlpatterns = patterns('', # Examples: # url(r'^$', 'pysearch.views.home', name='home'), # url(r'^blog/', include('blog.urls')), url(r'^admin/', include(admin.site.urls)), url(r'^search/', include('search.urls')), )
Remove access to admin site
from django.conf.urls import patterns, include, url from django.contrib import admin admin.autodiscover() urlpatterns = patterns('', # Examples: # url(r'^$', 'pysearch.views.home', name='home'), # url(r'^blog/', include('blog.urls')), url(r'^search/', include('search.urls')), )
INSTALLED_APPS = ( 'oauth_tokens', 'taggit', 'vkontakte_groups', ) OAUTH_TOKENS_VKONTAKTE_CLIENT_ID = 3430034 OAUTH_TOKENS_VKONTAKTE_CLIENT_SECRET = 'b0FwzyKtO8QiQmgWQMTz' OAUTH_TOKENS_VKONTAKTE_SCOPE = ['ads,wall,photos,friends,stats'] OAUTH_TOKENS_VKONTAKTE_USERNAME = '+919665223715' OAUTH_TOKENS_VKONTAKTE_PASSWORD = 'githubovich' OAUTH_TOKENS_VKONTAKTE_PHONE_END = '96652237'
Fix RuntimeError: maximum recursion depth
INSTALLED_APPS = ( 'oauth_tokens', 'taggit', 'vkontakte_groups', ) OAUTH_TOKENS_VKONTAKTE_CLIENT_ID = 3430034 OAUTH_TOKENS_VKONTAKTE_CLIENT_SECRET = 'b0FwzyKtO8QiQmgWQMTz' OAUTH_TOKENS_VKONTAKTE_SCOPE = ['ads,wall,photos,friends,stats'] OAUTH_TOKENS_VKONTAKTE_USERNAME = '+919665223715' OAUTH_TOKENS_VKONTAKTE_PASSWORD = 'githubovich' OAUTH_TOKENS_VKONTAKTE_PHONE_END = '96652237' # Set VK API Timeout VKONTAKTE_API_REQUEST_TIMEOUT = 7
from models.base_model import BaseModel from datetime import datetime from models.user_model import UserModel from peewee import CharField, TextField, DateTimeField, IntegerField, ForeignKeyField WAIFU_SHARING_STATUS_PRIVATE = 1 WAIFU_SHARING_STATUS_PUBLIC_MODERATION = 2 WAIFU_SHARING_STATUS_PUBLIC = 3 class WaifuModel(BaseModel): class Meta: db_table = 'waifus' name = CharField(max_length=128, null=False) description = TextField(null=False) pic = CharField(max_length=128, null=False) created_at = DateTimeField(null=False, default=datetime.now) updated_at = DateTimeField(null=False, default=datetime.now) rating = IntegerField(null=False, default=0) sharing_status = IntegerField(null=False, default=WAIFU_SHARING_STATUS_PRIVATE) owner = ForeignKeyField(UserModel, related_name='waifus_created_by_me')
Add users count to json representation.
from models.base_model import BaseModel from datetime import datetime from models.user_model import UserModel from peewee import CharField, TextField, DateTimeField, IntegerField, ForeignKeyField WAIFU_SHARING_STATUS_PRIVATE = 1 WAIFU_SHARING_STATUS_PUBLIC_MODERATION = 2 WAIFU_SHARING_STATUS_PUBLIC = 3 class WaifuModel(BaseModel): class Meta: db_table = 'waifus' name = CharField(max_length=128, null=False) description = TextField(null=False) pic = CharField(max_length=128, null=False) created_at = DateTimeField(null=False, default=datetime.now) updated_at = DateTimeField(null=False, default=datetime.now) rating = IntegerField(null=False, default=0) sharing_status = IntegerField(null=False, default=WAIFU_SHARING_STATUS_PRIVATE) owner = ForeignKeyField(UserModel, related_name='waifus_created_by_me') def to_json(self): json = super(WaifuModel, self).to_json() json['users_count'] = self.users.count() return json
import sys from time import time class PID(object): def __init__(self): """initizes value for the PID""" self.kd = 0 self.ki = 0 self.kp = 1 self.previous_error = 0 self.integral_error = 0 def set_k_values(self, kp, kd, ki): self.kp = kp self.ki = ki self.kd = kd def pid(self, target, process_var, timestep): current_error = (target - process_var) p_error = self.kp * current_error d_error = self.kd * (current_error - self.previous_error) \ / timestep self.integral_error = ( current_error + self.previous_error) / 2 \ + self.integral_error i_error = self.ki * self.integral_error total_error = p_error + d_error + i_error self.previous_error = current_error return total_error
Update to follower, reduce speed to motors.
import sys from time import time class PID(object): def __init__(self): """initizes value for the PID""" self.kd = 0 self.ki = 0 self.kp = 1 self.previous_error = 0 self.integral_error = 0 def set_k_values(self, kp, kd, ki): self.kp = kp self.ki = ki self.kd = kd def pid(self, target, process_var, timestep): current_error = (target + process_var) p_error = self.kp * current_error d_error = self.kd * (current_error - self.previous_error) \ / timestep self.integral_error = ( current_error + self.previous_error) / 2 \ + self.integral_error i_error = self.ki * self.integral_error total_error = p_error + d_error + i_error self.previous_error = current_error return total_error
from django.core.exceptions import ValidationError from django.core.validators import MinValueValidator, MaxValueValidator from django.db import models from netaddr import AddrFormatError, EUI, mac_unix_expanded class ASNField(models.BigIntegerField): description = "32-bit ASN field" default_validators = [ MinValueValidator(1), MaxValueValidator(4294967295), ] class mac_unix_expanded_uppercase(mac_unix_expanded): word_fmt = '%.2X' class MACAddressField(models.Field): description = "PostgreSQL MAC Address field" def python_type(self): return EUI def from_db_value(self, value, expression, connection, context): return self.to_python(value) def to_python(self, value): if value is None: return value try: return EUI(value, version=48, dialect=mac_unix_expanded_uppercase) except AddrFormatError as e: raise ValidationError("Invalid MAC address format: {}".format(value)) def db_type(self, connection): return 'macaddr' def get_prep_value(self, value): if not value: return None return str(self.to_python(value))
Remove deprecated context parameter from from_db_value
from django.core.exceptions import ValidationError from django.core.validators import MinValueValidator, MaxValueValidator from django.db import models from netaddr import AddrFormatError, EUI, mac_unix_expanded class ASNField(models.BigIntegerField): description = "32-bit ASN field" default_validators = [ MinValueValidator(1), MaxValueValidator(4294967295), ] class mac_unix_expanded_uppercase(mac_unix_expanded): word_fmt = '%.2X' class MACAddressField(models.Field): description = "PostgreSQL MAC Address field" def python_type(self): return EUI def from_db_value(self, value, expression, connection): return self.to_python(value) def to_python(self, value): if value is None: return value try: return EUI(value, version=48, dialect=mac_unix_expanded_uppercase) except AddrFormatError as e: raise ValidationError("Invalid MAC address format: {}".format(value)) def db_type(self, connection): return 'macaddr' def get_prep_value(self, value): if not value: return None return str(self.to_python(value))
""" InfluxDB Alchemy. """ from .client import InfluxAlchemy from .measurement import Measurement __version__ = "0.1.0"
Use package version for __version__
""" InfluxDB Alchemy. """ import pkg_resources from .client import InfluxAlchemy from .measurement import Measurement try: __version__ = pkg_resources.get_distribution(__package__).version except pkg_resources.DistributionNotFound: # pragma: no cover __version__ = None # pragma: no cover
"""Test that the clang modules cache directory can be controlled.""" from __future__ import print_function import unittest2 import os import time import platform import shutil import lldb from lldbsuite.test.decorators import * from lldbsuite.test.lldbtest import * from lldbsuite.test import lldbutil class ObjCModulesTestCase(TestBase): NO_DEBUG_INFO_TESTCASE = True mydir = TestBase.compute_mydir(__file__) def setUp(self): TestBase.setUp(self) def test_expr(self): self.build() self.main_source_file = lldb.SBFileSpec("main.m") self.runCmd("settings set target.auto-import-clang-modules true") mod_cache = self.getBuildArtifact("my-clang-modules-cache") if os.path.isdir(mod_cache): shutil.rmtree(mod_cache) self.assertFalse(os.path.isdir(mod_cache), "module cache should not exist") self.runCmd('settings set symbols.clang-modules-cache-path "%s"' % mod_cache) self.runCmd('settings set target.clang-module-search-paths "%s"' % self.getSourceDir()) (target, process, thread, bkpt) = lldbutil.run_to_source_breakpoint( self, "Set breakpoint here", self.main_source_file) self.runCmd("expr @import Darwin") self.assertTrue(os.path.isdir(mod_cache), "module cache exists")
Mark ObjC testcase as skipUnlessDarwin and fix a typo in test function. git-svn-id: 4c4cc70b1ef44ba2b7963015e681894188cea27e@326640 91177308-0d34-0410-b5e6-96231b3b80d8 (cherry picked from commit cb9b1a2163f960e34721f74bad30622fda71e43b)
"""Test that the clang modules cache directory can be controlled.""" from __future__ import print_function import unittest2 import os import time import platform import shutil import lldb from lldbsuite.test.decorators import * from lldbsuite.test.lldbtest import * from lldbsuite.test import lldbutil class ObjCModulesTestCase(TestBase): NO_DEBUG_INFO_TESTCASE = True mydir = TestBase.compute_mydir(__file__) def setUp(self): TestBase.setUp(self) @skipUnlessDarwin def test_expr(self): self.build() self.main_source_file = lldb.SBFileSpec("main.m") self.runCmd("settings set target.auto-import-clang-modules true") mod_cache = self.getBuildArtifact("my-clang-modules-cache") if os.path.isdir(mod_cache): shutil.rmtree(mod_cache) self.assertFalse(os.path.isdir(mod_cache), "module cache should not exist") self.runCmd('settings set symbols.clang-modules-cache-path "%s"' % mod_cache) self.runCmd('settings set target.clang-module-search-paths "%s"' % self.getSourceDir()) (target, process, thread, bkpt) = lldbutil.run_to_source_breakpoint( self, "Set breakpoint here", self.main_source_file) self.runCmd("expr @import Foo") self.assertTrue(os.path.isdir(mod_cache), "module cache exists")
import os import site # get site-packages into sys.path import sys # add local addons folder to sys.path so blender finds it sys.path = ( [os.path.join(os.path.dirname(__file__), '..', 'scripts', 'addons')] + sys.path ) # run sphinx builder # this assumes that the builder is called as # "blender --background --factory-startup --python blender-sphinx-build.py -- ..." # pass the correct arguments by dropping the arguments prior to -- import sphinx argv = ['blender-sphinx-build'] + sys.argv[6:] sphinx.main(argv=argv)
Correct sys.path when generating docs.
import os import site # get site-packages into sys.path import sys # add local addons folder to sys.path so blender finds it sys.path = ( [os.path.join(os.path.dirname(__file__), '..')] + sys.path ) # run sphinx builder # this assumes that the builder is called as # "blender --background --factory-startup --python blender-sphinx-build.py -- ..." # pass the correct arguments by dropping the arguments prior to -- import sphinx argv = ['blender-sphinx-build'] + sys.argv[6:] sphinx.main(argv=argv)
from django.db import models from django.urls import reverse class Document(models.Model): FILE_TYPES = ('md', 'txt') repo = models.ForeignKey('interface.Repo', related_name='documents') path = models.TextField() filename = models.TextField() body = models.TextField(blank=True) commit_date = models.DateTimeField() def __str__(self): return '{}/{}'.format(self.path, self.filename) @property def github_view_link(self): return 'https://github.com/{0}/blob/{1}{2}'.format(self.repo.full_name, self.repo.wiki_branch, str(self)) @property def github_edit_link(self): return 'https://github.com/{0}/edit/{1}{2}'.format(self.repo.full_name, self.repo.wiki_branch, str(self)) def get_absolute_url(self): return reverse('repo_detail', kwargs={'full_name': self.repo.full_name, 'path': str(self)}) class Meta: unique_together = ('repo', 'path', 'filename')
Move Document.__str__ to named method
from django.db import models from django.urls import reverse class Document(models.Model): FILE_TYPES = ('md', 'txt') repo = models.ForeignKey('interface.Repo', related_name='documents') path = models.TextField() filename = models.TextField() body = models.TextField(blank=True) commit_date = models.DateTimeField() def __str__(self): return self.full_path @property def full_path(self): return '{}/{}'.format(self.path, self.filename) @property def github_view_link(self): return 'https://github.com/{0}/blob/{1}{2}'.format(self.repo.full_name, self.repo.wiki_branch, self.full_path) @property def github_edit_link(self): return 'https://github.com/{0}/edit/{1}{2}'.format(self.repo.full_name, self.repo.wiki_branch, self.full_path) def get_absolute_url(self): return reverse('repo_detail', kwargs={'full_name': self.repo.full_name, 'path': self.full_path}) class Meta: unique_together = ('repo', 'path', 'filename')
# -*- coding: utf-8 -*- # Define your item pipelines here # # Don't forget to add your pipeline to the ITEM_PIPELINES setting # See: http://doc.scrapy.org/en/latest/topics/item-pipeline.html from scrapy.exceptions import DropItem class DuplicatesPipeline(object): def __init__(self): self.ids_seen = set() def process_item(self, item, spider): ref = item['ref'] if ref in self.ids_seen: raise DropItem("Duplicate item found: %s" % item) else: self.ids_seen.add(ref) return item class ApplySpiderNamePipeline(object): def process_item(self, item, spider): existing_extras = item.get('extras', {}) existing_extras['@spider'] = spider.name item['extras'] = existing_extras return item
Include spider name in item dedupe pipeline
# -*- coding: utf-8 -*- # Define your item pipelines here # # Don't forget to add your pipeline to the ITEM_PIPELINES setting # See: http://doc.scrapy.org/en/latest/topics/item-pipeline.html from scrapy.exceptions import DropItem class DuplicatesPipeline(object): def __init__(self): self.ids_seen = set() def process_item(self, item, spider): ref = (spider.name, item['ref']) if ref in self.ids_seen: raise DropItem("Duplicate item found: %s" % item) else: self.ids_seen.add(ref) return item class ApplySpiderNamePipeline(object): def process_item(self, item, spider): existing_extras = item.get('extras', {}) existing_extras['@spider'] = spider.name item['extras'] = existing_extras return item
# coding: utf-8 from Crypto.Cipher import AES import base64 import random import string from modoboa.lib import parameters def random_key(l=16): """Generate a random key :param integer l: the key's length :return: a string """ char_set = string.digits + string.letters + string.punctuation return ''.join(random.sample(char_set * l, l)) def encrypt(clear): key = parameters.get_admin("SECRET_KEY", app="core") obj = AES.new(key, AES.MODE_ECB) if type(clear) is unicode: clear = clear.encode("utf-8") if len(clear) % AES.block_size: clear += " " * (AES.block_size - len(clear) % AES.block_size) ciph = obj.encrypt(clear) ciph = base64.b64encode(ciph) return ciph def decrypt(ciph): obj = AES.new( parameters.get_admin("SECRET_KEY", app="core"), AES.MODE_ECB ) ciph = base64.b64decode(ciph) clear = obj.decrypt(ciph) return clear.rstrip(' ') def get_password(request): return decrypt(request.session["password"])
Make sure key has the required size. see #867
# coding: utf-8 """Crypto related utilities.""" import base64 import random import string from Crypto.Cipher import AES from modoboa.lib import parameters def random_key(l=16): """Generate a random key. :param integer l: the key's length :return: a string """ population = string.digits + string.letters + string.punctuation while True: key = "".join(random.sample(population * l, l)) if len(key) == l: return key def encrypt(clear): key = parameters.get_admin("SECRET_KEY", app="core") obj = AES.new(key, AES.MODE_ECB) if type(clear) is unicode: clear = clear.encode("utf-8") if len(clear) % AES.block_size: clear += " " * (AES.block_size - len(clear) % AES.block_size) ciph = obj.encrypt(clear) ciph = base64.b64encode(ciph) return ciph def decrypt(ciph): obj = AES.new( parameters.get_admin("SECRET_KEY", app="core"), AES.MODE_ECB ) ciph = base64.b64decode(ciph) clear = obj.decrypt(ciph) return clear.rstrip(' ') def get_password(request): return decrypt(request.session["password"])
import time from django.test import TestCase from django.contrib.auth.models import User from django.conf import settings from rest_framework.renderers import JSONRenderer from rest_framework.parsers import JSONParser from io import BytesIO import json from login.models import Profile, AmbulancePermission, HospitalPermission from login.serializers import ExtendedProfileSerializer from ambulance.models import Ambulance, \ AmbulanceStatus, AmbulanceCapability from ambulance.serializers import AmbulanceSerializer from hospital.models import Hospital, \ Equipment, HospitalEquipment, EquipmentType from hospital.serializers import EquipmentSerializer, \ HospitalSerializer, HospitalEquipmentSerializer from django.test import Client from .client import MQTTTestCase, MQTTTestClient from ..client import MQTTException from ..subscribe import SubscribeClient class TestMQTT1(MQTTTestCase): def test(self): self.assertEqual(True, True) class TestMQTT2(MQTTTestCase): def test(self): self.assertEqual(True, True)
Add more time to mqtt.test.client
import time from django.test import TestCase from django.contrib.auth.models import User from django.conf import settings from rest_framework.renderers import JSONRenderer from rest_framework.parsers import JSONParser from io import BytesIO import json from login.models import Profile, AmbulancePermission, HospitalPermission from login.serializers import ExtendedProfileSerializer from ambulance.models import Ambulance, \ AmbulanceStatus, AmbulanceCapability from ambulance.serializers import AmbulanceSerializer from hospital.models import Hospital, \ Equipment, HospitalEquipment, EquipmentType from hospital.serializers import EquipmentSerializer, \ HospitalSerializer, HospitalEquipmentSerializer from django.test import Client from .client import MQTTTestCase, MQTTTestClient from ..client import MQTTException from ..subscribe import SubscribeClient class TestMQTT1(MQTTTestCase): def test(self): import sys from django.core.management.base import OutputWrapper from django.core.management.color import color_style, no_style # seed from django.core import management management.call_command('mqttseed', verbosity=1) print('>> Processing messages...') self.assertEqual(True, True) class TestMQTT2(MQTTTestCase): def test(self): self.assertEqual(True, True)
import cassiopeia as cass from cassiopeia.core import Summoner def test_cass(): name = "Kalturi" masteries = cass.get_masteries() for mastery in masteries: print(mastery.name) if __name__ == "__main__": test_cass()
Remove redundant import, change function name.
import cassiopeia as cass def print_masteries(): for mastery in cass.get_masteries(): print(mastery.name) if __name__ == "__main__": print_masteries()
#!/usr/bin/env python3 # -*- coding: utf-8 -*- from wdom.log import configure_logger configure_logger()
Revert configure logger at initialization
#!/usr/bin/env python3 # -*- coding: utf-8 -*-
import sys, os myPath = os.path.dirname(os.path.abspath(__file__)) print(myPath) sys.path.insert(0, myPath + '/../SATSolver') from unittest import TestCase from individual import Individual from BitVector import BitVector from bitarray import bitarray class TestIndividual(TestCase): """ Testing class for Individual. """ def test_get(self): ind = Individual(9) ind.data = bitarray("011010100") self.assertEqual(ind.get(5), 1) self.assertEqual(ind.get(1), 0) self.assertEqual(ind.get(10), None) def test_set(self): ind = Individual(9) ind.data = bitarray("011010100") ind.set(2, 1) self.assertEqual(ind.get(2), 1) ind.set(7, 0) self.assertEqual(ind.get(7), 0) ind.set(6, 1) self.assertEqual(ind.get(6), 1) def test_flip(self): ind = Individual(9) ind.data = bitarray("011010100") ind.flip(1) self.assertEqual(ind.get(1), 1) ind.flip(8) self.assertEqual(ind.get(8), 1) ind.flip(4) self.assertEqual(ind.get(4), 1)
Remove BitVector import - Build fails
import sys, os myPath = os.path.dirname(os.path.abspath(__file__)) print(myPath) sys.path.insert(0, myPath + '/../SATSolver') from unittest import TestCase from individual import Individual from bitarray import bitarray class TestIndividual(TestCase): """ Testing class for Individual. """ def test_get(self): ind = Individual(9) ind.data = bitarray("011010100") self.assertEqual(ind.get(5), 1) self.assertEqual(ind.get(1), 0) self.assertEqual(ind.get(10), None) def test_set(self): ind = Individual(9) ind.data = bitarray("011010100") ind.set(2, 1) self.assertEqual(ind.get(2), 1) ind.set(7, 0) self.assertEqual(ind.get(7), 0) ind.set(6, 1) self.assertEqual(ind.get(6), 1) def test_flip(self): ind = Individual(9) ind.data = bitarray("011010100") ind.flip(1) self.assertEqual(ind.get(1), 1) ind.flip(8) self.assertEqual(ind.get(8), 1) ind.flip(4) self.assertEqual(ind.get(4), 1)
from __future__ import division, print_function, unicode_literals __version__ = '0.1.2' from hindkit.constants import paths, linguistics, styles, templates from hindkit.family import Family, Master, Style from hindkit.builder import Builder def confirm_version(required_version): if __version__ != required_version: message = templates.EXIT_MESSAGE.format(required_version, __version__) raise SystemExit(message)
Change the version number to 0.1.3
from __future__ import division, print_function, unicode_literals __version__ = '0.1.3' from hindkit.constants import paths, linguistics, styles, templates from hindkit.family import Family, Master, Style from hindkit.builder import Builder def confirm_version(required_version): if __version__ != required_version: message = templates.EXIT_MESSAGE.format(required_version, __version__) raise SystemExit(message)
import glob, os.path, sys version = sys.version.split(" ")[0] majorminor = version[0:3] # Add path to hiredis.so load path path = glob.glob("build/lib*-%s/hiredis/*.so" % majorminor)[0] sys.path.insert(0, os.path.dirname(path)) from unittest import * from . import reader def tests(): suite = TestSuite() suite.addTest(makeSuite(reader.ReaderTest)) return suite
Fix build path detection on SunOS Inside the hiredis directory there is another directory that contains the shared object. This is specific to the platform so we shouldn't care where the shared object itself is placed.
import glob, os.path, sys version = sys.version.split(" ")[0] majorminor = version[0:3] # Add path to hiredis.so load path path = glob.glob("build/lib*-%s/hiredis" % majorminor)[0] sys.path.insert(0, path) from unittest import * from . import reader def tests(): suite = TestSuite() suite.addTest(makeSuite(reader.ReaderTest)) return suite
from chainer.backends import chainerx # NOQA from chainer.backends import cpu # NOQA from chainer.backends import cuda # NOQA from chainer.backends import intel64 # NOQA
Add TODO to refactor backend registration
from chainer.backends import chainerx # NOQA from chainer.backends import cpu # NOQA from chainer.backends import cuda # NOQA from chainer.backends import intel64 # NOQA # TODO(niboshi): Refactor registration of backend modules for functions like # chainer.get_device().
"""Directives and roles for documenting traitlets config options. :: .. configtrait:: Application.log_datefmt Description goes here. Cross reference like this: :configtrait:`Application.log_datefmt`. """ from sphinx.locale import l_ from sphinx.util.docfields import Field def setup(app): app.add_object_type('configtrait', 'configtrait', objname='Config option') metadata = {'parallel_read_safe': True, 'parallel_write_safe': True} return metadata
Fix compatibility with the latest release of Sphinx `l_` from sphinx.locale has been deprecated for a long time. `_` is the new name for the same function but it seems that the imports there are useless. https://github.com/sphinx-doc/sphinx/commit/8d653a406dc0dc6c2632176ab4757ca15474b10f
"""Directives and roles for documenting traitlets config options. :: .. configtrait:: Application.log_datefmt Description goes here. Cross reference like this: :configtrait:`Application.log_datefmt`. """ def setup(app): app.add_object_type('configtrait', 'configtrait', objname='Config option') metadata = {'parallel_read_safe': True, 'parallel_write_safe': True} return metadata
""" nydus.db.routers ~~~~~~~~~~~~~~~~ :copyright: (c) 2011 DISQUS. :license: Apache License 2.0, see LICENSE for more details. """ from .base import BaseRouter, RoundRobinRouter
Add partition router to base
""" nydus.db.routers ~~~~~~~~~~~~~~~~ :copyright: (c) 2011 DISQUS. :license: Apache License 2.0, see LICENSE for more details. """ from .base import BaseRouter, RoundRobinRouter, PartitionRouter
import requests def register_asheesh(): return requests.post( 'http://localhost:3000/register', {'rawHostname': 'asheesh', 'email': '[email protected]', 'pubkey': open('snakeoil-sample-certs/ssl-cert-snakeoil.pubkey').read()}, ) def register_asheesh2_bad_key_type(): return requests.post( 'http://localhost:3000/register', {'rawHostname': 'asheesh2', 'email': '[email protected]', 'pubkey': open('snakeoil-sample-certs/ssl-cert-snakeoil.pem').read()}, )
Add non-working test for X-Forwarded-For
import requests def register_asheesh(): return requests.post( 'http://localhost:3000/register', {'rawHostname': 'asheesh', 'email': '[email protected]', 'pubkey': open('snakeoil-sample-certs/ssl-cert-snakeoil.pubkey').read()}, ) def register_asheesh2_bad_key_type(): return requests.post( 'http://localhost:3000/register', {'rawHostname': 'asheesh2', 'email': '[email protected]', 'pubkey': open('snakeoil-sample-certs/ssl-cert-snakeoil.pem').read()}, ) def register_asheesh3_x_forwarded_for(): # Provide the HTTP_FORWARDED_COUNT=1 environment variable to # Meteor before running this test. # # FIXME: This doesn't pass, but for now, I'm not *that* worried. return requests.post( 'http://localhost:3000/register', data={'rawHostname': 'asheesh3', 'email': '[email protected]', 'pubkey': open('snakeoil-sample-certs/ssl-cert-snakeoil.pubkey').read()}, headers={'X-Forwarded-For': '128.151.2.1'}, )
from django.conf import settings import stripe stripe.api_key = settings.STRIPE_API_KEY stripe.api_version = "2018-10-31" class StripeGateway: """A gateway to Stripe This insulates the rest of the system from Stripe errors and configures the Stripe module with the API key. """ def create_customer(self, user: settings.AUTH_USER_MODEL, stripe_token: str) -> str: """Add a user to Stripe and join them to the plan.""" # Let this fail on purpose. If it fails, the error monitoring system # will log it and I'll learn how to harden it for the conductor env. customer = stripe.Customer.create(email=user.email, source=stripe_token) stripe.Subscription.create( customer=customer.id, items=[{"plan": settings.STRIPE_PLAN}], trial_from_plan=True, ) return customer.id stripe_gateway = StripeGateway()
Remove pinned Stripe API version.
from django.conf import settings import stripe stripe.api_key = settings.STRIPE_API_KEY class StripeGateway: """A gateway to Stripe This insulates the rest of the system from Stripe errors and configures the Stripe module with the API key. """ def create_customer(self, user: settings.AUTH_USER_MODEL, stripe_token: str) -> str: """Add a user to Stripe and join them to the plan.""" # Let this fail on purpose. If it fails, the error monitoring system # will log it and I'll learn how to harden it for the conductor env. customer = stripe.Customer.create(email=user.email, source=stripe_token) stripe.Subscription.create( customer=customer.id, items=[{"plan": settings.STRIPE_PLAN}], trial_from_plan=True, ) return customer.id stripe_gateway = StripeGateway()
import pytest from parglare import Grammar from parglare.exceptions import GrammarError def test_terminal_nonterminal_conflict(): # Production A is a terminal ("a") and non-terminal at the same time. g = """ A = "a" | B; B = "b"; """ try: Grammar.from_string(g) assert False except GrammarError as e: assert 'Multiple definition' in str(e) def test_multiple_terminal_definition(): g = """ S = A A; A = "a"; A = "b"; """ try: Grammar.from_string(g) assert False except GrammarError as e: assert 'Multiple definition' in str(e)
Fix in tests for terminal definitions.
import pytest from parglare import Grammar def test_terminal_nonterminal(): # Production A is a terminal ("a") and non-terminal at the same time. # Thus, it must be recognized as non-terminal. g = """ S = A B; A = "a" | B; B = "b"; """ Grammar.from_string(g) # Here A shoud be non-terminal while B will be terminal. g = """ S = A B; A = B; B = "b"; """ Grammar.from_string(g) def test_multiple_terminal_definition(): # A is defined multiple times as terminal thus it must be recognized # as non-terminal with alternative expansions. g = """ S = A A; A = "a"; A = "b"; """ Grammar.from_string(g)
# Copyright (C) 2016 University of Zurich. All rights reserved. # # This file is part of MSRegistry Backend. # # MSRegistry Backend is free software: you can redistribute it and/or # modify it under the terms of the version 3 of the GNU Affero General # Public License as published by the Free Software Foundation, or any # other later version. # # MSRegistry Backend is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the version # 3 of the GNU Affero General Public License for more details. # # You should have received a copy of the version 3 of the GNU Affero # General Public License along with MSRegistry Backend. If not, see # <http://www.gnu.org/licenses/>. __author__ = "Filippo Panessa <[email protected]>" __copyright__ = ("Copyright (c) 2016 S3IT, Zentrale Informatik," " University of Zurich") from . import auth from ..decorators import requires_auth @auth.route('/test') @requires_auth def authTest(): return "All good. You only get this message if you're authenticated."
Use JSON for API GET /auth/test response
# Copyright (C) 2016 University of Zurich. All rights reserved. # # This file is part of MSRegistry Backend. # # MSRegistry Backend is free software: you can redistribute it and/or # modify it under the terms of the version 3 of the GNU Affero General # Public License as published by the Free Software Foundation, or any # other later version. # # MSRegistry Backend is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the version # 3 of the GNU Affero General Public License for more details. # # You should have received a copy of the version 3 of the GNU Affero # General Public License along with MSRegistry Backend. If not, see # <http://www.gnu.org/licenses/>. __author__ = "Filippo Panessa <[email protected]>" __copyright__ = ("Copyright (c) 2016 S3IT, Zentrale Informatik," " University of Zurich") from flask import jsonify from . import auth from ..decorators import requires_auth @auth.route('/test') @requires_auth def authTest(): return jsonify({'code': 'authorization_success', 'description': "All good. You only get this message if you're authenticated."})
import datetime from django.core.management.base import BaseCommand, CommandError from pontoon.administration.files import ( update_from_repository, extract_to_database, ) from pontoon.base.models import Project class Command(BaseCommand): args = '<project_id project_id ...>' help = 'Update projects from repositories and store changes to database' def handle(self, *args, **options): projects = Project.objects.all() if args: projects = projects.filter(pk__in=args) else: self.stdout.write(self.help.upper()) for project in projects: try: update_from_repository(project) extract_to_database(project) now = datetime.datetime.now() self.stdout.write( '[%s]: Updated project %s\n' % (now, project)) except Exception as e: now = datetime.datetime.now() raise CommandError( '[%s]: Update error: %s\n' % (now, unicode(e)))
Print new line after operation title
import datetime from django.core.management.base import BaseCommand, CommandError from pontoon.administration.files import ( update_from_repository, extract_to_database, ) from pontoon.base.models import Project class Command(BaseCommand): args = '<project_id project_id ...>' help = 'Update projects from repositories and store changes to database' def handle(self, *args, **options): projects = Project.objects.all() if args: projects = projects.filter(pk__in=args) else: self.stdout.write('%s\n' % self.help.upper()) for project in projects: try: update_from_repository(project) extract_to_database(project) now = datetime.datetime.now() self.stdout.write( '[%s]: Updated project %s\n' % (now, project)) except Exception as e: now = datetime.datetime.now() raise CommandError( '[%s]: Update error: %s\n' % (now, unicode(e)))
#!/bin/env python # # cat_StartdLog.py # # Print out the StartdLog for a glidein output file # # Usage: cat_StartdLog.py logname # import sys STARTUP_DIR=sys.path[0] sys.path.append(os.path.join(STARTUP_DIR,"lib")) import gWftLogParser USAGE="Usage: cat_StartdLog.py <logname>" def main(): try: print gWftLogParser.get_CondorLog(sys.argv[1],"StartdLog") except: sys.stderr.write("%s\n"%USAGE) sys.exit(1) if __name__ == '__main__': main()
Allow for startup in a different dir
#!/bin/env python # # cat_StartdLog.py # # Print out the StartdLog for a glidein output file # # Usage: cat_StartdLog.py logname # import os.path import sys STARTUP_DIR=sys.path[0] sys.path.append(os.path.join(STARTUP_DIR,"lib")) import gWftLogParser USAGE="Usage: cat_StartdLog.py <logname>" def main(): try: print gWftLogParser.get_CondorLog(sys.argv[1],"StartdLog") except: sys.stderr.write("%s\n"%USAGE) sys.exit(1) if __name__ == '__main__': main()
""" Utility functions for dhcp2nest """ from queue import Queue from subprocess import Popen, PIPE from threading import Thread def follow_file(fn, max_lines=100): """ Return a Queue that is fed lines (up to max_lines) from the given file (fn) continuously The implementation given here was inspired by http://stackoverflow.com/questions/12523044/how-can-i-tail-a-log-file-in-python """ fq = Queue(maxsize=max_lines) def _follow_file_thread(fn, fq): """ Queue lines from the given file (fn) continuously, even as the file grows or is replaced WARNING: This generator will block forever on the tail subprocess--no timeouts are enforced. """ # Use system tail with name-based following and retry p = Popen(["tail", "-n0", "-F", fn], stdout=PIPE) # Loop forever on pulling data from tail line = True while line: line = p.stdout.readline().decode('utf-8') fq.put(line) # Spawn a thread to read data from tail Thread(target=_follow_file_thread, args=(fn, fq)).start() # Return the queue return fq
Use daemon threads for follow_file() Signed-off-by: Jason Bernardino Alonso <[email protected]>
""" Utility functions for dhcp2nest """ from queue import Queue from subprocess import Popen, PIPE from threading import Thread def follow_file(fn, max_lines=100): """ Return a Queue that is fed lines (up to max_lines) from the given file (fn) continuously The implementation given here was inspired by http://stackoverflow.com/questions/12523044/how-can-i-tail-a-log-file-in-python """ fq = Queue(maxsize=max_lines) def _follow_file_thread(fn, fq): """ Queue lines from the given file (fn) continuously, even as the file grows or is replaced WARNING: This generator will block forever on the tail subprocess--no timeouts are enforced. """ # Use system tail with name-based following and retry p = Popen(["tail", "-n0", "-F", fn], stdout=PIPE) # Loop forever on pulling data from tail line = True while line: line = p.stdout.readline().decode('utf-8') fq.put(line) # Spawn a thread to read data from tail Thread(target=_follow_file_thread, args=(fn, fq), daemon=True).start() # Return the queue return fq
""" TESTS is a dict with all you tests. Keys for this will be categories' names. Each test is dict with "input" -- input data for user function "answer" -- your right answer "explanation" -- not necessary key, it's using for additional info in animation. """ TESTS = { "Basics": [ { "input": [2, 3], "answer": 5, "explanation": "2+3=?" }, { "input": [2, 7], "answer": 9, "explanation": "2+7=?" } ], "Additional": [ { "input": [6, 3], "answer": 9, "explanation": "6+3=?" }, { "input": [6, 7], "answer": 13, "explanation": "6+7=?" } ] }
Test category example rename for ordering
""" TESTS is a dict with all you tests. Keys for this will be categories' names. Each test is dict with "input" -- input data for user function "answer" -- your right answer "explanation" -- not necessary key, it's using for additional info in animation. """ TESTS = { "Basics": [ { "input": [2, 3], "answer": 5, "explanation": "2+3=?" }, { "input": [2, 7], "answer": 9, "explanation": "2+7=?" } ], "Extra": [ { "input": [6, 3], "answer": 9, "explanation": "6+3=?" }, { "input": [6, 7], "answer": 13, "explanation": "6+7=?" } ] }
from django.conf.urls import patterns from django.conf.urls import url from rest_framework_swagger.views import SwaggerResourcesView, SwaggerApiView, SwaggerUIView urlpatterns = patterns( '', url(r'^$', SwaggerUIView.as_view(), name="django.swagger.base.view"), url(r'^api-docs/$', SwaggerResourcesView.as_view(), name="django.swagger.resources.view"), url(r'^api-docs/(?P<path>.*)/?$', SwaggerApiView.as_view(), name='django.swagger.api.view'), )
Use the new style urlpatterns syntax to fix Django deprecation warnings The `patterns()` syntax is now deprecated: https://docs.djangoproject.com/en/1.8/releases/1.8/#django-conf-urls-patterns And so under Django 1.8 results in warnings: rest_framework_swagger/urls.py:10: RemovedInDjango110Warning: django.conf.urls.patterns() is deprecated and will be removed in Django 1.10. Update your urlpatterns to be a list of django.conf.urls.url() instances instead. Fixes #380.
from django.conf.urls import url from rest_framework_swagger.views import SwaggerResourcesView, SwaggerApiView, SwaggerUIView urlpatterns = [ url(r'^$', SwaggerUIView.as_view(), name="django.swagger.base.view"), url(r'^api-docs/$', SwaggerResourcesView.as_view(), name="django.swagger.resources.view"), url(r'^api-docs/(?P<path>.*)/?$', SwaggerApiView.as_view(), name='django.swagger.api.view'), ]
class Solution: # @param {string} s # @param {string} t # @return {boolean} def isIsomorphic(self, s, t): if len(s) != len(t): return False charDict = {} for i, c in enumerate(s): if c not in charDict.keys() and t[i] not in charDict.values(): charDict[c] = t[i] elif t[i] in charDict.values() or charDict[c] != t[i]: return False return True if __name__ == '__main__': test_list = [["ab","aa"],["aa", "bb"] result_list = [False, True] success = True solution = Solution() for i, s in enumerate(test_list): result = solution.isIsomorphic(s[0], s[1]) if result != result_list[i]: success = False print s print 'Expected value', result_list[i] print 'Actual value', result if success: print 'All the tests passed.' else: print 'Please fix the failed test'
Add solution for the isomorphic strings
class Solution: # @param {string} s # @param {string} t # @return {boolean} def isIsomorphic(self, s, t): if len(s) != len(t): return False charDict = {} for i, c in enumerate(s): if c not in charDict.keys() and t[i] not in charDict.values(): charDict[c] = t[i] elif c in charDict.keys() and charDict[c] != t[i]: return False elif t[i] in charDict.values(): if c not in charDict.keys(): return False elif charDict[c] != t[i]: return False return True if __name__ == '__main__': test_list = [["ab","aa"],["aa", "bb"], ["egg", "add"],["foo","bar"],["paper","title"]] result_list = [False, True, True, False, True] success = True solution = Solution() for i, s in enumerate(test_list): result = solution.isIsomorphic(s[0], s[1]) if result != result_list[i]: success = False print s print 'Expected value', result_list[i] print 'Actual value', result if success: print 'All the tests passed.' else: print 'Please fix the failed test'
from rq import Connection, Queue, Worker from worker.exceptions import WorkerException def worker_exc_handler(job, exc_type, exc_value, traceback): if isinstance(exc_type, WorkerException): job.meta['exc_code'] = exc_type.code job.meta['exc_message'] = exc_type.message return True def main(): with Connection(): q = Queue() worker = Worker([q]) worker.push_exc_handler(worker_exc_handler) worker.work() if __name__ == '__main__': main()
Fix exception subclass in worker exception handler
from rq import Connection, Queue, Worker from worker.exceptions import WorkerException def worker_exc_handler(job, exc_type, exc_value, traceback): if issubclass(exc_type, WorkerException): job.meta['exc_code'] = exc_type.code job.meta['exc_message'] = exc_type.message return True def main(): with Connection(): q = Queue() worker = Worker([q]) worker.push_exc_handler(worker_exc_handler) worker.work() if __name__ == '__main__': main()
from django.forms import widgets from wagtail.admin.edit_handlers import RichTextFieldPanel from wagtail.admin.rich_text.converters.contentstate import ContentstateConverter from wagtail.core.rich_text import features class DraftailRichTextArea(widgets.Textarea): # this class's constructor accepts a 'features' kwarg accepts_features = True def get_panel(self): return RichTextFieldPanel def __init__(self, *args, **kwargs): self.options = kwargs.pop('options', None) self.features = kwargs.pop('features', None) if self.features is None: self.features = features.get_default_features() self.converter = ContentstateConverter(self.features) super().__init__(*args, **kwargs) def render(self, name, value, attrs=None): if value is None: translated_value = None else: translated_value = self.converter.from_database_format(value) return super().render(name, translated_value, attrs) def value_from_datadict(self, data, files, name): original_value = super().value_from_datadict(data, files, name) if original_value is None: return None return self.converter.to_database_format(original_value)
Integrate Draftail-related assets with Django widget
import json from django.forms import Media, widgets from wagtail.admin.edit_handlers import RichTextFieldPanel from wagtail.admin.rich_text.converters.contentstate import ContentstateConverter from wagtail.core.rich_text import features class DraftailRichTextArea(widgets.Textarea): # this class's constructor accepts a 'features' kwarg accepts_features = True def get_panel(self): return RichTextFieldPanel def __init__(self, *args, **kwargs): self.options = kwargs.pop('options', None) self.features = kwargs.pop('features', None) if self.features is None: self.features = features.get_default_features() self.converter = ContentstateConverter(self.features) super().__init__(*args, **kwargs) def render(self, name, value, attrs=None): if value is None: translated_value = None else: translated_value = self.converter.from_database_format(value) return super().render(name, translated_value, attrs) def render_js_init(self, id_, name, value): return "window.draftail.initEditor('{name}', {opts})".format( name=name, opts=json.dumps(self.options)) def value_from_datadict(self, data, files, name): original_value = super().value_from_datadict(data, files, name) if original_value is None: return None return self.converter.to_database_format(original_value) @property def media(self): return Media(js=[ 'wagtailadmin/js/draftail.js', ], css={ 'all': ['wagtailadmin/css/panels/dratail.css'] })
from flask.ext.restful import fields from meta import BasicResource from config.pins import PinHttpManager from pi_gpio import app HTTP_MANAGER = PinHttpManager() class Pin(BasicResource): def __init__(self): super(Pin, self).__init__() self.fields = { "num": fields.Integer, "mode": fields.String, "value": fields.Integer } def pin_not_found(self): return {'message': 'Pin not found'}, 404 class PinList(Pin): def get(self): result = HTTP_MANAGER.read_all() return self.response(result, 200) class PinDetail(Pin): def get(self, pin_num): result = HTTP_MANAGER.read_one(pin_num) if not result: return self.pin_not_found() return self.response(result, 200) def patch(self, pin_num): self.parser.add_argument('value', type=int) args = self.parser.parse_args() result = HTTP_MANAGER.update_value(pin_num, args['value']) if not result: return self.pin_not_found() return self.response(HTTP_MANAGER.read_one(pin_num), 200)
Add new fields to response
from flask.ext.restful import fields from meta import BasicResource from config.pins import PinHttpManager from pi_gpio import app HTTP_MANAGER = PinHttpManager() class Pin(BasicResource): def __init__(self): super(Pin, self).__init__() self.fields = { "num": fields.Integer, "mode": fields.String, "value": fields.Integer, "resistor": fields.String, "initial": fields.String, "event": fields.String, "bounce": fields.Integer } def pin_not_found(self): return {'message': 'Pin not found'}, 404 class PinList(Pin): def get(self): result = HTTP_MANAGER.read_all() return self.response(result, 200) class PinDetail(Pin): def get(self, pin_num): result = HTTP_MANAGER.read_one(pin_num) if not result: return self.pin_not_found() return self.response(result, 200) def patch(self, pin_num): self.parser.add_argument('value', type=int) args = self.parser.parse_args() result = HTTP_MANAGER.update_value(pin_num, args['value']) if not result: return self.pin_not_found() return self.response(HTTP_MANAGER.read_one(pin_num), 200)
#!/usr/bin/env python import neukrill_net.utils as utils import neukrill_net.image_processing as image_processing import csv import pickle from sklearn.externals import joblib import numpy as np import glob import os def main(): settings = utils.Settings('settings.json') image_fname_dict = settings.image_fnames processing = lambda image: image_processing.resize_image(image, (48,48)) X, names = utils.load_data(image_fname_dict, processing=processing, verbose=True) clf = joblib.load('model.pkl') p = clf.predict_proba(X) with open('submission.csv', 'w') as csv_out: out_writer = csv.writer(csv_out, delimiter=',') out_writer.writerow(['image'] + list(settings.classes)) for index in range(len(names)): out_writer.writerow([names[index]] + list(p[index,])) if __name__ == '__main__': main()
Swap to using submission prediction writer function
#!/usr/bin/env python import neukrill_net.utils as utils import neukrill_net.image_processing as image_processing import csv import pickle from sklearn.externals import joblib import numpy as np import glob import os def main(): settings = utils.Settings('settings.json') image_fname_dict = settings.image_fnames processing = lambda image: image_processing.resize_image(image, (48,48)) X, names = utils.load_data(image_fname_dict, processing=processing, verbose=True) clf = joblib.load('model.pkl') p = clf.predict_proba(X) utils.write_predictions('submission.csv', p, names, settings) if __name__ == '__main__': main()
from alexandria import app, mongo from decorators import * from flask import render_template, request, jsonify, g, send_from_directory, redirect, url_for, session, flash import os import shutil import requests from pymongo import MongoClient from functools import wraps import bcrypt from bson.objectid import ObjectId @app.route('/', methods=['GET']) @authenticated def index(): return render_template('app.html') @app.route('/portal') def portal(): if not session.get('username'): return render_template('portal.html') else: return render_template('index.html') @app.route('/logout') def logout(): session.pop('username', None) session.pop('role', None) session.pop('realname', None) return redirect(url_for('index')) @app.route('/download/<id>/<format>') @authenticated def download(id, format): book = mongo.Books.find({'id':id})[0] response = send_from_directory(app.config['LIB_DIR'], id+'.'+format) response.headers.add('Content-Disposition', 'attachment; filename="' + book['title'] + '.' + format + '"') return response @app.route('/upload') @authenticated @administrator def upload(): return render_template('upload.html') if __name__ == "__main__": app.run()
Fix return on active user accessing the portal
from alexandria import app, mongo from decorators import * from flask import render_template, request, jsonify, g, send_from_directory, redirect, url_for, session, flash import os import shutil import requests from pymongo import MongoClient from functools import wraps import bcrypt from bson.objectid import ObjectId @app.route('/', methods=['GET']) @authenticated def index(): return render_template('app.html') @app.route('/portal') def portal(): if not session.get('username'): return render_template('portal.html') else: return redirect(url_for('index')) @app.route('/logout') def logout(): session.pop('username', None) session.pop('role', None) session.pop('realname', None) return redirect(url_for('index')) @app.route('/download/<id>/<format>') @authenticated def download(id, format): book = mongo.Books.find({'id':id})[0] response = send_from_directory(app.config['LIB_DIR'], id+'.'+format) response.headers.add('Content-Disposition', 'attachment; filename="' + book['title'] + '.' + format + '"') return response @app.route('/upload') @authenticated @administrator def upload(): return render_template('upload.html') if __name__ == "__main__": app.run()
import pytest from apispec import yaml_utils def test_load_yaml_from_docstring(): def f(): """ Foo bar baz quux --- herp: 1 derp: 2 """ result = yaml_utils.load_yaml_from_docstring(f.__doc__) assert result == {"herp": 1, "derp": 2} @pytest.mark.parametrize("docstring", (None, "", "---")) def test_load_yaml_from_docstring_empty_docstring(docstring): assert yaml_utils.load_yaml_from_docstring(docstring) == {} @pytest.mark.parametrize("docstring", (None, "", "---")) def test_load_operations_from_docstring_empty_docstring(docstring): assert yaml_utils.load_operations_from_docstring(docstring) == {}
Add regression test for generating yaml with unicode
import pytest from apispec import yaml_utils def test_load_yaml_from_docstring(): def f(): """ Foo bar baz quux --- herp: 1 derp: 2 """ result = yaml_utils.load_yaml_from_docstring(f.__doc__) assert result == {"herp": 1, "derp": 2} @pytest.mark.parametrize("docstring", (None, "", "---")) def test_load_yaml_from_docstring_empty_docstring(docstring): assert yaml_utils.load_yaml_from_docstring(docstring) == {} @pytest.mark.parametrize("docstring", (None, "", "---")) def test_load_operations_from_docstring_empty_docstring(docstring): assert yaml_utils.load_operations_from_docstring(docstring) == {} def test_dict_to_yaml_unicode(): assert yaml_utils.dict_to_yaml({"가": "나"}) == '"\\uAC00": "\\uB098"\n' assert yaml_utils.dict_to_yaml({"가": "나"}, {"allow_unicode": True}) == "가: 나\n"
from django.test import TestCase from django.http import HttpRequest from campaigns.views import create_campaign from campaigns.models import Campaign from campaigns.forms import CampaignForm class HomePageTest(TestCase): def test_does_root_url_resolves_the_home_page(self): called = self.client.get('/') self.assertTemplateUsed(called, 'home.html') class CampaignsViewsTest(TestCase): def test_does_create_campaign_resolves_the_right_url(self): called = self.client.get('/campaigns/new') self.assertTemplateUsed(called, 'new_campaign.html') # Trying to do self.client.post was using GET request for some # reason so i made it that ugly def test_does_create_camapign_saves_objects_with_POST_requests(self): self.assertEqual(Campaign.objects.count(), 0) request = HttpRequest() request.method = 'POST' request.POST['title'] = 'C1' request.POST['description'] = 'C1Descr' create_campaign(request) campaign = Campaign.objects.first() self.assertEqual(Campaign.objects.count(), 1) self.assertEqual(campaign.title, 'C1') self.assertEqual(campaign.description, 'C1Descr')
Refactor some redundancy in the views tests
from django.test import TestCase from django.http import HttpRequest from campaigns.views import create_campaign from campaigns.models import Campaign from campaigns.forms import CampaignForm def make_POST_request(titleValue, descriptionValue): request = HttpRequest() request.method = 'POST' request.POST['title'] = titleValue request.POST['description'] = descriptionValue return request class HomePageTest(TestCase): def test_does_root_url_resolves_the_home_page(self): called = self.client.get('/') self.assertTemplateUsed(called, 'home.html') class CampaignsViewsTest(TestCase): def test_does_create_campaign_resolves_the_right_url(self): called = self.client.get('/campaigns/new') self.assertTemplateUsed(called, 'new_campaign.html') # Trying to do self.client.post was using GET request for some # reason so i made it that ugly def test_does_create_campaign_saves_objects_with_POST_requests(self): self.assertEqual(Campaign.objects.count(), 0) create_campaign(make_POST_request('C1', 'C1Descr')) campaign = Campaign.objects.first() self.assertEqual(Campaign.objects.count(), 1) self.assertEqual(campaign.title, 'C1') self.assertEqual(campaign.description, 'C1Descr') def test_create_campaign_dont_saves_empty_objects(self): self.assertEqual(Campaign.objects.count(), 0) create_campaign(make_POST_request('', '')) self.assertEqual(Campaign.objects.count(), 0)
import numpy as np from scipy import sparse from .sparselol_cy import extents_count def extents(labels): """Compute the extents of every integer value in ``arr``. Parameters ---------- labels : array of ints The array of values to be mapped. Returns ------- locs : sparse.csr_matrix A sparse matrix in which the nonzero elements of row i are the indices of value i in ``arr``. """ labels = labels.ravel() counts = np.bincount(labels) indptr = np.concatenate([[0], np.cumsum(counts)]) indices = np.empty(labels.size, int) extents_count(labels.ravel(), indptr.copy(), out=indices) locs = sparse.csr_matrix((indices, indices, indptr), dtype=int) return locs
Use stride tricks to save data memory
import numpy as np from scipy import sparse from .sparselol_cy import extents_count def extents(labels): """Compute the extents of every integer value in ``arr``. Parameters ---------- labels : array of ints The array of values to be mapped. Returns ------- locs : sparse.csr_matrix A sparse matrix in which the nonzero elements of row i are the indices of value i in ``arr``. """ labels = labels.ravel() counts = np.bincount(labels) indptr = np.concatenate([[0], np.cumsum(counts)]) indices = np.empty(labels.size, int) extents_count(labels.ravel(), indptr.copy(), out=indices) one = np.ones((1,), dtype=int) data = np.lib.as_strided(one, shape=indices.shape, strides=(0,)) locs = sparse.csr_matrix((data, indices, indptr), dtype=int) return locs
import sphinx.parsers import docutils.parsers.rst as rst class CStrip(sphinx.parsers.Parser): def __init__(self): self.rst_parser = rst.Parser() def parse(self, inputstring, document): stripped = [] for line in inputstring.split("\n"): line = line.strip() if line == "//|": stripped.append("") elif line.startswith("//| "): stripped.append(line[len("//| "):]) stripped = "\r\n".join(stripped) self.rst_parser.parse(stripped, document)
Switch away from sphinx.parsers which isn't available in sphinx 1.3.5 on Read The Docs.
import docutils.parsers import docutils.parsers.rst as rst class CStrip(docutils.parsers.Parser): def __init__(self): self.rst_parser = rst.Parser() def parse(self, inputstring, document): stripped = [] for line in inputstring.split("\n"): line = line.strip() if line == "//|": stripped.append("") elif line.startswith("//| "): stripped.append(line[len("//| "):]) stripped = "\r\n".join(stripped) self.rst_parser.parse(stripped, document)
import json import keyring from pyutrack import Credentials from tests import PyutrackTest class CredentialsTests(PyutrackTest): def test_empty(self): c = Credentials('root') self.assertIsNone(c.password) self.assertIsNone(c.cookies) def test_persistence(self): c = Credentials('root', 'passwd', {"key": "value"}) c.persist() self.assertEqual( keyring.get_password(Credentials.KEYRING_PASSWORD, 'root'), 'passwd' ) self.assertEqual( json.loads(keyring.get_password(Credentials.KEYRING_COOKIE, 'root')), {"key": "value"} )
Add test for credentials reload
import json import keyring from pyutrack import Credentials from tests import PyutrackTest class CredentialsTests(PyutrackTest): def test_empty(self): c = Credentials('root') self.assertIsNone(c.password) self.assertIsNone(c.cookies) def test_persistence(self): c = Credentials('root', 'passwd', {"key": "value"}) c.persist() self.assertEqual( keyring.get_password(Credentials.KEYRING_PASSWORD, 'root'), 'passwd' ) self.assertEqual( json.loads(keyring.get_password(Credentials.KEYRING_COOKIE, 'root')), {"key": "value"} ) def test_reload(self): Credentials('root', 'passwd', {"key": "value"}).persist() c = Credentials('root') self.assertEqual( keyring.get_password(Credentials.KEYRING_PASSWORD, 'root'), 'passwd' ) self.assertEqual( json.loads(keyring.get_password(Credentials.KEYRING_COOKIE, 'root')), {"key": "value"} )
from django import forms from .models import Config class ConfigForm(forms.ModelForm): class Meta: model = Config exclude = ('created', 'user') PAGES = ( ('base',), ('distribution',), ('media_type',), ('architecture',), ('installer',), ('locale', 'keyboard_layout'), ) WIZARD_FORMS = [] for fields in PAGES: meta = type('Meta', (), { 'model': Config, 'fields': fields, }) WIZARD_FORMS.append(type('', (forms.ModelForm,), {'Meta': meta}))
Use radio buttons for most of the interface. Signed-off-by: Chris Lamb <[email protected]>
from django import forms from .models import Config class ConfigForm(forms.ModelForm): class Meta: model = Config exclude = ('created', 'user') PAGES = ( ('base',), ('distribution',), ('media_type',), ('architecture',), ('installer',), ('locale', 'keyboard_layout'), ) WIZARD_FORMS = [] for fields in PAGES: meta = type('Meta', (), { 'model': Config, 'fields': fields, 'widgets': { 'base': forms.RadioSelect(), 'distribution': forms.RadioSelect(), 'media_type': forms.RadioSelect(), 'architecture': forms.RadioSelect(), 'installer': forms.RadioSelect(), }, }) WIZARD_FORMS.append(type('', (forms.ModelForm,), {'Meta': meta}))
#!/usr/bin/env python import os import sys import {{ project.repo_name }} try: from setuptools import setup except ImportError: from distutils.core import setup if sys.argv[-1] == 'publish': os.system('python setup.py sdist upload') sys.exit() readme = open('README.rst', 'rt').read() history = open('HISTORY.rst', 'rt').read() setup( name='{{ project.repo_name }}', version={{ project.repo_name }}.__version__, description='{{ project.project_short_description }}', long_description=readme + '\n\n' + history, author='{{ project.full_name }}', author_email='{{ project.email }}', url='https://github.com/{{ project.github_username }}/{{ project.repo_name }}', packages=[ '{{ project.repo_name }}', ], include_package_data=True, install_requires=[ ], license="BSD", zip_safe=False, keywords='{{ project.repo_name }}', classifiers=[ 'Development Status :: 2 - Pre-Alpha', 'Intended Audience :: Developers', 'License :: OSI Approved :: BSD License', 'Natural Language :: English', "Programming Language :: Python :: 2", 'Programming Language :: Python :: 2.6', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.3', ], test_suite='tests', )
Add package_dir which uses project.repo_url.
#!/usr/bin/env python import os import sys import {{ project.repo_name }} try: from setuptools import setup except ImportError: from distutils.core import setup if sys.argv[-1] == 'publish': os.system('python setup.py sdist upload') sys.exit() readme = open('README.rst', 'rt').read() history = open('HISTORY.rst', 'rt').read() setup( name='{{ project.repo_name }}', version={{ project.repo_name }}.__version__, description='{{ project.project_short_description }}', long_description=readme + '\n\n' + history, author='{{ project.full_name }}', author_email='{{ project.email }}', url='https://github.com/{{ project.github_username }}/{{ project.repo_name }}', packages=[ '{{ project.repo_name }}', ], package_dir={'{{ project.repo_name }}': '{{ project.repo_name }}'}, include_package_data=True, install_requires=[ ], license="BSD", zip_safe=False, keywords='{{ project.repo_name }}', classifiers=[ 'Development Status :: 2 - Pre-Alpha', 'Intended Audience :: Developers', 'License :: OSI Approved :: BSD License', 'Natural Language :: English', "Programming Language :: Python :: 2", 'Programming Language :: Python :: 2.6', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.3', ], test_suite='tests', )
# -*- coding: utf-8 -*- DEBUG = True LOCAL_DATABASES = { 'default': { 'ENGINE': 'django.db.backends.postgresql', 'NAME': 'circle_test', 'USER': 'circleci', 'PASSWORD': '', 'HOST': 'localhost', 'PORT': '5432', } } LOCALLY_INSTALLED_APPS = [ ] ENABLE_EMAILS = False LOCALLY_ALLOWED_HOSTS = [ ] ADMINS = []
Add missing secret key to circle ci settings
# -*- coding: utf-8 -*- DEBUG = True LOCAL_DATABASES = { 'default': { 'ENGINE': 'django.db.backends.postgresql', 'NAME': 'circle_test', 'USER': 'circleci', 'PASSWORD': '', 'HOST': 'localhost', 'PORT': '5432', } } LOCALLY_INSTALLED_APPS = [ ] ENABLE_EMAILS = False LOCALLY_ALLOWED_HOSTS = [ ] ADMINS = [] SECRET_KEY = 'CHANGE ME'
from django.shortcuts import render from datetime import date, datetime, timedelta from .models import Event def index(request): if date.today().isoweekday() in [5,6,7]: weekend_start = date.today() else: weekend_start = date.today() + timedelta((5 - date.today().isoweekday()) % 7 ) events = Event.objects.filter(start__lte = weekend_start, end__gte = weekend_start).order_by('-start') return render(request, 'index.html', {'events': events})
Index now only displays published articles.
from django.shortcuts import render from datetime import date, datetime, timedelta from .models import Event def index(request): if date.today().isoweekday() in [5,6,7]: weekend_start = date.today() else: weekend_start = date.today() + timedelta((5 - date.today().isoweekday()) % 7 ) events = Event.objects.filter(start__lte = weekend_start, end__gte = weekend_start, published = True).order_by('-start') return render(request, 'index.html', {'events': events})
#!/usr/bin/env python # Copyright 2015 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. # See https://github.com/domokit/sky_engine/wiki/Release-process import os import subprocess import sys def main(): engine_root = os.path.abspath('.') if not os.path.exists(os.path.join(engine_root, 'sky')): print "Cannot find //sky. Is %s the Flutter engine repository?" % engine_root return 1 pub_path = os.path.join(engine_root, 'third_party/dart-sdk/dart-sdk/bin/pub') if args.publish: subprocess.check_call([pub_path, 'publish', '--force'], cwd=os.path.join(engine_root, 'sky/packages/sky')) subprocess.check_call([pub_path, 'publish', '--force'], cwd=os.path.join(engine_root, 'skysprites')) if __name__ == '__main__': sys.exit(main())
Add FLX to the release train
#!/usr/bin/env python # Copyright 2015 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. # See https://github.com/domokit/sky_engine/wiki/Release-process import os import subprocess import sys def main(): engine_root = os.path.abspath('.') if not os.path.exists(os.path.join(engine_root, 'sky')): print "Cannot find //sky. Is %s the Flutter engine repository?" % engine_root return 1 pub_path = os.path.join(engine_root, 'third_party/dart-sdk/dart-sdk/bin/pub') if args.publish: subprocess.check_call([pub_path, 'publish', '--force'], cwd=os.path.join(engine_root, 'sky/packages/sky')) subprocess.check_call([pub_path, 'publish', '--force'], cwd=os.path.join(engine_root, 'sky/packages/flx')) subprocess.check_call([pub_path, 'publish', '--force'], cwd=os.path.join(engine_root, 'skysprites')) if __name__ == '__main__': sys.exit(main())
import ast import click from parsing.parser import FileVisitor @click.command() @click.argument('code', type=click.File('rb')) @click.option('--printed', default=False, is_flag=True, help='Pretty prints the call tree for each class in the file') def cli(code, printed): """ Parses a file. codegrapher [file_name] """ parsed_code = ast.parse(code.read(), filename='code.py') visitor = FileVisitor() visitor.visit(parsed_code) if printed: click.echo('Classes in file:') for class_object in visitor.classes: click.echo('=' * 80) click.echo(class_object.name) click.echo(class_object.pprint()) click.echo('')
Add builtin removal as an option to cli
import ast import click from parsing.parser import FileVisitor @click.command() @click.argument('code', type=click.File('rb')) @click.option('--printed', default=False, is_flag=True, help='Pretty prints the call tree for each class in the file') @click.option('--remove-builtins', default=False, is_flag=True, help='Removes builtin functions from call trees') def cli(code, printed, remove_builtins): """ Parses a file. codegrapher [file_name] """ parsed_code = ast.parse(code.read(), filename='code.py') visitor = FileVisitor() visitor.visit(parsed_code) if printed: click.echo('Classes in file:') for class_object in visitor.classes: if remove_builtins: class_object.remove_builtins() click.echo('=' * 80) click.echo(class_object.name) click.echo(class_object.pprint()) click.echo('')
# Copyright 2022 ACSONE SA/NV # License AGPL-3.0 or later (https://www.gnu.org/licenses/agpl). from odoo import models class BasePartnerMergeAutomaticWizard(models.TransientModel): _inherit = "base.partner.merge.automatic.wizard" def _merge(self, partner_ids, dst_partner=None, extra_checks=True): partners = self.env["res.partner"].browse(partner_ids).exists() # remove dst_partner from partners to merge if dst_partner and dst_partner in partners: src_partners = partners - dst_partner else: ordered_partners = self._get_ordered_partner(partners.ids) src_partners = ordered_partners[:-1] src_partners.mapped("membership_line_ids")._close(force=True) return super(BasePartnerMergeAutomaticWizard, self)._merge( partner_ids, dst_partner, extra_checks )
Fix the security for the merge after closing memberships
# Copyright 2022 ACSONE SA/NV # License AGPL-3.0 or later (https://www.gnu.org/licenses/agpl). from odoo import models class BasePartnerMergeAutomaticWizard(models.TransientModel): _inherit = "base.partner.merge.automatic.wizard" def _merge(self, partner_ids, dst_partner=None, extra_checks=True): partners = self.env["res.partner"].browse(partner_ids).exists() # remove dst_partner from partners to merge if dst_partner and dst_partner in partners: src_partners = partners - dst_partner else: ordered_partners = self._get_ordered_partner(partners.ids) dst_partner = ordered_partners[-1] src_partners = ordered_partners[:-1] # since we close the membership we need to keep an instance for the security for p in src_partners: p.force_int_instance_id = p.int_instance_id dst_force_int_instance_id = dst_partner.force_int_instance_id src_partners.mapped("membership_line_ids")._close(force=True) res = super(BasePartnerMergeAutomaticWizard, self)._merge( partner_ids, dst_partner, extra_checks ) # do not modify the force_int_instance_id since it should be empty if # there is a membership_line_id dst_partner.force_int_instance_id = dst_force_int_instance_id return res
import asyncore import util try: import simplejson as json except ImportError: import json class ChannelServer(asyncore.dispatcher): def __init__(self, sock, dest): asyncore.dispatcher.__init__(self, sock) self.dest = dest dest.register('close', self.closehook) def handle_accept(self): client = self.accept() SideChannel(client[0], self.dest) def closehook(self, hook, data): print 'HOOK-CLOSE' self.close() class SideChannel(asyncore.dispatcher): def __init__(self, sock, dest): asyncore.dispatcher.__init__(self, sock) self.dest = dest self.buffer = None def handle_close(self): self.close() def handle_read(self): raw = self.recv(8192) if raw: msg = util.json_decode(json.loads(raw)) self.dest.queue(msg) self.buffer = {'result': 'done'} def writable(self): return self.buffer def handle_write(self): self.send(json.dumps(self.buffer)) self.close()
Use a lambda as a proxy.
import asyncore import util try: import simplejson as json except ImportError: import json class ChannelServer(asyncore.dispatcher): def __init__(self, sock, dest): asyncore.dispatcher.__init__(self, sock) self.dest = dest dest.register('close', lambda x, y: self.close()) def handle_accept(self): client = self.accept() SideChannel(client[0], self.dest) class SideChannel(asyncore.dispatcher): def __init__(self, sock, dest): asyncore.dispatcher.__init__(self, sock) self.dest = dest self.buffer = None def handle_close(self): self.close() def handle_read(self): raw = self.recv(8192) if raw: msg = util.json_decode(json.loads(raw)) self.dest.queue(msg) self.buffer = {'result': 'done'} def writable(self): return self.buffer def handle_write(self): self.send(json.dumps(self.buffer)) self.close()
from django.core.management import BaseCommand from django.db.models import get_app, get_models from django.conf import settings from common.utilities.search_utils import index_instance class Command(BaseCommand): def add_arguments(self, parser): parser.add_argument( '--test', action='store_true', dest='test', default=False, help='Provide this if you want to create a test index') def handle(self, *args, **options): # optimize this to index in bulk apps_lists = settings.LOCAL_APPS for app_name in apps_lists: app = get_app(app_name) for model in get_models(app): all_instances = model.objects.all()[0:3] \ if options.get('test') else model.objects.all() [index_instance(obj) for obj in all_instances] message = "Indexed {} {}".format( all_instances.count(), model._meta.verbose_name_plural.capitalize()) self.stdout.write(message) self.stdout.write("Finished indexing")
Check the model beig indexed
from django.core.management import BaseCommand from django.db.models import get_app, get_models from django.conf import settings from common.utilities.search_utils import index_instance class Command(BaseCommand): def add_arguments(self, parser): parser.add_argument( '--test', action='store_true', dest='test', default=False, help='Provide this if you want to create a test index') def handle(self, *args, **options): # optimize this to index in bulk apps_lists = settings.LOCAL_APPS for app_name in apps_lists: app = get_app(app_name) for model in get_models(app): if model.__name__.lower() != 'testmodel': all_instances = model.objects.all()[0:3] \ if options.get('test') else model.objects.all() [index_instance(obj) for obj in all_instances] message = "Indexed {} {}".format( all_instances.count(), model._meta.verbose_name_plural.capitalize()) self.stdout.write(message) else: # relation "common_testmodel" does not exist # Will be fixed pass self.stdout.write("Finished indexing")
#!/usr/bin/env python # encoding: utf8 from __future__ import (absolute_import, print_function, division, unicode_literals) # See - http://www.python.org/dev/peps/pep-0440/ # See - http://semver.org __author__ = 'Roy Levien' __copyright__ = '(c) 2014-2015 Roy Levien' __release__ = '0.2.1' # N(.N)* __pre_release__ = 'b2' # aN | bN | cN | __suffix__ = ''#'.dev5' # .devN | | .postN __version__ = __release__ + __pre_release__ + __suffix__
Update test version following release
#!/usr/bin/env python # encoding: utf8 from __future__ import (absolute_import, print_function, division, unicode_literals) # See - http://www.python.org/dev/peps/pep-0440/ # See - http://semver.org __author__ = 'Roy Levien' __copyright__ = '(c) 2014-2015 Roy Levien' __release__ = '0.2.1' # N(.N)* __pre_release__ = 'b3' # aN | bN | cN | __suffix__ = '.dev1' # .devN | | .postN __version__ = __release__ + __pre_release__ + __suffix__
# import porter_paleale def write_pdb(aa_sequence, fragment_angles, gap_length, filepath): """ Generate pdb file with results :param aa_sequence: Amino acid sequence :param fragment_angles: Backbone torsion angles :param gap_length: Length of the gap at the sequence start and end :param filepath: Path to the file to save the pdb """ pass
Write pdb result to disk
from peptide import PeptideBuilder import Bio.PDB def write_pdb(aa_sequence, fragment_angles, gap_length, filepath): """ Generate pdb file with results :param aa_sequence: Amino acid sequence :param fragment_angles: Backbone torsion angles :param gap_length: Length of the gap at the sequence start and end :param filepath: Path to the file to save the pdb """ phi, psi = zip(*fragment_angles) structure = PeptideBuilder.make_structure(aa_sequence, phi, psi) out = Bio.PDB.PDBIO() out.set_structure(structure) out.save(filepath)
# Copyright 2014 IBM Corp. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from oslotest import base from oslotest import moxstubout class TestMoxStubout(base.BaseTestCase): def _stubable(self): pass def test_basic_stubout(self): f = self.useFixture(moxstubout.MoxStubout()) before = TestMoxStubout._stubable f.mox.StubOutWithMock(TestMoxStubout, '_stubable') after = TestMoxStubout._stubable self.assertNotEqual(before, after) f.cleanUp() after2 = TestMoxStubout._stubable self.assertEqual(before, after2)
Fix build break with Fixtures 1.3 Our explicit call to cleanUp messes things up in latest fixture, so we need to call _clear_cleanups to stop the test from breaking Change-Id: I8ce2309a94736b47fb347f37ab4027857e19c8a8
# Copyright 2014 IBM Corp. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from oslotest import base from oslotest import moxstubout class TestMoxStubout(base.BaseTestCase): def _stubable(self): pass def test_basic_stubout(self): f = self.useFixture(moxstubout.MoxStubout()) before = TestMoxStubout._stubable f.mox.StubOutWithMock(TestMoxStubout, '_stubable') after = TestMoxStubout._stubable self.assertNotEqual(before, after) f.cleanUp() after2 = TestMoxStubout._stubable self.assertEqual(before, after2) f._clear_cleanups()
import os import re from setuptools import ( find_packages, setup, ) version_re = re.compile(r"__version__\s*=\s*['\"](.*?)['\"]") def get_version(): base = os.path.abspath(os.path.dirname(__file__)) with open(os.path.join(base, 'curator/__init__.py')) as initf: for line in initf: m = version_re.match(line.strip()) if not m: continue return m.groups()[0] setup( name='redis-lua-curator', version=get_version(), description='Helper for working with lua scripts.', packages=find_packages(exclude=[ "*.tests", "*.tests.*", "tests.*", "tests", ]), author='Michael Hahn', author_email='[email protected]', url='https://github.com/mhahn/curator/', download_url='https://github.com/mhahn/curator/tarball/%s' % ( get_version(), ), setup_requires=[ 'nose>=1.0', 'coverage>=1.0', 'mock==1.0.1', 'unittest2==0.5.1', ], install_requires=[ 'redis==2.10.1', 'jinja2==2.7.2', ], keywords=['redis', 'lua'], )
Support a range of redis client versions
import os import re from setuptools import ( find_packages, setup, ) version_re = re.compile(r"__version__\s*=\s*['\"](.*?)['\"]") def get_version(): base = os.path.abspath(os.path.dirname(__file__)) with open(os.path.join(base, 'curator/__init__.py')) as initf: for line in initf: m = version_re.match(line.strip()) if not m: continue return m.groups()[0] setup( name='redis-lua-curator', version=get_version(), description='Helper for working with lua scripts.', packages=find_packages(exclude=[ "*.tests", "*.tests.*", "tests.*", "tests", ]), author='Michael Hahn', author_email='[email protected]', url='https://github.com/mhahn/curator/', download_url='https://github.com/mhahn/curator/tarball/%s' % ( get_version(), ), setup_requires=[ 'nose>=1.0', 'coverage>=1.0', 'mock==1.0.1', 'unittest2==0.5.1', ], install_requires=[ 'redis >= 2.8.0, <= 2.10.1', 'jinja2==2.7.2', ], keywords=['redis', 'lua'], )
#!/usr/bin/env python from setuptools import setup, find_packages setup( name='django-admin-extend', version='0.0.1', description=('Provides functionality for extending' 'ModelAdmin classes that have already' 'been registered by other apps'), author='Ioan Alexandru Cucu', author_email='[email protected]', url='https://github.com/kux/django-admin-extend', download_url='https://github.com/kux/django-admin-extend/tarball/0.1', install_requires=('Django>=1.3',), packages=find_packages(), include_package_data=True, )
Fix download url and bump version
#!/usr/bin/env python from setuptools import setup, find_packages setup( name='django-admin-extend', version='0.0.2', description=('Provides functionality for extending' 'ModelAdmin classes that have already' 'been registered by other apps'), author='Ioan Alexandru Cucu', author_email='[email protected]', url='https://github.com/kux/django-admin-extend', download_url='https://github.com/kux/django-admin-extend/archive/0.0.2.tar.gz, install_requires=('Django>=1.3',), packages=find_packages(), include_package_data=True, )
#!/usr/bin/env python import os from distutils.core import setup def read(fname): return open(os.path.join(os.path.dirname(__file__), fname)).read() setup( name='whack', version='0.3.0', description='Utility for installing binaries from source with a single command', long_description=read("README"), author='Michael Williamson', url='http://github.com/mwilliamson/whack', scripts=["scripts/whack"], packages=['whack'], install_requires=['blah>=0.1.10,<0.2', 'requests', "catchy==0.1.0"], )
Update install_requires to be more accurate
#!/usr/bin/env python import os from distutils.core import setup def read(fname): return open(os.path.join(os.path.dirname(__file__), fname)).read() setup( name='whack', version='0.3.0', description='Utility for installing binaries from source with a single command', long_description=read("README"), author='Michael Williamson', url='http://github.com/mwilliamson/whack', scripts=["scripts/whack"], packages=['whack'], install_requires=['blah>=0.1.10,<0.2', 'requests>=1,<2', "catchy>=0.1.0,<0.2"], )
from setuptools import setup, find_packages setup( name = "biofloat", version = "0.3.0", packages = find_packages(), requires = ['Python (>=2.7)'], install_requires = [ 'beautifulsoup4>=4.4', 'coverage>=4', 'jupyter>=1.0.0', 'matplotlib', 'numpy>=1.10', 'pandas>=0.17', 'Pydap', 'requests>=2.8', 'seawater>=3.3', 'simpletable>=0.2', 'xray>=0.6' ], scripts = ['scripts/load_biofloat_cache.py'], # metadata for upload to PyPI author = "Mike McCann", author_email = "[email protected]", description = "Software for working with data from Bio-Argo floats", license = "MIT", keywords = "Oceanography Argo Bio-Argo drifting buoys floats", url = "https://github.com/biofloat/biofloat", )
Add 'scripts/woa_calibration.py' to scripts list
from setuptools import setup, find_packages setup( name = "biofloat", version = "0.3.0", packages = find_packages(), requires = ['Python (>=2.7)'], install_requires = [ 'beautifulsoup4>=4.4', 'coverage>=4', 'jupyter>=1.0.0', 'matplotlib', 'numpy>=1.10', 'pandas>=0.17', 'Pydap', 'requests>=2.8', 'seawater>=3.3', 'simpletable>=0.2', 'xray>=0.6' ], scripts = ['scripts/load_biofloat_cache.py', 'scripts/woa_calibration.py'], # metadata for upload to PyPI author = "Mike McCann", author_email = "[email protected]", description = "Software for working with data from Bio-Argo floats", license = "MIT", keywords = "Oceanography Argo Bio-Argo drifting buoys floats", url = "https://github.com/biofloat/biofloat", )