text
stringlengths 26
2.53M
|
---|
<|endoftext|>#!/usr/bin/env python
# -*- coding: utf-8 -*-
""" Provides ``mapping`` of url paths to request handlers.
"""
from bootstrap import Bootstrap
from fund import InstantPaymentNotificationHandler
from fund import ThankYouHandler
from view import *
mapping = [
(r"/", Index),
(r"/ipn", InstantPaymentNotificationHandler),
(r"/thank-you", ThankYouHandler),
(r"/about\/?", About),
(r"/guide\/?", Guide),
(r"/guide/download\/?", Download),
(r"/guide/standards\/?", Standards),
(r"/community\/?", Community),
(r"/news\/?", News),
(r"/support\/?", Support),
(r"/contact\/?", Contact),
(r"/press\/?", Press),
(r"/legal/terms", Terms),
(r"/library\/?", Library),
(r"/library/sketchup\/?", Library),
(r"/library/series/(\w+)\/?", Library),
(r"/library/users\/?", Users),
(r"/library/users/([0-9]+)\/?", User),
(r"/library/designs/([0-9]+)\/?", Design),
(r"/library/designs/([0-9]+)/(edit)\/?", Design),
(r"/library/designs\/?", Design),
(r"/library/designs/add\/?", Design),
(r"/library/designs/add/sketchup\/?", Design),
(r"/redirect/success/([0-9]+)\/?", RedirectSuccess),
(r"/redirect/error\/?", RedirectError),
(r"/redirect/after/delete\/?", RedirectAfterDelete),
(r"/admin/moderate\/?", Moderate),
(r"/admin/bootstrap\/?", Bootstrap),
(r"/activity", ActivityScreen),
(r"/txns", TxnList),
(r"/blob64/([^/]+)/([^/]+)\/?", Base64Blob),
(r"/blob64/([^/]+)\/?", Base64Blob),
(r"/i18n/message_strings.json", MessageStrings),
(r"/.*", NotFound),
]
<|endoftext|> |
<|endoftext|># -*- coding: utf-8 -*-
# Open Source Initiative OSI - The MIT License (MIT):Licensing
#
# The MIT License (MIT)
# Copyright (c) 2015 François-Xavier Bourlet ([email protected])
#
# Permission is hereby granted, free of charge, to any person obtaining a copy of
# this software and associated documentation files (the "Software"), to deal in
# the Software without restriction, including without limitation the rights to
# use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
# of the Software, and to permit persons to whom the Software is furnished to do
# so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
import msgpack
import gevent.pool
import gevent.queue
import gevent.event
import gevent.local
import gevent.lock
import logging
import sys
import gevent_zmq as zmq
from .exceptions import TimeoutExpired
from .context import Context
from .channel_base import ChannelBase
if sys.version_info < (2, 7):
def get_pyzmq_frame_buffer(frame):
return frame.buffer[:]
else:
def get_pyzmq_frame_buffer(frame):
return frame.buffer
logger = logging.getLogger(__name__)
class SequentialSender(object):
def __init__(self, socket):
self._socket = socket
def _send(self, parts):
e = None
for i in range(len(parts) - 1):
try:
self._socket.send(parts[i], copy=False, flags=zmq.SNDMORE)
except (gevent.GreenletExit, gevent.Timeout) as e:
if i == 0:
raise
self._socket.send(parts[i], copy=False, flags=zmq.SNDMORE)
try:
self._socket.send(parts[-1], copy=False)
except (gevent.GreenletExit, gevent.Timeout) as e:
self._socket.send(parts[-1], copy=False)
if e:
raise e
def __call__(self, parts, timeout=None):
if timeout:
with gevent.Timeout(timeout):
self._send(parts)
else:
self._send(parts)
class SequentialReceiver(object):
def __init__(self, socket):
self._socket = socket
def _recv(self):
e = None
parts = []
while True:
try:
part = self._socket.recv(copy=False)
except (gevent.GreenletExit, gevent.Timeout) as e:
if len(parts) == 0:
raise
part = self._socket.recv(copy=False)
parts.append(part)
if not part.more:
break
if e:
raise e
return parts
def __call__(self, timeout=None):
if timeout:
with gevent.Timeout(timeout):
return self._recv()
else:
return self._recv()
class Sender(SequentialSender):
def __init__(self, socket):
self._socket = socket
self._send_queue = gevent.queue.Channel()
self._send_task = gevent.spawn(self._sender)
def close(self):
if self._send_task:
self._send_task.kill()
def _sender(self):
for parts in self._send_queue:
super(Sender, self)._send(parts)
def __call__(self, parts, timeout=None):
try:
self._send_queue.put(parts, timeout=timeout)
except gevent.queue.Full:
raise TimeoutExpired(timeout)
class Receiver(SequentialReceiver):
def __init__(self, socket):
self._socket = socket
self._recv_queue = gevent.queue.Channel()
self._recv_task = gevent.spawn(self._recver)
def close(self):
if self._recv_task:
self._recv_task.kill()
self._recv_queue = None
def _recver(self):
while True:
parts = super(Receiver, self)._recv()
self._recv_queue.put(parts)
def __call__(self, timeout=None):
try:
return self._recv_queue.get(timeout=timeout)
except gevent.queue.Empty:
raise TimeoutExpired(timeout)
class Event(object):
__slots__ = ["_name", "_args", "_header", "_identity"]
def __init__(self, name, args, context, header=None):
self._name = name
self._args = args
if header is None:
self._header = {"message_id": context.new_msgid(), "v": 3}
else:
self._header = header
self._identity = None
@property
def header(self):
return self._header
@property
def name(self):
return self._name
@name.setter
def name(self, v):
self._name = v
@property
def args(self):
return self._args
@property
def identity(self):
return self._identity
@identity.setter
def identity(self, v):
self._identity = v
def pack(self):
return msgpack.Packer(use_bin_type=True).pack(
(self._header, self._name, self._args)
)
@staticmethod
def unpack(blob):
unpacker = msgpack.Unpacker(encoding="utf-8")
unpacker.feed(blob)
unpacked_msg = unpacker.unpack()
try:
(header, name, args) = unpacked_msg
except Exception as e:
raise Exception('invalid msg format "{0}": {1}'.format(unpacked_msg, e))
# Backward compatibility
if not isinstance(header, dict):
header = {}
return Event(name, args, None, header)
def __str__(self, ignore_args=False):
if ignore_args:
args = "[...]"
else:
args = self._args
try:
args = "<<{0}>>".format(str(self.unpack(self._args)))
except Exception:
pass
if self._identity:
identity = ", ".join(repr(x.bytes) for x in self._identity)
return "<{0}> {1} {2} {3}".format(identity, self._name, self._header, args)
return "{0} {1} {2}".format(self._name, self._header, args)
class Events(ChannelBase):
def __init__(self, zmq_socket_type, context=None):
self._debug = False
self._zmq_socket_type = zmq_socket_type
self._context = context or Context.get_instance()
self._socket = self._context.socket(zmq_socket_type)
if zmq_socket_type in (zmq.PUSH, zmq.PUB, zmq.DEALER, zmq.ROUTER):
self._send = Sender(self._socket)
elif zmq_socket_type in (zmq.REQ, zmq.REP):
self._send = SequentialSender(self._socket)
else:
self._send = None
if zmq_socket_type in (zmq.PULL, zmq.SUB, zmq.DEALER, zmq.ROUTER):
self._recv = Receiver(self._socket)
elif zmq_socket_type in (zmq.REQ, zmq.REP):
self._recv = SequentialReceiver(self._socket)
else:
self._recv = None
@property
def recv_is_supported(self):
return self._recv is not None
@property
def emit_is_supported(self):
return self._send is not None
def __del__(self):
try:
if not self._socket.closed:
self.close()
except (AttributeError, TypeError):
pass
def close(self):
try:
self._send.close()
except AttributeError:
pass
try:
self._recv.close()
except AttributeError:
pass
self._socket.close()
@property
def debug(self):
return self._debug
@debug.setter
def debug(self, v):
if v != self._debug:
self._debug = v
if self._debug:
logger.debug("debug enabled")
else:
logger.debug("debug disabled")
def _resolve_endpoint(self, endpoint, resolve=True):
if resolve:
endpoint = self._context.hook_resolve_endpoint(endpoint)
if isinstance(endpoint, (tuple, list)):
r = []
for sub_endpoint in endpoint:
r.extend(self._resolve_endpoint(sub_endpoint, resolve))
return r
return [endpoint]
def connect(self, endpoint, resolve=True):
r = []
for endpoint_ in self._resolve_endpoint(endpoint, resolve):
r.append(self._socket.connect(endpoint_))
logger.debug("connected to %s (status=%s)", endpoint_, r[-1])
return r
def bind(self, endpoint, resolve=True):
r = []
for endpoint_ in self._resolve_endpoint(endpoint, resolve):
r.append(self._socket.bind(endpoint_))
logger.debug("bound to %s (status=%s)", endpoint_, r[-1])
return r
def disconnect(self, endpoint, resolve=True):
r = []
for endpoint_ in self._resolve_endpoint(endpoint, resolve):
r.append(self._socket.disconnect(endpoint_))
logging.debug("disconnected from %s (status=%s)", endpoint_, r[-1])
return r
def new_event(self, name, args, xheader=None):
event = Event(name, args, context=self._context)
if xheader:
event.header.update(xheader)
return event
def emit_event(self, event, timeout=None):
if self._debug:
logger.debug("--> %s", event)
if event.identity:
parts = list(event.identity or list())
parts.extend(["", event.pack()])
elif self._zmq_socket_type in (zmq.DEALER, zmq.ROUTER):
parts = ("", event.pack())
else:
parts = (event.pack(),)
self._send(parts, timeout)
def recv(self, timeout=None):
parts = self._recv(timeout=timeout)
if len(parts) > 2:
identity = parts[0:-2]
blob = parts[-1]
elif len(parts) == 2:
identity = parts[0:-1]
blob = parts[-1]
else:
identity = None
blob = parts[0]
event = Event.unpack(get_pyzmq_frame_buffer(blob))
event.identity = identity
if self._debug:
logger.debug("<-- %s", event)
return event
def setsockopt(self, *args):
return self._socket.setsockopt(*args)
@property
def context(self):
return self._context
<|endoftext|> |
<|endoftext|>#!/usr/bin/env python
"""Django's command line utility."""
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "project.settings")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
<|endoftext|> |
<|endoftext|>"""Installer for hippybot
"""
import os
cwd = os.path.dirname(__file__)
__version__ = open(os.path.join(cwd, "hippybot", "version.txt"), "r").read().strip()
try:
from setuptools import setup, find_packages
except ImportError:
from ez_setup import use_setuptools
use_setuptools()
from setuptools import setup, find_packages
setup(
name="hippybot",
description="Python Hipchat bot",
long_description=open("README.rst").read(),
version=__version__,
author="Wes Mason",
author_email="wes[at]1stvamp[dot]org",
url="http://github.com/1stvamp/hippybot",
packages=find_packages(exclude=["ez_setup"]),
install_requires=open("requirements.txt").readlines(),
package_data={"hippybot": ["version.txt"]},
include_package_data=True,
extras_require={
"plugins": open("extras_requirements.txt").readlines(),
},
entry_points={
"console_scripts": [
"hippybot = hippybot.bot:main",
],
},
license="BSD",
)
<|endoftext|> |
<|endoftext|>#!/usr/bin/env python
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "twobuntu.settings")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
<|endoftext|> |
<|endoftext|># -*- coding: utf-8 -*-
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = []
operations = [
migrations.CreateModel(
name="Category",
fields=[
(
"id",
models.AutoField(
verbose_name="ID",
serialize=False,
auto_created=True,
primary_key=True,
),
),
(
"name",
models.CharField(
help_text=b"The name of the category.", max_length=40
),
),
(
"image",
models.ImageField(
help_text=b"A representative image.",
null=True,
upload_to=b"categories",
blank=True,
),
),
],
options={
"ordering": ("name",),
"verbose_name_plural": "Categories",
},
bases=(models.Model,),
),
]
<|endoftext|> |
<|endoftext|>import twitter
from django.contrib import messages
from django.contrib.auth.decorators import user_passes_test
from django.db import transaction
from django.shortcuts import redirect, render
from twobuntu.news.forms import AddItemForm
@user_passes_test(lambda u: u.is_staff)
def add(request):
"""
Add news items to the home page.
"""
if request.method == "POST":
form = AddItemForm(data=request.POST)
if form.is_valid():
item = form.save(commit=False)
item.reporter = request.user
try:
with transaction.atomic():
item.save()
except twitter.TwitterError as e:
messages.error(
request,
'Twitter error: "%s" Please try again.' % e.message[0]["message"],
)
else:
messages.info(request, "Your news item has been published!")
return redirect("home")
else:
form = AddItemForm()
return render(
request,
"form.html",
{
"title": "Add Item",
"form": form,
"description": "Enter the details for the news item below.",
"action": "Add",
},
)
<|endoftext|> |
<|endoftext|># -*- coding: utf-8 -*-
##############################################################################
#
# Copyright (c) 2010-2015, 2degrees Limited.
# All Rights Reserved.
#
# This file is part of django-wsgi <https://github.com/2degrees/django-wsgi/>,
# which is subject to the provisions of the BSD at
# <http://dev.2degreesnetwork.com/p/2degrees-license.html>. A copy of the
# license should accompany this distribution. THIS SOFTWARE IS PROVIDED "AS IS"
# AND ANY AND ALL EXPRESS OR IMPLIED WARRANTIES ARE DISCLAIMED, INCLUDING, BUT
# NOT LIMITED TO, THE IMPLIED WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST
# INFRINGEMENT, AND FITNESS FOR A PARTICULAR PURPOSE.
#
##############################################################################
"""
Exceptions raised by :mod:`django_wsgi.`
"""
__all__ = ("DjangoWSGIException", "ApplicationCallError")
class DjangoWSGIException(Exception):
"""Base class for exceptions raised by :mod:`django_wsgi`."""
pass
class ApplicationCallError(DjangoWSGIException):
"""
Exception raised when an embedded WSGI application was not called properly.
"""
pass
<|endoftext|> |
<|endoftext|>import boto
import boto.s3.connection
from django.conf import settings
import logging
log = logging.getLogger(__name__)
def get_s3_connection():
if settings.S3_ACCESS_KEY and settings.S3_SECRET_KEY and settings.S3_HOST:
log.debug(
"Connecting to {}, with secure connection is {}".format(
settings.S3_HOST, settings.S3_SECURE_CONNECTION
)
)
return boto.connect_s3(
aws_access_key_id=settings.S3_ACCESS_KEY,
aws_secret_access_key=settings.S3_SECRET_KEY,
host=settings.S3_HOST,
is_secure=settings.S3_SECURE_CONNECTION,
calling_format=boto.s3.connection.OrdinaryCallingFormat(),
)
return None
def get_or_create_bucket(s3_connection):
bucket = s3_connection.get_bucket(settings.S3_BUCKET_NAME)
if bucket is None:
bucket = s3_connection.create_bucket(settings.S3_BUCKET_NAME)
return bucket
<|endoftext|> |
<|endoftext|>from django.db import models
import datetime
from common.models import Project
class Stage(models.Model):
name = models.CharField(max_length=128)
project = models.ForeignKey(Project)
text = models.TextField(default="", blank=True)
link = models.URLField(default=None, blank=True, null=True)
state = models.CharField(max_length=24, default="info", blank=True)
weight = models.IntegerField(default=0)
updated = models.DateTimeField(default=datetime.datetime.now())
def save(self, *args, **kwargs):
self.updated = datetime.datetime.now()
return super(Stage, self).save(*args, **kwargs)
def __str__(self):
return self.name
<|endoftext|> |
<|endoftext|># -*- coding: utf-8 -*-
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
("testreport", "0026_testresult_launch_item_id"),
]
operations = [
migrations.AddField(
model_name="testplan",
name="filter",
field=models.TextField(
default=b"",
max_length=128,
verbose_name="Started by filter",
blank=True,
),
preserve_default=True,
),
migrations.AddField(
model_name="testplan",
name="main",
field=models.BooleanField(
default=False, verbose_name="Show in short statistic"
),
preserve_default=True,
),
]
<|endoftext|> |
<|endoftext|>import gevent
from gevent import monkey
monkey.patch_all()
import time
import smtplib
TEST_MAIL = """
Date: Wed, 30 Jul 2014 03:29:50 +0800 (CST)
From: =?utf-8?B?6IGU5oOz?= <[email protected]>
To: [email protected]
Message-ID: <766215193.1675381406662190229.JavaMail.root@USS-01>
Subject: =?utf-8?B?6IGU5oOz56e75Yqo5LqS6IGU572R5pyN5Yqh5rOo5YaM56Gu6K6k6YKu5Lu2?=
MIME-Version: 1.0
Content-Type: multipart/mixed;
boundary="----=_Part_335076_1490382245.1406662190222"
------=_Part_335076_1490382245.1406662190222
Content-Type: multipart/related;
boundary="----=_Part_335077_605133107.1406662190222"
------=_Part_335077_605133107.1406662190222
Content-Type: text/html;charset=utf-8
Content-Transfer-Encoding: quoted-printable
<html><head></head><body>=E5=B0=8A=E6=95=AC=E7=9A=84=E7=94=A8=E6=88=B7=EF=
=BC=9A<br/>=E6=82=A8=E5=A5=BD=EF=BC=81<br/>=E8=AF=B7=E7=82=B9=E5=87=BB=E8=
=81=94=E6=83=B3=E5=B8=90=E5=8F=B7=E7=A1=AE=E8=AE=A4=E9=93=BE=E6=8E=A5=EF=BC=
=8C=E4=BB=A5=E6=A0=A1=E9=AA=8C=E6=82=A8=E7=9A=84=E8=81=94=E6=83=B3=E5=B8=90=
=E5=8F=B7=EF=BC=9A<br/><a href=3D"https://passport.lenovo.com/wauthen/verif=
yuser?username=3D&vc=3DuHwf&accountid=3D1358934&lenovoid.=
cb=3D&lenovoid.realm=3Dthinkworld.lenovo.com&lang=3Dzh_CN&display=3D&lenovo=
id.ctx=3D&lenovoid.action=3D&lenovoid.lang=3D&lenovoid.uinfo=3D&lenovoid.vp=
=3D&verifyFlag=3Dnull">https://passport.lenovo.com/wauthen/verifyuser?usern=
ame=3o.org&vc=3DuHwf&accountid=3&lenovoid.cb=3D&lenov=
oid.realm=3Dthinkworld.lenovo.com&lang=3Dzh_CN&display=3D&lenovoid.ctx=3D&l=
enovoid.action=3D&lenovoid.lang=3D&lenovoid.uinfo=3D&lenovoid.vp=3D&verifyF=
lag=3Dnull</a><br/>=EF=BC=88=E5=A6=82=E6=9E=9C=E4=B8=8A=E9=9D=A2=E7=9A=84=
=E9=93=BE=E6=8E=A5=E6=97=A0=E6=B3=95=E7=82=B9=E5=87=BB=EF=BC=8C=E6=82=A8=E4=
=B9=9F=E5=8F=AF=E4=BB=A5=E5=A4=8D=E5=88=B6=E9=93=BE=E6=8E=A5=EF=BC=8C=E7=B2=
=98=E8=B4=B4=E5=88=B0=E6=82=A8=E6=B5=8F=E8=A7=88=E5=99=A8=E7=9A=84=E5=9C=B0=
=E5=9D=80=E6=A0=8F=E5=86=85=EF=BC=8C=E7=84=B6=E5=90=8E=E6=8C=89=E2=80=9C=E5=
=9B=9E=E8=BD=A6=E2=80=9D=E9=94=AE)=E3=80=82<br/>=E6=9D=A5=E8=87=AA=E8=81=94=
=E6=83=B3=E5=B8=90=E5=8F=B7</body></html>
------=_Part_335077_605133107.1406662190222--
------=_Part_335076_1490382245.1406662190222--
"""
def timeit(func):
def wrap(num, port, *args, **kwargs):
max_rqs = 0
for _ in range(3):
conns = [smtplib.SMTP(port=port) for x in range(num)]
list(map(lambda x: x.connect("127.0.0.1", port), conns))
start_at = time.time()
func(num, conns, **kwargs)
interval = time.time() - start_at
for con in conns:
try:
con.quit()
con.close()
except Exception:
pass
gevent.sleep(3)
rqs = num / interval
max_rqs = max(rqs, max_rqs)
return max_rqs
return wrap
@timeit
def helo(num, conns):
tasks = [gevent.spawn(x.helo) for x in conns]
gevent.joinall(tasks)
@timeit
def send(num, conns):
tasks = [
gevent.spawn(x.sendmail, "[email protected]", ["[email protected]"], TEST_MAIL) for x in conns
]
gevent.joinall(tasks)
def main(port, num):
print("%d %s %s" % (num, helo(num, port), send(num, port)))
if __name__ == "__main__":
import sys
try:
main(int(sys.argv[1]), int(sys.argv[2]))
except IndexError:
print("python concurrency.py <port> <connection number>")
<|endoftext|> |
<|endoftext|>#!/usr/bin/env python
import sys
import json
if sys.version_info < (3,):
def b(x):
return x
def s(x):
return x
else:
def b(x):
return bytes(x, "utf-8")
def s(x):
return x.decode("utf-8")
def parse_payload(payload):
if not isinstance(payload, str):
payload = " ".join(payload)
try:
json.loads(payload)
except ValueError:
kv = payload.split(" ", 1)
if len(kv) > 1:
payload = '{"%s": "%s"}' % (kv[0], kv[1])
else:
payload = "%s" % kv[0]
return payload
def requires_elements(xs, dictionary):
missing_values = []
for x in xs:
if x not in dictionary:
missing_values.append(x)
if missing_values:
err_msg = ", ".join(missing_values)
raise KeyError("Missing values %s" % (err_msg))
<|endoftext|> |
<|endoftext|>from flask_resty import Api, GenericModelView
from marshmallow import fields, Schema
import pytest
from sqlalchemy import Column, Integer, String
import helpers
# -----------------------------------------------------------------------------
@pytest.yield_fixture
def models(db):
class Widget(db.Model):
__tablename__ = "widgets"
id_1 = Column(Integer, primary_key=True)
id_2 = Column(Integer, primary_key=True)
name = Column(String, nullable=False)
db.create_all()
yield {
"widget": Widget,
}
db.drop_all()
@pytest.fixture
def schemas():
class WidgetSchema(Schema):
id_1 = fields.Integer(as_string=True)
id_2 = fields.Integer(as_string=True)
name = fields.String(required=True)
return {
"widget": WidgetSchema(),
}
@pytest.fixture(autouse=True)
def routes(app, models, schemas):
class WidgetViewBase(GenericModelView):
model = models["widget"]
schema = schemas["widget"]
id_fields = ("id_1", "id_2")
class WidgetListView(WidgetViewBase):
def get(self):
return self.list()
def post(self):
return self.create(allow_client_id=True)
class WidgetView(WidgetViewBase):
def get(self, id_1, id_2):
return self.retrieve((id_1, id_2))
def patch(self, id_1, id_2):
return self.update((id_1, id_2), partial=True)
def delete(self, id_1, id_2):
return self.destroy((id_1, id_2))
api = Api(app)
api.add_resource(
"/widgets",
WidgetListView,
WidgetView,
id_rule="<int:id_1>/<int:id_2>",
)
@pytest.fixture(autouse=True)
def data(db, models):
db.session.add_all(
(
models["widget"](id_1=1, id_2=2, name="Foo"),
models["widget"](id_1=1, id_2=3, name="Bar"),
models["widget"](id_1=4, id_2=5, name="Baz"),
)
)
db.session.commit()
# -----------------------------------------------------------------------------
def test_list(client):
response = client.get("/widgets")
assert response.status_code == 200
assert helpers.get_data(response) == [
{
"id_1": "1",
"id_2": "2",
"name": "Foo",
},
{
"id_1": "1",
"id_2": "3",
"name": "Bar",
},
{
"id_1": "4",
"id_2": "5",
"name": "Baz",
},
]
def test_retrieve(client):
response = client.get("/widgets/1/2")
assert response.status_code == 200
assert helpers.get_data(response) == {
"id_1": "1",
"id_2": "2",
"name": "Foo",
}
def test_create(client):
response = helpers.request(
client,
"POST",
"/widgets",
{
"id_1": "4",
"id_2": "6",
"name": "Qux",
},
)
assert response.status_code == 201
assert response.headers["Location"] == "http://localhost/widgets/4/6"
assert helpers.get_data(response) == {
"id_1": "4",
"id_2": "6",
"name": "Qux",
}
def test_update(client):
update_response = helpers.request(
client,
"PATCH",
"/widgets/1/2",
{
"id_1": "1",
"id_2": "2",
"name": "Qux",
},
)
assert update_response.status_code == 204
retrieve_response = client.get("/widgets/1/2")
assert retrieve_response.status_code == 200
assert helpers.get_data(retrieve_response) == {
"id_1": "1",
"id_2": "2",
"name": "Qux",
}
def test_destroy(client):
destroy_response = client.delete("/widgets/1/2")
assert destroy_response.status_code == 204
retrieve_response = client.get("/widgets/1/2")
assert retrieve_response.status_code == 404
<|endoftext|> |
<|endoftext|>from .dogpile import Dogpile
<|endoftext|> |
<|endoftext|>"""
RPi-Tron-Radio
Raspberry Pi Web-Radio with 2.8" TFT Touchscreen and Tron-styled graphical interface
GitHub: http://github.com/5volt-junkie/RPi-Tron-Radio
Blog: http://5volt-junkie.net
MIT License: see license.txt
"""
import pygame
from pygame.locals import *
import time
import datetime
import sys
import os
import glob
import subprocess
os.environ["SDL_FBDEV"] = "/dev/fb1"
os.environ["SDL_MOUSEDEV"] = "/dev/input/touchscreen"
os.environ["SDL_MOUSEDRV"] = "TSLIB"
#colors R G B
white = (255, 255, 255)
red = (255, 0, 0)
green = ( 0, 255, 0)
blue = ( 0, 0, 255)
black = ( 0, 0, 0)
cyan = ( 50, 255, 255)
magenta = (255, 0, 255)
yellow = (255, 255, 0)
orange = (255, 127, 0)
#screen size
width = 320
height = 240
size = (width, height)
screen = pygame.display.set_mode(size)
pygame.init()
#disable mouse cursor
pygame.mouse.set_visible(False)
#define font
font = pygame.font.Font(None, 25)
#screensaver
screensaver_timer = 5 #time until screensaver will be enabled, in minutes
screensaver = False
#load default skin
menu = 1
skin_number = 1
max_skins = 8
font_color = cyan
skin1 = pygame.image.load("skins/skin_tron_m1.png")
skin2 = pygame.image.load("skins/skin_tron_m2.png")
skin = skin1
screen.blit(skin, (0, 0))
#initial volume settings
subprocess.call('mpc volume 100' , shell=True)
reboot_label = font.render("rebooting...", 1, (font_color))
poweroff_label = font.render("shutting down", 1, (font_color))
song_title = " "
playlist = " "
def reboot():
screen.fill(black)
screen.blit(reboot_label, (10, 100))
pygame.display.flip()
time.sleep(5)
subprocess.call('mpc stop' , shell=True)
subprocess.call('reboot' , shell=True)
def poweroff():
screen.fill(black)
screen.blit(poweroff_label, (10, 100))
pygame.display.flip()
time.sleep(5)
subprocess.call('mpc stop' , shell=True)
subprocess.call('poweroff' , shell=True)
#copy playing title to favorite.txt
def favorite():
print(song_title)
f = open ('/var/www/favorite.txt' , 'a')
f.write('-' + song_title + '\n')
f.close()
#function runs if touchscreen was touched (and screensaver is disabled)
def on_touch():
#x_min x_max y_min y_max
if 13 <= pos[0] <= 75 and 121 <= pos[1] <= 173:
#print "button1 was pressed"
button(1)
if 90 <= pos[0] <= 152 and 121 <= pos[1] <= 173:
#print "button2 was pressed"
button(2)
if 167 <= pos[0] <= 229 and 121 <= pos[1] <= 173:
#print "button3 was pressed"
button(3)
if 244 <= pos[0] <= 306 and 121 <= pos[1] <= 173:
#print "button4 was pressed"
button(4)
if 13 <= pos[0] <= 75 and 181 <= pos[1] <= 233:
#print "button5 was pressed"
button(5)
if 90 <= pos[0] <= 152 and 181 <= pos[1] <= 233:
#print "button6 was pressed"
button(6)
if 167 <= pos[0] <= 229 and 181 <= pos[1] <= 233:
#print "button7 was pressed"
button(7)
if 244 <= pos[0] <= 306 and 181 <= pos[1] <= 233:
#print "button8 was pressed"
button(8)
#which button (and which menu) was presed on touch
def button(number):
global menu
if menu == 1:
if number == 1:
subprocess.call('mpc play' , shell=True)
#print "play"
if number == 2:
subprocess.call('mpc pause' , shell=True)
#print "pause"
if number == 3:
subprocess.call('mpc volume +5' , shell=True)
#print "vol +x"
if number == 4:
subprocess.call('mpc volume 0' , shell=True)
#print "vol 0"
if number == 5:
subprocess.call('mpc prev' , shell=True)
#print "prev"
if number == 6:
subprocess.call('mpc next' , shell=True)
#print "next"
if number == 7:
subprocess.call('mpc volume -5' , shell=True)
#print "vol -x"
if number == 8:
#print "go to menu 2"
menu = 2
update_screen()
return
if menu == 2:
if number == 1:
favorite()
if number == 2:
#print "switch skin"
global skin_number
skin_number = skin_number+1
#print skin_number
update_screen()
if number == 3:
#print "run in background"
pygame.quit()
sys.exit()
if number == 4:
#print "quit radio"
subprocess.call('mpc stop', shell=True)
pygame.quit()
sys.exit()
if number == 5:
print("power off")
poweroff()
if number == 6:
print("reboot")
reboot()
if number == 7:
#print "update screen"
update_screen()
if number == 8:
#print "go to menu 1"
menu = 1
update_screen()
return
#function to update screen
def update_screen():
global skin_number
if skin_number == 9:
skin_number = 1
if skin_number == 1:
skin1 = pygame.image.load("skins/skin_tron_m1.png")
skin2 = pygame.image.load("skins/skin_tron_m2.png")
font_color = cyan
if skin_number == 2:
skin1 = pygame.image.load("skins/skin_blue_m1.png")
skin2 = pygame.image.load("skins/skin_blue_m2.png")
font_color = blue
if skin_number == 3:
skin1 = pygame.image.load("skins/skin_green_m1.png")
skin2 = pygame.image.load("skins/skin_green_m2.png")
font_color = green
if skin_number == 4:
skin1 = pygame.image.load("skins/skin_magenta_m1.png")
skin2 = pygame.image.load("skins/skin_magenta_m2.png")
font_color = magenta
if skin_number == 5:
skin1 = pygame.image.load("skins/skin_orange_m1.png")
skin2 = pygame.image.load("skins/skin_orange_m2.png")
font_color = orange
if skin_number == 6:
skin1 = pygame.image.load("skins/skin_red_m1.png")
skin2 = pygame.image.load("skins/skin_red_m2.png")
font_color = red
if skin_number == 7:
skin1 = pygame.image.load("skins/skin_white_m1.png")
skin2 = pygame.image.load("skins/skin_white_m2.png")
font_color = white
if skin_number == 8:
skin1 = pygame.image.load("skins/skin_yellow_m1.png")
skin2 = pygame.image.load("skins/skin_yellow_m2.png")
font_color = yellow
global menu
if screensaver == False:
current_time = datetime.datetime.now().strftime('%H:%M %d.%m.%Y')
time_label = font.render(current_time, 1, (font_color))
if menu == 1:
skin = skin1
screen.blit(skin, (0, 0))
lines = subprocess.check_output('mpc current', shell=True).split(":")
if len(lines) == 1:
line1 = lines[0]
line1 = line1[:-1]
station_label = font.render("Station: no data", 1, (font_color))
else:
line1 = lines[0]
line2 = lines[1]
line1 = line1[:30]
station_label = font.render('Station: ' + line1 + '.', 1, (font_color))
lines = subprocess.check_output('mpc -f [%title%]', shell=True).split("\n")
line1 = lines[0]
if line1.startswith("volume"):
title_label = font.render("Title: no data! Try with PLAY!", 1, (font_color))
else:
line1 = lines[0]
line2 = lines[1]
global song_title
song_title = line1
line1 = line1[:30]
title_label = font.render(line1 + '.', 1, (font_color))
title = font.render("Now playing:", 1, (font_color))
screen.blit(skin, (0, 0))
screen.blit(station_label, (23, 15))
screen.blit(title, (23, 40))
screen.blit(title_label, (23, 60))
screen.blit(time_label, (160, 90))
lines = subprocess.check_output('mpc volume', shell=True).split("\n")
line1 = lines[0]
volume_label = font.render(line1, 1, (font_color))
screen.blit(volume_label, (23, 90))
pygame.display.flip()
if menu == 2:
skin = skin2
screen.blit(skin, (0, 0))
#get and display ip
ip = subprocess.check_output('hostname -I', shell=True).strip()
ip_label = font.render('IP: ' + ip, 1, (font_color))
screen.blit(ip_label, (23, 15))
#get and display cpu temp
cpu_temp = subprocess.check_output('/opt/vc/bin/vcgencmd measure_temp', shell=True).strip()
temp = font.render('cpu ' + cpu_temp, 1, (font_color))
screen.blit(temp, (23, 35))
#get current time
screen.blit(time_label, (90, 90))
pygame.display.flip()
if screensaver == True:
screen.fill(white)
pygame.display.flip()
minutes = 0
#userevent on every 1000ms, used for screensaver
pygame.time.set_timer(USEREVENT +1, 60000)
subprocess.call('mpc play' , shell=True)
update_screen()
running = True
while running:
for event in pygame.event.get():
if event.type == USEREVENT +1:
minutes += 1
if event.type == pygame.QUIT:
print("Quit radio")
pygame.quit()
sys.exit()
if event.type == pygame.KEYDOWN:
if event.key == K_ESCAPE:
print("Quit radio")
pygame.quit()
sys.exit()
#if screensaver is enabled and the screen was touched,
#just disable screensaver, reset timer and update screen
#no button state will be checked
if event.type == pygame.MOUSEBUTTONDOWN and screensaver == True:
minutes = 0
subprocess.call('echo 0 | sudo tee /sys/class/backlight/*/bl_power' , shell=True)
screensaver = False
update_screen()
break
#if screen was touched and screensaver is disabled,
#get position of touched button, call on_touch(), reset timer and update screen
if event.type == pygame.MOUSEBUTTONDOWN and screensaver == False:
pos = (pygame.mouse.get_pos() [0], pygame.mouse.get_pos() [1])
minutes = 0
on_touch()
update_screen()
#enable screensaver on timer overflow
if minutes > screensaver_timer:
screensaver = True
subprocess.call('echo 1 | sudo tee /sys/class/backlight/*/bl_power' , shell=True)
update_screen()
update_screen()
time.sleep(0.1)
<|endoftext|> |
<|endoftext|># coding:utf8
"""
Created on 2013-7-10
memcached client
@author: lan (www.9miao.com)
"""
import memcache
class MemConnError(Exception):
""" """
def __str__(self):
return "memcache connect error"
class MemClient:
"""memcached"""
def __init__(self, timeout=0):
""" """
self._hostname = ""
self._urls = []
self.connection = None
def connect(self, urls, hostname):
"""memcached connect"""
self._hostname = hostname
self._urls = urls
self.connection = memcache.Client(self._urls, debug=0)
if not self.connection.set("__testkey__", 1):
raise MemConnError()
def produceKey(self, keyname):
""" """
if isinstance(keyname, str):
return "".join([self._hostname, ":", keyname])
else:
raise "type error"
def get(self, key):
""" """
key = self.produceKey(key)
return self.connection.get(key)
def get_multi(self, keys):
""" """
keynamelist = [self.produceKey(keyname) for keyname in keys]
olddict = self.connection.get_multi(keynamelist)
newdict = dict(
list(
zip(
[keyname.split(":")[-1] for keyname in list(olddict.keys())],
list(olddict.values()),
)
)
)
return newdict
def set(self, keyname, value):
""" """
key = self.produceKey(keyname)
result = self.connection.set(key, value)
if not result: # å¦æåå
¥å¤±è´¥
self.connect(self._urls, self._hostname) # éæ°è¿æ¥
return self.connection.set(key, value)
return result
def set_multi(self, mapping):
""" """
newmapping = dict(
list(
zip(
[self.produceKey(keyname) for keyname in list(mapping.keys())],
list(mapping.values()),
)
)
)
result = self.connection.set_multi(newmapping)
if result: # å¦æåå
¥å¤±è´¥
self.connect(self._urls, self._hostname) # éæ°è¿æ¥
return self.connection.set_multi(newmapping)
return result
def incr(self, key, delta):
""" """
key = self.produceKey(key)
return self.connection.incr(key, delta)
def delete(self, key):
""" """
key = self.produceKey(key)
return self.connection.delete(key)
def delete_multi(self, keys):
""" """
keys = [self.produceKey(key) for key in keys]
return self.connection.delete_multi(keys)
def flush_all(self):
""" """
self.connection.flush_all()
mclient = MemClient()
<|endoftext|> |
<|endoftext|># coding:utf8
"""
Created on 2013-7-31
@author: lan (www.9miao.com)
"""
from firefly.dbentrust.dbpool import dbpool
from firefly.dbentrust.madminanager import MAdminManager
from firefly.dbentrust import mmode
from firefly.dbentrust.memclient import mclient
import time
if __name__ == "__main__":
# CREATE TABLE `tb_register` (
# `id` int(11) NOT NULL AUTO_INCREMENT COMMENT 'id',
# `username` varchar(255) CHARACTER SET utf8 COLLATE utf8_bin NOT NULL DEFAULT '' COMMENT 'ç¨æ·å',
# `password` varchar(255) CHARACTER SET utf8 COLLATE utf8_bin NOT NULL DEFAULT '' COMMENT 'ç¨æ·å¯ç ',
# PRIMARY KEY (`id`,`username`)
# ) ENGINE=MyISAM AUTO_INCREMENT=1 DEFAULT CHARSET=utf8
#
hostname = "localhost"
username = "root"
password = "111"
dbname = "test"
charset = "utf8"
tablename = "test1" #
aa = {
"host": "localhost",
"user": "root",
"passwd": "111",
"db": "test",
"port": 3306,
"charset": "utf8",
}
dbpool.initPool(**aa)
mclient.connect(["127.0.0.1:11211"], "test")
mmanager = MAdminManager()
m1 = mmode.MAdmin("test1", "id", incrkey="id")
m1.insert()
print(m1.get("_incrvalue"))
m2 = mmode.MAdmin("test1", "id", incrkey="id")
print(m2.get("_incrvalue"))
<|endoftext|> |
<|endoftext|>""" Really basic gatttool (BlueZ) wrapper
Based on https://github.com/stratosinc/pygatt
Part of https://github.com/ALPSquid/thebutton-monitor
"""
import pexpect
class connect:
"""Use to initiate a connection to a GATT device
Example: bt_device = gatt.connect('AB:CD:EF:01:23:45')
"""
def __init__(self, address):
self.address = "" # Connected bluetooth device address. Assigned from connect()
self.conn = None # pexpect.spawn() object for the gatttool command
self.connect(address)
def connect(self, address, adapter="hci0"):
"""Open an interactive connection to a bluetooth device
:param address: Bluetooth device address
:param adapter: Bluetooth adapter to use. Default: hci0
"""
if self.conn is None:
self.address = address
cmd = " ".join(["gatttool", "-b", address, "-i", adapter, "-I"])
self.conn = pexpect.spawn(cmd)
self.conn.expect(r"\[LE\]>", timeout=1)
self.conn.sendline("connect")
try:
self.conn.expect(r"Connection successful", timeout=10)
print(("Connected to " + address))
except pexpect.TIMEOUT:
raise Exception("Unable to connect to device")
else:
raise Exception(
"Device already connected! Call disconnect before attempting a new connection"
)
def reconnect(self):
"""Check and attempt to reconnect to device if necessary
:return: True if a reconnect was performed
"""
try:
self.conn.expect(r"Disconnected", timeout=0.1)
self.conn.sendline("connect")
try:
self.conn.expect(r"Connection successful", timeout=10)
print(("Reconnected to device: " + self.address))
except pexpect.TIMEOUT:
# Continue and try to reconnect next time
print(("Lost connection to device: " + self.address))
return True
except pexpect.TIMEOUT:
# No need to reconnect
return False
def disconnect(self):
"""Disconnect from current bluetooth device"""
if self.conn is not None:
self.conn.sendline("exit")
self.conn = None
print(("Disconnected from " + self.address))
def write(self, handle, value):
"""Write a value to the specified handle
:param handle: address to write to. e.g. 0016
:param value: value to write
"""
self.send(" ".join(["char-write-cmd", "0x" + handle, value]))
def read(self, handle):
"""Read from the specified handle
:param handle: address to read from. e.g. 0016
"""
self.send("char-read-hnd 0x" + handle, r"descriptor: .* \r", timeout=5)
val = " ".join(self.conn.after.decode("utf-8").split()[1:])
return val
def send(self, cmd, expect=None, timeout=5):
"""Send command to device. Attempt a reconnect if disconnected
:param cmd: Command to send
"""
self.conn.sendline(cmd)
if expect is not None:
try:
self.conn.expect(expect, timeout)
except pexpect.TIMEOUT:
if self.reconnect():
self.conn.sendline(cmd)
else:
if self.reconnect():
self.conn.sendline(cmd)
<|endoftext|> |
<|endoftext|># -*- coding: utf-8 -*-
from django.db import models, migrations
import wagtail.wagtailcore.fields
class Migration(migrations.Migration):
dependencies = [
("puput", "0001_initial"),
]
operations = [
migrations.AlterField(
model_name="blogpage",
name="description",
field=models.CharField(
max_length=255,
help_text="The blog description that will appear under the title.",
verbose_name="Description",
blank=True,
),
),
migrations.AlterField(
model_name="category",
name="description",
field=models.CharField(
max_length=500, verbose_name="Description", blank=True
),
),
migrations.AlterField(
model_name="category",
name="name",
field=models.CharField(
max_length=80, unique=True, verbose_name="Category name"
),
),
migrations.AlterField(
model_name="category",
name="parent",
field=models.ForeignKey(
to="puput.Category",
related_name="children",
null=True,
verbose_name="Parent category",
blank=True,
),
),
migrations.AlterField(
model_name="entrypage",
name="excerpt",
field=wagtail.wagtailcore.fields.RichTextField(
help_text="Entry excerpt to be displayed on entries list. If this field is not filled, a truncate version of body text will be used.",
verbose_name="excerpt",
blank=True,
),
),
]
<|endoftext|> |
<|endoftext|>"""
==================================
Map two radars to a Cartesian grid
==================================
Map the reflectivity field of two nearby ARM XSARP radars from antenna
coordinates to a Cartesian grid.
"""
print(__doc__)
# Author: Jonathan J. Helmus ([email protected])
# License: BSD 3 clause
import matplotlib.pyplot as plt
import pyart
# read in the data from both XSAPR radars
XSAPR_SW_FILE = "swx_20120520_0641.nc"
XSAPR_SE_FILE = "sex_20120520_0641.nc"
radar_sw = pyart.io.read_cfradial(XSAPR_SW_FILE)
radar_se = pyart.io.read_cfradial(XSAPR_SE_FILE)
# filter out gates with reflectivity > 100 from both radars
gatefilter_se = pyart.filters.GateFilter(radar_se)
gatefilter_se.exclude_above("corrected_reflectivity_horizontal", 100)
gatefilter_sw = pyart.filters.GateFilter(radar_sw)
gatefilter_sw.exclude_above("corrected_reflectivity_horizontal", 100)
# perform Cartesian mapping, limit to the reflectivity field.
grid = pyart.map.grid_from_radars(
(radar_se, radar_sw),
gatefilters=(gatefilter_se, gatefilter_sw),
grid_shape=(1, 201, 201),
grid_limits=((1000, 1000), (-50000, 40000), (-60000, 40000)),
grid_origin=(36.57861, -97.363611),
fields=["corrected_reflectivity_horizontal"],
)
# create the plot
fig = plt.figure()
ax = fig.add_subplot(111)
ax.imshow(
grid.fields["corrected_reflectivity_horizontal"]["data"][0],
origin="lower",
extent=(-60, 40, -50, 40),
vmin=0,
vmax=48,
)
plt.show()
<|endoftext|> |
<|endoftext|>"""
pyart.aux_io.radx
=================
Reading files using Radx to first convert the file to Cf.Radial format
.. autosummary::
:toctree: generated/
read_radx
"""
import os
import tempfile
import subprocess
from ..io.cfradial import read_cfradial
from ..io.common import _test_arguments
def read_radx(filename, **kwargs):
"""
Read a file by first converting it to Cf/Radial using RadxConvert.
Parameters
----------
filename : str
Name of file to read using RadxConvert.
Returns
-------
radar : Radar
Radar object.
"""
# test for non empty kwargs
_test_arguments(kwargs)
tmpfile = tempfile.mkstemp(suffix=".nc", dir=".")[1]
head, tail = os.path.split(tmpfile)
try:
subprocess.check_call(
[
"RadxConvert",
"-const_ngates",
"-outdir",
head,
"-outname",
tail,
"-f",
filename,
]
)
if not os.path.isfile(tmpfile):
raise IOError(
"RadxConvert failed to create a file, upgrading to the "
" latest version of Radx may be necessary."
)
radar = read_cfradial(tmpfile)
finally:
os.remove(tmpfile)
return radar
<|endoftext|> |
<|endoftext|>"""
pyart.exceptions
================
Custom Py-ART exceptions.
.. autosummary::
:toctree: generated/
MissingOptionalDependency
DeprecatedAttribute
DeprecatedFunctionName
_deprecated_alias
"""
import warnings
class MissingOptionalDependency(Exception):
"""Exception raised when a optional dependency is needed by not found."""
pass
class DeprecatedAttribute(DeprecationWarning):
"""Warning category for an attribute which has been renamed/moved."""
pass
class DeprecatedFunctionName(DeprecationWarning):
"""Warning category for a function which has been renamed/moved."""
pass
def _deprecated_alias(func, old_name, new_name):
"""
A function for creating an alias to a renamed or moved function.
Parameters
----------
func : func
The function which has been renamed or moved.
old_name, new_name : str
Name of the function before and after it was moved or renamed
(with namespace if changed).
Returns
-------
wrapper : func
A wrapper version of func, which issues a DeprecatedFunctionName
warning when the called.
"""
def wrapper(*args, **kwargs):
warnings.warn(
(
"{0} has been deprecated and will be removed in future "
+ "versions of Py-ART, pleases use {1}. "
).format(old_name, new_name),
category=DeprecatedFunctionName,
)
return func(*args, **kwargs)
return wrapper
<|endoftext|> |
<|endoftext|>"""
pyart.io.nexrad_archive
=======================
Functions for reading NEXRAD Level II Archive files.
.. autosummary::
:toctree: generated/
:template: dev_template.rst
_NEXRADLevel2StagedField
.. autosummary::
:toctree: generated/
read_nexrad_archive
_find_range_params
_find_scans_to_interp
_interpolate_scan
"""
import warnings
import numpy as np
from ..config import FileMetadata, get_fillvalue
from ..core.radar import Radar
from .common import make_time_unit_str, _test_arguments, prepare_for_read
from .nexrad_level2 import NEXRADLevel2File
from ..lazydict import LazyLoadDict
from .nexrad_common import get_nexrad_location
def read_nexrad_archive(
filename,
field_names=None,
additional_metadata=None,
file_field_names=False,
exclude_fields=None,
delay_field_loading=False,
station=None,
scans=None,
linear_interp=True,
**kwargs
):
"""
Read a NEXRAD Level 2 Archive file.
Parameters
----------
filename : str
Filename of NEXRAD Level 2 Archive file. The files hosted by
at the NOAA National Climate Data Center [1]_ as well as on the
UCAR THREDDS Data Server [2]_ have been tested. Other NEXRAD
Level 2 Archive files may or may not work. Message type 1 file
and message type 31 files are supported.
field_names : dict, optional
Dictionary mapping NEXRAD moments to radar field names. If a
data type found in the file does not appear in this dictionary or has
a value of None it will not be placed in the radar.fields dictionary.
A value of None, the default, will use the mapping defined in the
metadata configuration file.
additional_metadata : dict of dicts, optional
Dictionary of dictionaries to retrieve metadata from during this read.
This metadata is not used during any successive file reads unless
explicitly included. A value of None, the default, will not
introduct any addition metadata and the file specific or default
metadata as specified by the metadata configuration file will be used.
file_field_names : bool, optional
True to use the NEXRAD field names for the field names. If this
case the field_names parameter is ignored. The field dictionary will
likely only have a 'data' key, unless the fields are defined in
`additional_metadata`.
exclude_fields : list or None, optional
List of fields to exclude from the radar object. This is applied
after the `file_field_names` and `field_names` parameters.
delay_field_loading : bool, optional
True to delay loading of field data from the file until the 'data'
key in a particular field dictionary is accessed. In this case
the field attribute of the returned Radar object will contain
LazyLoadDict objects not dict objects.
station : str or None, optional
Four letter ICAO name of the NEXRAD station used to determine the
location in the returned radar object. This parameter is only
used when the location is not contained in the file, which occur
in older NEXRAD message 1 files.
scans : list or None, optional
Read only specified scans from the file. None (the default) will read
all scans.
linear_interp : bool, optional
True (the default) to perform linear interpolation between valid pairs
of gates in low resolution rays in files mixed resolution rays.
False will perform a nearest neighbor interpolation. This parameter is
not used if the resolution of all rays in the file or requested sweeps
is constant.
Returns
-------
radar : Radar
Radar object containing all moments and sweeps/cuts in the volume.
Gates not collected are masked in the field data.
References
----------
.. [1] http://www.ncdc.noaa.gov/
.. [2] http://thredds.ucar.edu/thredds/catalog.html
"""
# test for non empty kwargs
_test_arguments(kwargs)
# create metadata retrieval object
filemetadata = FileMetadata(
"nexrad_archive",
field_names,
additional_metadata,
file_field_names,
exclude_fields,
)
# open the file and retrieve scan information
nfile = NEXRADLevel2File(prepare_for_read(filename))
scan_info = nfile.scan_info(scans)
# time
time = filemetadata("time")
time_start, _time = nfile.get_times(scans)
time["data"] = _time
time["units"] = make_time_unit_str(time_start)
# range
_range = filemetadata("range")
first_gate, gate_spacing, last_gate = _find_range_params(scan_info, filemetadata)
_range["data"] = np.arange(first_gate, last_gate, gate_spacing, "float32")
_range["meters_to_center_of_first_gate"] = float(first_gate)
_range["meters_between_gates"] = float(gate_spacing)
# metadata
metadata = filemetadata("metadata")
metadata["original_container"] = "NEXRAD Level II"
# scan_type
scan_type = "ppi"
# latitude, longitude, altitude
latitude = filemetadata("latitude")
longitude = filemetadata("longitude")
altitude = filemetadata("altitude")
if nfile._msg_type == "1" and station is not None:
lat, lon, alt = get_nexrad_location(station)
else:
lat, lon, alt = nfile.location()
latitude["data"] = np.array([lat], dtype="float64")
longitude["data"] = np.array([lon], dtype="float64")
altitude["data"] = np.array([alt], dtype="float64")
# sweep_number, sweep_mode, fixed_angle, sweep_start_ray_index
# sweep_end_ray_index
sweep_number = filemetadata("sweep_number")
sweep_mode = filemetadata("sweep_mode")
sweep_start_ray_index = filemetadata("sweep_start_ray_index")
sweep_end_ray_index = filemetadata("sweep_end_ray_index")
if scans is None:
nsweeps = int(nfile.nscans)
else:
nsweeps = len(scans)
sweep_number["data"] = np.arange(nsweeps, dtype="int32")
sweep_mode["data"] = np.array(nsweeps * ["azimuth_surveillance"], dtype="S")
rays_per_scan = [s["nrays"] for s in scan_info]
sweep_end_ray_index["data"] = np.cumsum(rays_per_scan, dtype="int32") - 1
rays_per_scan.insert(0, 0)
sweep_start_ray_index["data"] = np.cumsum(rays_per_scan[:-1], dtype="int32")
# azimuth, elevation, fixed_angle
azimuth = filemetadata("azimuth")
elevation = filemetadata("elevation")
fixed_angle = filemetadata("fixed_angle")
azimuth["data"] = nfile.get_azimuth_angles(scans)
elevation["data"] = nfile.get_elevation_angles(scans).astype("float32")
fixed_angle["data"] = nfile.get_target_angles(scans)
# fields
max_ngates = len(_range["data"])
available_moments = set([m for scan in scan_info for m in scan["moments"]])
interpolate = _find_scans_to_interp(
scan_info, first_gate, gate_spacing, filemetadata
)
fields = {}
for moment in available_moments:
field_name = filemetadata.get_field_name(moment)
if field_name is None:
continue
dic = filemetadata(field_name)
dic["_FillValue"] = get_fillvalue()
if delay_field_loading and moment not in interpolate:
dic = LazyLoadDict(dic)
data_call = _NEXRADLevel2StagedField(nfile, moment, max_ngates, scans)
dic.set_lazy("data", data_call)
else:
mdata = nfile.get_data(moment, max_ngates, scans=scans)
if moment in interpolate:
interp_scans = interpolate[moment]
warnings.warn(
"Gate spacing is not constant, interpolating data in "
+ "scans %s for moment %s." % (interp_scans, moment),
UserWarning,
)
for scan in interp_scans:
idx = scan_info[scan]["moments"].index(moment)
moment_ngates = scan_info[scan]["ngates"][idx]
start = sweep_start_ray_index["data"][scan]
end = sweep_end_ray_index["data"][scan]
_interpolate_scan(mdata, start, end, moment_ngates, linear_interp)
dic["data"] = mdata
fields[field_name] = dic
# instrument_parameters
nyquist_velocity = filemetadata("nyquist_velocity")
unambiguous_range = filemetadata("unambiguous_range")
nyquist_velocity["data"] = nfile.get_nyquist_vel(scans).astype("float32")
unambiguous_range["data"] = nfile.get_unambigous_range(scans).astype("float32")
instrument_parameters = {
"unambiguous_range": unambiguous_range,
"nyquist_velocity": nyquist_velocity,
}
nfile.close()
return Radar(
time,
_range,
fields,
metadata,
scan_type,
latitude,
longitude,
altitude,
sweep_number,
sweep_mode,
fixed_angle,
sweep_start_ray_index,
sweep_end_ray_index,
azimuth,
elevation,
instrument_parameters=instrument_parameters,
)
def _find_range_params(scan_info, filemetadata):
"""Return range parameters, first_gate, gate_spacing, last_gate."""
min_first_gate = 999999
min_gate_spacing = 999999
max_last_gate = 0
for scan_params in scan_info:
ngates = scan_params["ngates"][0]
for i, moment in enumerate(scan_params["moments"]):
if filemetadata.get_field_name(moment) is None:
# moment is not read, skip
continue
first_gate = scan_params["first_gate"][i]
gate_spacing = scan_params["gate_spacing"][i]
last_gate = first_gate + gate_spacing * (ngates - 0.5)
min_first_gate = min(min_first_gate, first_gate)
min_gate_spacing = min(min_gate_spacing, gate_spacing)
max_last_gate = max(max_last_gate, last_gate)
return min_first_gate, min_gate_spacing, max_last_gate
def _find_scans_to_interp(scan_info, first_gate, gate_spacing, filemetadata):
"""Return a dict indicating what moments/scans need interpolation."""
moments = set([m for scan in scan_info for m in scan["moments"]])
interpolate = dict([(moment, []) for moment in moments])
for scan_num, scan in enumerate(scan_info):
for moment in moments:
if moment not in scan["moments"]:
continue
if filemetadata.get_field_name(moment) is None:
# moment is not read, skip
continue
index = scan["moments"].index(moment)
first = scan["first_gate"][index]
spacing = scan["gate_spacing"][index]
if first != first_gate or spacing != gate_spacing:
interpolate[moment].append(scan_num)
# for proper interpolation the gate spacing of the scan to be
# interpolated should be 1/4th the spacing of the radar
assert spacing == gate_spacing * 4
# and the first gate for the scan should be one and half times
# the radar spacing past the radar first gate
assert first_gate + 1.5 * gate_spacing == first
# remove moments with no scans needing interpolation
interpolate = dict([(k, v) for k, v in list(interpolate.items()) if len(v) != 0])
return interpolate
def _interpolate_scan(mdata, start, end, moment_ngates, linear_interp=True):
"""Interpolate a single NEXRAD moment scan from 1000 m to 250 m."""
# This interpolation scheme is only valid for NEXRAD data where a 4:1
# (1000 m : 250 m) interpolation is needed.
#
# The scheme here performs a linear interpolation between pairs of gates
# in a ray when the both of the gates are not masked (below threshold).
# When one of the gates is masked the interpolation changes to a nearest
# neighbor interpolation. Nearest neighbor is also performed at the end
# points until the new range bin would be centered beyond half of the range
# spacing of the original range.
#
# Nearest neighbor interpolation is performed when linear_interp is False,
# this is equivalent to repeating each gate four times in each ray.
#
# No transformation of the raw data is performed prior to interpolation, so
# reflectivity will be interpolated in dB units, velocity in m/s, etc,
# this may not be the best method for interpolation.
#
# This method was adapted from Radx
for ray_num in range(start, end + 1):
ray = mdata[ray_num].copy()
# repeat each gate value 4 times
interp_ngates = 4 * moment_ngates
ray[:interp_ngates] = np.repeat(ray[:moment_ngates], 4)
if linear_interp:
# linear interpolate
for i in range(2, interp_ngates - 4, 4):
gate_val = ray[i]
next_val = ray[i + 4]
if np.ma.is_masked(gate_val) or np.ma.is_masked(next_val):
continue
delta = (next_val - gate_val) / 4.0
ray[i + 0] = gate_val + delta * 0.5
ray[i + 1] = gate_val + delta * 1.5
ray[i + 2] = gate_val + delta * 2.5
ray[i + 3] = gate_val + delta * 3.5
mdata[ray_num] = ray[:]
class _NEXRADLevel2StagedField(object):
"""
A class to facilitate on demand loading of field data from a Level 2 file.
"""
def __init__(self, nfile, moment, max_ngates, scans):
"""initialize."""
self.nfile = nfile
self.moment = moment
self.max_ngates = max_ngates
self.scans = scans
def __call__(self):
"""Return the array containing the field data."""
return self.nfile.get_data(self.moment, self.max_ngates, scans=self.scans)
<|endoftext|> |
<|endoftext|>"""
pyart.io.uf
===========
Reading of Universal format (UF) files
.. autosummary::
:toctree: generated/
read_uf
_get_instrument_parameters
"""
import warnings
import numpy as np
from netCDF4 import date2num
from ..config import FileMetadata, get_fillvalue
from ..core.radar import Radar
from .common import make_time_unit_str, _test_arguments, prepare_for_read
from .uffile import UFFile
_LIGHT_SPEED = 2.99792458e8 # speed of light in meters per second
_UF_SWEEP_MODES = {
0: "calibration",
1: "ppi",
2: "coplane",
3: "rhi",
4: "vpt",
5: "target",
6: "manual",
7: "idle",
}
_SWEEP_MODE_STR = {
"calibration": "calibration",
"ppi": "azimuth_surveillance",
"coplane": "coplane",
"rhi": "rhi",
"vpt": "vertical_pointing",
"target": "pointing",
"manual": "manual",
"idle": "idle",
}
def read_uf(
filename,
field_names=None,
additional_metadata=None,
file_field_names=False,
exclude_fields=None,
delay_field_loading=False,
**kwargs
):
"""
Read a UF File.
Parameters
----------
filename : str or file-like
Name of Universal format file to read data from.
field_names : dict, optional
Dictionary mapping UF data type names to radar field names. If a
data type found in the file does not appear in this dictionary or has
a value of None it will not be placed in the radar.fields dictionary.
A value of None, the default, will use the mapping defined in the
Py-ART configuration file.
additional_metadata : dict of dicts, optional
Dictionary of dictionaries to retrieve metadata from during this read.
This metadata is not used during any successive file reads unless
explicitly included. A value of None, the default, will not
introduce any addition metadata and the file specific or default
metadata as specified by the Py-ART configuration file will be used.
file_field_names : bool, optional
True to force the use of the field names from the file in which
case the `field_names` parameter is ignored. False will use to
`field_names` parameter to rename fields.
exclude_fields : list or None, optional
List of fields to exclude from the radar object. This is applied
after the `file_field_names` and `field_names` parameters.
delay_field_loading : bool
This option is not implemented in the function but included for
compatibility.
Returns
-------
radar : Radar
Radar object.
"""
# test for non empty kwargs
_test_arguments(kwargs)
# create metadata retrieval object
filemetadata = FileMetadata(
"uf", field_names, additional_metadata, file_field_names, exclude_fields
)
# Open UF file and get handle
ufile = UFFile(prepare_for_read(filename))
first_ray = ufile.rays[0]
# time
dts = ufile.get_datetimes()
units = make_time_unit_str(min(dts))
time = filemetadata("time")
time["units"] = units
time["data"] = date2num(dts, units).astype("float32")
# range
_range = filemetadata("range")
# assume that the number of gates and spacing from the first ray is
# representative of the entire volume
field_header = first_ray.field_headers[0]
ngates = field_header["nbins"]
step = field_header["range_spacing_m"]
# this gives distances to the center of each gate, remove step/2 for start
start = (
field_header["range_start_km"] * 1000.0
+ field_header["range_start_m"]
+ step / 2.0
)
_range["data"] = np.arange(ngates, dtype="float32") * step + start
_range["meters_to_center_of_first_gate"] = start
_range["meters_between_gates"] = step
# latitude, longitude and altitude
latitude = filemetadata("latitude")
longitude = filemetadata("longitude")
altitude = filemetadata("altitude")
lat, lon, height = first_ray.get_location()
latitude["data"] = np.array([lat], dtype="float64")
longitude["data"] = np.array([lon], dtype="float64")
altitude["data"] = np.array([height], dtype="float64")
# metadata
metadata = filemetadata("metadata")
metadata["original_container"] = "UF"
metadata["site_name"] = first_ray.mandatory_header["site_name"]
metadata["radar_name"] = first_ray.mandatory_header["radar_name"]
# sweep_start_ray_index, sweep_end_ray_index
sweep_start_ray_index = filemetadata("sweep_start_ray_index")
sweep_end_ray_index = filemetadata("sweep_end_ray_index")
sweep_start_ray_index["data"] = ufile.first_ray_in_sweep
sweep_end_ray_index["data"] = ufile.last_ray_in_sweep
# sweep number
sweep_number = filemetadata("sweep_number")
sweep_number["data"] = np.arange(ufile.nsweeps, dtype="int32")
# sweep_type
scan_type = _UF_SWEEP_MODES[first_ray.mandatory_header["sweep_mode"]]
# sweep_mode
sweep_mode = filemetadata("sweep_mode")
sweep_mode["data"] = np.array(
ufile.nsweeps * [_SWEEP_MODE_STR[scan_type]], dtype="S"
)
# elevation
elevation = filemetadata("elevation")
elevation["data"] = ufile.get_elevations()
# azimuth
azimuth = filemetadata("azimuth")
azimuth["data"] = ufile.get_azimuths()
# fixed_angle
fixed_angle = filemetadata("fixed_angle")
fixed_angle["data"] = ufile.get_sweep_fixed_angles()
# fields
fields = {}
for uf_field_number, uf_field_dic in enumerate(first_ray.field_positions):
uf_field_name = uf_field_dic["data_type"].decode("ascii")
field_name = filemetadata.get_field_name(uf_field_name)
if field_name is None:
continue
field_dic = filemetadata(field_name)
field_dic["data"] = ufile.get_field_data(uf_field_number)
field_dic["_FillValue"] = get_fillvalue()
fields[field_name] = field_dic
# instrument_parameters
instrument_parameters = _get_instrument_parameters(ufile, filemetadata)
# scan rate
scan_rate = filemetadata("scan_rate")
scan_rate["data"] = ufile.get_sweep_rates()
ufile.close()
return Radar(
time,
_range,
fields,
metadata,
scan_type,
latitude,
longitude,
altitude,
sweep_number,
sweep_mode,
fixed_angle,
sweep_start_ray_index,
sweep_end_ray_index,
azimuth,
elevation,
scan_rate=scan_rate,
instrument_parameters=instrument_parameters,
)
def _get_instrument_parameters(ufile, filemetadata):
"""Return a dictionary containing instrument parameters."""
# pulse width
pulse_width = filemetadata("pulse_width")
pulse_width["data"] = ufile.get_pulse_widths() / _LIGHT_SPEED # m->sec
# assume that the parameters in the first ray represent the beam widths,
# bandwidth and frequency in the entire volume
first_ray = ufile.rays[0]
field_header = first_ray.field_headers[0]
beam_width_h = field_header["beam_width_h"] / 64.0
beam_width_v = field_header["beam_width_v"] / 64.0
bandwidth = field_header["bandwidth"] / 16.0 * 1.0e6
wavelength_cm = field_header["wavelength_cm"] / 64.0
if wavelength_cm == 0:
warnings.warn("Invalid wavelength, frequency set to default value.")
wavelength_hz = 9999.0
else:
wavelength_hz = _LIGHT_SPEED / (wavelength_cm / 100.0)
# radar_beam_width_h
radar_beam_width_h = filemetadata("radar_beam_width_h")
radar_beam_width_h["data"] = np.array([beam_width_h], dtype="float32")
# radar_beam_width_v
radar_beam_width_v = filemetadata("radar_beam_width_w")
radar_beam_width_v["data"] = np.array([beam_width_v], dtype="float32")
# radar_receiver_bandwidth
radar_receiver_bandwidth = filemetadata("radar_receiver_bandwidth")
radar_receiver_bandwidth["data"] = np.array([bandwidth], dtype="float32")
# polarization_mode
polarization_mode = filemetadata("polarization_mode")
polarization_mode["data"] = ufile.get_sweep_polarizations()
# frequency
frequency = filemetadata("frequency")
frequency["data"] = np.array([wavelength_hz], dtype="float32")
# prt
prt = filemetadata("prt")
prt["data"] = ufile.get_prts() / 1e6 # us->sec
instrument_parameters = {
"pulse_width": pulse_width,
"radar_beam_width_h": radar_beam_width_h,
"radar_beam_width_v": radar_beam_width_v,
"radar_receiver_bandwidth": radar_receiver_bandwidth,
"polarization_mode": polarization_mode,
"frequency": frequency,
"prt": prt,
}
# nyquist velocity if defined
nyquist_velocity = filemetadata("nyquist_velocity")
nyquist_velocity["data"] = ufile.get_nyquists()
if nyquist_velocity["data"] is not None:
instrument_parameters["nyquist_velocity"] = nyquist_velocity
return instrument_parameters
<|endoftext|> |
<|endoftext|>#! /usr/bin/env python
"""
Make a small netCDF CF/Radial file containing a single RHI scan.
Single field and scan is converted from sigmet file XSW110520113537.RAW7HHL
"""
import pyart
radar = pyart.io.read_rsl("XSW110520113537.RAW7HHL")
time_slice = slice(None, 713, 18)
range_slice = slice(None, None, 12)
sweep_slice = slice(None, 1)
# remove all but the reflectivity_horizontal fields
rf_field = radar.fields["reflectivity"]
rf_data = rf_field["data"]
rf_field["data"] = rf_data[time_slice, range_slice]
radar.fields = {"reflectivity_horizontal": rf_field}
radar.nsweeps = 1
radar.nray = 40
radar.ngates = 45
# truncate the range based variables
radar.range["data"] = radar.range["data"][range_slice]
# truncate the time based variables
radar.time["data"] = radar.time["data"][time_slice]
radar.azimuth["data"] = radar.azimuth["data"][time_slice]
radar.elevation["data"] = radar.elevation["data"][time_slice]
radar.instrument_parameters["prt"]["data"] = radar.instrument_parameters["prt"]["data"][
time_slice
]
radar.instrument_parameters["unambiguous_range"]["data"] = radar.instrument_parameters[
"unambiguous_range"
]["data"][time_slice]
radar.instrument_parameters["nyquist_velocity"]["data"] = radar.instrument_parameters[
"nyquist_velocity"
]["data"][time_slice]
# truncate the sweep based variables
radar.sweep_number["data"] = radar.sweep_number["data"][sweep_slice]
radar.fixed_angle["data"] = radar.fixed_angle["data"][sweep_slice]
radar.sweep_start_ray_index["data"] = radar.sweep_start_ray_index["data"][sweep_slice]
radar.sweep_end_ray_index["data"] = radar.sweep_end_ray_index["data"][sweep_slice]
radar.sweep_end_ray_index["data"][0] = 39
radar.sweep_mode["data"] = radar.sweep_mode["data"][sweep_slice]
radar.sweep_number["data"] = radar.sweep_number["data"][sweep_slice]
radar.instrument_parameters["prt_mode"]["data"] = radar.instrument_parameters[
"prt_mode"
]["data"][sweep_slice]
# adjust metadata
radar.metadata = {
"Conventions": "CF/Radial instrument_parameters",
"version": "1.2",
"title": "Py-ART Example RHI CF/Radial file",
"institution": (
"United States Department of Energy - Atmospheric "
"Radiation Measurement (ARM) program"
),
"references": "none",
"source": "ARM SGP XSAPR Radar",
"history": "created by jhelmus on evs348532 at 2013-05-22T12:34:56",
"comment": "none",
"instrument_name": "xsapr-sgp",
}
pyart.io.write_cfradial("example_cfradial_rhi.nc", radar)
<|endoftext|> |
<|endoftext|>"""
pyart.util.radar_utils
======================
Functions for working radar instances.
.. autosummary::
:toctree: generated/
is_vpt
to_vpt
join_radar
"""
import copy
import numpy as np
from netCDF4 import num2date, date2num
from . import datetime_utils
def is_vpt(radar, offset=0.5):
"""
Determine if a Radar appears to be a vertical pointing scan.
This function only verifies that the object is a vertical pointing scan,
use the :py:func:`to_vpt` function to convert the radar to a vpt scan
if this function returns True.
Parameters
----------
radar : Radar
Radar object to determine if
offset : float
Maximum offset of the elevation from 90 degrees to still consider
to be vertically pointing.
Returns
-------
flag : bool
True if the radar appear to be verticle pointing, False if not.
"""
# check that the elevation is within offset of 90 degrees.
elev = radar.elevation["data"]
return np.all((elev < 90.0 + offset) & (elev > 90.0 - offset))
def to_vpt(radar, single_scan=True):
"""
Convert an existing Radar object to represent a vertical pointing scan.
This function does not verify that the Radar object contains a vertical
pointing scan. To perform such a check use :py:func:`is_vpt`.
Parameters
----------
radar : Radar
Mislabeled vertical pointing scan Radar object to convert to be
properly labeled. This object is converted in place, no copy of
the existing data is made.
single_scan : bool, optional
True to convert the volume to a single scan, any azimuth angle data
is lost. False will convert the scan to contain the same number of
scans as rays, azimuth angles are retained.
"""
if single_scan:
nsweeps = 1
radar.azimuth["data"][:] = 0.0
seri = np.array([radar.nrays - 1], dtype="int32")
radar.sweep_end_ray_index["data"] = seri
else:
nsweeps = radar.nrays
# radar.azimuth not adjusted
radar.sweep_end_ray_index["data"] = np.arange(nsweeps, dtype="int32")
radar.scan_type = "vpt"
radar.nsweeps = nsweeps
radar.target_scan_rate = None # no scanning
radar.elevation["data"][:] = 90.0
radar.sweep_number["data"] = np.arange(nsweeps, dtype="int32")
radar.sweep_mode["data"] = np.array(["vertical_pointing"] * nsweeps)
radar.fixed_angle["data"] = np.ones(nsweeps, dtype="float32") * 90.0
radar.sweep_start_ray_index["data"] = np.arange(nsweeps, dtype="int32")
if radar.instrument_parameters is not None:
for key in ["prt_mode", "follow_mode", "polarization_mode"]:
if key in radar.instrument_parameters:
ip_dic = radar.instrument_parameters[key]
ip_dic["data"] = np.array([ip_dic["data"][0]] * nsweeps)
# Attributes that do not need any changes
# radar.altitude
# radar.altitude_agl
# radar.latitude
# radar.longitude
# radar.range
# radar.ngates
# radar.nrays
# radar.metadata
# radar.radar_calibration
# radar.time
# radar.fields
# radar.antenna_transition
# radar.scan_rate
return
def join_radar(radar1, radar2):
"""
Combine two radar instances into one.
Parameters
----------
radar1 : Radar
Radar object.
radar2 : Radar
Radar object.
"""
# must have same gate spacing
new_radar = copy.deepcopy(radar1)
new_radar.azimuth["data"] = np.append(
radar1.azimuth["data"], radar2.azimuth["data"]
)
new_radar.elevation["data"] = np.append(
radar1.elevation["data"], radar2.elevation["data"]
)
if len(radar1.range["data"]) >= len(radar2.range["data"]):
new_radar.range["data"] = radar1.range["data"]
else:
new_radar.range["data"] = radar2.range["data"]
# to combine times we need to reference them to a standard
# for this we'll use epoch time
estring = "seconds since 1970-01-01T00:00:00Z"
r1dt = num2date(radar1.time["data"], radar1.time["units"])
r2dt = num2date(radar2.time["data"], radar2.time["units"])
r1num = datetime_utils.datetimes_from_radar(radar1, epoch=True)
r2num = datetime_utils.datetimes_from_radar(radar2, epoch=True)
new_radar.time["data"] = np.append(r1num, r2num)
new_radar.time["units"] = datetime_utils.EPOCH_UNITS
for var in list(new_radar.fields.keys()):
sh1 = radar1.fields[var]["data"].shape
sh2 = radar2.fields[var]["data"].shape
new_field = np.ma.zeros([sh1[0] + sh2[0], max([sh1[1], sh2[1]])]) - 9999.0
new_field[0 : sh1[0], 0 : sh1[1]] = radar1.fields[var]["data"]
new_field[sh1[0] :, 0 : sh2[1]] = radar2.fields[var]["data"]
new_radar.fields[var]["data"] = new_field
# radar locations
# TODO moving platforms - any more?
if (
len(radar1.latitude["data"])
== 1 & len(radar2.latitude["data"])
== 1 & len(radar1.longitude["data"])
== 1 & len(radar2.longitude["data"])
== 1 & len(radar1.altitude["data"])
== 1 & len(radar2.altitude["data"])
== 1
):
lat1 = float(radar1.latitude["data"])
lon1 = float(radar1.longitude["data"])
alt1 = float(radar1.altitude["data"])
lat2 = float(radar2.latitude["data"])
lon2 = float(radar2.longitude["data"])
alt2 = float(radar2.altitude["data"])
if (lat1 != lat2) or (lon1 != lon2) or (alt1 != alt2):
ones1 = np.ones(len(radar1.time["data"]), dtype="float32")
ones2 = np.ones(len(radar2.time["data"]), dtype="float32")
new_radar.latitude["data"] = np.append(ones1 * lat1, ones2 * lat2)
new_radar.longitude["data"] = np.append(ones1 * lon1, ones2 * lon2)
new_radar.latitude["data"] = np.append(ones1 * alt1, ones2 * alt2)
else:
new_radar.latitude["data"] = radar1.latitude["data"]
new_radar.longitude["data"] = radar1.longitude["data"]
new_radar.altitude["data"] = radar1.altitude["data"]
else:
new_radar.latitude["data"] = np.append(
radar1.latitude["data"], radar2.latitude["data"]
)
new_radar.longitude["data"] = np.append(
radar1.longitude["data"], radar2.longitude["data"]
)
new_radar.altitude["data"] = np.append(
radar1.altitude["data"], radar2.altitude["data"]
)
return new_radar
<|endoftext|> |
<|endoftext|>"""
Default config for Workload Automation. DO NOT MODIFY this file. This file
gets copied to ~/.workload_automation/config.py on initial run of run_workloads.
Add your configuration to that file instead.
"""
# *** WARNING: ***
# Configuration listed in this file is NOT COMPLETE. This file sets the default
# configuration for WA and gives EXAMPLES of other configuration available. It
# is not supposed to be an exhaustive list.
# PLEASE REFER TO WA DOCUMENTATION FOR THE COMPLETE LIST OF AVAILABLE
# EXTENSIONS AND THEIR CONFIGURATION.
# This defines when the device will be rebooted during Workload Automation execution. #
# #
# Valid policies are: #
# never: The device will never be rebooted. #
# as_needed: The device will only be rebooted if the need arises (e.g. if it #
# becomes unresponsive #
# initial: The device will be rebooted when the execution first starts, just before executing #
# the first workload spec. #
# each_spec: The device will be rebooted before running a new workload spec. #
# each_iteration: The device will be rebooted before each new iteration. #
# #
reboot_policy = "as_needed"
# Defines the order in which the agenda spec will be executed. At the moment, #
# the following execution orders are supported: #
# #
# by_iteration: The first iteration of each workload spec is executed one ofter the other, #
# so all workloads are executed before proceeding on to the second iteration. #
# This is the default if no order is explicitly specified. #
# If multiple sections were specified, this will also split them up, so that specs #
# in the same section are further apart in the execution order. #
# by_section: Same as "by_iteration", but runn specs from the same section one after the other #
# by_spec: All iterations of the first spec are executed before moving on to the next #
# spec. This may also be specified as ``"classic"``, as this was the way #
# workloads were executed in earlier versions of WA. #
# random: Randomisizes the order in which specs run. #
execution_order = "by_iteration"
# This indicates when a job will be re-run.
# Possible values:
# OK: This iteration has completed and no errors have been detected
# PARTIAL: One or more instruments have failed (the iteration may still be running).
# FAILED: The workload itself has failed.
# ABORTED: The user interupted the workload
#
# If set to an empty list, a job will not be re-run ever.
retry_on_status = ["FAILED", "PARTIAL"]
# How many times a job will be re-run before giving up
max_retries = 3
####################################################################################################
######################################### Device Settings ##########################################
####################################################################################################
# Specify the device you want to run workload automation on. This must be a #
# string with the ID of the device. At the moment, only 'TC2' is supported. #
# #
device = "generic_android"
# Configuration options that will be passed onto the device. These are obviously device-specific, #
# so check the documentation for the particular device to find out which options and values are #
# valid. The settings listed below are common to all devices #
# #
device_config = dict(
# The name used by adb to identify the device. Use "adb devices" in bash to list
# the devices currently seen by adb.
# adb_name='10.109.173.2:5555',
# The directory on the device that WA will use to push files to
# working_directory='/sdcard/wa-working',
# This specifies the device's CPU cores. The order must match how they
# appear in cpufreq. The example below is for TC2.
# core_names = ['a7', 'a7', 'a7', 'a15', 'a15']
# Specifies cluster mapping for the device's cores.
# core_clusters = [0, 0, 0, 1, 1]
)
####################################################################################################
################################### Instrumention Configuration ####################################
####################################################################################################
# This defines the additionnal instrumentation that will be enabled during workload execution, #
# which in turn determines what additional data (such as /proc/interrupts content or Streamline #
# traces) will be available in the results directory. #
# #
instrumentation = [
# Records the time it took to run the workload
"execution_time",
# Collects /proc/interrupts before and after execution and does a diff.
"interrupts",
# Collects the contents of/sys/devices/system/cpu before and after execution and does a diff.
"cpufreq",
# Gets energy usage from the workload form HWMON devices
# NOTE: the hardware needs to have the right sensors in order for this to work
#'hwmon',
# Run perf in the background during workload execution and then collect the results. perf is a
# standard Linux performance analysis tool.
#'perf',
# Collect Streamline traces during workload execution. Streamline is part of DS-5
#'streamline',
# Collects traces by interacting with Ftrace Linux kernel internal tracer
#'trace-cmd',
# Obtains the power consumption of the target device's core measured by National Instruments
# Data Acquisition(DAQ) device.
#'daq',
# Collects CCI counter data.
#'cci_pmu_logger',
# Collects FPS (Frames Per Second) and related metrics (such as jank) from
# the View of the workload (Note: only a single View per workload is
# supported at the moment, so this is mainly useful for games).
#'fps',
]
####################################################################################################
################################# Result Processors Configuration ##################################
####################################################################################################
# Specifies how results will be processed and presented. #
# #
result_processors = [
# Creates a status.txt that provides a summary status for the run
"status",
# Creates a results.txt file for each iteration that lists all collected metrics
# in "name = value (units)" format
"standard",
# Creates a results.csv that contains metrics for all iterations of all workloads
# in the .csv format.
"csv",
# Creates a summary.csv that contains summary metrics for all iterations of all
# all in the .csv format. Summary metrics are defined on per-worklod basis
# are typically things like overall scores. The contents of summary.csv are
# always a subset of the contents of results.csv (if it is generated).
#'summary_csv',
# Creates a results.csv that contains metrics for all iterations of all workloads
# in the JSON format
#'json',
# Write results to an sqlite3 database. By default, a new database will be
# generated for each run, however it is possible to specify a path to an
# existing DB file (see result processor configuration below), in which
# case results from multiple runs may be stored in the one file.
#'sqlite',
]
####################################################################################################
################################### Logging output Configuration ###################################
####################################################################################################
# Specify the format of logging messages. The format uses the old formatting syntax: #
# #
# http://docs.python.org/2/library/stdtypes.html#string-formatting-operations #
# #
# The attributes that can be used in formats are listested here: #
# #
# http://docs.python.org/2/library/logging.html#logrecord-attributes #
# #
logging = {
# Log file format
"file format": "%(asctime)s %(levelname)-8s %(name)s: %(message)s",
# Verbose console output format
"verbose format": "%(asctime)s %(levelname)-8s %(name)s: %(message)s",
# Regular console output format
"regular format": "%(levelname)-8s %(message)s",
# Colouring the console output
"colour_enabled": True,
}
####################################################################################################
#################################### Instruments Configuration #####################################
####################################################################################################
# Instrumention Configuration is related to specific insturment's settings. Some of the #
# instrumentations require specific settings in order for them to work. These settings are #
# specified here. #
# Note that these settings only take effect if the corresponding instrument is
# enabled above.
####################################################################################################
######################################## perf configuration ########################################
# The hardware events such as instructions executed, cache-misses suffered, or branches
# mispredicted to be reported by perf. Events can be obtained from the device by tpying
# 'perf list'.
# perf_events = ['migrations', 'cs']
# The perf options which can be obtained from man page for perf-record
# perf_options = '-a -i'
####################################################################################################
####################################### hwmon configuration ########################################
# The kinds of sensors hwmon instrument will look for
# hwmon_sensors = ['energy', 'temp']
####################################################################################################
###################################### trace-cmd configuration #####################################
# trace-cmd events to be traced. The events can be found by rooting on the device then type
# 'trace-cmd list -e'
# trace_events = ['power*']
####################################################################################################
######################################### DAQ configuration ########################################
# The host address of the machine that runs the daq Server which the insturment communicates with
# daq_server_host = '10.1.17.56'
# The port number for daq Server in which daq insturment communicates with
# daq_server_port = 56788
# The values of resistors 1 and 2 (in Ohms) across which the voltages are measured
# daq_resistor_values = [0.002, 0.002]
####################################################################################################
################################### cci_pmu_logger configuration ###################################
# The events to be counted by PMU
# NOTE: The number of events must not exceed the number of counters available (which is 4 for CCI-400)
# cci_pmu_events = ['0x63', '0x83']
# The name of the events which will be used when reporting PMU counts
# cci_pmu_event_labels = ['event_0x63', 'event_0x83']
# The period (in jiffies) between counter reads
# cci_pmu_period = 15
####################################################################################################
################################### fps configuration ##############################################
# Data points below this FPS will dropped as not constituting "real" gameplay. The assumption
# being that while actually running, the FPS in the game will not drop below X frames per second,
# except on loading screens, menus, etc, which should not contribute to FPS calculation.
# fps_drop_threshold=5
# If set to True, this will keep the raw dumpsys output in the results directory (this is maily
# used for debugging). Note: frames.csv with collected frames data will always be generated
# regardless of this setting.
# fps_keep_raw=False
####################################################################################################
################################# Result Processor Configuration ###################################
####################################################################################################
# Specifies an alternative database to store results in. If the file does not
# exist, it will be created (the directiory of the file must exist however). If
# the file does exist, the results will be added to the existing data set (each
# run as a UUID, so results won't clash even if identical agendas were used).
# Note that in order for this to work, the version of the schema used to generate
# the DB file must match that of the schema used for the current run. Please
# see "What's new" secition in WA docs to check if the schema has changed in
# recent releases of WA.
# sqlite_database = '/work/results/myresults.sqlite'
# If the file specified by sqlite_database exists, setting this to True will
# cause that file to be overwritten rather than updated -- existing results in
# the file will be lost.
# sqlite_overwrite = False
# distribution: internal
####################################################################################################
#################################### Resource Getter configuration #################################
####################################################################################################
# The location on your system where /arm/scratch is mounted. Used by
# Scratch resource getter.
# scratch_mount_point = '/arm/scratch'
# end distribution
<|endoftext|> |
<|endoftext|># Copyright 2014-2015 ARM Limited
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Original implementation by Rene de Jong. Updated by Sascha Bischoff.
import logging
from wlauto import LinuxDevice, Parameter
from wlauto.common.gem5.device import BaseGem5Device
from wlauto.utils import types
class Gem5LinuxDevice(BaseGem5Device, LinuxDevice):
"""
Implements gem5 Linux device.
This class allows a user to connect WA to a simulation using gem5. The
connection to the device is made using the telnet connection of the
simulator, and is used for all commands. The simulator does not have ADB
support, and therefore we need to fall back to using standard shell
commands.
Files are copied into the simulation using a VirtIO 9P device in gem5. Files
are copied out of the simulated environment using the m5 writefile command
within the simulated system.
When starting the workload run, the simulator is automatically started by
Workload Automation, and a connection to the simulator is established. WA
will then wait for Android to boot on the simulated system (which can take
hours), prior to executing any other commands on the device. It is also
possible to resume from a checkpoint when starting the simulation. To do
this, please append the relevant checkpoint commands from the gem5
simulation script to the gem5_discription argument in the agenda.
Host system requirements:
* VirtIO support. We rely on diod on the host system. This can be
installed on ubuntu using the following command:
sudo apt-get install diod
Guest requirements:
* VirtIO support. We rely on VirtIO to move files into the simulation.
Please make sure that the following are set in the kernel
configuration:
CONFIG_NET_9P=y
CONFIG_NET_9P_VIRTIO=y
CONFIG_9P_FS=y
CONFIG_9P_FS_POSIX_ACL=y
CONFIG_9P_FS_SECURITY=y
CONFIG_VIRTIO_BLK=y
* m5 binary. Please make sure that the m5 binary is on the device and
can by found in the path.
"""
name = "gem5_linux"
platform = "linux"
parameters = [
Parameter("core_names", default=[], override=True),
Parameter("core_clusters", default=[], override=True),
Parameter(
"host",
default="localhost",
override=True,
description="Host name or IP address for the device.",
),
Parameter(
"login_prompt",
kind=types.list_of_strs,
default=["login:", "AEL login:", "username:"],
mandatory=False,
),
Parameter(
"login_password_prompt",
kind=types.list_of_strs,
default=["password:"],
mandatory=False,
),
]
# Overwritten from Device. For documentation, see corresponding method in
# Device.
def __init__(self, **kwargs):
self.logger = logging.getLogger("Gem5LinuxDevice")
LinuxDevice.__init__(self, **kwargs)
BaseGem5Device.__init__(self)
def login_to_device(self):
# Wait for the login prompt
prompt = self.login_prompt + [self.sckt.UNIQUE_PROMPT]
i = self.sckt.expect(prompt, timeout=10)
# Check if we are already at a prompt, or if we need to log in.
if i < len(prompt) - 1:
self.sckt.sendline("{}".format(self.username))
password_prompt = self.login_password_prompt + [
r"# ",
self.sckt.UNIQUE_PROMPT,
]
j = self.sckt.expect(password_prompt, timeout=self.delay)
if j < len(password_prompt) - 2:
self.sckt.sendline("{}".format(self.password))
self.sckt.expect([r"# ", self.sckt.UNIQUE_PROMPT], timeout=self.delay)
def capture_screen(self, filepath):
if BaseGem5Device.capture_screen(self, filepath):
return
# If we didn't manage to do the above, call the parent class.
self.logger.warning(
"capture_screen: falling back to parent class implementation"
)
LinuxDevice.capture_screen(self, filepath)
def initialize(self, context):
self.resize_shell()
self.deploy_m5(context, force=False)
<|endoftext|> |
<|endoftext|>"""Louie version information."""
NAME = "Louie"
DESCRIPTION = "Signal dispatching mechanism"
VERSION = "1.1"
<|endoftext|> |
<|endoftext|># Copyright 2013-2015 ARM Limited
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# pylint: disable=attribute-defined-outside-init
import os
import sqlite3
import json
import uuid
from datetime import datetime, timedelta
from contextlib import contextmanager
from wlauto import ResultProcessor, settings, Parameter
from wlauto.exceptions import ResultProcessorError
from wlauto.utils.types import boolean
# IMPORTANT: when updating this schema, make sure to bump the version!
SCHEMA_VERSION = "0.0.2"
SCHEMA = [
"""CREATE TABLE runs (
uuid text,
start_time datetime,
end_time datetime,
duration integer
)""",
"""CREATE TABLE workload_specs (
id text,
run_oid text,
number_of_iterations integer,
label text,
workload_name text,
boot_parameters text,
runtime_parameters text,
workload_parameters text
)""",
"""CREATE TABLE metrics (
spec_oid int,
iteration integer,
metric text,
value text,
units text,
lower_is_better integer
)""",
"""CREATE VIEW results AS
SELECT uuid as run_uuid, spec_id, label as workload, iteration, metric, value, units, lower_is_better
FROM metrics AS m INNER JOIN (
SELECT ws.OID as spec_oid, ws.id as spec_id, uuid, label
FROM workload_specs AS ws INNER JOIN runs AS r ON ws.run_oid = r.OID
) AS wsr ON wsr.spec_oid = m.spec_oid
""",
"""CREATE TABLE __meta (
schema_version text
)""",
"""INSERT INTO __meta VALUES ("{}")""".format(SCHEMA_VERSION),
]
sqlite3.register_adapter(datetime, lambda x: x.isoformat())
sqlite3.register_adapter(timedelta, lambda x: x.total_seconds())
sqlite3.register_adapter(uuid.UUID, str)
class SqliteResultProcessor(ResultProcessor):
name = "sqlite"
description = """
Stores results in an sqlite database.
This may be used accumulate results of multiple runs in a single file.
"""
name = "sqlite"
parameters = [
Parameter(
"database",
default=None,
global_alias="sqlite_database",
description=""" Full path to the sqlite database to be used. If this is not specified then
a new database file will be created in the output directory. This setting can be
used to accumulate results from multiple runs in a single database. If the
specified file does not exist, it will be created, however the directory of the
file must exist.
.. note:: The value must resolve to an absolute path,
relative paths are not allowed; however the
value may contain environment variables and/or
the home reference ~.
""",
),
Parameter(
"overwrite",
kind=boolean,
default=False,
global_alias="sqlite_overwrite",
description="""If ``True``, this will overwrite the database file
if it already exists. If ``False`` (the default) data
will be added to the existing file (provided schema
versions match -- otherwise an error will be raised).
""",
),
]
def initialize(self, context):
self._last_spec = None
self._run_oid = None
self._spec_oid = None
if not os.path.exists(self.database):
self._initdb()
elif self.overwrite: # pylint: disable=no-member
os.remove(self.database)
self._initdb()
else:
self._validate_schema_version()
self._update_run(context.run_info.uuid)
def process_iteration_result(self, result, context):
if self._last_spec != context.spec:
self._update_spec(context.spec)
metrics = [
(
self._spec_oid,
context.current_iteration,
m.name,
str(m.value),
m.units,
int(m.lower_is_better),
)
for m in result.metrics
]
with self._open_connecton() as conn:
conn.executemany("INSERT INTO metrics VALUES (?,?,?,?,?,?)", metrics)
def process_run_result(self, result, context):
info = context.run_info
with self._open_connecton() as conn:
conn.execute(
"""UPDATE runs SET start_time=?, end_time=?, duration=?
WHERE OID=?""",
(info.start_time, info.end_time, info.duration, self._run_oid),
)
def validate(self):
if not self.database: # pylint: disable=access-member-before-definition
self.database = os.path.join(settings.output_directory, "results.sqlite")
self.database = os.path.expandvars(os.path.expanduser(self.database))
def _initdb(self):
with self._open_connecton() as conn:
for command in SCHEMA:
conn.execute(command)
def _validate_schema_version(self):
with self._open_connecton() as conn:
try:
c = conn.execute("SELECT schema_version FROM __meta")
found_version = c.fetchone()[0]
except sqlite3.OperationalError:
message = (
"{} does not appear to be a valid WA results database.".format(
self.database
)
)
raise ResultProcessorError(message)
if found_version != SCHEMA_VERSION:
message = (
"Schema version in {} ({}) does not match current version ({})."
)
raise ResultProcessorError(
message.format(self.database, found_version, SCHEMA_VERSION)
)
def _update_run(self, run_uuid):
with self._open_connecton() as conn:
conn.execute("INSERT INTO runs (uuid) VALUES (?)", (run_uuid,))
conn.commit()
c = conn.execute("SELECT OID FROM runs WHERE uuid=?", (run_uuid,))
self._run_oid = c.fetchone()[0]
def _update_spec(self, spec):
self._last_spec = spec
spec_tuple = (
spec.id,
self._run_oid,
spec.number_of_iterations,
spec.label,
spec.workload_name,
json.dumps(spec.boot_parameters),
json.dumps(spec.runtime_parameters),
json.dumps(spec.workload_parameters),
)
with self._open_connecton() as conn:
conn.execute(
"INSERT INTO workload_specs VALUES (?,?,?,?,?,?,?,?)", spec_tuple
)
conn.commit()
c = conn.execute(
"SELECT OID FROM workload_specs WHERE run_oid=? AND id=?",
(self._run_oid, spec.id),
)
self._spec_oid = c.fetchone()[0]
@contextmanager
def _open_connecton(self):
conn = sqlite3.connect(self.database)
try:
yield conn
finally:
conn.commit()
<|endoftext|> |
<|endoftext|># Copyright 2014-2015 ARM Limited
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""
This module contains utilities for implemening device hard reset
using Netio 230 series power switches. This utilizes the KSHELL connection.
"""
import telnetlib
import socket
import re
import time
import logging
logger = logging.getLogger("NetIO")
class NetioError(Exception):
pass
class KshellConnection(object):
response_regex = re.compile(r"^(\d+) (.*?)\r\n")
delay = 0.5
def __init__(self, host="ippowerbar", port=1234, timeout=None):
"""Parameters are passed into ``telnetlib.Telnet`` -- see Python docs."""
self.host = host
self.port = port
self.conn = telnetlib.Telnet(host, port, timeout)
time.sleep(self.delay) # give time to respond
output = self.conn.read_very_eager()
if "HELLO" not in output:
raise NetioError(
"Could not connect: did not see a HELLO. Got: {}".format(output)
)
def login(self, user, password):
code, out = self.send_command("login {} {}\r\n".format(user, password))
if code != 250:
raise NetioError("Login failed. Got: {} {}".format(code, out))
def enable_port(self, port):
"""Enable the power supply at the specified port."""
self.set_port(port, 1)
def disable_port(self, port):
"""Enable the power supply at the specified port."""
self.set_port(port, 0)
def set_port(self, port, value):
code, out = self.send_command("port {} {}".format(port, value))
if code != 250:
raise NetioError(
"Could not set {} on port {}. Got: {} {}".format(value, port, code, out)
)
def send_command(self, command):
try:
if command.startswith("login"):
parts = command.split()
parts[2] = "*" * len(parts[2])
logger.debug(" ".join(parts))
else:
logger.debug(command)
self.conn.write("{}\n".format(command))
time.sleep(self.delay) # give time to respond
out = self.conn.read_very_eager()
match = self.response_regex.search(out)
if not match:
raise NetioError("Invalid response: {}".format(out.strip()))
logger.debug("response: {} {}".format(match.group(1), match.group(2)))
return int(match.group(1)), match.group(2)
except socket.error as err:
try:
time.sleep(self.delay) # give time to respond
out = self.conn.read_very_eager()
if out.startswith("130 CONNECTION TIMEOUT"):
raise NetioError("130 Timed out.")
except EOFError:
pass
raise err
def close(self):
self.conn.close()
<|endoftext|> |
<|endoftext|># Copyright 2012-2015 ARM Limited
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# pylint: disable=no-member
# pylint: disable=attribute-defined-outside-init
import os
import time
from wlauto import settings, Workload, Executable, Parameter
from wlauto.exceptions import ConfigError, WorkloadError
from wlauto.utils.types import boolean
TXT_RESULT_NAME = "cyclictest_result.txt"
RESULT_INTERPRETATION = {
"T": "Thread",
"P": "Priority",
"C": "Clock",
}
class Cyclictest(Workload):
name = "cyclictest"
description = """
Measures the amount of time that passes between when a timer expires and
when the thread which set the timer actually runs.
Cyclic test works by taking a time snapshot just prior to waiting for a specific
time interval (t1), then taking another time snapshot after the timer
finishes (t2), then comparing the theoretical wakeup time with the actual
wakeup time (t2 -(t1 + sleep_time)). This value is the latency for that
timers wakeup.
"""
parameters = [
Parameter(
"clock",
allowed_values=["monotonic", "realtime"],
default="realtime",
description=("specify the clock to be used during the test."),
),
Parameter(
"duration",
kind=int,
default=30,
description=("Specify the length for the test to run in seconds."),
),
Parameter(
"quiet",
kind=boolean,
default=True,
description=("Run the tests quiet and print only a summary on exit."),
),
Parameter(
"thread",
kind=int,
default=8,
description=("Set the number of test threads"),
),
Parameter(
"latency",
kind=int,
default=1000000,
description=("Write the value to /dev/cpu_dma_latency"),
),
Parameter(
"extra_parameters",
kind=str,
default="",
description=(
"Any additional command line parameters to append to the "
"existing parameters above. A list can be found at "
"https://rt.wiki.kernel.org/index.php/Cyclictest or "
"in the help page ``cyclictest -h``"
),
),
Parameter(
"clear_file_cache",
kind=boolean,
default=True,
description=("Clear file caches before starting test"),
),
Parameter(
"screen_off",
kind=boolean,
default=True,
description=(
"If true it will turn the screen off so that onscreen "
"graphics do not effect the score. This is predominantly "
"for devices without a GPU"
),
),
]
def setup(self, context):
self.cyclictest_on_device = "cyclictest"
self.cyclictest_result = os.path.join(
self.device.working_directory, TXT_RESULT_NAME
)
self.cyclictest_command = (
"{} --clock={} --duration={}s --thread={} --latency={} {} {} > {}"
)
self.device_binary = None
if not self.device.is_rooted:
raise WorkloadError(
"This workload requires a device with root premissions to run"
)
host_binary = context.resolver.get(
Executable(self, self.device.abi, "cyclictest")
)
self.device_binary = self.device.install(host_binary)
self.cyclictest_command = self.cyclictest_command.format(
self.device_binary,
0 if self.clock == "monotonic" else 1,
self.duration,
self.thread,
self.latency,
"--quiet" if self.quiet else "",
self.extra_parameters,
self.cyclictest_result,
)
if self.clear_file_cache:
self.device.execute("sync")
self.device.set_sysfile_value("/proc/sys/vm/drop_caches", 3)
if self.device.platform == "android":
if self.screen_off and self.device.is_screen_on:
self.device.execute("input keyevent 26")
def run(self, context):
self.device.execute(self.cyclictest_command, self.duration * 2, as_root=True)
def update_result(self, context):
self.device.pull_file(self.cyclictest_result, context.output_directory)
# Parsing the output
# Standard Cyclictest Output:
# T: 0 (31974) P:95 I:1000 C:4990 Min:9 Act:37 Avg:31 Max:59
with open(os.path.join(context.output_directory, TXT_RESULT_NAME)) as f:
for line in f:
if line.find("C:") is not -1:
# Key = T: 0 (31974) P:95 I:1000
# Remaing = 49990 Min:9 Act:37 Avg:31 Max:59
# sperator = C:
(key, sperator, remaing) = line.partition("C:")
index = key.find("T")
key = key.replace(key[index], RESULT_INTERPRETATION["T"])
index = key.find("P")
key = key.replace(key[index], RESULT_INTERPRETATION["P"])
index = sperator.find("C")
sperator = sperator.replace(
sperator[index], RESULT_INTERPRETATION["C"]
)
metrics = (sperator + remaing).split()
# metrics is now in the from of ['Min:', '9', 'Act:', '37', 'Avg:', '31' , 'Max', '59']
for i in range(0, len(metrics), 2):
full_key = key + " " + metrics[i][:-1]
value = int(metrics[i + 1])
context.result.add_metric(full_key, value, "microseconds")
def teardown(self, context):
if self.device.platform == "android":
if self.screen_off:
self.device.ensure_screen_is_on()
self.device.execute("rm -f {}".format(self.cyclictest_result))
<|endoftext|> |
<|endoftext|># Copyright 2013-2015 ARM Limited
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# pylint: disable=E1101,W0201
import os
import re
from collections import defaultdict
from wlauto import Workload, Parameter, File
from wlauto.utils.types import caseless_string
from wlauto.exceptions import WorkloadError
class Recentfling(Workload):
name = "recentfling"
description = """
Tests UI jank on android devices.
For this workload to work, ``recentfling.sh`` and ``defs.sh`` must be placed
in ``~/.workload_automation/dependencies/recentfling/``. These can be found
in the [AOSP Git repository](https://android.googlesource.com/platform/system/extras/+/master/tests/).
To change the apps that are opened at the start of the workload you will need
to modify the ``defs.sh`` file. You will need to add your app to ``dfltAppList``
and then add a variable called ``{app_name}Activity`` with the name of the
activity to launch (where ``{add_name}`` is the name you put into ``dfltAppList``).
You can get a list of activities available on your device by running
``adb shell pm list packages -f``
"""
supported_platforms = ["android"]
parameters = [
Parameter(
"loops", kind=int, default=3, description="The number of test iterations."
),
]
def initialise(self, context): # pylint: disable=no-self-use
if context.device.get_sdk_version() < 23:
raise WorkloadError(
"This workload relies on ``dumpsys gfxinfo`` \
only present in Android M and onwards"
)
def setup(self, context):
self.defs_host = context.resolver.get(File(self, "defs.sh"))
self.recentfling_host = context.resolver.get(File(self, "recentfling.sh"))
self.device.push_file(self.recentfling_host, self.device.working_directory)
self.device.push_file(self.defs_host, self.device.working_directory)
self._kill_recentfling()
self.device.ensure_screen_is_on()
def run(self, context):
cmd = "echo $$>{dir}/pidfile; exec {dir}/recentfling.sh -i {}; rm {dir}/pidfile"
cmd = cmd.format(self.loops, dir=self.device.working_directory)
try:
self.output = self.device.execute(cmd, timeout=120)
except KeyboardInterrupt:
self._kill_recentfling()
raise
def update_result(self, context):
group_names = [
"90th Percentile",
"95th Percentile",
"99th Percentile",
"Jank",
"Jank%",
]
count = 0
for line in self.output.strip().splitlines():
p = re.compile(
"Frames: \d+ latency: (?P<pct90>\d+)/(?P<pct95>\d+)/(?P<pct99>\d+) Janks: (?P<jank>\d+)\((?P<jank_pct>\d+)%\)"
)
match = p.search(line)
if match:
count += 1
if line.startswith("AVE: "):
group_names = ["Average " + g for g in group_names]
count = 0
for metric in zip(group_names, match.groups()):
context.result.add_metric(
metric[0],
metric[1],
None,
classifiers={"loop": count or "Average"},
)
def teardown(self, context):
self.device.delete_file(
self.device.path.join(self.device.working_directory, "recentfling.sh")
)
self.device.delete_file(
self.device.path.join(self.device.working_directory, "defs.sh")
)
def _kill_recentfling(self):
pid = self.device.execute(
"cat {}/pidfile".format(self.device.working_directory)
)
if pid:
self.device.kill(pid.strip(), signal="SIGKILL")
<|endoftext|> |
<|endoftext|>#!/usr/bin/env python
"""
mbed SDK
Copyright (c) 2011-2016 ARM Limited
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import sys
from time import time
class HtrunLogger(object):
"""! Yet another logger flavour"""
def __init__(self, prn_lock, name):
self.__prn_lock = prn_lock
self.__name = name
def __prn_func(self, text, nl=True):
"""! Prints and flushes data to stdout"""
with self.__prn_lock:
if nl and not text.endswith("\n"):
text += "\n"
sys.stdout.write(text)
sys.stdout.flush()
def __prn_log_human(self, level, text, timestamp=None):
if not timestamp:
timestamp = time()
timestamp_str = strftime("%y-%m-%d %H:%M:%S", gmtime(timestamp))
frac, whole = modf(timestamp)
s = "[%s.%d][%s][%s] %s" % (timestamp_str, frac, self.__name, level, text)
self.__prn_func(s, nl=True)
def __prn_log(self, level, text, timestamp=None):
if not timestamp:
timestamp = time()
s = "[%.2f][%s][%s] %s" % (timestamp, self.__name, level, text)
self.__prn_func(s, nl=True)
def prn_dbg(self, text, timestamp=None):
self.__prn_log("DBG", text, timestamp)
def prn_wrn(self, text, timestamp=None):
self.__prn_log("WRN", text, timestamp)
def prn_err(self, text, timestamp=None):
self.__prn_log("ERR", text, timestamp)
def prn_inf(self, text, timestamp=None):
self.__prn_log("INF", text, timestamp)
def prn_txt(self, text, timestamp=None):
self.__prn_log("TXT", text, timestamp)
def prn_txd(self, text, timestamp=None):
self.__prn_log("TXD", text, timestamp)
def prn_rxd(self, text, timestamp=None):
self.__prn_log("RXD", text, timestamp)
<|endoftext|> |
<|endoftext|>#!/usr/bin/env python
"""
mbed SDK
Copyright (c) 2011-2015 ARM Limited
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import unittest
from mbed_host_tests import is_host_test
from mbed_host_tests import get_host_test
from mbed_host_tests import get_plugin_caps
from mbed_host_tests import get_host_test_list
class BasicHostTestsTestCase(unittest.TestCase):
def setUp(self):
pass
def tearDown(self):
pass
def test_basic_get_host_test(self):
self.assertNotEqual(None, get_host_test("default"))
self.assertNotEqual(None, get_host_test("default_auto"))
def test_basic_is_host_test(self):
self.assertFalse(is_host_test(""))
self.assertFalse(is_host_test(None))
self.assertTrue(is_host_test("default"))
self.assertTrue(is_host_test("default_auto"))
def test_get_host_test_list(self):
d = get_host_test_list()
self.assertIs(type(d), dict)
self.assertIn("default", d)
self.assertIn("default_auto", d)
def test_get_plugin_caps(self):
d = get_plugin_caps()
self.assertIs(type(d), dict)
if __name__ == "__main__":
unittest.main()
<|endoftext|> |
<|endoftext|>#!/usr/bin/env python
# small RNA oriented bowtie wrapper
# version 1.5 17-7-2014: arg parser implementation
# Usage sRbowtie.py <1 input_fasta_file> <2 alignment method> <3 -v mismatches> <4 out_type> <5 buildIndexIfHistory> <6 fasta/bowtie index> <7 bowtie output> <8 ali_fasta> <9 unali_fasta> <10 --num-threads \${GALAXY_SLOTS:-4}>
# current rev: for bowtie __norc, move from --supress 2,6,7,8 to --supress 6,7,8. Future Parser must be updated to take into account this standardisation
# Christophe Antoniewski <[email protected]>
import sys
import os
import subprocess
import tempfile
import shutil
import argparse
def Parser():
the_parser = argparse.ArgumentParser(
description="bowtie wrapper for small fasta reads"
)
the_parser.add_argument("--input", action="store", type=str, help="input file")
the_parser.add_argument(
"--input-format",
dest="input_format",
action="store",
type=str,
help="fasta or fastq",
)
the_parser.add_argument(
"--method",
action="store",
type=str,
help="RNA, unique, multiple, k_option, n_option, a_option",
)
the_parser.add_argument(
"--v-mismatches",
dest="v_mismatches",
action="store",
type=str,
help="number of mismatches allowed for the alignments",
)
the_parser.add_argument(
"--output-format",
dest="output_format",
action="store",
type=str,
help="tabular, sam, bam",
)
the_parser.add_argument(
"--output", action="store", type=str, help="output file path"
)
the_parser.add_argument(
"--index-from",
dest="index_from",
action="store",
type=str,
help="indexed or history",
)
the_parser.add_argument(
"--index-source",
dest="index_source",
action="store",
type=str,
help="file path to the index source",
)
the_parser.add_argument(
"--aligned", action="store", type=str, help="aligned read file path, maybe None"
)
the_parser.add_argument(
"--unaligned",
action="store",
type=str,
help="unaligned read file path, maybe None",
)
the_parser.add_argument(
"--num-threads",
dest="num_threads",
action="store",
type=str,
help="number of bowtie threads",
)
args = the_parser.parse_args()
return args
def stop_err(msg):
sys.stderr.write("%s\n" % msg)
sys.exit()
def bowtieCommandLiner(
alignment_method="RNA",
v_mis="1",
out_type="tabular",
aligned="None",
unaligned="None",
input_format="fasta",
input="path",
index="path",
output="path",
pslots="4",
):
if input_format == "fasta":
input_format = "-f"
elif (input_format == "fastq") or (input_format == "fastqsanger"):
input_format = "-q"
else:
raise Exception("input format must be one of fasta or fastq")
if alignment_method == "RNA":
x = "-v %s -M 1 --best --strata -p %s --norc --suppress 6,7,8" % (v_mis, pslots)
elif alignment_method == "unique":
x = "-v %s -m 1 -p %s --suppress 6,7,8" % (v_mis, pslots)
elif alignment_method == "multiple":
x = "-v %s -M 1 --best --strata -p %s --suppress 6,7,8" % (v_mis, pslots)
elif alignment_method == "k_option":
x = "-v %s -k 1 --best -p %s --suppress 6,7,8" % (v_mis, pslots)
elif alignment_method == "n_option":
x = "-n %s -M 1 --best -p %s --suppress 6,7,8" % (v_mis, pslots)
elif alignment_method == "a_option":
x = "-v %s -a --best -p %s --suppress 6,7,8" % (v_mis, pslots)
if aligned == "None" and unaligned == "None":
fasta_command = ""
elif aligned != "None" and unaligned == "None":
fasta_command = " --al %s" % aligned
elif aligned == "None" and unaligned != "None":
fasta_command = " --un %s" % unaligned
else:
fasta_command = " --al %s --un %s" % (aligned, unaligned)
x = x + fasta_command
if out_type == "tabular":
return "bowtie %s %s %s %s > %s" % (x, index, input_format, input, output)
elif out_type == "sam":
return "bowtie %s -S %s %s %s > %s" % (x, index, input_format, input, output)
elif out_type == "bam":
return "bowtie %s -S %s %s %s |samtools view -bS - > %s" % (
x,
index,
input_format,
input,
output,
)
def bowtie_squash(fasta):
# make temp directory for bowtie indexes
tmp_index_dir = tempfile.mkdtemp()
ref_file = tempfile.NamedTemporaryFile(dir=tmp_index_dir)
ref_file_name = ref_file.name
# by default, delete the temporary file, but ref_file.name is now stored
# in ref_file_name
ref_file.close()
# symlink between the fasta source file and the deleted ref_file name
os.symlink(fasta, ref_file_name)
# bowtie command line, which will work after changing dir
# (cwd=tmp_index_dir)
cmd1 = "bowtie-build -f %s %s" % (ref_file_name, ref_file_name)
try:
FNULL = open(os.devnull, "w")
# a path string for a temp file in tmp_index_dir. Just a string
tmp = tempfile.NamedTemporaryFile(dir=tmp_index_dir).name
# creates and open a file handler pointing to the temp file
tmp_stderr = open(tmp, "wb")
# both stderr and stdout of bowtie-build are redirected in dev/null
proc = subprocess.Popen(
args=cmd1, shell=True, cwd=tmp_index_dir, stderr=FNULL, stdout=FNULL
)
returncode = proc.wait()
tmp_stderr.close()
FNULL.close()
sys.stdout.write(cmd1 + "\n")
except Exception as e:
# clean up temp dir
if os.path.exists(tmp_index_dir):
shutil.rmtree(tmp_index_dir)
stop_err("Error indexing reference sequence\n" + str(e))
# no Cleaning if no Exception, tmp_index_dir has to be cleaned after
# bowtie_alignment()
# bowtie fashion path without extention
index_full_path = os.path.join(tmp_index_dir, ref_file_name)
return tmp_index_dir, index_full_path
def bowtie_alignment(command_line, flyPreIndexed=""):
# make temp directory just for stderr
tmp_index_dir = tempfile.mkdtemp()
tmp = tempfile.NamedTemporaryFile(dir=tmp_index_dir).name
tmp_stderr = open(tmp, "wb")
# conditional statement for sorted bam generation viewable in Trackster
if "samtools" in command_line:
# recover the final output file name
target_file = command_line.split()[-1]
path_to_unsortedBam = os.path.join(tmp_index_dir, "unsorted.bam")
path_to_sortedBam = os.path.join(tmp_index_dir, "unsorted.bam.sorted")
first_command_line = (
" ".join(command_line.split()[:-3]) + " -o " + path_to_unsortedBam + " - "
)
# example: bowtie -v 0 -M 1 --best --strata -p 12 --suppress 6,7,8 -S
# /home/galaxy/galaxy-dist/bowtie/Dmel/dmel-all-chromosome-r5.49 -f
# /home/galaxy/galaxy-dist/database/files/003/dataset_3460.dat
# |samtools view -bS -o /tmp/tmp_PgMT0/unsorted.bam -
# generates an "unsorted.bam.sorted.bam file", NOT an
# "unsorted.bam.sorted" file
second_command_line = "samtools sort %s %s" % (
path_to_unsortedBam,
path_to_sortedBam,
)
# fileno() method return the file descriptor number of tmp_stderr
p = subprocess.Popen(
args=first_command_line,
cwd=tmp_index_dir,
shell=True,
stderr=tmp_stderr.fileno(),
)
returncode = p.wait()
sys.stdout.write("%s\n" % first_command_line + str(returncode))
p = subprocess.Popen(
args=second_command_line,
cwd=tmp_index_dir,
shell=True,
stderr=tmp_stderr.fileno(),
)
returncode = p.wait()
sys.stdout.write("\n%s\n" % second_command_line + str(returncode))
if os.path.isfile(path_to_sortedBam + ".bam"):
shutil.copy2(path_to_sortedBam + ".bam", target_file)
else:
p = subprocess.Popen(args=command_line, shell=True, stderr=tmp_stderr.fileno())
returncode = p.wait()
sys.stdout.write(command_line + "\n")
tmp_stderr.close()
# cleaning if the index was created in the fly
if os.path.exists(flyPreIndexed):
shutil.rmtree(flyPreIndexed)
# cleaning tmp files and directories
if os.path.exists(tmp_index_dir):
shutil.rmtree(tmp_index_dir)
return
def __main__():
args = Parser()
F = open(args.output, "w")
if args.index_from == "history":
tmp_dir, index_path = bowtie_squash(args.index_source)
else:
tmp_dir, index_path = "dummy/dymmy", args.index_source
command_line = bowtieCommandLiner(
args.method,
args.v_mismatches,
args.output_format,
args.aligned,
args.unaligned,
args.input_format,
args.input,
index_path,
args.output,
args.num_threads,
)
bowtie_alignment(command_line, flyPreIndexed=tmp_dir)
F.close()
if __name__ == "__main__":
__main__()
<|endoftext|> |
<|endoftext|>#!/usr/bin/python
#
import sys
input = open(sys.argv[1], "r")
output = open(sys.argv[2], "w")
for line in input:
if line[0] == ">":
print("@HTW-" + line[1:-1], file=output)
continue
else:
print(line[:-1], file=output)
print("+", file=output)
print("H" * len(line[:-1]), file=output)
input.close()
output.close()
<|endoftext|> |
<|endoftext|>"""
Verbose demonstration of how to set up a server and run a remote game.
For all practical needs, using the simplesetup module should be sufficient.
"""
import sys
import subprocess
from pelita.simplesetup import SimpleServer, SimplePublisher, SimpleController
import logging
from pelita.ui.tk_viewer import TkViewer
try:
import colorama
MAGENTA = colorama.Fore.MAGENTA
RESET = colorama.Fore.RESET
except ImportError:
MAGENTA = ""
RESET = ""
def get_python_process():
py_proc = sys.executable
if not py_proc:
raise RuntimeError("Cannot retrieve current Python executable.")
return py_proc
FORMAT = (
"[%(asctime)s,%(msecs)03d][%(name)s][%(levelname)s][%(funcName)s]"
+ MAGENTA
+ " %(message)s"
+ RESET
)
logging.basicConfig(format=FORMAT, datefmt="%H:%M:%S", level=logging.INFO)
layout = """ ##################
#0#. . 2# . 3 #
# ##### ##### #
# . # . .#1#
################## """
server = SimpleServer(
layout_string=layout, rounds=200, bind_addrs=("tcp://*:50007", "tcp://*:50008")
)
publisher = SimplePublisher("tcp://*:50012")
server.game_master.register_viewer(publisher)
subscribe_sock = server
tk_open = "TkViewer(%r, %r).run()" % ("tcp://localhost:50012", "tcp://localhost:50013")
tkprocess = subprocess.Popen(
[get_python_process(), "-c", "from pelita.ui.tk_viewer import TkViewer\n" + tk_open]
)
try:
print((server.bind_addresses))
server.register_teams()
controller = SimpleController(server.game_master, "tcp://*:50013")
controller.run()
server.exit_teams()
except KeyboardInterrupt:
tkprocess.kill()
<|endoftext|> |
<|endoftext|># Main entry point for the plugin.
# Author: Yuri van Geffen
import sublime, sublime_plugin
import os
import threading
import queue
import asyncore
import socket
from itertools import chain
import re
settings = sublime.load_settings("subdebug")
TCP_IP = "127.0.0.1"
TCP_PORT = 8172
BUFFER_SIZE = 1024
BASEDIR = settings.get("basedir", "")
STEP_ON_CONNECT = settings.get("step_on_connect", False)
# Handles incoming and outgoing messages for the MobDebug client
class SubDebugHandler(asyncore.dispatcher):
def __init__(self, socket, handler_id):
asyncore.dispatcher.__init__(self, socket)
self.handler_id = handler_id
msg_queue.put(b"STEP\n" if STEP_ON_CONNECT else b"RUN\n")
for view_name, row in state_handler.breakpoints():
msg_queue.put("SETB {0} {1}\n".format(view_name, row).encode("latin-1"))
# Reads the message-code of incomming messages and passes
# them to the right function
def handle_read(self):
data = self.recv(BUFFER_SIZE)
if data:
print((self.handler_id, "Received: ", data))
split = data.split()
if split[0] in message_parsers:
message_parsers[split[0]](split)
def handle_write(self):
if not msg_queue.empty():
msg = msg_queue.get()
print(("Sending: ", msg))
self.send(msg)
def handle_error(self):
raise
# Starts listening on TCP_PORT and accepts incoming connections
# before passing them to an instance of SubDebugHandler
class SubDebugServer(asyncore.dispatcher):
def __init__(self, host, port):
asyncore.dispatcher.__init__(self)
self.handler_id = 0
self.create_socket(socket.AF_INET, socket.SOCK_STREAM)
self.set_reuse_addr()
self.bind((host, port))
self.listen(1)
print(("Started listening on: ", host, ":", port))
def handle_accept(self):
pair = self.accept()
if pair is not None:
(conn_sock, client_address) = pair
print(("Incoming connection: ", client_address))
SubDebugHandler(conn_sock, ++self.handler_id)
def handle_close(self):
print("Closing server.")
self.close()
def handle_error(self):
self.close()
# Lets the user run the script (until breakpoint)
class RunCommand(sublime_plugin.WindowCommand):
def run(self):
print("Running until breakpoint...")
msg_queue.put(b"RUN\n")
state_handler.remove_line_marker()
# Lets the user step to the next line
class StepCommand(sublime_plugin.WindowCommand):
def run(self):
print("Stepping to next line...")
msg_queue.put(b"STEP\n")
# Lets the user step to the next line
class ToggleBreakpointCommand(sublime_plugin.TextCommand):
def run(self, edit):
view_name = simplify_path(self.view.file_name())
row, _ = self.view.rowcol(self.view.sel()[0].begin())
print(("Toggling breakpoint:", view_name, row))
state_handler.toggle_breakpoint(view_name, row + 1)
# Lets the user pick a base directory from where the lua is executed
class SetBasedirCommand(sublime_plugin.WindowCommand):
def run(self):
# Ran if the user want to choose their own base directory
def choose_other(path):
global BASEDIR
BASEDIR = path.replace("\\", "/")
if BASEDIR[-1] != "/":
BASEDIR += "/"
print(("BASEDIR:", BASEDIR))
# Ran if the user has chosen a base directory option
def selected_folder(index):
global BASEDIR
if index != -1: # The last option lets the user choose a base dir themself
if index == len(folders) - 1:
sublime.active_window().show_input_panel(
"Give the base directory path.",
BASEDIR,
choose_other,
None,
None,
)
else:
BASEDIR = folders[index] + "/"
state_handler.clear_state()
print(("BASEDIR:", BASEDIR))
folders = list(chain.from_iterable([w.folders() for w in sublime.windows()]))
folders = [f.replace("\\", "/") for f in folders]
folders.insert(len(folders), "Choose other directory...")
sublime.active_window().show_quick_panel(folders, selected_folder)
# Lets the user step to the next line
class ToggleStepOnConnectCommand(sublime_plugin.WindowCommand):
def run(self):
global STEP_ON_CONNECT
STEP_ON_CONNECT = not STEP_ON_CONNECT
print(("Step on connect:", STEP_ON_CONNECT))
def is_checked(self):
return STEP_ON_CONNECT or False
# =========Incomming message parsers=========#
# Called when the "202 Paused" message is received
def paused_command(args):
state_handler.set_line_marker(args[2].decode("utf-8"), int(args[3]))
# Mapping from incomming messages to the functions that parse them
message_parsers = {
b"202": paused_command,
}
# ===========================================#
class StateHandler:
# Initiates object by checking which views are available and
# clearing the state
def __init__(self):
self.clear_state()
self.update_regions()
def clear_state(self):
self.state = {}
self.update_regions()
# Gets all available views in sublime and adds the missing ones to the state
def add_missing_views(self):
views = [v for v in sum([w.views() for w in sublime.windows()], [])]
self.views = {
simplify_path(v.file_name()): v for v in views if v.file_name() != None
}
print((self.views))
for view_name, view in list(self.views.items()):
if view_name not in self.state:
self.state[view_name] = []
# Updates all views with the available state-objects using the
# assigned functions
def update_regions(self):
self.add_missing_views()
# Iterate over all files in the state
for view_name, regions in list(self.state.items()):
# Remove all old regions
for reg_type_name in self.region_types:
self.views[view_name].erase_regions(reg_type_name)
region_sets = {}
# Iterate over all regions in that file
for reg_type, line in regions:
if reg_type == "line_marker" or ("line_marker", line) not in regions:
if reg_type not in region_sets:
region_sets[reg_type] = []
region_sets[reg_type].append(
sublime.Region(self.views[view_name].text_point(line - 1, 0))
)
# Register all new regions except the line-marker with sublime
for reg_name, v in list(region_sets.items()):
print(("Adding region:", view_name, reg_name, v))
self.views[view_name].add_regions(
reg_name, v, *self.region_types[reg_name]
)
def set_line_marker(self, view_name, line_number):
view_name = simplify_path(view_name)
print(("Setting line marker:", view_name, line_number))
self.add_missing_views()
if view_name in self.views:
self.state.setdefault(view_name, [])
self.state[view_name] = [
(k, v) for k, v in self.state[view_name] if k != "line_marker"
]
self.state[view_name].append(("line_marker", line_number))
self.update_regions()
def remove_line_marker(self):
for name, view in list(self.state.items()):
self.state[name] = [(t, n) for t, n in view if t != "line_marker"]
self.update_regions()
def toggle_breakpoint(self, view_name, line_number):
self.add_missing_views()
if (
view_name in self.views
and ("breakpoint", line_number) in self.state[view_name]
):
self.remove_breakpoint(view_name, line_number)
else:
self.set_breakpoint(view_name, line_number)
self.update_regions()
def set_breakpoint(self, view_name, line_number):
self.state.setdefault(view_name, [])
self.state[view_name].append(("breakpoint", line_number))
msg_queue.put("SETB {0} {1}\n".format(view_name, line_number).encode("latin-1"))
def remove_breakpoint(self, view_name, line_number):
self.state[view_name].remove(("breakpoint", line_number))
msg_queue.put("DELB {0} {1}\n".format(view_name, line_number).encode("latin-1"))
def breakpoints(self):
ret = []
for k, v in list(self.state.items()):
for t in v:
if t[0] == "breakpoint":
ret.append((k, t[1]))
return ret
views = {}
state = {}
region_types = {
"breakpoint": ("keyword", "circle"),
"line_marker": ("keyword", "bookmark"),
}
def plugin_unloaded():
settings.set("basedir", BASEDIR)
settings.set("step_on_connect", STEP_ON_CONNECT)
print("Closing down the server...")
server.close()
def simplify_path(path):
path = path.replace("\\", "/").replace(BASEDIR, "")
path = re.sub("\.lua$", "", path) # Strip ".lua" from the path
return path
# Open a threadsafe message queue
msg_queue = queue.Queue()
state_handler = StateHandler()
# Start listening and open the asyncore loop
server = SubDebugServer(TCP_IP, TCP_PORT)
if os.name == "posix":
thread = threading.Thread(target=asyncore.loop, kwargs={"use_poll": True})
else:
thread = threading.Thread(target=asyncore.loop)
thread.start()
<|endoftext|> |
<|endoftext|>from django.contrib import sitemaps
from django.core.urlresolvers import reverse
class StaticViewSitemap(sitemaps.Sitemap):
priority = 0.5
changefreq = "monthly"
def items(self):
return [
"landpage",
"robots",
"humans",
"google_plus_verify",
"terms",
"privacy",
]
def location(self, item):
return reverse(item)
# https://docs.djangoproject.com/en/1.8/ref/contrib/sitemaps/
<|endoftext|> |
<|endoftext|>from django.conf.urls import patterns, include, url
from publisher.views import catalog
from publisher.views import my_publication
from publisher.views import publication
urlpatterns = patterns(
"",
# Publications(s)
url(r"^publish$", catalog.catalog_page),
url(r"^publication/(\d+)$", publication.publication_page),
url(r"^publication/(\d+)/peer_review_modal$", publication.peer_review_modal),
url(r"^publication/(\d+)/save_peer_review$", publication.save_peer_review),
url(r"^publication/(\d+)/delete_peer_review$", publication.delete_peer_review),
# My Publications
url(r"^my_publications$", my_publication.my_publications_page),
url(r"^refresh_publications_table$", my_publication.refresh_publications_table),
url(r"^my_publication_modal$", my_publication.my_publication_modal),
url(r"^save_publication$", my_publication.save_publication),
url(r"^delete_publication$", my_publication.delete_publication),
)
<|endoftext|> |
<|endoftext|>from django.core.urlresolvers import resolve
from django.http import HttpRequest
from django.http import QueryDict
from django.test import TestCase
from django.test import Client
from django.contrib.auth.models import User
from django.contrib.auth import authenticate, login, logout
from django.contrib.auth.decorators import login_required
from django.conf.urls.static import static, settings
import json
from registrar.models import Course
from registrar.models import Teacher
from registrar.models import Student
from registrar.models import Assignment
from registrar.models import AssignmentSubmission
from registrar.models import Quiz
from registrar.models import QuizSubmission
from registrar.models import Exam
from registrar.models import ExamSubmission
from registrar.models import EssayQuestion
from registrar.models import EssaySubmission
from registrar.models import MultipleChoiceQuestion
from registrar.models import MultipleChoiceSubmission
from registrar.models import ResponseQuestion
from registrar.models import ResponseSubmission
from registrar.models import TrueFalseQuestion
from registrar.models import TrueFalseSubmission
from registrar.models import PeerReview
from student.views import assignment
from student.views import quiz
from student.views import exam
from student.views import credit
TEST_USER_EMAIL = "[email protected]"
TEST_USER_USERNAME = "Ledo"
TEST_USER_PASSWORD = "password"
class CreditTestCase(TestCase):
def tearDown(self):
courses = Course.objects.all()
for course in courses:
course.delete()
User.objects.get(email=TEST_USER_EMAIL).delete()
def setUp(self):
# Create our Student.
User.objects.create_user(
email=TEST_USER_EMAIL,
username=TEST_USER_USERNAME,
password=TEST_USER_PASSWORD,
)
user = User.objects.get(email=TEST_USER_EMAIL)
teacher = Teacher.objects.create(user=user)
student = Student.objects.create(user=user)
# Create a test course.
course = Course.objects.create(
id=1,
title="Comics Book Course",
sub_title="The definitive course on comics!",
category="",
teacher=teacher,
)
# Create our assignment(s)
assignment = Assignment.objects.create(
assignment_id=1,
assignment_num=1,
title="Hideauze",
description="Anime related assignment.",
worth=25,
course=course,
)
# Create questions
EssayQuestion.objects.create(
question_id=1,
assignment=assignment,
title="Evolvers",
description="Write an essay about the Evolvers.",
)
MultipleChoiceQuestion.objects.create(
question_id=2,
assignment=assignment,
title="Hideauze",
description="Who where the Hideauze?",
a="Former Humans",
a_is_correct=True,
b="Aliens",
b_is_correct=False,
c="Magical or Supernatural Creatures",
c_is_correct=False,
d="Dark Elves",
d_is_correct=False,
e="Heavenly Creatures",
e_is_correct=False,
)
TrueFalseQuestion.objects.create(
question_id=3,
assignment=assignment,
title="Hideauze",
description="Where the Hideauze human?",
true_choice="Yes, former humans",
false_choice="No, aliens",
answer=True,
)
ResponseQuestion.objects.create(
question_id=4,
assignment=assignment,
title="Hideauze",
description="Why did humanity migrate off-world?",
answer="Because of solar hibernation causing Global Cooling on Earth.",
)
# Create our quiz
Quiz.objects.create(
quiz_id=1,
quiz_num=1,
title="Hideauze",
description="Anime related assignment.",
worth=25,
course=course,
)
quiz = Quiz.objects.get(quiz_id=1)
TrueFalseQuestion.objects.create(
question_id=5,
quiz=quiz,
title="Hideauze",
description="Where the Hideauze human?",
true_choice="Yes, former humans",
false_choice="No, aliens",
answer=True,
)
# Create our Exam
Exam.objects.create(
exam_id=1,
exam_num=1,
title="Hideauze",
description="Anime related assignment.",
worth=50,
course=course,
is_final=True,
)
exam = Exam.objects.get(exam_id=1)
MultipleChoiceQuestion.objects.create(
question_id=6,
exam=exam,
title="Hideauze",
description="Who where the Hideauze?",
a="Former Humans",
a_is_correct=True,
b="Aliens",
b_is_correct=False,
c="Magical or Supernatural Creatures",
c_is_correct=False,
d="Orcs",
d_is_correct=False,
e="Heavenly Creatures",
e_is_correct=False,
)
def get_logged_in_client(self):
client = Client()
client.login(username=TEST_USER_USERNAME, password=TEST_USER_PASSWORD)
return client
def test_url_resolves_to_credit_page_view(self):
found = resolve("/course/1/credit")
self.assertEqual(found.func, credit.credit_page)
def test_credit_page_with_no_submissions(self):
client = self.get_logged_in_client()
response = client.post("/course/1/credit")
self.assertEqual(response.status_code, 200)
self.assertIn(b"Comics Book Course", response.content)
self.assertIn(b"ajax_submit_credit_application();", response.content)
def test_url_resolves_to_submit_json(self):
found = resolve("/course/1/submit_credit_application")
self.assertEqual(found.func, credit.submit_credit_application)
def test_submit_credit_application_on_no_failing_criteria(self):
kwargs = {"HTTP_X_REQUESTED_WITH": "XMLHttpRequest"}
client = self.get_logged_in_client()
response = client.post(
"/course/1/submit_credit_application",
{
"assignment_id": 1,
},
**kwargs
)
json_string = response.content.decode(encoding="UTF-8")
array = json.loads(json_string)
self.assertEqual(response.status_code, 200)
self.assertEqual(array["status"], "failure")
self.assertEqual(array["message"], "you need to pass with at minimum 50%")
def test_submit_credit_application_on_passing_criteria_without_peer_reviews(self):
kwargs = {"HTTP_X_REQUESTED_WITH": "XMLHttpRequest"}
client = self.get_logged_in_client()
# Setup Failing
# Assignment
file_path = settings.MEDIA_ROOT + "/sample.pdf"
with open(file_path, "rb") as fp:
self.assertTrue(fp is not None)
client.post(
"/course/1/assignment/1/submit_e_assignment_answer",
{"question_id": 1, "file": fp},
**kwargs
)
client.post(
"/course/1/assignment/1/submit_mc_assignment_answer",
{
"question_id": 2,
"answer": "A",
},
**kwargs
)
client.post(
"/course/1/assignment/1/submit_tf_assignment_answer",
{
"question_id": 3,
"answer": "true",
},
**kwargs
)
client.post(
"/course/1/assignment/1/submit_r_assignment_answer",
{
"question_id": 4,
"answer": "Because of Global Cooling caused by abnormal solar hibernation.",
},
**kwargs
)
client.post("/course/1/assignment/1/submit_assignment", {}, **kwargs)
# Quiz
client.post(
"/course/1/quiz/1/submit_tf_quiz_answer",
{
"question_id": 5,
"answer": "true",
},
**kwargs
)
client.post("/course/1/quiz/1/submit_quiz", {}, **kwargs)
# Exam
response = client.post(
"/course/1/exam/1/submit_mc_exam_answer",
{
"question_id": 6,
"answer": "A",
},
**kwargs
)
client.post("/course/1/exam/1/submit_exam", {}, **kwargs)
# Test
response = client.post(
"/course/1/submit_credit_application",
{
"assignment_id": 1,
},
**kwargs
)
json_string = response.content.decode(encoding="UTF-8")
array = json.loads(json_string)
self.assertEqual(response.status_code, 200)
self.assertEqual(array["status"], "success")
self.assertEqual(array["message"], "credit granted")
# Cleanup
try:
EssaySubmission.objects.get(submission_id=1).delete()
except EssaySubmission.DoesNotExist:
pass
try:
EssaySubmission.objects.get(submission_id=2).delete()
except EssaySubmission.DoesNotExist:
pass
def test_submit_credit_application_on_passing_criteria_with_peer_reviews(self):
kwargs = {"HTTP_X_REQUESTED_WITH": "XMLHttpRequest"}
client = self.get_logged_in_client()
# Setup Failing
# Assignment
file_path = settings.MEDIA_ROOT + "/sample.pdf"
with open(file_path, "rb") as fp:
self.assertTrue(fp is not None)
client.post(
"/course/1/assignment/1/submit_e_assignment_answer",
{"question_id": 1, "file": fp},
**kwargs
)
client.post(
"/course/1/assignment/1/submit_mc_assignment_answer",
{
"question_id": 2,
"answer": "A",
},
**kwargs
)
client.post(
"/course/1/assignment/1/submit_tf_assignment_answer",
{
"question_id": 3,
"answer": "true",
},
**kwargs
)
client.post(
"/course/1/assignment/1/submit_r_assignment_answer",
{
"question_id": 4,
"answer": "Because of Global Cooling caused by abnormal solar hibernation.",
},
**kwargs
)
client.post("/course/1/assignment/1/submit_assignment", {}, **kwargs)
# Quiz
client.post(
"/course/1/quiz/1/submit_tf_quiz_answer",
{
"question_id": 5,
"answer": "true",
},
**kwargs
)
client.post("/course/1/quiz/1/submit_quiz", {}, **kwargs)
# Exam
response = client.post(
"/course/1/exam/1/submit_mc_exam_answer",
{
"question_id": 6,
"answer": "A",
},
**kwargs
)
client.post("/course/1/exam/1/submit_exam", {}, **kwargs)
# Peer Reviews
client.post(
"/course/1/peer_review/1/save_peer_review",
{
"question_id": 1,
"question_type": settings.ESSAY_QUESTION_TYPE,
"submission_id": 1,
"marks": 5,
},
**kwargs
)
client.post(
"/course/1/peer_review/1/save_peer_review",
{
"question_id": 4,
"question_type": settings.RESPONSE_QUESTION_TYPE,
"submission_id": 1,
"marks": 5,
},
**kwargs
)
# Test
response = client.post(
"/course/1/submit_credit_application",
{
"assignment_id": 1,
},
**kwargs
)
json_string = response.content.decode(encoding="UTF-8")
array = json.loads(json_string)
self.assertEqual(response.status_code, 200)
self.assertEqual(array["status"], "success")
self.assertEqual(array["message"], "credit granted")
# Cleanup
try:
EssaySubmission.objects.get(submission_id=1).delete()
except EssaySubmission.DoesNotExist:
pass
try:
EssaySubmission.objects.get(submission_id=2).delete()
except EssaySubmission.DoesNotExist:
pass
<|endoftext|> |
<|endoftext|># Django & Python
from django.core.urlresolvers import resolve
from django.http import HttpRequest
from django.http import QueryDict
from django.test import TestCase
from django.test import Client
from django.contrib.auth.models import User
from django.contrib.auth import authenticate, login, logout
from django.contrib.auth.decorators import login_required
from django.conf.urls.static import static, settings
import json
# Modal
from registrar.models import Teacher
from registrar.models import Course
from registrar.models import Announcement
from registrar.models import Syllabus
from registrar.models import Policy
from registrar.models import Lecture
from registrar.models import Assignment
from registrar.models import Quiz
from registrar.models import Exam
from registrar.models import CourseSubmission
# View
from teacher.views import overview
# Contants
TEST_USER_EMAIL = "[email protected]"
TEST_USER_USERNAME = "Ledo"
TEST_USER_PASSWORD = "ContinentalUnion"
TEST_USER_EMAIL2 = "[email protected]"
TEST_USER_USERNAME2 = "whalesquid"
TEST_USER_PASSWORD2 = "Evolvers"
class OverviewTestCase(TestCase):
def tearDown(self):
syllabuses = Syllabus.objects.all()
for syllabus in syllabuses:
syllabus.delete()
policies = Policy.objects.all()
for policy in policies:
policy.delete()
courses = Course.objects.all()
for course in courses:
course.delete()
User.objects.all().delete()
def setUp(self):
# Create our Trudy user.
User.objects.create_user(
email=TEST_USER_EMAIL2,
username=TEST_USER_USERNAME2,
password=TEST_USER_PASSWORD2,
)
user = User.objects.get(email=TEST_USER_EMAIL2)
teacher = Teacher.objects.create(user=user)
# Create our Teacher.
user = User.objects.create_user(
email=TEST_USER_EMAIL,
username=TEST_USER_USERNAME,
password=TEST_USER_PASSWORD,
)
teacher = Teacher.objects.create(user=user)
course = Course.objects.create(
id=1,
title="Comics Book Course",
sub_title="The definitive course on comics!",
category="",
teacher=teacher,
)
def populate_course_content(self, client, kwargs):
course = Course.objects.get(id=1)
Announcement.objects.create(
announcement_id=1,
course=course,
title="Hello world!",
body="This is the body of the message.",
)
course = Course.objects.get(id=1)
file_path = settings.MEDIA_ROOT + "/sample.pdf"
with open(file_path, "rb") as fp:
self.assertTrue(fp is not None)
Syllabus.objects.create(
syllabus_id=1,
file="",
course=course,
)
with open(file_path, "rb") as fp:
self.assertTrue(fp is not None)
Policy.objects.create(
policy_id=1,
file="",
course=course,
)
Lecture.objects.create(
lecture_id=1,
lecture_num=1,
week_num=1,
title="Blade vs Evil",
description="Fighting for the destiny of the Earth.",
course=course,
)
Lecture.objects.create(
lecture_id=2,
lecture_num=2,
week_num=1,
title="Blade vs Evil",
description="Fighting for the destiny of the Earth.",
course=course,
)
Assignment.objects.create(
assignment_id=1,
assignment_num=1,
title="Hideauze",
description="Anime related assignment.",
worth=25,
course=course,
)
Quiz.objects.create(
quiz_id=1,
quiz_num=1,
title="Hideauze",
description="Anime related assignment.",
worth=25,
course=course,
)
Exam.objects.create(
exam_id=1,
exam_num=1,
title="Hideauze",
description="Anime related assignment.",
worth=50,
course=course,
is_final=True,
)
def delete_course_content(self):
for id in range(1, 10):
# Syllabus
try:
Syllabus.objects.get(syllabus_id=id).delete()
except Syllabus.DoesNotExist:
pass
# Policy
try:
Policy.objects.get(policy_id=id).delete()
except Policy.DoesNotExist:
pass
# Announcement
try:
Announcement.objects.get(announcement_id=1).delete()
except Announcement.DoesNotExist:
pass
def get_logged_in_client(self):
client = Client()
client.login(username=TEST_USER_USERNAME, password=TEST_USER_PASSWORD)
return client
def test_url_resolves_to_overview_page_view(self):
found = resolve("/teacher/course/1/overview")
self.assertEqual(found.func, overview.overview_page)
def test_overview_page(self):
client = self.get_logged_in_client()
response = client.post("/teacher/course/1/overview")
self.assertEqual(response.status_code, 200)
self.assertIn(b"Comics Book Course", response.content)
self.assertIn(b"ajax_submit_course()", response.content)
def test_submit_course_for_review(self):
client = self.get_logged_in_client()
kwargs = {"HTTP_X_REQUESTED_WITH": "XMLHttpRequest"}
# Create course content.
self.populate_course_content(client, kwargs)
response = client.post(
"/teacher/course/1/submit_course_for_review", {}, **kwargs
)
self.assertEqual(response.status_code, 200)
json_string = response.content.decode(encoding="UTF-8")
array = json.loads(json_string)
self.assertEqual(array["message"], "submitted course review")
self.assertEqual(array["status"], "success")
# Delete course content.
self.delete_course_content()
<|endoftext|> |
<|endoftext|>"""added goal properties
Revision ID: 5018059c5c8f
Revises: 16b4a243d41d
Create Date: 2015-09-23 11:56:01.897992
"""
# revision identifiers, used by Alembic.
revision = "5018059c5c8f"
down_revision = "16b4a243d41d"
branch_labels = None
depends_on = None
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.create_table(
"goalproperties",
sa.Column("id", sa.Integer(), nullable=False),
sa.Column("name", sa.String(length=255), nullable=False),
sa.Column("is_variable", sa.Boolean(), nullable=False),
sa.PrimaryKeyConstraint("id"),
)
op.create_table(
"goals_goalproperties",
sa.Column("goal_id", sa.Integer(), nullable=False),
sa.Column("property_id", sa.Integer(), nullable=False),
sa.Column("value", sa.String(length=255), nullable=True),
sa.Column("value_translation_id", sa.Integer(), nullable=True),
sa.Column("from_level", sa.Integer(), nullable=False),
sa.ForeignKeyConstraint(["goal_id"], ["goals.id"], ondelete="CASCADE"),
sa.ForeignKeyConstraint(
["property_id"], ["goalproperties.id"], ondelete="CASCADE"
),
sa.ForeignKeyConstraint(
["value_translation_id"], ["translationvariables.id"], ondelete="RESTRICT"
),
sa.PrimaryKeyConstraint("goal_id", "property_id", "from_level"),
)
op.add_column(
"goals",
sa.Column("name", sa.String(length=255), nullable=False, server_default=""),
)
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_column("goals", "name")
op.drop_table("goals_goalproperties")
op.drop_table("goalproperties")
### end Alembic commands ###
<|endoftext|> |
<|endoftext|># Demonstration of `applib` features
import logging
from applib.base import Cmdln, Application
from applib.misc import require_option
from applib import textui, sh, _cmdln as cmdln
LOG = logging.getLogger(__name__)
application = Application("demo-app", "CompanyNameHere", "1.2")
@cmdln.option("", "--foo", action="store_true", help="*must pass --foo")
class Commands(Cmdln):
name = "demo-app"
def initialize(self):
require_option(self.options, "foo")
@cmdln.alias("cd")
@cmdln.option(
"-t", "--show-time", action="store_true", help="Also show the current time"
)
def do_currentdate(self, subcmd, opts):
"""${cmd_name}: Show the current date
${cmd_usage}
${cmd_option_list}
"""
with self.bootstrapped():
from datetime import datetime
now = datetime.now()
LOG.debug("datetime.now = %s", now)
if opts.show_time:
print(now)
else:
print((now.date()))
def do_ls(self, subcmd, opts):
"""${cmd_name}: Show directory listing (runs 'ls')
${cmd_usage}
${cmd_option_list}
"""
with self.bootstrapped():
print((sh.run("ls")[0].decode("utf-8")))
def do_makeerror(self, subcmd, opts, what):
"""${cmd_name}: Make an error. Use -v to see full traceback
${cmd_usage}
${cmd_option_list}
"""
with self.bootstrapped():
LOG.debug("About to make an error! %s", what)
textui.askyesno("Press enter to proceed:", default=True)
1 / 0
@cmdln.option("", "--no-break", action="store_true", help="Don't break from loop")
def do_think(self, subcmd, opts, length=200):
"""${cmd_name}: Progress bar example
${cmd_usage}
${cmd_option_list}
"""
with self.bootstrapped():
import time
length = int(length)
for x in textui.ProgressBar.iterate(
list(range(length)), post="Thought {total} thoughts in time {elapsed}"
):
if x == length - 1 and not opts.no_break:
break # test that break doesn't mess up output
time.sleep(0.1)
def do_multable(self, subcmd, opts, number=10, times=25):
"""${cmd_name}: Print multiplication table
To demonstrate `colprint` feature
${cmd_usage}
${cmd_option_list}
"""
with self.bootstrapped():
textui.colprint(
[
[str(x * y) for y in range(1, 1 + int(times))]
for x in range(1, 1 + int(number))
]
)
if __name__ == "__main__":
application.run(Commands)
<|endoftext|> |
<|endoftext|># Copyright (c) 2015-2016, Activision Publishing, Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without modification,
# are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# 3. Neither the name of the copyright holder nor the names of its contributors
# may be used to endorse or promote products derived from this software without
# specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR
# ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
# ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
from assertpy import assert_that, fail
class TestType(object):
def test_is_type_of(self):
assert_that("foo").is_type_of(str)
assert_that(123).is_type_of(int)
assert_that(0.456).is_type_of(float)
# assert_that(234L).is_type_of(long)
assert_that(["a", "b"]).is_type_of(list)
assert_that(("a", "b")).is_type_of(tuple)
assert_that({"a": 1, "b": 2}).is_type_of(dict)
assert_that(set(["a", "b"])).is_type_of(set)
assert_that(None).is_type_of(type(None))
assert_that(Foo()).is_type_of(Foo)
assert_that(Bar()).is_type_of(Bar)
def test_is_type_of_failure(self):
try:
assert_that("foo").is_type_of(int)
fail("should have raised error")
except AssertionError as ex:
assert_that(str(ex)).is_equal_to(
"Expected <foo:str> to be of type <int>, but was not."
)
def test_is_type_of_bad_arg_failure(self):
try:
assert_that("foo").is_type_of("bad")
fail("should have raised error")
except TypeError as ex:
assert_that(str(ex)).is_equal_to("given arg must be a type")
def test_is_type_of_subclass_failure(self):
try:
assert_that(Bar()).is_type_of(Foo)
fail("should have raised error")
except AssertionError as ex:
assert_that(str(ex)).starts_with("Expected <")
assert_that(str(ex)).ends_with(":Bar> to be of type <Foo>, but was not.")
def test_is_instance_of(self):
assert_that("foo").is_instance_of(str)
assert_that(123).is_instance_of(int)
assert_that(0.456).is_instance_of(float)
# assert_that(234L).is_instance_of(long)
assert_that(["a", "b"]).is_instance_of(list)
assert_that(("a", "b")).is_instance_of(tuple)
assert_that({"a": 1, "b": 2}).is_instance_of(dict)
assert_that(set(["a", "b"])).is_instance_of(set)
assert_that(None).is_instance_of(type(None))
assert_that(Foo()).is_instance_of(Foo)
assert_that(Bar()).is_instance_of(Bar)
assert_that(Bar()).is_instance_of(Foo)
def test_is_instance_of_failure(self):
try:
assert_that("foo").is_instance_of(int)
fail("should have raised error")
except AssertionError as ex:
assert_that(str(ex)).is_equal_to(
"Expected <foo:str> to be instance of class <int>, but was not."
)
def test_is_instance_of_bad_arg_failure(self):
try:
assert_that("foo").is_instance_of("bad")
fail("should have raised error")
except TypeError as ex:
assert_that(str(ex)).is_equal_to("given arg must be a class")
class Foo(object):
pass
class Bar(Foo):
pass
<|endoftext|> |
End of preview. Expand
in Data Studio
README.md exists but content is empty.
- Downloads last month
- 38