text
stringlengths 0
2.53M
|
---|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
""" Provides ``mapping`` of url paths to request handlers.
"""
from bootstrap import Bootstrap
from fund import InstantPaymentNotificationHandler
from fund import ThankYouHandler
from view import *
mapping = [
(r"/", Index),
(r"/ipn", InstantPaymentNotificationHandler),
(r"/thank-you", ThankYouHandler),
(r"/about\/?", About),
(r"/guide\/?", Guide),
(r"/guide/download\/?", Download),
(r"/guide/standards\/?", Standards),
(r"/community\/?", Community),
(r"/news\/?", News),
(r"/support\/?", Support),
(r"/contact\/?", Contact),
(r"/press\/?", Press),
(r"/legal/terms", Terms),
(r"/library\/?", Library),
(r"/library/sketchup\/?", Library),
(r"/library/series/(\w+)\/?", Library),
(r"/library/users\/?", Users),
(r"/library/users/([0-9]+)\/?", User),
(r"/library/designs/([0-9]+)\/?", Design),
(r"/library/designs/([0-9]+)/(edit)\/?", Design),
(r"/library/designs\/?", Design),
(r"/library/designs/add\/?", Design),
(r"/library/designs/add/sketchup\/?", Design),
(r"/redirect/success/([0-9]+)\/?", RedirectSuccess),
(r"/redirect/error\/?", RedirectError),
(r"/redirect/after/delete\/?", RedirectAfterDelete),
(r"/admin/moderate\/?", Moderate),
(r"/admin/bootstrap\/?", Bootstrap),
(r"/activity", ActivityScreen),
(r"/txns", TxnList),
(r"/blob64/([^/]+)/([^/]+)\/?", Base64Blob),
(r"/blob64/([^/]+)\/?", Base64Blob),
(r"/i18n/message_strings.json", MessageStrings),
(r"/.*", NotFound),
]
|
import msgpack
import gevent.pool
import gevent.queue
import gevent.event
import gevent.local
import gevent.lock
import logging
import sys
import gevent_zmq as zmq
from .exceptions import TimeoutExpired
from .context import Context
from .channel_base import ChannelBase
if (sys.version_info < (2, 7)):
def get_pyzmq_frame_buffer(frame):
return frame.buffer[:]
else:
def get_pyzmq_frame_buffer(frame):
return frame.buffer
logger = (__name__)
class SequentialSender(object):
def __init__(self, socket):
self._socket = socket
def _send(self, parts):
e = None
for i in (((parts) - 1)):
try:
(parts[i])
except (gevent.GreenletExit, gevent.Timeout) as e:
if (i == 0):
raise
(parts[i])
try:
(parts[(- 1)])
except (gevent.GreenletExit, gevent.Timeout) as e:
(parts[(- 1)])
if e:
raise e
def __call__(self, parts, timeout=None):
if timeout:
with (timeout):
(parts)
else:
(parts)
class SequentialReceiver(object):
def __init__(self, socket):
self._socket = socket
def _recv(self):
e = None
parts = []
while True:
try:
part = ()
except (gevent.GreenletExit, gevent.Timeout) as e:
if ((parts) == 0):
raise
part = ()
(part)
if (not part.more):
break
if e:
raise e
return parts
def __call__(self, timeout=None):
if timeout:
with (timeout):
return ()
else:
return ()
class Sender(SequentialSender):
def __init__(self, socket):
self._socket = socket
self._send_queue = ()
self._send_task = (self._sender)
def close(self):
if self._send_task:
()
def _sender(self):
for parts in self._send_queue:
(parts)
def __call__(self, parts, timeout=None):
try:
(parts)
except gevent.queue.Full:
raise (timeout)
class Receiver(SequentialReceiver):
def __init__(self, socket):
self._socket = socket
self._recv_queue = ()
self._recv_task = (self._recver)
def close(self):
if self._recv_task:
()
self._recv_queue = None
def _recver(self):
while True:
parts = ()
(parts)
def __call__(self, timeout=None):
try:
return ()
except gevent.queue.Empty:
raise (timeout)
class Event(object):
__slots__ = ['_name', '_args', '_header', '_identity']
def __init__(self, name, args, context, header=None):
self._name = name
self._args = args
if (header is None):
self._header = {'message_id': (), 'v': 3}
else:
self._header = header
self._identity = None
@property
def header(self):
return self._header
@property
def name(self):
return self._name
@name.setter
def name(self, v):
self._name = v
@property
def args(self):
return self._args
@property
def identity(self):
return self._identity
@identity.setter
def identity(self, v):
self._identity = v
def pack(self):
return ((self._header, self._name, self._args))
@staticmethod
def unpack(blob):
unpacker = ()
(blob)
unpacked_msg = ()
try:
(header, name, args) = unpacked_msg
except Exception as e:
raise ((unpacked_msg, e))
if (not (header, dict)):
header = {}
return (name, args, None, header)
def __str__(self, ignore_args=False):
if ignore_args:
args = '[...]'
else:
args = self._args
try:
args = (((self._args)))
except Exception:
raise
if self._identity:
identity = (((x.bytes) for x in self._identity))
return (identity, self._name, self._header, args)
return (self._name, self._header, args)
class Events(ChannelBase):
def __init__(self, zmq_socket_type, context=None):
self._debug = False
self._zmq_socket_type = zmq_socket_type
self._context = (context or ())
self._socket = (zmq_socket_type)
if (zmq_socket_type in (zmq.PUSH, zmq.PUB, zmq.DEALER, zmq.ROUTER)):
self._send = (self._socket)
elif (zmq_socket_type in (zmq.REQ, zmq.REP)):
self._send = (self._socket)
else:
self._send = None
if (zmq_socket_type in (zmq.PULL, zmq.SUB, zmq.DEALER, zmq.ROUTER)):
self._recv = (self._socket)
elif (zmq_socket_type in (zmq.REQ, zmq.REP)):
self._recv = (self._socket)
else:
self._recv = None
@property
def recv_is_supported(self):
return (self._recv is not None)
@property
def emit_is_supported(self):
return (self._send is not None)
def __del__(self):
try:
if (not self._socket.closed):
()
except (AttributeError, TypeError):
raise
def close(self):
try:
()
except AttributeError:
raise
try:
()
except AttributeError:
raise
()
@property
def debug(self):
return self._debug
@debug.setter
def debug(self, v):
if (v != self._debug):
self._debug = v
if self._debug:
('debug enabled')
else:
('debug disabled')
def _resolve_endpoint(self, endpoint, resolve=True):
if resolve:
endpoint = (endpoint)
if (endpoint, (tuple, list)):
r = []
for sub_endpoint in endpoint:
((sub_endpoint, resolve))
return r
return [endpoint]
def connect(self, endpoint, resolve=True):
r = []
for endpoint_ in (endpoint, resolve):
((endpoint_))
('connected to %s (status=%s)', endpoint_, r[(- 1)])
return r
def bind(self, endpoint, resolve=True):
r = []
for endpoint_ in (endpoint, resolve):
((endpoint_))
('bound to %s (status=%s)', endpoint_, r[(- 1)])
return r
def disconnect(self, endpoint, resolve=True):
r = []
for endpoint_ in (endpoint, resolve):
((endpoint_))
('disconnected from %s (status=%s)', endpoint_, r[(- 1)])
return r
def new_event(self, name, args, xheader=None):
event = (name, args)
if xheader:
(xheader)
return event
def emit_event(self, event, timeout=None):
if self._debug:
('--> %s', event)
if event.identity:
parts = ((event.identity or ()))
(['', ()])
elif (self._zmq_socket_type in (zmq.DEALER, zmq.ROUTER)):
parts = ('', ())
else:
parts = ((),)
(parts, timeout)
def recv(self, timeout=None):
parts = ()
if ((parts) > 2):
identity = parts[0:(- 2)]
blob = parts[(- 1)]
elif ((parts) == 2):
identity = parts[0:(- 1)]
blob = parts[(- 1)]
else:
identity = None
blob = parts[0]
event = ((blob))
event.identity = identity
if self._debug:
('<-- %s', event)
return event
def setsockopt(self, *args):
return (*args)
@property
def context(self):
return self._context |
#!/usr/bin/env python
"""Django's command line utility."""
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "project.settings")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
|
"""Installer for hippybot
"""
import os
cwd = os.path.dirname(__file__)
__version__ = open(os.path.join(cwd, "hippybot", "version.txt"), "r").read().strip()
try:
from setuptools import setup, find_packages
except ImportError:
from ez_setup import use_setuptools
use_setuptools()
from setuptools import setup, find_packages
setup(
name="hippybot",
description="Python Hipchat bot",
long_description=open("README.rst").read(),
version=__version__,
author="Wes Mason",
author_email="wes[at]1stvamp[dot]org",
url="http://github.com/1stvamp/hippybot",
packages=find_packages(exclude=["ez_setup"]),
install_requires=open("requirements.txt").readlines(),
package_data={"hippybot": ["version.txt"]},
include_package_data=True,
extras_require={
"plugins": open("extras_requirements.txt").readlines(),
},
entry_points={
"console_scripts": [
"hippybot = hippybot.bot:main",
],
},
license="BSD",
)
|
#!/usr/bin/env python
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "twobuntu.settings")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
|
# -*- coding: utf-8 -*-
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = []
operations = [
migrations.CreateModel(
name="Category",
fields=[
(
"id",
models.AutoField(
verbose_name="ID",
serialize=False,
auto_created=True,
primary_key=True,
),
),
(
"name",
models.CharField(
help_text=b"The name of the category.", max_length=40
),
),
(
"image",
models.ImageField(
help_text=b"A representative image.",
null=True,
upload_to=b"categories",
blank=True,
),
),
],
options={
"ordering": ("name",),
"verbose_name_plural": "Categories",
},
bases=(models.Model,),
),
]
|
import twitter
from django.contrib import messages
from django.contrib.auth.decorators import user_passes_test
from django.db import transaction
from django.shortcuts import redirect, render
from twobuntu.news.forms import AddItemForm
@user_passes_test(lambda u: u.is_staff)
def add(request):
"""
Add news items to the home page.
"""
if request.method == "POST":
form = AddItemForm(data=request.POST)
if form.is_valid():
item = form.save(commit=False)
item.reporter = request.user
try:
with transaction.atomic():
item.save()
except twitter.TwitterError as e:
messages.error(
request,
'Twitter error: "%s" Please try again.' % e.message[0]["message"],
)
else:
messages.info(request, "Your news item has been published!")
return redirect("home")
else:
form = AddItemForm()
return render(
request,
"form.html",
{
"title": "Add Item",
"form": form,
"description": "Enter the details for the news item below.",
"action": "Add",
},
)
|
# -*- coding: utf-8 -*-
##############################################################################
#
# Copyright (c) 2010-2015, 2degrees Limited.
# All Rights Reserved.
#
# This file is part of django-wsgi <https://github.com/2degrees/django-wsgi/>,
# which is subject to the provisions of the BSD at
# <http://dev.2degreesnetwork.com/p/2degrees-license.html>. A copy of the
# license should accompany this distribution. THIS SOFTWARE IS PROVIDED "AS IS"
# AND ANY AND ALL EXPRESS OR IMPLIED WARRANTIES ARE DISCLAIMED, INCLUDING, BUT
# NOT LIMITED TO, THE IMPLIED WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST
# INFRINGEMENT, AND FITNESS FOR A PARTICULAR PURPOSE.
#
##############################################################################
"""
Exceptions raised by :mod:`django_wsgi.`
"""
__all__ = ("DjangoWSGIException", "ApplicationCallError")
class DjangoWSGIException(Exception):
"""Base class for exceptions raised by :mod:`django_wsgi`."""
pass
class ApplicationCallError(DjangoWSGIException):
"""
Exception raised when an embedded WSGI application was not called properly.
"""
pass
|
import boto
import boto.s3.connection
from django.conf import settings
import logging
log = logging.getLogger(__name__)
def get_s3_connection():
if settings.S3_ACCESS_KEY and settings.S3_SECRET_KEY and settings.S3_HOST:
log.debug(
"Connecting to {}, with secure connection is {}".format(
settings.S3_HOST, settings.S3_SECURE_CONNECTION
)
)
return boto.connect_s3(
aws_access_key_id=settings.S3_ACCESS_KEY,
aws_secret_access_key=settings.S3_SECRET_KEY,
host=settings.S3_HOST,
is_secure=settings.S3_SECURE_CONNECTION,
calling_format=boto.s3.connection.OrdinaryCallingFormat(),
)
return None
def get_or_create_bucket(s3_connection):
bucket = s3_connection.get_bucket(settings.S3_BUCKET_NAME)
if bucket is None:
bucket = s3_connection.create_bucket(settings.S3_BUCKET_NAME)
return bucket
|
from django.db import models
import datetime
from common.models import Project
class Stage(models.Model):
name = models.CharField(max_length=128)
project = models.ForeignKey(Project)
text = models.TextField(default="", blank=True)
link = models.URLField(default=None, blank=True, null=True)
state = models.CharField(max_length=24, default="info", blank=True)
weight = models.IntegerField(default=0)
updated = models.DateTimeField(default=datetime.datetime.now())
def save(self, *args, **kwargs):
self.updated = datetime.datetime.now()
return super(Stage, self).save(*args, **kwargs)
def __str__(self):
return self.name
|
# -*- coding: utf-8 -*-
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
("testreport", "0026_testresult_launch_item_id"),
]
operations = [
migrations.AddField(
model_name="testplan",
name="filter",
field=models.TextField(
default=b"",
max_length=128,
verbose_name="Started by filter",
blank=True,
),
preserve_default=True,
),
migrations.AddField(
model_name="testplan",
name="main",
field=models.BooleanField(
default=False, verbose_name="Show in short statistic"
),
preserve_default=True,
),
]
|
import gevent
from gevent import monkey
()
import time
import smtplib
TEST_MAIL = '\nDate: Wed, 30 Jul 2014 03:29:50 +0800 (CST)\nFrom: =?utf-8?B?6IGU5oOz?= <[email protected]>\nTo: [email protected]\nMessage-ID: <766215193.1675381406662190229.JavaMail.root@USS-01>\nSubject: =?utf-8?B?6IGU5oOz56e75Yqo5LqS6IGU572R5pyN5Yqh5rOo5YaM56Gu6K6k6YKu5Lu2?=\nMIME-Version: 1.0\nContent-Type: multipart/mixed; \n boundary="----=_Part_335076_1490382245.1406662190222"\n\n------=_Part_335076_1490382245.1406662190222\nContent-Type: multipart/related; \n boundary="----=_Part_335077_605133107.1406662190222"\n\n------=_Part_335077_605133107.1406662190222\nContent-Type: text/html;charset=utf-8\nContent-Transfer-Encoding: quoted-printable\n\n <html><head></head><body>=E5=B0=8A=E6=95=AC=E7=9A=84=E7=94=A8=E6=88=B7=EF=\n=BC=9A<br/>=E6=82=A8=E5=A5=BD=EF=BC=81<br/>=E8=AF=B7=E7=82=B9=E5=87=BB=E8=\n=81=94=E6=83=B3=E5=B8=90=E5=8F=B7=E7=A1=AE=E8=AE=A4=E9=93=BE=E6=8E=A5=EF=BC=\n=8C=E4=BB=A5=E6=A0=A1=E9=AA=8C=E6=82=A8=E7=9A=84=E8=81=94=E6=83=B3=E5=B8=90=\n=E5=8F=B7=EF=BC=9A<br/><a href=3D"https://passport.lenovo.com/wauthen/verif=\nyuser?username=3D&vc=3DuHwf&accountid=3D1358934&lenovoid.=\ncb=3D&lenovoid.realm=3Dthinkworld.lenovo.com&lang=3Dzh_CN&display=3D&lenovo=\nid.ctx=3D&lenovoid.action=3D&lenovoid.lang=3D&lenovoid.uinfo=3D&lenovoid.vp=\n=3D&verifyFlag=3Dnull">https://passport.lenovo.com/wauthen/verifyuser?usern=\name=3o.org&vc=3DuHwf&accountid=3&lenovoid.cb=3D&lenov=\noid.realm=3Dthinkworld.lenovo.com&lang=3Dzh_CN&display=3D&lenovoid.ctx=3D&l=\nenovoid.action=3D&lenovoid.lang=3D&lenovoid.uinfo=3D&lenovoid.vp=3D&verifyF=\nlag=3Dnull</a><br/>=EF=BC=88=E5=A6=82=E6=9E=9C=E4=B8=8A=E9=9D=A2=E7=9A=84=\n=E9=93=BE=E6=8E=A5=E6=97=A0=E6=B3=95=E7=82=B9=E5=87=BB=EF=BC=8C=E6=82=A8=E4=\n=B9=9F=E5=8F=AF=E4=BB=A5=E5=A4=8D=E5=88=B6=E9=93=BE=E6=8E=A5=EF=BC=8C=E7=B2=\n=98=E8=B4=B4=E5=88=B0=E6=82=A8=E6=B5=8F=E8=A7=88=E5=99=A8=E7=9A=84=E5=9C=B0=\n=E5=9D=80=E6=A0=8F=E5=86=85=EF=BC=8C=E7=84=B6=E5=90=8E=E6=8C=89=E2=80=9C=E5=\n=9B=9E=E8=BD=A6=E2=80=9D=E9=94=AE)=E3=80=82<br/>=E6=9D=A5=E8=87=AA=E8=81=94=\n=E6=83=B3=E5=B8=90=E5=8F=B7</body></html>\n------=_Part_335077_605133107.1406662190222--\n\n------=_Part_335076_1490382245.1406662190222--\n'
def timeit(func):
def wrap(num, port, *args, **kwargs):
max_rqs = 0
for _ in (3):
conns = [() for x in (num)]
(((lambda x: ('127.0.0.1', port)), conns))
start_at = ()
(num, conns)
interval = (() - start_at)
for con in conns:
try:
()
()
except Exception:
raise
(3)
rqs = (num / interval)
max_rqs = (rqs, max_rqs)
return max_rqs
return wrap
@timeit
def helo(num, conns):
tasks = [(x.helo) for x in conns]
(tasks)
@timeit
def send(num, conns):
tasks = [(x.sendmail, '[email protected]', ['[email protected]'], TEST_MAIL) for x in conns]
(tasks)
def main(port, num):
(('%d %s %s' % (num, (num, port), (num, port))))
if (__name__ == '__main__'):
import sys
try:
((sys.argv[1]), (sys.argv[2]))
except IndexError:
('python concurrency.py <port> <connection number>') |
#!/usr/bin/env python
import sys
import json
if sys.version_info < (3,):
def b(x):
return x
def s(x):
return x
else:
def b(x):
return bytes(x, "utf-8")
def s(x):
return x.decode("utf-8")
def parse_payload(payload):
if not isinstance(payload, str):
payload = " ".join(payload)
try:
json.loads(payload)
except ValueError:
kv = payload.split(" ", 1)
if len(kv) > 1:
payload = '{"%s": "%s"}' % (kv[0], kv[1])
else:
payload = "%s" % kv[0]
return payload
def requires_elements(xs, dictionary):
missing_values = []
for x in xs:
if x not in dictionary:
missing_values.append(x)
if missing_values:
err_msg = ", ".join(missing_values)
raise KeyError("Missing values %s" % (err_msg))
|
from flask_resty import Api, GenericModelView
from marshmallow import fields, Schema
import pytest
from sqlalchemy import Column, Integer, String
import helpers
@pytest.yield_fixture
def models(db):
class Widget(db.Model):
__tablename__ = 'widgets'
id_1 = (Integer)
id_2 = (Integer)
name = (String)
()
(yield {'widget': Widget})
()
@pytest.fixture
def schemas():
class WidgetSchema(Schema):
id_1 = ()
id_2 = ()
name = ()
return {'widget': ()}
@()
def routes(app, models, schemas):
class WidgetViewBase(GenericModelView):
model = models['widget']
schema = schemas['widget']
id_fields = ('id_1', 'id_2')
class WidgetListView(WidgetViewBase):
def get(self):
return ()
def post(self):
return ()
class WidgetView(WidgetViewBase):
def get(self, id_1, id_2):
return ((id_1, id_2))
def patch(self, id_1, id_2):
return ((id_1, id_2))
def delete(self, id_1, id_2):
return ((id_1, id_2))
api = (app)
('/widgets', WidgetListView, WidgetView)
@()
def data(db, models):
(((), (), ()))
()
def test_list(client):
response = ('/widgets')
if (not (response.status_code == 200)):
raise ()
if (not ((response) == [{'id_1': '1', 'id_2': '2', 'name': 'Foo'}, {'id_1': '1', 'id_2': '3', 'name': 'Bar'}, {'id_1': '4', 'id_2': '5', 'name': 'Baz'}])):
raise ()
def test_retrieve(client):
response = ('/widgets/1/2')
if (not (response.status_code == 200)):
raise ()
if (not ((response) == {'id_1': '1', 'id_2': '2', 'name': 'Foo'})):
raise ()
def test_create(client):
response = (client, 'POST', '/widgets', {'id_1': '4', 'id_2': '6', 'name': 'Qux'})
if (not (response.status_code == 201)):
raise ()
if (not (response.headers['Location'] == 'http://localhost/widgets/4/6')):
raise ()
if (not ((response) == {'id_1': '4', 'id_2': '6', 'name': 'Qux'})):
raise ()
def test_update(client):
update_response = (client, 'PATCH', '/widgets/1/2', {'id_1': '1', 'id_2': '2', 'name': 'Qux'})
if (not (update_response.status_code == 204)):
raise ()
retrieve_response = ('/widgets/1/2')
if (not (retrieve_response.status_code == 200)):
raise ()
if (not ((retrieve_response) == {'id_1': '1', 'id_2': '2', 'name': 'Qux'})):
raise ()
def test_destroy(client):
destroy_response = ('/widgets/1/2')
if (not (destroy_response.status_code == 204)):
raise ()
retrieve_response = ('/widgets/1/2')
if (not (retrieve_response.status_code == 404)):
raise () |
from .dogpile import Dogpile
|
"""
RPi-Tron-Radio
Raspberry Pi Web-Radio with 2.8" TFT Touchscreen and Tron-styled graphical interface
GitHub: http://github.com/5volt-junkie/RPi-Tron-Radio
Blog: http://5volt-junkie.net
MIT License: see license.txt
"""
import pygame
from pygame.locals import *
import time
import datetime
import sys
import os
import glob
import subprocess
os.environ["SDL_FBDEV"] = "/dev/fb1"
os.environ["SDL_MOUSEDEV"] = "/dev/input/touchscreen"
os.environ["SDL_MOUSEDRV"] = "TSLIB"
#colors R G B
white = (255, 255, 255)
red = (255, 0, 0)
green = ( 0, 255, 0)
blue = ( 0, 0, 255)
black = ( 0, 0, 0)
cyan = ( 50, 255, 255)
magenta = (255, 0, 255)
yellow = (255, 255, 0)
orange = (255, 127, 0)
#screen size
width = 320
height = 240
size = (width, height)
screen = pygame.display.set_mode(size)
pygame.init()
#disable mouse cursor
pygame.mouse.set_visible(False)
#define font
font = pygame.font.Font(None, 25)
#screensaver
screensaver_timer = 5 #time until screensaver will be enabled, in minutes
screensaver = False
#load default skin
menu = 1
skin_number = 1
max_skins = 8
font_color = cyan
skin1 = pygame.image.load("skins/skin_tron_m1.png")
skin2 = pygame.image.load("skins/skin_tron_m2.png")
skin = skin1
screen.blit(skin, (0, 0))
#initial volume settings
subprocess.call('mpc volume 100' , shell=True)
reboot_label = font.render("rebooting...", 1, (font_color))
poweroff_label = font.render("shutting down", 1, (font_color))
song_title = " "
playlist = " "
def reboot():
screen.fill(black)
screen.blit(reboot_label, (10, 100))
pygame.display.flip()
time.sleep(5)
subprocess.call('mpc stop' , shell=True)
subprocess.call('reboot' , shell=True)
def poweroff():
screen.fill(black)
screen.blit(poweroff_label, (10, 100))
pygame.display.flip()
time.sleep(5)
subprocess.call('mpc stop' , shell=True)
subprocess.call('poweroff' , shell=True)
#copy playing title to favorite.txt
def favorite():
print(song_title)
f = open ('/var/www/favorite.txt' , 'a')
f.write('-' + song_title + '\n')
f.close()
#function runs if touchscreen was touched (and screensaver is disabled)
def on_touch():
#x_min x_max y_min y_max
if 13 <= pos[0] <= 75 and 121 <= pos[1] <= 173:
#print "button1 was pressed"
button(1)
if 90 <= pos[0] <= 152 and 121 <= pos[1] <= 173:
#print "button2 was pressed"
button(2)
if 167 <= pos[0] <= 229 and 121 <= pos[1] <= 173:
#print "button3 was pressed"
button(3)
if 244 <= pos[0] <= 306 and 121 <= pos[1] <= 173:
#print "button4 was pressed"
button(4)
if 13 <= pos[0] <= 75 and 181 <= pos[1] <= 233:
#print "button5 was pressed"
button(5)
if 90 <= pos[0] <= 152 and 181 <= pos[1] <= 233:
#print "button6 was pressed"
button(6)
if 167 <= pos[0] <= 229 and 181 <= pos[1] <= 233:
#print "button7 was pressed"
button(7)
if 244 <= pos[0] <= 306 and 181 <= pos[1] <= 233:
#print "button8 was pressed"
button(8)
#which button (and which menu) was presed on touch
def button(number):
global menu
if menu == 1:
if number == 1:
subprocess.call('mpc play' , shell=True)
#print "play"
if number == 2:
subprocess.call('mpc pause' , shell=True)
#print "pause"
if number == 3:
subprocess.call('mpc volume +5' , shell=True)
#print "vol +x"
if number == 4:
subprocess.call('mpc volume 0' , shell=True)
#print "vol 0"
if number == 5:
subprocess.call('mpc prev' , shell=True)
#print "prev"
if number == 6:
subprocess.call('mpc next' , shell=True)
#print "next"
if number == 7:
subprocess.call('mpc volume -5' , shell=True)
#print "vol -x"
if number == 8:
#print "go to menu 2"
menu = 2
update_screen()
return
if menu == 2:
if number == 1:
favorite()
if number == 2:
#print "switch skin"
global skin_number
skin_number = skin_number+1
#print skin_number
update_screen()
if number == 3:
#print "run in background"
pygame.quit()
sys.exit()
if number == 4:
#print "quit radio"
subprocess.call('mpc stop', shell=True)
pygame.quit()
sys.exit()
if number == 5:
print("power off")
poweroff()
if number == 6:
print("reboot")
reboot()
if number == 7:
#print "update screen"
update_screen()
if number == 8:
#print "go to menu 1"
menu = 1
update_screen()
return
#function to update screen
def update_screen():
global skin_number
if skin_number == 9:
skin_number = 1
if skin_number == 1:
skin1 = pygame.image.load("skins/skin_tron_m1.png")
skin2 = pygame.image.load("skins/skin_tron_m2.png")
font_color = cyan
if skin_number == 2:
skin1 = pygame.image.load("skins/skin_blue_m1.png")
skin2 = pygame.image.load("skins/skin_blue_m2.png")
font_color = blue
if skin_number == 3:
skin1 = pygame.image.load("skins/skin_green_m1.png")
skin2 = pygame.image.load("skins/skin_green_m2.png")
font_color = green
if skin_number == 4:
skin1 = pygame.image.load("skins/skin_magenta_m1.png")
skin2 = pygame.image.load("skins/skin_magenta_m2.png")
font_color = magenta
if skin_number == 5:
skin1 = pygame.image.load("skins/skin_orange_m1.png")
skin2 = pygame.image.load("skins/skin_orange_m2.png")
font_color = orange
if skin_number == 6:
skin1 = pygame.image.load("skins/skin_red_m1.png")
skin2 = pygame.image.load("skins/skin_red_m2.png")
font_color = red
if skin_number == 7:
skin1 = pygame.image.load("skins/skin_white_m1.png")
skin2 = pygame.image.load("skins/skin_white_m2.png")
font_color = white
if skin_number == 8:
skin1 = pygame.image.load("skins/skin_yellow_m1.png")
skin2 = pygame.image.load("skins/skin_yellow_m2.png")
font_color = yellow
global menu
if screensaver == False:
current_time = datetime.datetime.now().strftime('%H:%M %d.%m.%Y')
time_label = font.render(current_time, 1, (font_color))
if menu == 1:
skin = skin1
screen.blit(skin, (0, 0))
lines = subprocess.check_output('mpc current', shell=True).split(":")
if len(lines) == 1:
line1 = lines[0]
line1 = line1[:-1]
station_label = font.render("Station: no data", 1, (font_color))
else:
line1 = lines[0]
line2 = lines[1]
line1 = line1[:30]
station_label = font.render('Station: ' + line1 + '.', 1, (font_color))
lines = subprocess.check_output('mpc -f [%title%]', shell=True).split("\n")
line1 = lines[0]
if line1.startswith("volume"):
title_label = font.render("Title: no data! Try with PLAY!", 1, (font_color))
else:
line1 = lines[0]
line2 = lines[1]
global song_title
song_title = line1
line1 = line1[:30]
title_label = font.render(line1 + '.', 1, (font_color))
title = font.render("Now playing:", 1, (font_color))
screen.blit(skin, (0, 0))
screen.blit(station_label, (23, 15))
screen.blit(title, (23, 40))
screen.blit(title_label, (23, 60))
screen.blit(time_label, (160, 90))
lines = subprocess.check_output('mpc volume', shell=True).split("\n")
line1 = lines[0]
volume_label = font.render(line1, 1, (font_color))
screen.blit(volume_label, (23, 90))
pygame.display.flip()
if menu == 2:
skin = skin2
screen.blit(skin, (0, 0))
#get and display ip
ip = subprocess.check_output('hostname -I', shell=True).strip()
ip_label = font.render('IP: ' + ip, 1, (font_color))
screen.blit(ip_label, (23, 15))
#get and display cpu temp
cpu_temp = subprocess.check_output('/opt/vc/bin/vcgencmd measure_temp', shell=True).strip()
temp = font.render('cpu ' + cpu_temp, 1, (font_color))
screen.blit(temp, (23, 35))
#get current time
screen.blit(time_label, (90, 90))
pygame.display.flip()
if screensaver == True:
screen.fill(white)
pygame.display.flip()
minutes = 0
#userevent on every 1000ms, used for screensaver
pygame.time.set_timer(USEREVENT +1, 60000)
subprocess.call('mpc play' , shell=True)
update_screen()
running = True
while running:
for event in pygame.event.get():
if event.type == USEREVENT +1:
minutes += 1
if event.type == pygame.QUIT:
print("Quit radio")
pygame.quit()
sys.exit()
if event.type == pygame.KEYDOWN:
if event.key == K_ESCAPE:
print("Quit radio")
pygame.quit()
sys.exit()
#if screensaver is enabled and the screen was touched,
#just disable screensaver, reset timer and update screen
#no button state will be checked
if event.type == pygame.MOUSEBUTTONDOWN and screensaver == True:
minutes = 0
subprocess.call('echo 0 | sudo tee /sys/class/backlight/*/bl_power' , shell=True)
screensaver = False
update_screen()
break
#if screen was touched and screensaver is disabled,
#get position of touched button, call on_touch(), reset timer and update screen
if event.type == pygame.MOUSEBUTTONDOWN and screensaver == False:
pos = (pygame.mouse.get_pos() [0], pygame.mouse.get_pos() [1])
minutes = 0
on_touch()
update_screen()
#enable screensaver on timer overflow
if minutes > screensaver_timer:
screensaver = True
subprocess.call('echo 1 | sudo tee /sys/class/backlight/*/bl_power' , shell=True)
update_screen()
update_screen()
time.sleep(0.1)
|
# coding:utf8
"""
Created on 2013-7-10
memcached client
@author: lan (www.9miao.com)
"""
import memcache
class MemConnError(Exception):
""" """
def __str__(self):
return "memcache connect error"
class MemClient:
"""memcached"""
def __init__(self, timeout=0):
""" """
self._hostname = ""
self._urls = []
self.connection = None
def connect(self, urls, hostname):
"""memcached connect"""
self._hostname = hostname
self._urls = urls
self.connection = memcache.Client(self._urls, debug=0)
if not self.connection.set("__testkey__", 1):
raise MemConnError()
def produceKey(self, keyname):
""" """
if isinstance(keyname, str):
return "".join([self._hostname, ":", keyname])
else:
raise "type error"
def get(self, key):
""" """
key = self.produceKey(key)
return self.connection.get(key)
def get_multi(self, keys):
""" """
keynamelist = [self.produceKey(keyname) for keyname in keys]
olddict = self.connection.get_multi(keynamelist)
newdict = dict(
list(
zip(
[keyname.split(":")[-1] for keyname in list(olddict.keys())],
list(olddict.values()),
)
)
)
return newdict
def set(self, keyname, value):
""" """
key = self.produceKey(keyname)
result = self.connection.set(key, value)
if not result: # å¦æåå
¥å¤±è´¥
self.connect(self._urls, self._hostname) # éæ°è¿æ¥
return self.connection.set(key, value)
return result
def set_multi(self, mapping):
""" """
newmapping = dict(
list(
zip(
[self.produceKey(keyname) for keyname in list(mapping.keys())],
list(mapping.values()),
)
)
)
result = self.connection.set_multi(newmapping)
if result: # å¦æåå
¥å¤±è´¥
self.connect(self._urls, self._hostname) # éæ°è¿æ¥
return self.connection.set_multi(newmapping)
return result
def incr(self, key, delta):
""" """
key = self.produceKey(key)
return self.connection.incr(key, delta)
def delete(self, key):
""" """
key = self.produceKey(key)
return self.connection.delete(key)
def delete_multi(self, keys):
""" """
keys = [self.produceKey(key) for key in keys]
return self.connection.delete_multi(keys)
def flush_all(self):
""" """
self.connection.flush_all()
mclient = MemClient()
|
""" Really basic gatttool (BlueZ) wrapper
Based on https://github.com/stratosinc/pygatt
Part of https://github.com/ALPSquid/thebutton-monitor
"""
import pexpect
class connect:
"""Use to initiate a connection to a GATT device
Example: bt_device = gatt.connect('AB:CD:EF:01:23:45')
"""
def __init__(self, address):
self.address = "" # Connected bluetooth device address. Assigned from connect()
self.conn = None # pexpect.spawn() object for the gatttool command
self.connect(address)
def connect(self, address, adapter="hci0"):
"""Open an interactive connection to a bluetooth device
:param address: Bluetooth device address
:param adapter: Bluetooth adapter to use. Default: hci0
"""
if self.conn is None:
self.address = address
cmd = " ".join(["gatttool", "-b", address, "-i", adapter, "-I"])
self.conn = pexpect.spawn(cmd)
self.conn.expect(r"\[LE\]>", timeout=1)
self.conn.sendline("connect")
try:
self.conn.expect(r"Connection successful", timeout=10)
print(("Connected to " + address))
except pexpect.TIMEOUT:
raise Exception("Unable to connect to device")
else:
raise Exception(
"Device already connected! Call disconnect before attempting a new connection"
)
def reconnect(self):
"""Check and attempt to reconnect to device if necessary
:return: True if a reconnect was performed
"""
try:
self.conn.expect(r"Disconnected", timeout=0.1)
self.conn.sendline("connect")
try:
self.conn.expect(r"Connection successful", timeout=10)
print(("Reconnected to device: " + self.address))
except pexpect.TIMEOUT:
# Continue and try to reconnect next time
print(("Lost connection to device: " + self.address))
return True
except pexpect.TIMEOUT:
# No need to reconnect
return False
def disconnect(self):
"""Disconnect from current bluetooth device"""
if self.conn is not None:
self.conn.sendline("exit")
self.conn = None
print(("Disconnected from " + self.address))
def write(self, handle, value):
"""Write a value to the specified handle
:param handle: address to write to. e.g. 0016
:param value: value to write
"""
self.send(" ".join(["char-write-cmd", "0x" + handle, value]))
def read(self, handle):
"""Read from the specified handle
:param handle: address to read from. e.g. 0016
"""
self.send("char-read-hnd 0x" + handle, r"descriptor: .* \r", timeout=5)
val = " ".join(self.conn.after.decode("utf-8").split()[1:])
return val
def send(self, cmd, expect=None, timeout=5):
"""Send command to device. Attempt a reconnect if disconnected
:param cmd: Command to send
"""
self.conn.sendline(cmd)
if expect is not None:
try:
self.conn.expect(expect, timeout)
except pexpect.TIMEOUT:
if self.reconnect():
self.conn.sendline(cmd)
else:
if self.reconnect():
self.conn.sendline(cmd)
|
# -*- coding: utf-8 -*-
from django.db import models, migrations
import wagtail.wagtailcore.fields
class Migration(migrations.Migration):
dependencies = [
("puput", "0001_initial"),
]
operations = [
migrations.AlterField(
model_name="blogpage",
name="description",
field=models.CharField(
max_length=255,
help_text="The blog description that will appear under the title.",
verbose_name="Description",
blank=True,
),
),
migrations.AlterField(
model_name="category",
name="description",
field=models.CharField(
max_length=500, verbose_name="Description", blank=True
),
),
migrations.AlterField(
model_name="category",
name="name",
field=models.CharField(
max_length=80, unique=True, verbose_name="Category name"
),
),
migrations.AlterField(
model_name="category",
name="parent",
field=models.ForeignKey(
to="puput.Category",
related_name="children",
null=True,
verbose_name="Parent category",
blank=True,
),
),
migrations.AlterField(
model_name="entrypage",
name="excerpt",
field=wagtail.wagtailcore.fields.RichTextField(
help_text="Entry excerpt to be displayed on entries list. If this field is not filled, a truncate version of body text will be used.",
verbose_name="excerpt",
blank=True,
),
),
]
|
"""
==================================
Map two radars to a Cartesian grid
==================================
Map the reflectivity field of two nearby ARM XSARP radars from antenna
coordinates to a Cartesian grid.
"""
print(__doc__)
# Author: Jonathan J. Helmus ([email protected])
# License: BSD 3 clause
import matplotlib.pyplot as plt
import pyart
# read in the data from both XSAPR radars
XSAPR_SW_FILE = "swx_20120520_0641.nc"
XSAPR_SE_FILE = "sex_20120520_0641.nc"
radar_sw = pyart.io.read_cfradial(XSAPR_SW_FILE)
radar_se = pyart.io.read_cfradial(XSAPR_SE_FILE)
# filter out gates with reflectivity > 100 from both radars
gatefilter_se = pyart.filters.GateFilter(radar_se)
gatefilter_se.exclude_above("corrected_reflectivity_horizontal", 100)
gatefilter_sw = pyart.filters.GateFilter(radar_sw)
gatefilter_sw.exclude_above("corrected_reflectivity_horizontal", 100)
# perform Cartesian mapping, limit to the reflectivity field.
grid = pyart.map.grid_from_radars(
(radar_se, radar_sw),
gatefilters=(gatefilter_se, gatefilter_sw),
grid_shape=(1, 201, 201),
grid_limits=((1000, 1000), (-50000, 40000), (-60000, 40000)),
grid_origin=(36.57861, -97.363611),
fields=["corrected_reflectivity_horizontal"],
)
# create the plot
fig = plt.figure()
ax = fig.add_subplot(111)
ax.imshow(
grid.fields["corrected_reflectivity_horizontal"]["data"][0],
origin="lower",
extent=(-60, 40, -50, 40),
vmin=0,
vmax=48,
)
plt.show()
|
"""
pyart.exceptions
================
Custom Py-ART exceptions.
.. autosummary::
:toctree: generated/
MissingOptionalDependency
DeprecatedAttribute
DeprecatedFunctionName
_deprecated_alias
"""
import warnings
class MissingOptionalDependency(Exception):
"""Exception raised when a optional dependency is needed by not found."""
pass
class DeprecatedAttribute(DeprecationWarning):
"""Warning category for an attribute which has been renamed/moved."""
pass
class DeprecatedFunctionName(DeprecationWarning):
"""Warning category for a function which has been renamed/moved."""
pass
def _deprecated_alias(func, old_name, new_name):
"""
A function for creating an alias to a renamed or moved function.
Parameters
----------
func : func
The function which has been renamed or moved.
old_name, new_name : str
Name of the function before and after it was moved or renamed
(with namespace if changed).
Returns
-------
wrapper : func
A wrapper version of func, which issues a DeprecatedFunctionName
warning when the called.
"""
def wrapper(*args, **kwargs):
warnings.warn(
(
"{0} has been deprecated and will be removed in future "
+ "versions of Py-ART, pleases use {1}. "
).format(old_name, new_name),
category=DeprecatedFunctionName,
)
return func(*args, **kwargs)
return wrapper
|
'\npyart.io.nexrad_archive\n=======================\n\nFunctions for reading NEXRAD Level II Archive files.\n\n.. autosummary::\n :toctree: generated/\n :template: dev_template.rst\n\n _NEXRADLevel2StagedField\n\n.. autosummary::\n :toctree: generated/\n\n read_nexrad_archive\n _find_range_params\n _find_scans_to_interp\n _interpolate_scan\n\n'
import warnings
import numpy as np
from ..config import FileMetadata, get_fillvalue
from ..core.radar import Radar
from .common import make_time_unit_str, _test_arguments, prepare_for_read
from .nexrad_level2 import NEXRADLevel2File
from ..lazydict import LazyLoadDict
from .nexrad_common import get_nexrad_location
def read_nexrad_archive(filename, field_names=None, additional_metadata=None, file_field_names=False, exclude_fields=None, delay_field_loading=False, station=None, scans=None, linear_interp=True, **kwargs):
"\n Read a NEXRAD Level 2 Archive file.\n\n Parameters\n ----------\n filename : str\n Filename of NEXRAD Level 2 Archive file. The files hosted by\n at the NOAA National Climate Data Center [1]_ as well as on the\n UCAR THREDDS Data Server [2]_ have been tested. Other NEXRAD\n Level 2 Archive files may or may not work. Message type 1 file\n and message type 31 files are supported.\n field_names : dict, optional\n Dictionary mapping NEXRAD moments to radar field names. If a\n data type found in the file does not appear in this dictionary or has\n a value of None it will not be placed in the radar.fields dictionary.\n A value of None, the default, will use the mapping defined in the\n metadata configuration file.\n additional_metadata : dict of dicts, optional\n Dictionary of dictionaries to retrieve metadata from during this read.\n This metadata is not used during any successive file reads unless\n explicitly included. A value of None, the default, will not\n introduct any addition metadata and the file specific or default\n metadata as specified by the metadata configuration file will be used.\n file_field_names : bool, optional\n True to use the NEXRAD field names for the field names. If this\n case the field_names parameter is ignored. The field dictionary will\n likely only have a 'data' key, unless the fields are defined in\n `additional_metadata`.\n exclude_fields : list or None, optional\n List of fields to exclude from the radar object. This is applied\n after the `file_field_names` and `field_names` parameters.\n delay_field_loading : bool, optional\n True to delay loading of field data from the file until the 'data'\n key in a particular field dictionary is accessed. In this case\n the field attribute of the returned Radar object will contain\n LazyLoadDict objects not dict objects.\n station : str or None, optional\n Four letter ICAO name of the NEXRAD station used to determine the\n location in the returned radar object. This parameter is only\n used when the location is not contained in the file, which occur\n in older NEXRAD message 1 files.\n scans : list or None, optional\n Read only specified scans from the file. None (the default) will read\n all scans.\n linear_interp : bool, optional\n True (the default) to perform linear interpolation between valid pairs\n of gates in low resolution rays in files mixed resolution rays.\n False will perform a nearest neighbor interpolation. This parameter is\n not used if the resolution of all rays in the file or requested sweeps\n is constant.\n\n Returns\n -------\n radar : Radar\n Radar object containing all moments and sweeps/cuts in the volume.\n Gates not collected are masked in the field data.\n\n References\n ----------\n .. [1] http://www.ncdc.noaa.gov/\n .. [2] http://thredds.ucar.edu/thredds/catalog.html\n\n "
(kwargs)
filemetadata = ('nexrad_archive', field_names, additional_metadata, file_field_names, exclude_fields)
nfile = ((filename))
scan_info = (scans)
time = ('time')
(time_start, _time) = (scans)
time['data'] = _time
time['units'] = (time_start)
_range = ('range')
(first_gate, gate_spacing, last_gate) = (scan_info, filemetadata)
_range['data'] = (first_gate, last_gate, gate_spacing, 'float32')
_range['meters_to_center_of_first_gate'] = (first_gate)
_range['meters_between_gates'] = (gate_spacing)
metadata = ('metadata')
metadata['original_container'] = 'NEXRAD Level II'
scan_type = 'ppi'
latitude = ('latitude')
longitude = ('longitude')
altitude = ('altitude')
if ((nfile._msg_type == '1') and (station is not None)):
(lat, lon, alt) = (station)
else:
(lat, lon, alt) = ()
latitude['data'] = ([lat])
longitude['data'] = ([lon])
altitude['data'] = ([alt])
sweep_number = ('sweep_number')
sweep_mode = ('sweep_mode')
sweep_start_ray_index = ('sweep_start_ray_index')
sweep_end_ray_index = ('sweep_end_ray_index')
if (scans is None):
nsweeps = (nfile.nscans)
else:
nsweeps = (scans)
sweep_number['data'] = (nsweeps)
sweep_mode['data'] = ((nsweeps * ['azimuth_surveillance']))
rays_per_scan = [s['nrays'] for s in scan_info]
sweep_end_ray_index['data'] = ((rays_per_scan) - 1)
(0, 0)
sweep_start_ray_index['data'] = (rays_per_scan[:(- 1)])
azimuth = ('azimuth')
elevation = ('elevation')
fixed_angle = ('fixed_angle')
azimuth['data'] = (scans)
elevation['data'] = ('float32')
fixed_angle['data'] = (scans)
max_ngates = (_range['data'])
available_moments = ([m for scan in scan_info for m in scan['moments']])
interpolate = (scan_info, first_gate, gate_spacing, filemetadata)
fields = {}
for moment in available_moments:
field_name = (moment)
if (field_name is None):
continue
dic = (field_name)
dic['_FillValue'] = ()
if (delay_field_loading and (moment not in interpolate)):
dic = (dic)
data_call = (nfile, moment, max_ngates, scans)
('data', data_call)
else:
mdata = (moment, max_ngates)
if (moment in interpolate):
interp_scans = interpolate[moment]
(('Gate spacing is not constant, interpolating data in ' + ('scans %s for moment %s.' % (interp_scans, moment))), UserWarning)
for scan in interp_scans:
idx = (moment)
moment_ngates = scan_info[scan]['ngates'][idx]
start = sweep_start_ray_index['data'][scan]
end = sweep_end_ray_index['data'][scan]
(mdata, start, end, moment_ngates, linear_interp)
dic['data'] = mdata
fields[field_name] = dic
nyquist_velocity = ('nyquist_velocity')
unambiguous_range = ('unambiguous_range')
nyquist_velocity['data'] = ('float32')
unambiguous_range['data'] = ('float32')
instrument_parameters = {'unambiguous_range': unambiguous_range, 'nyquist_velocity': nyquist_velocity}
()
return (time, _range, fields, metadata, scan_type, latitude, longitude, altitude, sweep_number, sweep_mode, fixed_angle, sweep_start_ray_index, sweep_end_ray_index, azimuth, elevation)
def _find_range_params(scan_info, filemetadata):
'Return range parameters, first_gate, gate_spacing, last_gate.'
min_first_gate = 999999
min_gate_spacing = 999999
max_last_gate = 0
for scan_params in scan_info:
ngates = scan_params['ngates'][0]
for (i, moment) in (scan_params['moments']):
if ((moment) is None):
continue
first_gate = scan_params['first_gate'][i]
gate_spacing = scan_params['gate_spacing'][i]
last_gate = (first_gate + (gate_spacing * (ngates - 0.5)))
min_first_gate = (min_first_gate, first_gate)
min_gate_spacing = (min_gate_spacing, gate_spacing)
max_last_gate = (max_last_gate, last_gate)
return (min_first_gate, min_gate_spacing, max_last_gate)
def _find_scans_to_interp(scan_info, first_gate, gate_spacing, filemetadata):
'Return a dict indicating what moments/scans need interpolation.'
moments = ([m for scan in scan_info for m in scan['moments']])
interpolate = ([(moment, []) for moment in moments])
for (scan_num, scan) in (scan_info):
for moment in moments:
if (moment not in scan['moments']):
continue
if ((moment) is None):
continue
index = (moment)
first = scan['first_gate'][index]
spacing = scan['gate_spacing'][index]
if ((first != first_gate) or (spacing != gate_spacing)):
(scan_num)
if (not (spacing == (gate_spacing * 4))):
raise ()
if (not ((first_gate + (1.5 * gate_spacing)) == first)):
raise ()
interpolate = ([(k, v) for (k, v) in (()) if ((v) != 0)])
return interpolate
def _interpolate_scan(mdata, start, end, moment_ngates, linear_interp=True):
'Interpolate a single NEXRAD moment scan from 1000 m to 250 m.'
for ray_num in (start, (end + 1)):
ray = ()
interp_ngates = (4 * moment_ngates)
ray[:interp_ngates] = (ray[:moment_ngates], 4)
if linear_interp:
for i in (2, (interp_ngates - 4), 4):
gate_val = ray[i]
next_val = ray[(i + 4)]
if ((gate_val) or (next_val)):
continue
delta = ((next_val - gate_val) / 4.0)
ray[(i + 0)] = (gate_val + (delta * 0.5))
ray[(i + 1)] = (gate_val + (delta * 1.5))
ray[(i + 2)] = (gate_val + (delta * 2.5))
ray[(i + 3)] = (gate_val + (delta * 3.5))
mdata[ray_num] = ray[:]
class _NEXRADLevel2StagedField(object):
'\n A class to facilitate on demand loading of field data from a Level 2 file.\n '
def __init__(self, nfile, moment, max_ngates, scans):
'initialize.'
self.nfile = nfile
self.moment = moment
self.max_ngates = max_ngates
self.scans = scans
def __call__(self):
'Return the array containing the field data.'
return (self.moment, self.max_ngates) |
"""
pyart.io.uf
===========
Reading of Universal format (UF) files
.. autosummary::
:toctree: generated/
read_uf
_get_instrument_parameters
"""
import warnings
import numpy as np
from netCDF4 import date2num
from ..config import FileMetadata, get_fillvalue
from ..core.radar import Radar
from .common import make_time_unit_str, _test_arguments, prepare_for_read
from .uffile import UFFile
_LIGHT_SPEED = 2.99792458e8 # speed of light in meters per second
_UF_SWEEP_MODES = {
0: "calibration",
1: "ppi",
2: "coplane",
3: "rhi",
4: "vpt",
5: "target",
6: "manual",
7: "idle",
}
_SWEEP_MODE_STR = {
"calibration": "calibration",
"ppi": "azimuth_surveillance",
"coplane": "coplane",
"rhi": "rhi",
"vpt": "vertical_pointing",
"target": "pointing",
"manual": "manual",
"idle": "idle",
}
def read_uf(
filename,
field_names=None,
additional_metadata=None,
file_field_names=False,
exclude_fields=None,
delay_field_loading=False,
**kwargs
):
"""
Read a UF File.
Parameters
----------
filename : str or file-like
Name of Universal format file to read data from.
field_names : dict, optional
Dictionary mapping UF data type names to radar field names. If a
data type found in the file does not appear in this dictionary or has
a value of None it will not be placed in the radar.fields dictionary.
A value of None, the default, will use the mapping defined in the
Py-ART configuration file.
additional_metadata : dict of dicts, optional
Dictionary of dictionaries to retrieve metadata from during this read.
This metadata is not used during any successive file reads unless
explicitly included. A value of None, the default, will not
introduce any addition metadata and the file specific or default
metadata as specified by the Py-ART configuration file will be used.
file_field_names : bool, optional
True to force the use of the field names from the file in which
case the `field_names` parameter is ignored. False will use to
`field_names` parameter to rename fields.
exclude_fields : list or None, optional
List of fields to exclude from the radar object. This is applied
after the `file_field_names` and `field_names` parameters.
delay_field_loading : bool
This option is not implemented in the function but included for
compatibility.
Returns
-------
radar : Radar
Radar object.
"""
# test for non empty kwargs
_test_arguments(kwargs)
# create metadata retrieval object
filemetadata = FileMetadata(
"uf", field_names, additional_metadata, file_field_names, exclude_fields
)
# Open UF file and get handle
ufile = UFFile(prepare_for_read(filename))
first_ray = ufile.rays[0]
# time
dts = ufile.get_datetimes()
units = make_time_unit_str(min(dts))
time = filemetadata("time")
time["units"] = units
time["data"] = date2num(dts, units).astype("float32")
# range
_range = filemetadata("range")
# assume that the number of gates and spacing from the first ray is
# representative of the entire volume
field_header = first_ray.field_headers[0]
ngates = field_header["nbins"]
step = field_header["range_spacing_m"]
# this gives distances to the center of each gate, remove step/2 for start
start = (
field_header["range_start_km"] * 1000.0
+ field_header["range_start_m"]
+ step / 2.0
)
_range["data"] = np.arange(ngates, dtype="float32") * step + start
_range["meters_to_center_of_first_gate"] = start
_range["meters_between_gates"] = step
# latitude, longitude and altitude
latitude = filemetadata("latitude")
longitude = filemetadata("longitude")
altitude = filemetadata("altitude")
lat, lon, height = first_ray.get_location()
latitude["data"] = np.array([lat], dtype="float64")
longitude["data"] = np.array([lon], dtype="float64")
altitude["data"] = np.array([height], dtype="float64")
# metadata
metadata = filemetadata("metadata")
metadata["original_container"] = "UF"
metadata["site_name"] = first_ray.mandatory_header["site_name"]
metadata["radar_name"] = first_ray.mandatory_header["radar_name"]
# sweep_start_ray_index, sweep_end_ray_index
sweep_start_ray_index = filemetadata("sweep_start_ray_index")
sweep_end_ray_index = filemetadata("sweep_end_ray_index")
sweep_start_ray_index["data"] = ufile.first_ray_in_sweep
sweep_end_ray_index["data"] = ufile.last_ray_in_sweep
# sweep number
sweep_number = filemetadata("sweep_number")
sweep_number["data"] = np.arange(ufile.nsweeps, dtype="int32")
# sweep_type
scan_type = _UF_SWEEP_MODES[first_ray.mandatory_header["sweep_mode"]]
# sweep_mode
sweep_mode = filemetadata("sweep_mode")
sweep_mode["data"] = np.array(
ufile.nsweeps * [_SWEEP_MODE_STR[scan_type]], dtype="S"
)
# elevation
elevation = filemetadata("elevation")
elevation["data"] = ufile.get_elevations()
# azimuth
azimuth = filemetadata("azimuth")
azimuth["data"] = ufile.get_azimuths()
# fixed_angle
fixed_angle = filemetadata("fixed_angle")
fixed_angle["data"] = ufile.get_sweep_fixed_angles()
# fields
fields = {}
for uf_field_number, uf_field_dic in enumerate(first_ray.field_positions):
uf_field_name = uf_field_dic["data_type"].decode("ascii")
field_name = filemetadata.get_field_name(uf_field_name)
if field_name is None:
continue
field_dic = filemetadata(field_name)
field_dic["data"] = ufile.get_field_data(uf_field_number)
field_dic["_FillValue"] = get_fillvalue()
fields[field_name] = field_dic
# instrument_parameters
instrument_parameters = _get_instrument_parameters(ufile, filemetadata)
# scan rate
scan_rate = filemetadata("scan_rate")
scan_rate["data"] = ufile.get_sweep_rates()
ufile.close()
return Radar(
time,
_range,
fields,
metadata,
scan_type,
latitude,
longitude,
altitude,
sweep_number,
sweep_mode,
fixed_angle,
sweep_start_ray_index,
sweep_end_ray_index,
azimuth,
elevation,
scan_rate=scan_rate,
instrument_parameters=instrument_parameters,
)
def _get_instrument_parameters(ufile, filemetadata):
"""Return a dictionary containing instrument parameters."""
# pulse width
pulse_width = filemetadata("pulse_width")
pulse_width["data"] = ufile.get_pulse_widths() / _LIGHT_SPEED # m->sec
# assume that the parameters in the first ray represent the beam widths,
# bandwidth and frequency in the entire volume
first_ray = ufile.rays[0]
field_header = first_ray.field_headers[0]
beam_width_h = field_header["beam_width_h"] / 64.0
beam_width_v = field_header["beam_width_v"] / 64.0
bandwidth = field_header["bandwidth"] / 16.0 * 1.0e6
wavelength_cm = field_header["wavelength_cm"] / 64.0
if wavelength_cm == 0:
warnings.warn("Invalid wavelength, frequency set to default value.")
wavelength_hz = 9999.0
else:
wavelength_hz = _LIGHT_SPEED / (wavelength_cm / 100.0)
# radar_beam_width_h
radar_beam_width_h = filemetadata("radar_beam_width_h")
radar_beam_width_h["data"] = np.array([beam_width_h], dtype="float32")
# radar_beam_width_v
radar_beam_width_v = filemetadata("radar_beam_width_w")
radar_beam_width_v["data"] = np.array([beam_width_v], dtype="float32")
# radar_receiver_bandwidth
radar_receiver_bandwidth = filemetadata("radar_receiver_bandwidth")
radar_receiver_bandwidth["data"] = np.array([bandwidth], dtype="float32")
# polarization_mode
polarization_mode = filemetadata("polarization_mode")
polarization_mode["data"] = ufile.get_sweep_polarizations()
# frequency
frequency = filemetadata("frequency")
frequency["data"] = np.array([wavelength_hz], dtype="float32")
# prt
prt = filemetadata("prt")
prt["data"] = ufile.get_prts() / 1e6 # us->sec
instrument_parameters = {
"pulse_width": pulse_width,
"radar_beam_width_h": radar_beam_width_h,
"radar_beam_width_v": radar_beam_width_v,
"radar_receiver_bandwidth": radar_receiver_bandwidth,
"polarization_mode": polarization_mode,
"frequency": frequency,
"prt": prt,
}
# nyquist velocity if defined
nyquist_velocity = filemetadata("nyquist_velocity")
nyquist_velocity["data"] = ufile.get_nyquists()
if nyquist_velocity["data"] is not None:
instrument_parameters["nyquist_velocity"] = nyquist_velocity
return instrument_parameters
|
#! /usr/bin/env python
"""
Make a small netCDF CF/Radial file containing a single RHI scan.
Single field and scan is converted from sigmet file XSW110520113537.RAW7HHL
"""
import pyart
radar = pyart.io.read_rsl("XSW110520113537.RAW7HHL")
time_slice = slice(None, 713, 18)
range_slice = slice(None, None, 12)
sweep_slice = slice(None, 1)
# remove all but the reflectivity_horizontal fields
rf_field = radar.fields["reflectivity"]
rf_data = rf_field["data"]
rf_field["data"] = rf_data[time_slice, range_slice]
radar.fields = {"reflectivity_horizontal": rf_field}
radar.nsweeps = 1
radar.nray = 40
radar.ngates = 45
# truncate the range based variables
radar.range["data"] = radar.range["data"][range_slice]
# truncate the time based variables
radar.time["data"] = radar.time["data"][time_slice]
radar.azimuth["data"] = radar.azimuth["data"][time_slice]
radar.elevation["data"] = radar.elevation["data"][time_slice]
radar.instrument_parameters["prt"]["data"] = radar.instrument_parameters["prt"]["data"][
time_slice
]
radar.instrument_parameters["unambiguous_range"]["data"] = radar.instrument_parameters[
"unambiguous_range"
]["data"][time_slice]
radar.instrument_parameters["nyquist_velocity"]["data"] = radar.instrument_parameters[
"nyquist_velocity"
]["data"][time_slice]
# truncate the sweep based variables
radar.sweep_number["data"] = radar.sweep_number["data"][sweep_slice]
radar.fixed_angle["data"] = radar.fixed_angle["data"][sweep_slice]
radar.sweep_start_ray_index["data"] = radar.sweep_start_ray_index["data"][sweep_slice]
radar.sweep_end_ray_index["data"] = radar.sweep_end_ray_index["data"][sweep_slice]
radar.sweep_end_ray_index["data"][0] = 39
radar.sweep_mode["data"] = radar.sweep_mode["data"][sweep_slice]
radar.sweep_number["data"] = radar.sweep_number["data"][sweep_slice]
radar.instrument_parameters["prt_mode"]["data"] = radar.instrument_parameters[
"prt_mode"
]["data"][sweep_slice]
# adjust metadata
radar.metadata = {
"Conventions": "CF/Radial instrument_parameters",
"version": "1.2",
"title": "Py-ART Example RHI CF/Radial file",
"institution": (
"United States Department of Energy - Atmospheric "
"Radiation Measurement (ARM) program"
),
"references": "none",
"source": "ARM SGP XSAPR Radar",
"history": "created by jhelmus on evs348532 at 2013-05-22T12:34:56",
"comment": "none",
"instrument_name": "xsapr-sgp",
}
pyart.io.write_cfradial("example_cfradial_rhi.nc", radar)
|
"""
pyart.util.radar_utils
======================
Functions for working radar instances.
.. autosummary::
:toctree: generated/
is_vpt
to_vpt
join_radar
"""
import copy
import numpy as np
from netCDF4 import num2date, date2num
from . import datetime_utils
def is_vpt(radar, offset=0.5):
"""
Determine if a Radar appears to be a vertical pointing scan.
This function only verifies that the object is a vertical pointing scan,
use the :py:func:`to_vpt` function to convert the radar to a vpt scan
if this function returns True.
Parameters
----------
radar : Radar
Radar object to determine if
offset : float
Maximum offset of the elevation from 90 degrees to still consider
to be vertically pointing.
Returns
-------
flag : bool
True if the radar appear to be verticle pointing, False if not.
"""
# check that the elevation is within offset of 90 degrees.
elev = radar.elevation["data"]
return np.all((elev < 90.0 + offset) & (elev > 90.0 - offset))
def to_vpt(radar, single_scan=True):
"""
Convert an existing Radar object to represent a vertical pointing scan.
This function does not verify that the Radar object contains a vertical
pointing scan. To perform such a check use :py:func:`is_vpt`.
Parameters
----------
radar : Radar
Mislabeled vertical pointing scan Radar object to convert to be
properly labeled. This object is converted in place, no copy of
the existing data is made.
single_scan : bool, optional
True to convert the volume to a single scan, any azimuth angle data
is lost. False will convert the scan to contain the same number of
scans as rays, azimuth angles are retained.
"""
if single_scan:
nsweeps = 1
radar.azimuth["data"][:] = 0.0
seri = np.array([radar.nrays - 1], dtype="int32")
radar.sweep_end_ray_index["data"] = seri
else:
nsweeps = radar.nrays
# radar.azimuth not adjusted
radar.sweep_end_ray_index["data"] = np.arange(nsweeps, dtype="int32")
radar.scan_type = "vpt"
radar.nsweeps = nsweeps
radar.target_scan_rate = None # no scanning
radar.elevation["data"][:] = 90.0
radar.sweep_number["data"] = np.arange(nsweeps, dtype="int32")
radar.sweep_mode["data"] = np.array(["vertical_pointing"] * nsweeps)
radar.fixed_angle["data"] = np.ones(nsweeps, dtype="float32") * 90.0
radar.sweep_start_ray_index["data"] = np.arange(nsweeps, dtype="int32")
if radar.instrument_parameters is not None:
for key in ["prt_mode", "follow_mode", "polarization_mode"]:
if key in radar.instrument_parameters:
ip_dic = radar.instrument_parameters[key]
ip_dic["data"] = np.array([ip_dic["data"][0]] * nsweeps)
# Attributes that do not need any changes
# radar.altitude
# radar.altitude_agl
# radar.latitude
# radar.longitude
# radar.range
# radar.ngates
# radar.nrays
# radar.metadata
# radar.radar_calibration
# radar.time
# radar.fields
# radar.antenna_transition
# radar.scan_rate
return
def join_radar(radar1, radar2):
"""
Combine two radar instances into one.
Parameters
----------
radar1 : Radar
Radar object.
radar2 : Radar
Radar object.
"""
# must have same gate spacing
new_radar = copy.deepcopy(radar1)
new_radar.azimuth["data"] = np.append(
radar1.azimuth["data"], radar2.azimuth["data"]
)
new_radar.elevation["data"] = np.append(
radar1.elevation["data"], radar2.elevation["data"]
)
if len(radar1.range["data"]) >= len(radar2.range["data"]):
new_radar.range["data"] = radar1.range["data"]
else:
new_radar.range["data"] = radar2.range["data"]
# to combine times we need to reference them to a standard
# for this we'll use epoch time
estring = "seconds since 1970-01-01T00:00:00Z"
r1dt = num2date(radar1.time["data"], radar1.time["units"])
r2dt = num2date(radar2.time["data"], radar2.time["units"])
r1num = datetime_utils.datetimes_from_radar(radar1, epoch=True)
r2num = datetime_utils.datetimes_from_radar(radar2, epoch=True)
new_radar.time["data"] = np.append(r1num, r2num)
new_radar.time["units"] = datetime_utils.EPOCH_UNITS
for var in list(new_radar.fields.keys()):
sh1 = radar1.fields[var]["data"].shape
sh2 = radar2.fields[var]["data"].shape
new_field = np.ma.zeros([sh1[0] + sh2[0], max([sh1[1], sh2[1]])]) - 9999.0
new_field[0 : sh1[0], 0 : sh1[1]] = radar1.fields[var]["data"]
new_field[sh1[0] :, 0 : sh2[1]] = radar2.fields[var]["data"]
new_radar.fields[var]["data"] = new_field
# radar locations
# TODO moving platforms - any more?
if (
len(radar1.latitude["data"])
== 1 & len(radar2.latitude["data"])
== 1 & len(radar1.longitude["data"])
== 1 & len(radar2.longitude["data"])
== 1 & len(radar1.altitude["data"])
== 1 & len(radar2.altitude["data"])
== 1
):
lat1 = float(radar1.latitude["data"])
lon1 = float(radar1.longitude["data"])
alt1 = float(radar1.altitude["data"])
lat2 = float(radar2.latitude["data"])
lon2 = float(radar2.longitude["data"])
alt2 = float(radar2.altitude["data"])
if (lat1 != lat2) or (lon1 != lon2) or (alt1 != alt2):
ones1 = np.ones(len(radar1.time["data"]), dtype="float32")
ones2 = np.ones(len(radar2.time["data"]), dtype="float32")
new_radar.latitude["data"] = np.append(ones1 * lat1, ones2 * lat2)
new_radar.longitude["data"] = np.append(ones1 * lon1, ones2 * lon2)
new_radar.latitude["data"] = np.append(ones1 * alt1, ones2 * alt2)
else:
new_radar.latitude["data"] = radar1.latitude["data"]
new_radar.longitude["data"] = radar1.longitude["data"]
new_radar.altitude["data"] = radar1.altitude["data"]
else:
new_radar.latitude["data"] = np.append(
radar1.latitude["data"], radar2.latitude["data"]
)
new_radar.longitude["data"] = np.append(
radar1.longitude["data"], radar2.longitude["data"]
)
new_radar.altitude["data"] = np.append(
radar1.altitude["data"], radar2.altitude["data"]
)
return new_radar
|
"""
Default config for Workload Automation. DO NOT MODIFY this file. This file
gets copied to ~/.workload_automation/config.py on initial run of run_workloads.
Add your configuration to that file instead.
"""
# *** WARNING: ***
# Configuration listed in this file is NOT COMPLETE. This file sets the default
# configuration for WA and gives EXAMPLES of other configuration available. It
# is not supposed to be an exhaustive list.
# PLEASE REFER TO WA DOCUMENTATION FOR THE COMPLETE LIST OF AVAILABLE
# EXTENSIONS AND THEIR CONFIGURATION.
# This defines when the device will be rebooted during Workload Automation execution. #
# #
# Valid policies are: #
# never: The device will never be rebooted. #
# as_needed: The device will only be rebooted if the need arises (e.g. if it #
# becomes unresponsive #
# initial: The device will be rebooted when the execution first starts, just before executing #
# the first workload spec. #
# each_spec: The device will be rebooted before running a new workload spec. #
# each_iteration: The device will be rebooted before each new iteration. #
# #
reboot_policy = "as_needed"
# Defines the order in which the agenda spec will be executed. At the moment, #
# the following execution orders are supported: #
# #
# by_iteration: The first iteration of each workload spec is executed one ofter the other, #
# so all workloads are executed before proceeding on to the second iteration. #
# This is the default if no order is explicitly specified. #
# If multiple sections were specified, this will also split them up, so that specs #
# in the same section are further apart in the execution order. #
# by_section: Same as "by_iteration", but runn specs from the same section one after the other #
# by_spec: All iterations of the first spec are executed before moving on to the next #
# spec. This may also be specified as ``"classic"``, as this was the way #
# workloads were executed in earlier versions of WA. #
# random: Randomisizes the order in which specs run. #
execution_order = "by_iteration"
# This indicates when a job will be re-run.
# Possible values:
# OK: This iteration has completed and no errors have been detected
# PARTIAL: One or more instruments have failed (the iteration may still be running).
# FAILED: The workload itself has failed.
# ABORTED: The user interupted the workload
#
# If set to an empty list, a job will not be re-run ever.
retry_on_status = ["FAILED", "PARTIAL"]
# How many times a job will be re-run before giving up
max_retries = 3
####################################################################################################
######################################### Device Settings ##########################################
####################################################################################################
# Specify the device you want to run workload automation on. This must be a #
# string with the ID of the device. At the moment, only 'TC2' is supported. #
# #
device = "generic_android"
# Configuration options that will be passed onto the device. These are obviously device-specific, #
# so check the documentation for the particular device to find out which options and values are #
# valid. The settings listed below are common to all devices #
# #
device_config = dict(
# The name used by adb to identify the device. Use "adb devices" in bash to list
# the devices currently seen by adb.
# adb_name='10.109.173.2:5555',
# The directory on the device that WA will use to push files to
# working_directory='/sdcard/wa-working',
# This specifies the device's CPU cores. The order must match how they
# appear in cpufreq. The example below is for TC2.
# core_names = ['a7', 'a7', 'a7', 'a15', 'a15']
# Specifies cluster mapping for the device's cores.
# core_clusters = [0, 0, 0, 1, 1]
)
####################################################################################################
################################### Instrumention Configuration ####################################
####################################################################################################
# This defines the additionnal instrumentation that will be enabled during workload execution, #
# which in turn determines what additional data (such as /proc/interrupts content or Streamline #
# traces) will be available in the results directory. #
# #
instrumentation = [
# Records the time it took to run the workload
"execution_time",
# Collects /proc/interrupts before and after execution and does a diff.
"interrupts",
# Collects the contents of/sys/devices/system/cpu before and after execution and does a diff.
"cpufreq",
# Gets energy usage from the workload form HWMON devices
# NOTE: the hardware needs to have the right sensors in order for this to work
#'hwmon',
# Run perf in the background during workload execution and then collect the results. perf is a
# standard Linux performance analysis tool.
#'perf',
# Collect Streamline traces during workload execution. Streamline is part of DS-5
#'streamline',
# Collects traces by interacting with Ftrace Linux kernel internal tracer
#'trace-cmd',
# Obtains the power consumption of the target device's core measured by National Instruments
# Data Acquisition(DAQ) device.
#'daq',
# Collects CCI counter data.
#'cci_pmu_logger',
# Collects FPS (Frames Per Second) and related metrics (such as jank) from
# the View of the workload (Note: only a single View per workload is
# supported at the moment, so this is mainly useful for games).
#'fps',
]
####################################################################################################
################################# Result Processors Configuration ##################################
####################################################################################################
# Specifies how results will be processed and presented. #
# #
result_processors = [
# Creates a status.txt that provides a summary status for the run
"status",
# Creates a results.txt file for each iteration that lists all collected metrics
# in "name = value (units)" format
"standard",
# Creates a results.csv that contains metrics for all iterations of all workloads
# in the .csv format.
"csv",
# Creates a summary.csv that contains summary metrics for all iterations of all
# all in the .csv format. Summary metrics are defined on per-worklod basis
# are typically things like overall scores. The contents of summary.csv are
# always a subset of the contents of results.csv (if it is generated).
#'summary_csv',
# Creates a results.csv that contains metrics for all iterations of all workloads
# in the JSON format
#'json',
# Write results to an sqlite3 database. By default, a new database will be
# generated for each run, however it is possible to specify a path to an
# existing DB file (see result processor configuration below), in which
# case results from multiple runs may be stored in the one file.
#'sqlite',
]
####################################################################################################
################################### Logging output Configuration ###################################
####################################################################################################
# Specify the format of logging messages. The format uses the old formatting syntax: #
# #
# http://docs.python.org/2/library/stdtypes.html#string-formatting-operations #
# #
# The attributes that can be used in formats are listested here: #
# #
# http://docs.python.org/2/library/logging.html#logrecord-attributes #
# #
logging = {
# Log file format
"file format": "%(asctime)s %(levelname)-8s %(name)s: %(message)s",
# Verbose console output format
"verbose format": "%(asctime)s %(levelname)-8s %(name)s: %(message)s",
# Regular console output format
"regular format": "%(levelname)-8s %(message)s",
# Colouring the console output
"colour_enabled": True,
}
####################################################################################################
#################################### Instruments Configuration #####################################
####################################################################################################
# Instrumention Configuration is related to specific insturment's settings. Some of the #
# instrumentations require specific settings in order for them to work. These settings are #
# specified here. #
# Note that these settings only take effect if the corresponding instrument is
# enabled above.
####################################################################################################
######################################## perf configuration ########################################
# The hardware events such as instructions executed, cache-misses suffered, or branches
# mispredicted to be reported by perf. Events can be obtained from the device by tpying
# 'perf list'.
# perf_events = ['migrations', 'cs']
# The perf options which can be obtained from man page for perf-record
# perf_options = '-a -i'
####################################################################################################
####################################### hwmon configuration ########################################
# The kinds of sensors hwmon instrument will look for
# hwmon_sensors = ['energy', 'temp']
####################################################################################################
###################################### trace-cmd configuration #####################################
# trace-cmd events to be traced. The events can be found by rooting on the device then type
# 'trace-cmd list -e'
# trace_events = ['power*']
####################################################################################################
######################################### DAQ configuration ########################################
# The host address of the machine that runs the daq Server which the insturment communicates with
# daq_server_host = '10.1.17.56'
# The port number for daq Server in which daq insturment communicates with
# daq_server_port = 56788
# The values of resistors 1 and 2 (in Ohms) across which the voltages are measured
# daq_resistor_values = [0.002, 0.002]
####################################################################################################
################################### cci_pmu_logger configuration ###################################
# The events to be counted by PMU
# NOTE: The number of events must not exceed the number of counters available (which is 4 for CCI-400)
# cci_pmu_events = ['0x63', '0x83']
# The name of the events which will be used when reporting PMU counts
# cci_pmu_event_labels = ['event_0x63', 'event_0x83']
# The period (in jiffies) between counter reads
# cci_pmu_period = 15
####################################################################################################
################################### fps configuration ##############################################
# Data points below this FPS will dropped as not constituting "real" gameplay. The assumption
# being that while actually running, the FPS in the game will not drop below X frames per second,
# except on loading screens, menus, etc, which should not contribute to FPS calculation.
# fps_drop_threshold=5
# If set to True, this will keep the raw dumpsys output in the results directory (this is maily
# used for debugging). Note: frames.csv with collected frames data will always be generated
# regardless of this setting.
# fps_keep_raw=False
####################################################################################################
################################# Result Processor Configuration ###################################
####################################################################################################
# Specifies an alternative database to store results in. If the file does not
# exist, it will be created (the directiory of the file must exist however). If
# the file does exist, the results will be added to the existing data set (each
# run as a UUID, so results won't clash even if identical agendas were used).
# Note that in order for this to work, the version of the schema used to generate
# the DB file must match that of the schema used for the current run. Please
# see "What's new" secition in WA docs to check if the schema has changed in
# recent releases of WA.
# sqlite_database = '/work/results/myresults.sqlite'
# If the file specified by sqlite_database exists, setting this to True will
# cause that file to be overwritten rather than updated -- existing results in
# the file will be lost.
# sqlite_overwrite = False
# distribution: internal
####################################################################################################
#################################### Resource Getter configuration #################################
####################################################################################################
# The location on your system where /arm/scratch is mounted. Used by
# Scratch resource getter.
# scratch_mount_point = '/arm/scratch'
# end distribution
|
# Copyright 2014-2015 ARM Limited
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Original implementation by Rene de Jong. Updated by Sascha Bischoff.
import logging
from wlauto import LinuxDevice, Parameter
from wlauto.common.gem5.device import BaseGem5Device
from wlauto.utils import types
class Gem5LinuxDevice(BaseGem5Device, LinuxDevice):
"""
Implements gem5 Linux device.
This class allows a user to connect WA to a simulation using gem5. The
connection to the device is made using the telnet connection of the
simulator, and is used for all commands. The simulator does not have ADB
support, and therefore we need to fall back to using standard shell
commands.
Files are copied into the simulation using a VirtIO 9P device in gem5. Files
are copied out of the simulated environment using the m5 writefile command
within the simulated system.
When starting the workload run, the simulator is automatically started by
Workload Automation, and a connection to the simulator is established. WA
will then wait for Android to boot on the simulated system (which can take
hours), prior to executing any other commands on the device. It is also
possible to resume from a checkpoint when starting the simulation. To do
this, please append the relevant checkpoint commands from the gem5
simulation script to the gem5_discription argument in the agenda.
Host system requirements:
* VirtIO support. We rely on diod on the host system. This can be
installed on ubuntu using the following command:
sudo apt-get install diod
Guest requirements:
* VirtIO support. We rely on VirtIO to move files into the simulation.
Please make sure that the following are set in the kernel
configuration:
CONFIG_NET_9P=y
CONFIG_NET_9P_VIRTIO=y
CONFIG_9P_FS=y
CONFIG_9P_FS_POSIX_ACL=y
CONFIG_9P_FS_SECURITY=y
CONFIG_VIRTIO_BLK=y
* m5 binary. Please make sure that the m5 binary is on the device and
can by found in the path.
"""
name = "gem5_linux"
platform = "linux"
parameters = [
Parameter("core_names", default=[], override=True),
Parameter("core_clusters", default=[], override=True),
Parameter(
"host",
default="localhost",
override=True,
description="Host name or IP address for the device.",
),
Parameter(
"login_prompt",
kind=types.list_of_strs,
default=["login:", "AEL login:", "username:"],
mandatory=False,
),
Parameter(
"login_password_prompt",
kind=types.list_of_strs,
default=["password:"],
mandatory=False,
),
]
# Overwritten from Device. For documentation, see corresponding method in
# Device.
def __init__(self, **kwargs):
self.logger = logging.getLogger("Gem5LinuxDevice")
LinuxDevice.__init__(self, **kwargs)
BaseGem5Device.__init__(self)
def login_to_device(self):
# Wait for the login prompt
prompt = self.login_prompt + [self.sckt.UNIQUE_PROMPT]
i = self.sckt.expect(prompt, timeout=10)
# Check if we are already at a prompt, or if we need to log in.
if i < len(prompt) - 1:
self.sckt.sendline("{}".format(self.username))
password_prompt = self.login_password_prompt + [
r"# ",
self.sckt.UNIQUE_PROMPT,
]
j = self.sckt.expect(password_prompt, timeout=self.delay)
if j < len(password_prompt) - 2:
self.sckt.sendline("{}".format(self.password))
self.sckt.expect([r"# ", self.sckt.UNIQUE_PROMPT], timeout=self.delay)
def capture_screen(self, filepath):
if BaseGem5Device.capture_screen(self, filepath):
return
# If we didn't manage to do the above, call the parent class.
self.logger.warning(
"capture_screen: falling back to parent class implementation"
)
LinuxDevice.capture_screen(self, filepath)
def initialize(self, context):
self.resize_shell()
self.deploy_m5(context, force=False)
|
"""Louie version information."""
NAME = "Louie"
DESCRIPTION = "Signal dispatching mechanism"
VERSION = "1.1"
|
import os
import sqlite3
import json
import uuid
from datetime import datetime, timedelta
from contextlib import contextmanager
from wlauto import ResultProcessor, settings, Parameter
from wlauto.exceptions import ResultProcessorError
from wlauto.utils.types import boolean
SCHEMA_VERSION = '0.0.2'
SCHEMA = ['CREATE TABLE runs (\n uuid text,\n start_time datetime,\n end_time datetime,\n duration integer\n )', 'CREATE TABLE workload_specs (\n id text,\n run_oid text,\n number_of_iterations integer,\n label text,\n workload_name text,\n boot_parameters text,\n runtime_parameters text,\n workload_parameters text\n )', 'CREATE TABLE metrics (\n spec_oid int,\n iteration integer,\n metric text,\n value text,\n units text,\n lower_is_better integer\n )', 'CREATE VIEW results AS\n SELECT uuid as run_uuid, spec_id, label as workload, iteration, metric, value, units, lower_is_better\n FROM metrics AS m INNER JOIN (\n SELECT ws.OID as spec_oid, ws.id as spec_id, uuid, label\n FROM workload_specs AS ws INNER JOIN runs AS r ON ws.run_oid = r.OID\n ) AS wsr ON wsr.spec_oid = m.spec_oid\n ', 'CREATE TABLE __meta (\n schema_version text\n )', (SCHEMA_VERSION)]
(datetime, (lambda x: ()))
(timedelta, (lambda x: ()))
(uuid.UUID, str)
class SqliteResultProcessor(ResultProcessor):
name = 'sqlite'
description = '\n Stores results in an sqlite database.\n\n This may be used accumulate results of multiple runs in a single file.\n\n '
name = 'sqlite'
parameters = [('database'), ('overwrite')]
def initialize(self, context):
self._last_spec = None
self._run_oid = None
self._spec_oid = None
if (not (self.database)):
()
elif self.overwrite:
(self.database)
()
else:
()
(context.run_info.uuid)
def process_iteration_result(self, result, context):
if (self._last_spec != context.spec):
(context.spec)
metrics = [(self._spec_oid, context.current_iteration, m.name, (m.value), m.units, (m.lower_is_better)) for m in result.metrics]
with () as conn:
('INSERT INTO metrics VALUES (?,?,?,?,?,?)', metrics)
def process_run_result(self, result, context):
info = context.run_info
with () as conn:
('UPDATE runs SET start_time=?, end_time=?, duration=?\n WHERE OID=?', (info.start_time, info.end_time, info.duration, self._run_oid))
def validate(self):
if (not self.database):
self.database = (settings.output_directory, 'results.sqlite')
self.database = ((self.database))
def _initdb(self):
with () as conn:
for command in SCHEMA:
(command)
def _validate_schema_version(self):
with () as conn:
try:
c = ('SELECT schema_version FROM __meta')
found_version = ()[0]
except sqlite3.OperationalError:
message = (self.database)
raise (message)
if (found_version != SCHEMA_VERSION):
message = 'Schema version in {} ({}) does not match current version ({}).'
raise ((self.database, found_version, SCHEMA_VERSION))
def _update_run(self, run_uuid):
with () as conn:
('INSERT INTO runs (uuid) VALUES (?)', (run_uuid,))
()
c = ('SELECT OID FROM runs WHERE uuid=?', (run_uuid,))
self._run_oid = ()[0]
def _update_spec(self, spec):
self._last_spec = spec
spec_tuple = (spec.id, self._run_oid, spec.number_of_iterations, spec.label, spec.workload_name, (spec.boot_parameters), (spec.runtime_parameters), (spec.workload_parameters))
with () as conn:
('INSERT INTO workload_specs VALUES (?,?,?,?,?,?,?,?)', spec_tuple)
()
c = ('SELECT OID FROM workload_specs WHERE run_oid=? AND id=?', (self._run_oid, spec.id))
self._spec_oid = ()[0]
@contextmanager
def _open_connecton(self):
conn = (self.database)
try:
(yield conn)
finally:
() |
# Copyright 2012-2015 ARM Limited
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# pylint: disable=no-member
# pylint: disable=attribute-defined-outside-init
import os
import time
from wlauto import settings, Workload, Executable, Parameter
from wlauto.exceptions import ConfigError, WorkloadError
from wlauto.utils.types import boolean
TXT_RESULT_NAME = "cyclictest_result.txt"
RESULT_INTERPRETATION = {
"T": "Thread",
"P": "Priority",
"C": "Clock",
}
class Cyclictest(Workload):
name = "cyclictest"
description = """
Measures the amount of time that passes between when a timer expires and
when the thread which set the timer actually runs.
Cyclic test works by taking a time snapshot just prior to waiting for a specific
time interval (t1), then taking another time snapshot after the timer
finishes (t2), then comparing the theoretical wakeup time with the actual
wakeup time (t2 -(t1 + sleep_time)). This value is the latency for that
timers wakeup.
"""
parameters = [
Parameter(
"clock",
allowed_values=["monotonic", "realtime"],
default="realtime",
description=("specify the clock to be used during the test."),
),
Parameter(
"duration",
kind=int,
default=30,
description=("Specify the length for the test to run in seconds."),
),
Parameter(
"quiet",
kind=boolean,
default=True,
description=("Run the tests quiet and print only a summary on exit."),
),
Parameter(
"thread",
kind=int,
default=8,
description=("Set the number of test threads"),
),
Parameter(
"latency",
kind=int,
default=1000000,
description=("Write the value to /dev/cpu_dma_latency"),
),
Parameter(
"extra_parameters",
kind=str,
default="",
description=(
"Any additional command line parameters to append to the "
"existing parameters above. A list can be found at "
"https://rt.wiki.kernel.org/index.php/Cyclictest or "
"in the help page ``cyclictest -h``"
),
),
Parameter(
"clear_file_cache",
kind=boolean,
default=True,
description=("Clear file caches before starting test"),
),
Parameter(
"screen_off",
kind=boolean,
default=True,
description=(
"If true it will turn the screen off so that onscreen "
"graphics do not effect the score. This is predominantly "
"for devices without a GPU"
),
),
]
def setup(self, context):
self.cyclictest_on_device = "cyclictest"
self.cyclictest_result = os.path.join(
self.device.working_directory, TXT_RESULT_NAME
)
self.cyclictest_command = (
"{} --clock={} --duration={}s --thread={} --latency={} {} {} > {}"
)
self.device_binary = None
if not self.device.is_rooted:
raise WorkloadError(
"This workload requires a device with root premissions to run"
)
host_binary = context.resolver.get(
Executable(self, self.device.abi, "cyclictest")
)
self.device_binary = self.device.install(host_binary)
self.cyclictest_command = self.cyclictest_command.format(
self.device_binary,
0 if self.clock == "monotonic" else 1,
self.duration,
self.thread,
self.latency,
"--quiet" if self.quiet else "",
self.extra_parameters,
self.cyclictest_result,
)
if self.clear_file_cache:
self.device.execute("sync")
self.device.set_sysfile_value("/proc/sys/vm/drop_caches", 3)
if self.device.platform == "android":
if self.screen_off and self.device.is_screen_on:
self.device.execute("input keyevent 26")
def run(self, context):
self.device.execute(self.cyclictest_command, self.duration * 2, as_root=True)
def update_result(self, context):
self.device.pull_file(self.cyclictest_result, context.output_directory)
# Parsing the output
# Standard Cyclictest Output:
# T: 0 (31974) P:95 I:1000 C:4990 Min:9 Act:37 Avg:31 Max:59
with open(os.path.join(context.output_directory, TXT_RESULT_NAME)) as f:
for line in f:
if line.find("C:") is not -1:
# Key = T: 0 (31974) P:95 I:1000
# Remaing = 49990 Min:9 Act:37 Avg:31 Max:59
# sperator = C:
(key, sperator, remaing) = line.partition("C:")
index = key.find("T")
key = key.replace(key[index], RESULT_INTERPRETATION["T"])
index = key.find("P")
key = key.replace(key[index], RESULT_INTERPRETATION["P"])
index = sperator.find("C")
sperator = sperator.replace(
sperator[index], RESULT_INTERPRETATION["C"]
)
metrics = (sperator + remaing).split()
# metrics is now in the from of ['Min:', '9', 'Act:', '37', 'Avg:', '31' , 'Max', '59']
for i in range(0, len(metrics), 2):
full_key = key + " " + metrics[i][:-1]
value = int(metrics[i + 1])
context.result.add_metric(full_key, value, "microseconds")
def teardown(self, context):
if self.device.platform == "android":
if self.screen_off:
self.device.ensure_screen_is_on()
self.device.execute("rm -f {}".format(self.cyclictest_result))
|
# Copyright 2013-2015 ARM Limited
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# pylint: disable=E1101,W0201
import os
import re
from collections import defaultdict
from wlauto import Workload, Parameter, File
from wlauto.utils.types import caseless_string
from wlauto.exceptions import WorkloadError
class Recentfling(Workload):
name = "recentfling"
description = """
Tests UI jank on android devices.
For this workload to work, ``recentfling.sh`` and ``defs.sh`` must be placed
in ``~/.workload_automation/dependencies/recentfling/``. These can be found
in the [AOSP Git repository](https://android.googlesource.com/platform/system/extras/+/master/tests/).
To change the apps that are opened at the start of the workload you will need
to modify the ``defs.sh`` file. You will need to add your app to ``dfltAppList``
and then add a variable called ``{app_name}Activity`` with the name of the
activity to launch (where ``{add_name}`` is the name you put into ``dfltAppList``).
You can get a list of activities available on your device by running
``adb shell pm list packages -f``
"""
supported_platforms = ["android"]
parameters = [
Parameter(
"loops", kind=int, default=3, description="The number of test iterations."
),
]
def initialise(self, context): # pylint: disable=no-self-use
if context.device.get_sdk_version() < 23:
raise WorkloadError(
"This workload relies on ``dumpsys gfxinfo`` \
only present in Android M and onwards"
)
def setup(self, context):
self.defs_host = context.resolver.get(File(self, "defs.sh"))
self.recentfling_host = context.resolver.get(File(self, "recentfling.sh"))
self.device.push_file(self.recentfling_host, self.device.working_directory)
self.device.push_file(self.defs_host, self.device.working_directory)
self._kill_recentfling()
self.device.ensure_screen_is_on()
def run(self, context):
cmd = "echo $$>{dir}/pidfile; exec {dir}/recentfling.sh -i {}; rm {dir}/pidfile"
cmd = cmd.format(self.loops, dir=self.device.working_directory)
try:
self.output = self.device.execute(cmd, timeout=120)
except KeyboardInterrupt:
self._kill_recentfling()
raise
def update_result(self, context):
group_names = [
"90th Percentile",
"95th Percentile",
"99th Percentile",
"Jank",
"Jank%",
]
count = 0
for line in self.output.strip().splitlines():
p = re.compile(
"Frames: \d+ latency: (?P<pct90>\d+)/(?P<pct95>\d+)/(?P<pct99>\d+) Janks: (?P<jank>\d+)\((?P<jank_pct>\d+)%\)"
)
match = p.search(line)
if match:
count += 1
if line.startswith("AVE: "):
group_names = ["Average " + g for g in group_names]
count = 0
for metric in zip(group_names, match.groups()):
context.result.add_metric(
metric[0],
metric[1],
None,
classifiers={"loop": count or "Average"},
)
def teardown(self, context):
self.device.delete_file(
self.device.path.join(self.device.working_directory, "recentfling.sh")
)
self.device.delete_file(
self.device.path.join(self.device.working_directory, "defs.sh")
)
def _kill_recentfling(self):
pid = self.device.execute(
"cat {}/pidfile".format(self.device.working_directory)
)
if pid:
self.device.kill(pid.strip(), signal="SIGKILL")
|
#!/usr/bin/env python
"""
mbed SDK
Copyright (c) 2011-2016 ARM Limited
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import sys
from time import time
class HtrunLogger(object):
"""! Yet another logger flavour"""
def __init__(self, prn_lock, name):
self.__prn_lock = prn_lock
self.__name = name
def __prn_func(self, text, nl=True):
"""! Prints and flushes data to stdout"""
with self.__prn_lock:
if nl and not text.endswith("\n"):
text += "\n"
sys.stdout.write(text)
sys.stdout.flush()
def __prn_log_human(self, level, text, timestamp=None):
if not timestamp:
timestamp = time()
timestamp_str = strftime("%y-%m-%d %H:%M:%S", gmtime(timestamp))
frac, whole = modf(timestamp)
s = "[%s.%d][%s][%s] %s" % (timestamp_str, frac, self.__name, level, text)
self.__prn_func(s, nl=True)
def __prn_log(self, level, text, timestamp=None):
if not timestamp:
timestamp = time()
s = "[%.2f][%s][%s] %s" % (timestamp, self.__name, level, text)
self.__prn_func(s, nl=True)
def prn_dbg(self, text, timestamp=None):
self.__prn_log("DBG", text, timestamp)
def prn_wrn(self, text, timestamp=None):
self.__prn_log("WRN", text, timestamp)
def prn_err(self, text, timestamp=None):
self.__prn_log("ERR", text, timestamp)
def prn_inf(self, text, timestamp=None):
self.__prn_log("INF", text, timestamp)
def prn_txt(self, text, timestamp=None):
self.__prn_log("TXT", text, timestamp)
def prn_txd(self, text, timestamp=None):
self.__prn_log("TXD", text, timestamp)
def prn_rxd(self, text, timestamp=None):
self.__prn_log("RXD", text, timestamp)
|
#!/usr/bin/env python
"""
mbed SDK
Copyright (c) 2011-2015 ARM Limited
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import unittest
from mbed_host_tests import is_host_test
from mbed_host_tests import get_host_test
from mbed_host_tests import get_plugin_caps
from mbed_host_tests import get_host_test_list
class BasicHostTestsTestCase(unittest.TestCase):
def setUp(self):
pass
def tearDown(self):
pass
def test_basic_get_host_test(self):
self.assertNotEqual(None, get_host_test("default"))
self.assertNotEqual(None, get_host_test("default_auto"))
def test_basic_is_host_test(self):
self.assertFalse(is_host_test(""))
self.assertFalse(is_host_test(None))
self.assertTrue(is_host_test("default"))
self.assertTrue(is_host_test("default_auto"))
def test_get_host_test_list(self):
d = get_host_test_list()
self.assertIs(type(d), dict)
self.assertIn("default", d)
self.assertIn("default_auto", d)
def test_get_plugin_caps(self):
d = get_plugin_caps()
self.assertIs(type(d), dict)
if __name__ == "__main__":
unittest.main()
|
#!/usr/bin/python
#
import sys
input = open(sys.argv[1], "r")
output = open(sys.argv[2], "w")
for line in input:
if line[0] == ">":
print("@HTW-" + line[1:-1], file=output)
continue
else:
print(line[:-1], file=output)
print("+", file=output)
print("H" * len(line[:-1]), file=output)
input.close()
output.close()
|
# Main entry point for the plugin.
# Author: Yuri van Geffen
import sublime, sublime_plugin
import os
import threading
import queue
import asyncore
import socket
from itertools import chain
import re
settings = sublime.load_settings("subdebug")
TCP_IP = "127.0.0.1"
TCP_PORT = 8172
BUFFER_SIZE = 1024
BASEDIR = settings.get("basedir", "")
STEP_ON_CONNECT = settings.get("step_on_connect", False)
# Handles incoming and outgoing messages for the MobDebug client
class SubDebugHandler(asyncore.dispatcher):
def __init__(self, socket, handler_id):
asyncore.dispatcher.__init__(self, socket)
self.handler_id = handler_id
msg_queue.put(b"STEP\n" if STEP_ON_CONNECT else b"RUN\n")
for view_name, row in state_handler.breakpoints():
msg_queue.put("SETB {0} {1}\n".format(view_name, row).encode("latin-1"))
# Reads the message-code of incomming messages and passes
# them to the right function
def handle_read(self):
data = self.recv(BUFFER_SIZE)
if data:
print((self.handler_id, "Received: ", data))
split = data.split()
if split[0] in message_parsers:
message_parsers[split[0]](split)
def handle_write(self):
if not msg_queue.empty():
msg = msg_queue.get()
print(("Sending: ", msg))
self.send(msg)
def handle_error(self):
raise
# Starts listening on TCP_PORT and accepts incoming connections
# before passing them to an instance of SubDebugHandler
class SubDebugServer(asyncore.dispatcher):
def __init__(self, host, port):
asyncore.dispatcher.__init__(self)
self.handler_id = 0
self.create_socket(socket.AF_INET, socket.SOCK_STREAM)
self.set_reuse_addr()
self.bind((host, port))
self.listen(1)
print(("Started listening on: ", host, ":", port))
def handle_accept(self):
pair = self.accept()
if pair is not None:
(conn_sock, client_address) = pair
print(("Incoming connection: ", client_address))
SubDebugHandler(conn_sock, ++self.handler_id)
def handle_close(self):
print("Closing server.")
self.close()
def handle_error(self):
self.close()
# Lets the user run the script (until breakpoint)
class RunCommand(sublime_plugin.WindowCommand):
def run(self):
print("Running until breakpoint...")
msg_queue.put(b"RUN\n")
state_handler.remove_line_marker()
# Lets the user step to the next line
class StepCommand(sublime_plugin.WindowCommand):
def run(self):
print("Stepping to next line...")
msg_queue.put(b"STEP\n")
# Lets the user step to the next line
class ToggleBreakpointCommand(sublime_plugin.TextCommand):
def run(self, edit):
view_name = simplify_path(self.view.file_name())
row, _ = self.view.rowcol(self.view.sel()[0].begin())
print(("Toggling breakpoint:", view_name, row))
state_handler.toggle_breakpoint(view_name, row + 1)
# Lets the user pick a base directory from where the lua is executed
class SetBasedirCommand(sublime_plugin.WindowCommand):
def run(self):
# Ran if the user want to choose their own base directory
def choose_other(path):
global BASEDIR
BASEDIR = path.replace("\\", "/")
if BASEDIR[-1] != "/":
BASEDIR += "/"
print(("BASEDIR:", BASEDIR))
# Ran if the user has chosen a base directory option
def selected_folder(index):
global BASEDIR
if index != -1: # The last option lets the user choose a base dir themself
if index == len(folders) - 1:
sublime.active_window().show_input_panel(
"Give the base directory path.",
BASEDIR,
choose_other,
None,
None,
)
else:
BASEDIR = folders[index] + "/"
state_handler.clear_state()
print(("BASEDIR:", BASEDIR))
folders = list(chain.from_iterable([w.folders() for w in sublime.windows()]))
folders = [f.replace("\\", "/") for f in folders]
folders.insert(len(folders), "Choose other directory...")
sublime.active_window().show_quick_panel(folders, selected_folder)
# Lets the user step to the next line
class ToggleStepOnConnectCommand(sublime_plugin.WindowCommand):
def run(self):
global STEP_ON_CONNECT
STEP_ON_CONNECT = not STEP_ON_CONNECT
print(("Step on connect:", STEP_ON_CONNECT))
def is_checked(self):
return STEP_ON_CONNECT or False
# =========Incomming message parsers=========#
# Called when the "202 Paused" message is received
def paused_command(args):
state_handler.set_line_marker(args[2].decode("utf-8"), int(args[3]))
# Mapping from incomming messages to the functions that parse them
message_parsers = {
b"202": paused_command,
}
# ===========================================#
class StateHandler:
# Initiates object by checking which views are available and
# clearing the state
def __init__(self):
self.clear_state()
self.update_regions()
def clear_state(self):
self.state = {}
self.update_regions()
# Gets all available views in sublime and adds the missing ones to the state
def add_missing_views(self):
views = [v for v in sum([w.views() for w in sublime.windows()], [])]
self.views = {
simplify_path(v.file_name()): v for v in views if v.file_name() != None
}
print((self.views))
for view_name, view in list(self.views.items()):
if view_name not in self.state:
self.state[view_name] = []
# Updates all views with the available state-objects using the
# assigned functions
def update_regions(self):
self.add_missing_views()
# Iterate over all files in the state
for view_name, regions in list(self.state.items()):
# Remove all old regions
for reg_type_name in self.region_types:
self.views[view_name].erase_regions(reg_type_name)
region_sets = {}
# Iterate over all regions in that file
for reg_type, line in regions:
if reg_type == "line_marker" or ("line_marker", line) not in regions:
if reg_type not in region_sets:
region_sets[reg_type] = []
region_sets[reg_type].append(
sublime.Region(self.views[view_name].text_point(line - 1, 0))
)
# Register all new regions except the line-marker with sublime
for reg_name, v in list(region_sets.items()):
print(("Adding region:", view_name, reg_name, v))
self.views[view_name].add_regions(
reg_name, v, *self.region_types[reg_name]
)
def set_line_marker(self, view_name, line_number):
view_name = simplify_path(view_name)
print(("Setting line marker:", view_name, line_number))
self.add_missing_views()
if view_name in self.views:
self.state.setdefault(view_name, [])
self.state[view_name] = [
(k, v) for k, v in self.state[view_name] if k != "line_marker"
]
self.state[view_name].append(("line_marker", line_number))
self.update_regions()
def remove_line_marker(self):
for name, view in list(self.state.items()):
self.state[name] = [(t, n) for t, n in view if t != "line_marker"]
self.update_regions()
def toggle_breakpoint(self, view_name, line_number):
self.add_missing_views()
if (
view_name in self.views
and ("breakpoint", line_number) in self.state[view_name]
):
self.remove_breakpoint(view_name, line_number)
else:
self.set_breakpoint(view_name, line_number)
self.update_regions()
def set_breakpoint(self, view_name, line_number):
self.state.setdefault(view_name, [])
self.state[view_name].append(("breakpoint", line_number))
msg_queue.put("SETB {0} {1}\n".format(view_name, line_number).encode("latin-1"))
def remove_breakpoint(self, view_name, line_number):
self.state[view_name].remove(("breakpoint", line_number))
msg_queue.put("DELB {0} {1}\n".format(view_name, line_number).encode("latin-1"))
def breakpoints(self):
ret = []
for k, v in list(self.state.items()):
for t in v:
if t[0] == "breakpoint":
ret.append((k, t[1]))
return ret
views = {}
state = {}
region_types = {
"breakpoint": ("keyword", "circle"),
"line_marker": ("keyword", "bookmark"),
}
def plugin_unloaded():
settings.set("basedir", BASEDIR)
settings.set("step_on_connect", STEP_ON_CONNECT)
print("Closing down the server...")
server.close()
def simplify_path(path):
path = path.replace("\\", "/").replace(BASEDIR, "")
path = re.sub("\.lua$", "", path) # Strip ".lua" from the path
return path
# Open a threadsafe message queue
msg_queue = queue.Queue()
state_handler = StateHandler()
# Start listening and open the asyncore loop
server = SubDebugServer(TCP_IP, TCP_PORT)
if os.name == "posix":
thread = threading.Thread(target=asyncore.loop, kwargs={"use_poll": True})
else:
thread = threading.Thread(target=asyncore.loop)
thread.start()
|
from django.contrib import sitemaps
from django.core.urlresolvers import reverse
class StaticViewSitemap(sitemaps.Sitemap):
priority = 0.5
changefreq = "monthly"
def items(self):
return [
"landpage",
"robots",
"humans",
"google_plus_verify",
"terms",
"privacy",
]
def location(self, item):
return reverse(item)
# https://docs.djangoproject.com/en/1.8/ref/contrib/sitemaps/
|
from django.conf.urls import patterns, include, url
from publisher.views import catalog
from publisher.views import my_publication
from publisher.views import publication
urlpatterns = patterns(
"",
# Publications(s)
url(r"^publish$", catalog.catalog_page),
url(r"^publication/(\d+)$", publication.publication_page),
url(r"^publication/(\d+)/peer_review_modal$", publication.peer_review_modal),
url(r"^publication/(\d+)/save_peer_review$", publication.save_peer_review),
url(r"^publication/(\d+)/delete_peer_review$", publication.delete_peer_review),
# My Publications
url(r"^my_publications$", my_publication.my_publications_page),
url(r"^refresh_publications_table$", my_publication.refresh_publications_table),
url(r"^my_publication_modal$", my_publication.my_publication_modal),
url(r"^save_publication$", my_publication.save_publication),
url(r"^delete_publication$", my_publication.delete_publication),
)
|
"""added goal properties
Revision ID: 5018059c5c8f
Revises: 16b4a243d41d
Create Date: 2015-09-23 11:56:01.897992
"""
# revision identifiers, used by Alembic.
revision = "5018059c5c8f"
down_revision = "16b4a243d41d"
branch_labels = None
depends_on = None
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.create_table(
"goalproperties",
sa.Column("id", sa.Integer(), nullable=False),
sa.Column("name", sa.String(length=255), nullable=False),
sa.Column("is_variable", sa.Boolean(), nullable=False),
sa.PrimaryKeyConstraint("id"),
)
op.create_table(
"goals_goalproperties",
sa.Column("goal_id", sa.Integer(), nullable=False),
sa.Column("property_id", sa.Integer(), nullable=False),
sa.Column("value", sa.String(length=255), nullable=True),
sa.Column("value_translation_id", sa.Integer(), nullable=True),
sa.Column("from_level", sa.Integer(), nullable=False),
sa.ForeignKeyConstraint(["goal_id"], ["goals.id"], ondelete="CASCADE"),
sa.ForeignKeyConstraint(
["property_id"], ["goalproperties.id"], ondelete="CASCADE"
),
sa.ForeignKeyConstraint(
["value_translation_id"], ["translationvariables.id"], ondelete="RESTRICT"
),
sa.PrimaryKeyConstraint("goal_id", "property_id", "from_level"),
)
op.add_column(
"goals",
sa.Column("name", sa.String(length=255), nullable=False, server_default=""),
)
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_column("goals", "name")
op.drop_table("goals_goalproperties")
op.drop_table("goalproperties")
### end Alembic commands ###
|
# Demonstration of `applib` features
import logging
from applib.base import Cmdln, Application
from applib.misc import require_option
from applib import textui, sh, _cmdln as cmdln
LOG = logging.getLogger(__name__)
application = Application("demo-app", "CompanyNameHere", "1.2")
@cmdln.option("", "--foo", action="store_true", help="*must pass --foo")
class Commands(Cmdln):
name = "demo-app"
def initialize(self):
require_option(self.options, "foo")
@cmdln.alias("cd")
@cmdln.option(
"-t", "--show-time", action="store_true", help="Also show the current time"
)
def do_currentdate(self, subcmd, opts):
"""${cmd_name}: Show the current date
${cmd_usage}
${cmd_option_list}
"""
with self.bootstrapped():
from datetime import datetime
now = datetime.now()
LOG.debug("datetime.now = %s", now)
if opts.show_time:
print(now)
else:
print((now.date()))
def do_ls(self, subcmd, opts):
"""${cmd_name}: Show directory listing (runs 'ls')
${cmd_usage}
${cmd_option_list}
"""
with self.bootstrapped():
print((sh.run("ls")[0].decode("utf-8")))
def do_makeerror(self, subcmd, opts, what):
"""${cmd_name}: Make an error. Use -v to see full traceback
${cmd_usage}
${cmd_option_list}
"""
with self.bootstrapped():
LOG.debug("About to make an error! %s", what)
textui.askyesno("Press enter to proceed:", default=True)
1 / 0
@cmdln.option("", "--no-break", action="store_true", help="Don't break from loop")
def do_think(self, subcmd, opts, length=200):
"""${cmd_name}: Progress bar example
${cmd_usage}
${cmd_option_list}
"""
with self.bootstrapped():
import time
length = int(length)
for x in textui.ProgressBar.iterate(
list(range(length)), post="Thought {total} thoughts in time {elapsed}"
):
if x == length - 1 and not opts.no_break:
break # test that break doesn't mess up output
time.sleep(0.1)
def do_multable(self, subcmd, opts, number=10, times=25):
"""${cmd_name}: Print multiplication table
To demonstrate `colprint` feature
${cmd_usage}
${cmd_option_list}
"""
with self.bootstrapped():
textui.colprint(
[
[str(x * y) for y in range(1, 1 + int(times))]
for x in range(1, 1 + int(number))
]
)
if __name__ == "__main__":
application.run(Commands)
|
# Copyright (c) 2015-2016, Activision Publishing, Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without modification,
# are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# 3. Neither the name of the copyright holder nor the names of its contributors
# may be used to endorse or promote products derived from this software without
# specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR
# ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
# ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
from assertpy import assert_that, fail
class TestType(object):
def test_is_type_of(self):
assert_that("foo").is_type_of(str)
assert_that(123).is_type_of(int)
assert_that(0.456).is_type_of(float)
# assert_that(234L).is_type_of(long)
assert_that(["a", "b"]).is_type_of(list)
assert_that(("a", "b")).is_type_of(tuple)
assert_that({"a": 1, "b": 2}).is_type_of(dict)
assert_that(set(["a", "b"])).is_type_of(set)
assert_that(None).is_type_of(type(None))
assert_that(Foo()).is_type_of(Foo)
assert_that(Bar()).is_type_of(Bar)
def test_is_type_of_failure(self):
try:
assert_that("foo").is_type_of(int)
fail("should have raised error")
except AssertionError as ex:
assert_that(str(ex)).is_equal_to(
"Expected <foo:str> to be of type <int>, but was not."
)
def test_is_type_of_bad_arg_failure(self):
try:
assert_that("foo").is_type_of("bad")
fail("should have raised error")
except TypeError as ex:
assert_that(str(ex)).is_equal_to("given arg must be a type")
def test_is_type_of_subclass_failure(self):
try:
assert_that(Bar()).is_type_of(Foo)
fail("should have raised error")
except AssertionError as ex:
assert_that(str(ex)).starts_with("Expected <")
assert_that(str(ex)).ends_with(":Bar> to be of type <Foo>, but was not.")
def test_is_instance_of(self):
assert_that("foo").is_instance_of(str)
assert_that(123).is_instance_of(int)
assert_that(0.456).is_instance_of(float)
# assert_that(234L).is_instance_of(long)
assert_that(["a", "b"]).is_instance_of(list)
assert_that(("a", "b")).is_instance_of(tuple)
assert_that({"a": 1, "b": 2}).is_instance_of(dict)
assert_that(set(["a", "b"])).is_instance_of(set)
assert_that(None).is_instance_of(type(None))
assert_that(Foo()).is_instance_of(Foo)
assert_that(Bar()).is_instance_of(Bar)
assert_that(Bar()).is_instance_of(Foo)
def test_is_instance_of_failure(self):
try:
assert_that("foo").is_instance_of(int)
fail("should have raised error")
except AssertionError as ex:
assert_that(str(ex)).is_equal_to(
"Expected <foo:str> to be instance of class <int>, but was not."
)
def test_is_instance_of_bad_arg_failure(self):
try:
assert_that("foo").is_instance_of("bad")
fail("should have raised error")
except TypeError as ex:
assert_that(str(ex)).is_equal_to("given arg must be a class")
class Foo(object):
pass
class Bar(Foo):
pass
|
import sys
import math
import scipy
import pylab
import scipy.io.wavfile as wav
import wave
from scipy import signal
from itertools import product
import numpy
def readWav():
"""
Reads a sound wave from a standard input and finds its parameters.
"""
# Read the sound wave from the input.
sound_wave = wave.open(sys.argv[1], "r")
# Get parameters of the sound wave.
nframes = sound_wave.getnframes()
framerate = sound_wave.getframerate()
params = sound_wave.getparams()
duration = nframes / float(framerate)
print("frame rate: %d " % (framerate,))
print("nframes: %d" % (nframes,))
print("duration: %f seconds" % (duration,))
print(scipy.array(sound_wave))
return (sound_wave, nframes, framerate, duration, params)
def getDuration(sound_file):
"""
Returns the duration of a given sound file.
"""
wr = wave.open(sound_file, "r")
nchannels, sampwidth, framerate, nframes, comptype, compname = wr.getparams()
return nframes / float(framerate)
def getFrameRate(sound_file):
"""
Returns the frame rate of a given sound file.
"""
wr = wave.open(sound_file, "r")
nchannels, sampwidth, framerate, nframes, comptype, compname = wr.getparams()
return framerate
def get_channels_no(sound_file):
"""
Returns number of channels of a given sound file.
"""
wr = wave.open(sound_file, "r")
nchannels, sampwidth, framerate, nframes, comptype, compname = wr.getparams()
return nchannels
def plotSoundWave(rate, sample):
"""
Plots a given sound wave.
"""
t = scipy.linspace(0, 2, 2 * rate, endpoint=False)
pylab.figure("Sound wave")
T = int(0.0001 * rate)
pylab.plot(
t[:T],
sample[:T],
)
pylab.show()
def plotPartials(binFrequencies, maxFreq, magnitudes):
"""
Calculates and plots the power spectrum of a given sound wave.
"""
T = int(maxFreq)
pylab.figure("Power spectrum")
pylab.plot(
binFrequencies[:T],
magnitudes[:T],
)
pylab.xlabel("Frequency (Hz)")
pylab.ylabel("Power spectrum (|X[k]|^2)")
pylab.show()
def plotPowerSpectrum(FFT, binFrequencies, maxFreq):
"""
Calculates and plots the power spectrum of a given sound wave.
"""
T = int(maxFreq)
pylab.figure("Power spectrum")
pylab.plot(
binFrequencies[:T],
scipy.absolute(FFT[:T]) * scipy.absolute(FFT[:T]),
)
pylab.xlabel("Frequency (Hz)")
pylab.ylabel("Power spectrum (|X[k]|^2)")
pylab.show()
def get_frequencies_axis(framerate, fft_length):
binResolution = float(framerate) / float(fft_length)
binFreqs = []
for k in range(fft_length):
binFreq = k * binResolution
binFreqs.append(binFreq)
return binFreqs
def get_next_power_2(n):
"""
Returns the closest number that is smaller than n that is a power of 2.
"""
power = 1
while power < n:
power *= 2
if power > 1:
return power / 2
else:
return 1
class MIDI_Detector(object):
"""
Class for MIDI notes detection given a .wav file.
"""
def __init__(self, wav_file):
self.wav_file = wav_file
self.minFreqConsidered = 20
self.maxFreqConsidered = 5000
self.low_f0s = [
27.5,
29.135,
30.868,
32.703,
34.648,
37.708,
38.891,
41.203,
43.654,
46.249,
48.999,
51.913,
55.0,
58.27,
61.735,
65.406,
69.296,
73.416,
77.782,
82.407,
]
def detect_MIDI_notes(self):
"""
The algorithm for calculating midi notes from a given wav file.
"""
(framerate, sample) = wav.read(self.wav_file)
if get_channels_no(self.wav_file) > 1:
sample = sample.mean(axis=1)
duration = getDuration(self.wav_file)
midi_notes = []
# Consider only files with a duration longer than 0.18 seconds.
if duration > 0.18:
(
FFT,
filteredFreqs,
maxFreq,
magnitudes,
significant_freq,
) = self.calculateFFT(duration, framerate, sample)
# plotPowerSpectrum(FFT, filteredFreqs, 1000)
clusters = self.clusterFrequencies(filteredFreqs)
averagedClusters = self.getClustersMeans(clusters)
f0_candidates = self.getF0Candidates(averagedClusters)
midi_notes = self.matchWithMIDINotes(f0_candidates)
"""
OCTAVE CORRECTION METHOD
"""
"""
# Include a note with a significant magnitude:
# if its magnitude is higher than the sum of magnitudes
# of all other spectral peaks
# include it in the list of detected notes and
# remove the note that's octave lower than this one
# if it was also detected.
if significant_freq > 0:
significant_midi_notes = self.matchWithMIDINotes([
significant_freq])
significant_midi_note = significant_midi_notes[0]
if significant_midi_note not in midi_notes:
midi_notes.append(significant_midi_note)
midi_notes = self.remove_lower_octave(
significant_midi_note, midi_notes)
"""
return midi_notes
def remove_lower_octave(self, upper_octave, midi_notes):
lower_octave = upper_octave - 12
if lower_octave in midi_notes:
midi_notes.remove(lower_octave)
return midi_notes
def get_candidates_with_partials(self, frequencies, magnitudes):
print(frequencies)
partial_margin = 11.0 # Hz
# A list of frequencies of each candidate.
candidates_freq = []
# A list of magnitudes of frequencies of each candidate.
candidates_magnitude = []
for i in range(len(frequencies)):
partials, partial_magnitudes = self.find_partials(
frequencies[i:], frequencies[i], magnitudes[i:]
)
candidates_freq.append(partials)
candidates_magnitude.append(partial_magnitudes)
return (candidates_freq, candidates_magnitude)
def calculateFFT(self, duration, framerate, sample):
"""
Calculates FFT for a given sound wave.
Considers only frequencies with the magnitudes higher than
a given threshold.
"""
fft_length = int(duration * framerate)
# For the FFT to work much faster take the length that is a power of 2.
fft_length = get_next_power_2(fft_length)
FFT = numpy.fft.fft(sample, n=fft_length)
""" ADJUSTING THRESHOLD - HIGHEST SPECTRAL PEAK METHOD"""
threshold = 0
power_spectra = []
frequency_bin_with_max_spectrum = 0
for i in range(len(FFT) / 2):
power_spectrum = scipy.absolute(FFT[i]) * scipy.absolute(FFT[i])
if power_spectrum > threshold:
threshold = power_spectrum
frequency_bin_with_max_spectrum = i
power_spectra.append(power_spectrum)
max_power_spectrum = threshold
threshold *= 0.1
binFrequencies = []
magnitudes = []
binResolution = float(framerate) / float(fft_length)
sum_of_significant_spectra = 0
# For each bin calculate the corresponding frequency.
for k in range(len(FFT)):
binFreq = k * binResolution
# Truncating the FFT so we consider only hearable frequencies.
if binFreq > self.maxFreqConsidered:
FFT = FFT[:k]
break
elif binFreq > self.minFreqConsidered:
# Consider only the frequencies
# with magnitudes higher than the threshold.
power_spectrum = power_spectra[k]
if power_spectrum > threshold:
magnitudes.append(power_spectrum)
binFrequencies.append(binFreq)
# Sum all significant power spectra
# except the max power spectrum.
if power_spectrum != max_power_spectrum:
sum_of_significant_spectra += power_spectrum
significant_freq = 0.0
if max_power_spectrum > sum_of_significant_spectra:
significant_freq = frequency_bin_with_max_spectrum * binResolution
# Max. frequency considered after truncating.
# maxFreq = rate without truncating.
maxFreq = len(FFT) / duration
return (FFT, binFrequencies, maxFreq, magnitudes, significant_freq)
# Code for STFT taken from:
# http://stackoverflow.com/questions/2459295/stft-and-istft-in-python
def STFT(self, x, samplingFreq, framesz, hop):
"""
Computes STFT for a given sound wave using Hanning window.
"""
framesamp = int(framesz * samplingFreq)
print("FRAMESAMP: " + str(framesamp))
hopsamp = int(hop * samplingFreq)
print("HOP SAMP: " + str(hopsamp))
# Modification: using Hanning window instead of Hamming - by Pertusa
w = signal.hann(framesamp)
X = numpy.array(
[
numpy.fft.fft(w * x[i : i + framesamp])
for i in range(0, len(x) - framesamp, hopsamp)
]
)
return X
def plotMagnitudeSpectrogram(self, rate, sample, framesz, hop):
"""
Calculates and plots the magnitude spectrum of a given sound wave.
"""
X = self.STFT(sample, rate, framesz, hop)
# Plot the magnitude spectrogram.
pylab.figure("Magnitude spectrogram")
pylab.imshow(
scipy.absolute(X.T), origin="lower", aspect="auto", interpolation="nearest"
)
pylab.xlabel("Time")
pylab.ylabel("Frequency")
pylab.show()
def getFilteredFFT(self, FFT, duration, threshold):
"""
Returns a list of frequencies with the magnitudes higher
than a given threshold.
"""
significantFreqs = []
for i in range(len(FFT)):
power_spectrum = scipy.absolute(FFT[i]) * scipy.absolute(FFT[i])
if power_spectrum > threshold:
significantFreqs.append(i / duration)
return significantFreqs
def clusterFrequencies(self, freqs):
"""
Clusters frequencies.
"""
if len(freqs) == 0:
return {}
clusteredFreqs = {}
bin = 0
clusteredFreqs[0] = [freqs[0]]
for i in range(len(freqs) - 1):
dist = self.calcDistance(freqs[i], freqs[i + 1])
if dist < 2.0:
clusteredFreqs[bin].append(freqs[i + 1])
else:
bin += 1
clusteredFreqs[bin] = [freqs[i + 1]]
return clusteredFreqs
def getClustersMeans(self, clusters):
"""
Given clustered frequencies finds a mean of each cluster.
"""
means = []
for bin, freqs in clusters.items():
means.append(sum(freqs) / len(freqs))
return means
def getDistances(self, freqs):
"""
Returns a list of distances between each frequency.
"""
distances = {
(freqs[i], freqs[j]): self.calcDistance(freqs[i], freqs[j])
for (i, j) in product(list(range(len(freqs))), repeat=2)
}
distances = {
freq_pair: dist for freq_pair, dist in distances.items() if dist < 2.0
}
return distances
def calcDistance(self, freq1, freq2):
"""
Calculates distance between frequencies taking into account that
the frequencies of pitches increase logarithmically.
"""
difference = abs(freq1 - freq2)
log = math.log((freq1 + freq2) / 2)
return difference / log
def getF0Candidates(self, frequencies):
"""
Given frequencies finds possible F0 candidates
by discarding potential harmonic frequencies.
"""
f0_candidates = []
"""
MODIFICATION: CONSIDER ONLY MIDDLE RANGE FREQUENCIES
"""
"""
if len(frequencies) > 0 and frequencies[0] < 83.0:
low_freq_candidate = self.find_low_freq_candidate(frequencies)
if low_freq_candidate > 0.0:
f0_candidates.append(low_freq_candidate)
#frequencies = self.filterOutHarmonics(
frequencies, low_freq_candidate)
"""
while len(frequencies) > 0:
f0_candidate = frequencies[0]
f0_candidates.append(f0_candidate)
frequencies.remove(f0_candidate)
frequencies = self.filterOutHarmonics(frequencies, f0_candidate)
return f0_candidates
def filterOutHarmonics(self, frequencies, f0_candidate):
"""
Given frequencies and an f0 candidate remove
all possible harmonics of this f0 candidate.
"""
# If an integer frequency is a multiple of another frequency
# then it is its harmonic. This constant was found empirically.
REMAINDER_THRESHOLD = 0.2
def is_multiple(f, f0):
return abs(round(f / f0) - f / f0) < REMAINDER_THRESHOLD
return [f for f in frequencies if not is_multiple(f, f0_candidate)]
def find_low_freq_candidate(self, frequencies):
REMAINDER_THRESHOLD = 0.05
f0_candidates = []
def is_multiple(f, f0):
return abs(round(f / f0) - f / f0) < REMAINDER_THRESHOLD
best_candidate = -1
max_no_partials = 0
for low_f0 in self.low_f0s:
num_of_partials = 0
for f in frequencies:
if is_multiple(f, low_f0):
num_of_partials += 1
if num_of_partials > max_no_partials:
max_no_partials = num_of_partials
best_candidate = low_f0
return best_candidate
def find_partials(self, frequencies, f0_candidate, magnitudes):
"""
Given frequencies, frequency magnitudes and an f0 candidate
return the partials and magnitudes of this f0 candidate.
"""
REMAINDER_THRESHOLD = 0.05
def is_multiple(f, f0):
return abs(round(f / f0) - f / f0) < REMAINDER_THRESHOLD
partials = []
partial_magnitudes = []
for i in range(len(frequencies)):
if is_multiple(frequencies[i], f0_candidate):
partials.append(frequencies[i])
partial_magnitudes.append(magnitudes[i])
return (partials, partial_magnitudes)
def matchWithMIDINotes(self, f0_candidates):
midi_notes = []
for freq in f0_candidates:
# Formula for calculating MIDI note number.
midi_notes.append(int(round(69 + 12 * math.log(freq / 440) / math.log(2))))
return midi_notes
if __name__ == "__main__":
MIDI_detector = MIDI_Detector(sys.argv[1])
midi_notes = MIDI_detector.detect_MIDI_notes()
print(midi_notes)
|
__author__ = "Ahmed Hani Ibrahim"
class Action(object):
def GetActionName(self):
return self.__name
def SetActionName(self, name):
self.__name = name
def __init__(self, name):
self.__name = name
|
# auto-generated file
import _cffi_backend
ffi = _cffi_backend.FFI(
"_simple_example",
_version=0x2601,
_types=b"\x00\x00\x04\x0D\x00\x00\x03\x03\x00\x00\x01\x0F\x00\x00\x02\x01\x00\x00\x07\x01",
_globals=(
b"\x00\x00\x00\x23printf",
0,
),
)
|
#!/usr/bin/env python
from dnslib import *
packet = binascii.unhexlify(
b"d5ad818000010005000000000377777706676f6f676c6503636f6d0000010001c00c0005000100000005000803777777016cc010c02c0001000100000005000442f95b68c02c0001000100000005000442f95b63c02c0001000100000005000442f95b67c02c0001000100000005000442f95b93"
)
d = DNSRecord.parse(packet)
# The default text representation of the DNSRecord is in zone file format
print(d)
|
from app import app
if __name__ == "__main__":
app.run()
|
# coding: utf-8
from flask import render_template, Blueprint, redirect, request, url_for
from ..forms import SigninForm, SignupForm
from ..utils.account import signin_user, signout_user
from ..utils.permissions import VisitorPermission, UserPermission
from ..models import db, User
bp = Blueprint("account", __name__)
@bp.route("/signin", methods=["GET", "POST"])
@VisitorPermission()
def signin():
"""Signin"""
form = SigninForm()
if form.validate_on_submit():
signin_user(form.user)
return redirect(url_for("site.index"))
return render_template("account/signin/signin.html", form=form)
@bp.route("/signup", methods=["GET", "POST"])
@VisitorPermission()
def signup():
"""Signup"""
form = SignupForm()
if form.validate_on_submit():
params = form.data.copy()
params.pop("repassword")
user = User(**params)
db.session.add(user)
db.session.commit()
signin_user(user)
return redirect(url_for("site.index"))
return render_template("account/signup/signup.html", form=form)
@bp.route("/signout")
def signout():
"""Signout"""
signout_user()
return redirect(request.referrer or url_for("site.index"))
|
from app import app, db
import unittest
import os
import tempfile
from flask import json
TEST_DB = "test.db"
class BasicTestCase(unittest.TestCase):
def test_index(self):
"""inital test. ensure flask was set up correctly"""
tester = app.test_client(self)
response = tester.get("/", content_type="html/text")
self.assertEqual(response.status_code, 200)
def test_database(self):
"""inital test. ensure that the database exists"""
tester = os.path.exists("flaskr.db")
self.assertTrue(tester)
class FlaskrTestCase(unittest.TestCase):
def setUp(self):
"""Set up a blank temp database before each test"""
basedir = os.path.abspath(os.path.dirname(__file__))
app.config["TESTING"] = True
app.config["SQLALCHEMY_DATABASE_URI"] = "sqlite:///" + os.path.join(
basedir, TEST_DB
)
self.app = app.test_client()
db.create_all()
def tearDown(self):
"""Destroy blank temp database after each test"""
db.drop_all()
def login(self, username, password):
"""Login helper function"""
return self.app.post(
"/login",
data=dict(username=username, password=password),
follow_redirects=True,
)
def logout(self):
"""Logout helper function"""
return self.app.get("/logout", follow_redirects=True)
# assert functions
def test_empty_db(self):
"""Ensure database is blank"""
rv = self.app.get("/")
self.assertIn(b"No entries yet. Add some!", rv.data)
def test_login_logout(self):
"""Test login and logout using helper functions"""
rv = self.login(app.config["USERNAME"], app.config["PASSWORD"])
self.assertIn(b"You were logged in", rv.data)
rv = self.logout()
self.assertIn(b"You were logged out", rv.data)
rv = self.login(app.config["USERNAME"] + "x", app.config["PASSWORD"])
self.assertIn(b"Invalid username", rv.data)
rv = self.login(app.config["USERNAME"], app.config["PASSWORD"] + "x")
self.assertIn(b"Invalid password", rv.data)
def test_messages(self):
"""Ensure that user can post messages"""
self.login(app.config["USERNAME"], app.config["PASSWORD"])
rv = self.app.post(
"/add",
data=dict(title="<Hello>", text="<strong>HTML</strong> allowed here"),
follow_redirects=True,
)
self.assertNotIn(b"No entries here so far", rv.data)
self.assertIn(b"<Hello>", rv.data)
self.assertIn(b"<strong>HTML</strong> allowed here", rv.data)
def test_delete_message(self):
"""Ensure the messages are being deleted"""
rv = self.app.get("/delete/1")
data = json.loads(rv.data)
self.assertEqual(data["status"], 1)
if __name__ == "__main__":
unittest.main()
|
#!/usr/bin/env python
# encoding: utf-8
import json
data = [{"a": "A", "b": (2, 4), "c": 3.0}]
print("DATA:", repr(data))
unsorted = json.dumps(data)
print("JSON:", json.dumps(data))
print("SORT:", json.dumps(data, sort_keys=True))
first = json.dumps(data, sort_keys=True)
second = json.dumps(data, sort_keys=True)
print("UNSORTED MATCH:", unsorted == first)
print("SORTED MATCH :", first == second)
|
#!/usr/bin/env python
# Exercise 30: Else and If
people = 30
cars = 40
trucks = 15
if cars > people:
print("We should take the cars.")
elif cars < people:
print("We should not take the cars.")
else:
print("We can't decide.")
if trucks > cars:
print("That's too many trucks.")
elif trucks < cars:
print("Maybe we could take the trucks.")
else:
print("We still can't decide.")
if people > trucks:
print("Alright, let's just take the trucks.")
else:
print("Fine, let's stay home then.")
|
#!/usr/bin/env python2
# -*- coding: utf-8 -*-
import _thread
import time
mylock = _thread.allocate_lock() # Allocate a lock
num = 0 # Shared resource
def add_num(name):
global num
while True:
mylock.acquire() # Get the lock
# Do something to the shared resource
print(("Thread %s locked! num=%s" % (name, str(num))))
if num >= 5:
print(("Thread %s released! num=%s" % (name, str(num))))
mylock.release()
_thread.exit()
num += 1
print(("Thread %s released! num=%s" % (name, str(num))))
mylock.release() # Release the lock.
def test():
_thread.start_new_thread(add_num, ("A",))
_thread.start_new_thread(add_num, ("B",))
time.sleep(30)
if __name__ == "__main__":
test()
|
#!/usr/bin/env python
# encoding: utf-8
"""Expand shell variables in filenames.
"""
import os.path
import os
os.environ["MYVAR"] = "VALUE"
print(os.path.expandvars("/path/to/$MYVAR"))
|
#!/usr/bin/env python
import pyglet
from pyglet.window import key
from pyglet.window import mouse
window = pyglet.window.Window()
@window.event
def on_key_press(symbol, modifiers):
print("key %s was pressed" % symbol)
if symbol == key.A:
print('The "A" key was pressed.')
elif symbol == key.LEFT:
print("The left arrow key was pressed.")
elif symbol == key.ENTER:
print("The enter key was pressed.")
@window.event
def on_mouse_press(x, y, button, modifiers):
print("location: (%s, %s), button: %s" % (x, y, button))
if button == mouse.LEFT:
print("The left mouse button was pressed.")
@window.event
def on_draw():
window.clear()
pyglet.app.run()
|
number = 53
go = True
while go:
guess = int(raw_input('input a number please'))
if guess == number:
print 'correct'
go = False
elif guess < number:
print 'try a bigger one'
else:
print 'try a smaller one'
else:
print 'it\'s over'
|
import os
from setuptools import setup, find_packages
here = os.path.abspath(os.path.dirname(__file__))
with open(os.path.join(here, "README.txt")) as f:
README = f.read()
with open(os.path.join(here, "CHANGES.txt")) as f:
CHANGES = f.read()
requires = [
"pyramid",
"pyramid_chameleon",
"pyramid_debugtoolbar",
"pyramid_tm",
"SQLAlchemy",
"transaction",
"zope.sqlalchemy",
"waitress",
]
setup(
name="pyramid_pycharm",
version="0.0",
description="pyramid_pycharm",
long_description=README + "\n\n" + CHANGES,
classifiers=[
"Programming Language :: Python",
"Framework :: Pyramid",
"Topic :: Internet :: WWW/HTTP",
"Topic :: Internet :: WWW/HTTP :: WSGI :: Application",
],
author="",
author_email="",
url="",
keywords="web wsgi bfg pylons pyramid",
packages=find_packages(),
include_package_data=True,
zip_safe=False,
test_suite="pyramid_pycharm",
install_requires=requires,
entry_points="""\
[paste.app_factory]
main = pyramid_pycharm:main
[console_scripts]
initialize_pyramid_pycharm_db = pyramid_pycharm.scripts.initializedb:main
""",
)
|
from mako.template import Template
from mako.runtime import Context
from io import StringIO
mytemplate = ('hello, ${name}!')
buf = ()
ctx = (buf)
(ctx)
(()) |
#!/usr/bin/env python
"""Test for inequality
"""
import unittest
class InequalityTest(unittest.TestCase):
def testEqual(self):
self.assertNotEqual(1, 3 - 2)
def testNotEqual(self):
self.assertEqual(2, 3 - 2)
if __name__ == "__main__":
unittest.main()
|
from tests.common import parent_id, parent_name, child_id, child_parent_id, relation, child, parent
from eralchemy.main import _intermediary_to_markdown
import re
import pytest
column_re = ('(?P<key>\\*?)(?P<name>[^*].+) \\{label:"(?P<type>.+)"\\}')
def test_all_to_er():
tables = [child, parent]
relations = [relation]
output = (tables, relations)
for element in (relations + tables):
if (not (() in output)):
raise ()
def assert_column_well_rendered_to_er(col):
col_er = ()
col_parsed = (col_er)
if (not (('key') == ('*' if col.is_key else ''))):
raise ()
if (not (('name') == col.name)):
raise ()
if (not (('type') == col.type)):
raise ()
def test_column_to_er():
(parent_id)
(parent_name)
(child_id)
(child_parent_id)
def test_relation():
if (not (() in ['parent *--? child', 'child ?--* parent'])):
raise ()
def assert_table_well_rendered_to_er(table):
if (not (table.header_markdown == (('[' + table.name) + ']'))):
raise ()
table_er = ()
for col in table.columns:
if (not (() in table_er)):
raise ()
def test_table():
(child)
(parent) |
from django.http import Http404
from django.shortcuts import render_to_response
from django.core.paginator import Paginator, EmptyPage, PageNotAnInteger
###########
# CHOICES #
###########
def choice_list(request, app_label, module_name, field_name, models):
m, f = lookup_field(app_label, module_name, field_name, models)
return render_to_response("databrowse/choice_list.html", {"model": m, "field": f})
def choice_detail(request, app_label, module_name, field_name, field_val, models):
m, f = lookup_field(app_label, module_name, field_name, models)
try:
label = dict(f.field.choices)[field_val]
except KeyError:
raise Http404("Invalid choice value given")
obj_list = m.objects(**{f.field.name: field_val})
numitems = request.GET.get("items")
items_per_page = [25, 50, 100]
if numitems and numitems.isdigit() and int(numitems) > 0:
paginator = Paginator(obj_list, numitems)
else:
# fall back to default
paginator = Paginator(obj_list, items_per_page[0])
page = request.GET.get("page")
try:
obj_list_page = paginator.page(page)
except PageNotAnInteger:
# If page is not an integer, deliver first page.
obj_list_page = paginator.page(1)
except EmptyPage:
# If page is out of range (e.g. 9999), deliver last page.
obj_list_page = paginator.page(paginator.num_pages)
return render_to_response(
"databrowse/choice_detail.html",
{
"model": m,
"field": f,
"value": label,
"object_list": obj_list_page,
"items_per_page": items_per_page,
},
)
|
"""
This is testing project for KeyKeeper application.
"""
|
"""Dynamic REST (or DREST) is an extension of Django REST Framework.
DREST offers the following features on top of the standard DRF kit:
- Linked/embedded/sideloaded relationships
- Field inclusions/exlusions
- Field-based filtering/sorting
- Directory panel for the browsable API
- Optimizations
"""
|
# -*- coding: utf-8 -*-
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("contenttypes", "0001_initial"),
("tests", "0002_auto_20160310_1052"),
]
operations = [
migrations.AddField(
model_name="user",
name="favorite_pet_id",
field=models.TextField(null=True, blank=True),
preserve_default=True,
),
migrations.AddField(
model_name="user",
name="favorite_pet_type",
field=models.ForeignKey(
blank=True, to="contenttypes.ContentType", null=True
), # noqa
preserve_default=True,
),
]
|
"""FamilySearch User submodule"""
# Python imports
# Magic
class User(object):
"""https://familysearch.org/developers/docs/api/resources#user"""
def __init__(self):
"""https://familysearch.org/developers/docs/api/examples#user"""
pass
def current_user(self):
"""https://familysearch.org/developers/docs/api/users/Current_User_resource"""
url = self.root_collection["response"]["collections"][0]["links"][
"current-user"
]["href"]
return url
def current_user_person(self):
"""https://familysearch.org/developers/docs/api/tree/Current_Tree_Person_resource"""
try:
url = self.collections["FSFT"]["response"]["collections"][0]["links"][
"current-user-person"
]["href"]
except KeyError:
self.update_collection("FSFT")
url = self.collections["FSFT"]["response"]["collections"][0]["links"][
"current-user-person"
]["href"]
return url
def agent(self, uid):
"""https://familysearch.org/developers/docs/api/users/Agent_resource"""
return self.user_base + "agents/" + uid
def current_user_history(self):
"""https://familysearch.org/developers/docs/api/users/Current_User_History_resource"""
try:
url = self.collections["FSFT"]["response"]["collections"][0]["links"][
"current-user-history"
]["href"]
except KeyError:
self.update_collection("FSFT")
url = self.collections["FSFT"]["response"]["collections"][0]["links"][
"current-user-history"
]["href"]
return url
|
"""
[Advanced] [In-development]
Export a program list to a single yaml file.
The export may contain machine specific paths.
and may need to be edited for portability
"""
from argparse import FileType
import logging
import sys
import yaml
from chalmers.utils.cli import add_selection_group, select_programs
log = logging.getLogger("chalmers.export")
def main(args):
export_data = []
programs = select_programs(args, filter_paused=False)
for prog in programs:
export_data.append({"program": dict(prog.raw_data)})
yaml.safe_dump(export_data, args.output, default_flow_style=False)
def add_parser(subparsers):
parser = subparsers.add_parser(
"export",
help='[IN DEVELOPMENT] Export current configuration to be installed with the "import" command',
description=__doc__,
)
add_selection_group(parser)
parser.add_argument("-o", "--output", type=FileType("w"), default=sys.stdout)
parser.set_defaults(main=main)
|
"""
Linux services, this module checks the existence of linux command line
programs on import
* systemd_service
* upstart_service
* sysv_service
* cron_service
In that order
"""
import logging
import platform
import sys
from . import cron_service, sysv_service, upstart_service, systemd_service
from chalmers import errors
# Fix for AWS Linux
if sys.version_info.major == 3:
system_dist = ("system",)
else:
system_dist = (b"system",)
platform._supported_dists += system_dist
log = logging.getLogger("chalmers.service")
class NoPosixSystemService(object):
def __init__(self, target_user=None):
supported_dists = platform._supported_dists + system_dist
linux = platform.linux_distribution(supported_dists=supported_dists)
raise errors.ChalmersError(
"Could not detect system service for platform %s (tried systemd, sysv init and upstart)"
% linux[0]
)
if systemd_service.check():
PosixSystemService = systemd_service.SystemdService
elif sysv_service.check():
PosixSystemService = sysv_service.SysVService
elif upstart_service.check():
PosixSystemService = upstart_service.UpstartService
else:
PosixSystemService = NoPosixSystemService
PosixLocalService = cron_service.CronService
|
import abc
import logging
import traceback
import servicemanager
import win32event, win32service, win32api
from win32serviceutil import ServiceFramework
log = logging.getLogger(__name__)
class WindowsService(object, ServiceFramework, metaclass=abc.ABCMeta):
"""
Base windows service class that provides all the nice things that a python
service needs
"""
def __init__(self, args):
try:
self._svc_name_ = args[0]
self._svc_display_name_ = args[0]
ServiceFramework.__init__(self, args)
self.stop_event = win32event.CreateEvent(None, 0, 0, None)
except Exception:
self.log("Error in WindowsService.__init__")
self.log(traceback.format_exc())
raise
def log(self, msg):
"Log to the NTEventlog"
servicemanager.LogInfoMsg(str(msg))
def sleep(self, sec):
win32api.Sleep(sec * 1000, True)
def SvcDoRun(self):
self.log("start")
self.ReportServiceStatus(win32service.SERVICE_START_PENDING)
try:
self.ReportServiceStatus(win32service.SERVICE_RUNNING)
self.log("start")
self.start()
self.ReportServiceStatus(win32service.SERVICE_STOPPED)
# self.log('wait')
# win32event.WaitForSingleObject(self.stop_event, win32event.INFINITE)
self.log("done")
except Exception:
self.log("Error in WindowsService.SvcDoRun")
self.log(traceback.format_exc())
self.SvcStop()
def SvcStop(self):
pass
self.ReportServiceStatus(win32service.SERVICE_STOP_PENDING)
self.log("stopping")
self.stop()
self.log("stopped")
win32event.SetEvent(self.stop_event)
self.ReportServiceStatus(win32service.SERVICE_STOPPED)
|
# Copyright (c) 2014 Johan Burke
# Distributed under the MIT software license. See http://www.opensource.org/licenses/mit-license.php.
from ..pyelliptic.ecc import *
from ..threads.threadutils import *
from ..constants import *
from .key import *
import hashlib
from struct import *
import sys
def encodeInt(val, alphabet=ALPHABET):
base = len(alphabet)
result = ""
while val > 0:
rem = val % base
result = str(alphabet[rem]) + result
val = val // base
return result
class Address:
def __init__(self, hashValue, version=VERSION):
self.version = version
self.hashValue = hashValue
self.encodedValue = ""
def encodeVersion(self):
# return the version as a big-endian unsigned byte.
return pack(">B", self.version)
def encode(self):
a = self.encodeVersion() + self.hashValue
sha = hashlib.new("sha512")
sha.update(a)
sha.update(sha.digest())
checksum = sha.digest()[0:2]
intValue = int.from_bytes(a + checksum, "big")
# this value is in base 64
self.encodedValue = encodeInt(intValue)
def genKey():
curve = ECC()
pubKey = curve.get_pubkey()
sha = hashlib.new("sha512")
sha.update(pubKey)
ripemd = hashlib.new("ripemd160")
ripemd.update(sha.digest())
sha.update(ripemd.digest())
ripemd.update(sha.digest())
# safePrint(ripemd.digest())
a = Address(ripemd.digest())
a.encode()
key = Key(pubKey, curve.get_privkey(), a.encodedValue)
return key
|
from anymesh import AnyMesh, AnyMeshDelegateProtocol
class LeftDelegate(AnyMeshDelegateProtocol):
def connected_to(self, device_info):
print(("left connected to " + device_info.name))
def disconnected_from(self, name):
pass
def received_msg(self, message):
print(("left received message from " + message.sender))
print(("message: " + message.data["msg"]))
leftMesh.request("right", {"msg": "back at ya righty!"})
class RightDelegate(AnyMeshDelegateProtocol):
def connected_to(self, device_info):
print(("right connected to " + device_info.name))
rightMesh.request("left", {"msg": "hey lefty!"})
def disconnected_from(self, name):
pass
def received_msg(self, message):
print(("right received message from " + message.sender))
print(("message: " + message.data["msg"]))
leftMesh = AnyMesh("left", "global", LeftDelegate())
rightMesh = AnyMesh("right", "global", RightDelegate())
AnyMesh.run()
|
import unittest
import doctest
import urwid
def load_tests(loader, tests, ignore):
module_doctests = [
urwid.widget,
urwid.wimp,
urwid.decoration,
urwid.display_common,
urwid.main_loop,
urwid.monitored_list,
urwid.raw_display,
"urwid.split_repr", # override function with same name
urwid.util,
urwid.signals,
]
for m in module_doctests:
tests.addTests(
doctest.DocTestSuite(
m, optionflags=doctest.ELLIPSIS | doctest.IGNORE_EXCEPTION_DETAIL
)
)
return tests
|
import logging
log = logging.getLogger(__name__)
EXCLUDED_LOG_VARS = [
"threadName",
"name",
"thread",
"created",
"process",
"processName",
"args",
"module",
"filename",
"levelno",
"exc_text",
"pathname",
"lineno",
"msg",
"exc_info",
"message",
"funcName",
"relativeCreated",
"levelname",
"msecs",
"asctime",
]
def register_logging(logger, client_config, cls):
found = False
for handler in logger.handlers:
if isinstance(handler, cls):
found = True
reg_handler = handler
if not found:
reg_handler = cls(client_config=client_config)
logger.addHandler(reg_handler)
return reg_handler
def unregister_logger(logger, handler):
logger.removeHandler(handler)
|
import uuid
import datetime
from appenlight_client.timing import get_local_storage
from appenlight_client.timing import default_timer
from appenlight_client.client import PY3
import logging
log = logging.getLogger(__name__)
class AppenlightWSGIWrapper(object):
__version__ = "0.3"
def __init__(self, app, appenlight_client):
self.app = app
self.appenlight_client = appenlight_client
def __call__(self, environ, start_response):
"""Run the application and conserve the traceback frames.
also determine if we got 404
"""
environ["appenlight.request_id"] = str(uuid.uuid4())
appenlight_storage = get_local_storage()
# clear out thread stats on request start
appenlight_storage.clear()
app_iter = None
detected_data = []
create_report = False
traceback = None
http_status = 200
start_time = default_timer()
def detect_headers(status, headers, *k, **kw):
detected_data[:] = status[:3], headers
return start_response(status, headers, *k, **kw)
# inject client instance reference to environ
if "appenlight.client" not in environ:
environ["appenlight.client"] = self.appenlight_client
# some bw. compat stubs
def local_report(message, include_traceback=True, http_status=200):
environ["appenlight.force_send"] = True
def local_log(level, message):
environ["appenlight.force_send"] = True
environ["appenlight.report"] = local_report
environ["appenlight.log"] = local_log
if "appenlight.tags" not in environ:
environ["appenlight.tags"] = {}
if "appenlight.extra" not in environ:
environ["appenlight.extra"] = {}
try:
app_iter = self.app(environ, detect_headers)
return app_iter
except Exception:
if hasattr(app_iter, "close"):
app_iter.close()
# we need that here
traceback = self.appenlight_client.get_current_traceback()
# by default reraise exceptions for app/FW to handle
if self.appenlight_client.config["reraise_exceptions"]:
raise
try:
start_response(
"500 INTERNAL SERVER ERROR",
[("Content-Type", "text/html; charset=utf-8")],
)
except Exception:
environ["wsgi.errors"].write(
"AppenlightWSGIWrapper middleware catched exception "
"in streamed response at a point where response headers "
"were already sent.\n"
)
else:
return "Server Error"
finally:
# report 500's and 404's
# report slowness
end_time = default_timer()
appenlight_storage.thread_stats["main"] = end_time - start_time
delta = datetime.timedelta(seconds=(end_time - start_time))
stats, slow_calls = appenlight_storage.get_thread_stats()
if "appenlight.view_name" not in environ:
environ["appenlight.view_name"] = getattr(
appenlight_storage, "view_name", ""
)
if detected_data and detected_data[0]:
http_status = int(detected_data[0])
if self.appenlight_client.config["slow_requests"] and not environ.get(
"appenlight.ignore_slow"
):
# do we have slow calls/request ?
if (
delta >= self.appenlight_client.config["slow_request_time"]
or slow_calls
):
create_report = True
if "appenlight.__traceback" in environ and not environ.get(
"appenlight.ignore_error"
):
# get traceback gathered by pyramid tween
traceback = environ["appenlight.__traceback"]
del environ["appenlight.__traceback"]
http_status = 500
create_report = True
if (
traceback
and self.appenlight_client.config["report_errors"]
and not environ.get("appenlight.ignore_error")
):
http_status = 500
create_report = True
elif self.appenlight_client.config["report_404"] and http_status == 404:
create_report = True
if create_report:
self.appenlight_client.py_report(
environ,
traceback,
message=None,
http_status=http_status,
start_time=datetime.datetime.utcfromtimestamp(start_time),
end_time=datetime.datetime.utcfromtimestamp(end_time),
request_stats=stats,
slow_calls=slow_calls,
)
# dereference
del traceback
self.appenlight_client.save_request_stats(
stats, view_name=environ.get("appenlight.view_name", "")
)
if self.appenlight_client.config["logging"]:
records = self.appenlight_client.log_handlers_get_records()
self.appenlight_client.log_handlers_clear_records()
self.appenlight_client.py_log(
environ,
records=records,
r_uuid=environ["appenlight.request_id"],
created_report=create_report,
)
# send all data we gathered immediately at the end of request
self.appenlight_client.check_if_deliver(
self.appenlight_client.config["force_send"]
or environ.get("appenlight.force_send")
)
|
End of preview. Expand
in Data Studio
README.md exists but content is empty.
- Downloads last month
- 47