Dataset Viewer
Auto-converted to Parquet Duplicate
id
int64
0
10k
text
stringlengths
186
4k
length
int64
128
1.02k
0
#!/usr/bin/env python # -*- coding: utf-8 -*- from setuptools import find_packages, setup with open('README.md', 'r') as fp: README = fp.read() setup( name='zerobounce', version='0.1.5', description='ZeroBounce Python API - https://www.zerobounce.net.', author='Tudor Aursulesei', author_email='[email protected]', url='http://github.com/zerobounce/zerobounce-python-api', long_description=README, long_description_content_type="text/markdown", keywords = ['email', 'validation'], # arbitrary keywords download_url='https://github.com/freshsecurity/zerobounce-python-api/dist/0.1.5.tar.gz', # I'll explain this in a second packages=find_packages(), install_requires=[ 'requests==2.21.0', ], classifiers=[ 'Development Status :: 4 - Beta', 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Operating System :: OS Independent', 'Programming Language :: Python', ], )
397
1
# Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # http://www.apache.org/licenses/LICENSE-2.0 # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. """ Web Traversal Library (WTL) Provides a bottom abstraction layer for automatic web workflows. """ from .actions import Action, Actions, ElementAction from .classifiers import ActiveElementFilter, ElementClassifier, ScalingMode, ViewClassifier from .color import Color from .config import Config from .error import ElementNotFoundError, Error, ScrapingError, WebDriverSendError, WindowClosedError from .geometry import Point, Rectangle from .javascript import JavascriptWrapper from .policies import multi_tab_coroutine, single_tab, single_tab_coroutine from .scraper import Scraper from .selector import Selector from .snapshot import Elements, PageElement, PageSnapshot from .version import __version__ from .view import View from .window import Window from .workflow import Workflow
389
2
config = { "interfaces": { "google.ads.googleads.v6.services.DynamicSearchAdsSearchTermViewService": { "retry_codes": { "retry_policy_1_codes": [ "UNAVAILABLE", "DEADLINE_EXCEEDED" ], "no_retry_codes": [] }, "retry_params": { "retry_policy_1_params": { "initial_retry_delay_millis": 5000, "retry_delay_multiplier": 1.3, "max_retry_delay_millis": 60000, "initial_rpc_timeout_millis": 3600000, "rpc_timeout_multiplier": 1.0, "max_rpc_timeout_millis": 3600000, "total_timeout_millis": 3600000 }, "no_retry_params": { "initial_retry_delay_millis": 0, "retry_delay_multiplier": 0.0, "max_retry_delay_millis": 0, "initial_rpc_timeout_millis": 0, "rpc_timeout_multiplier": 1.0, "max_rpc_timeout_millis": 0, "total_timeout_millis": 0 } }, "methods": { "GetDynamicSearchAdsSearchTermView": { "timeout_millis": 60000, "retry_codes_name": "retry_policy_1_codes", "retry_params_name": "retry_policy_1_params" } } } } }
663
3
import hashlib import os import re import shutil import subprocess import sys # copy the required files into repo root shutil.copy('docs/favicon.ico', '.') shutil.copy('deploy/windows/instaloader.spec', '.') code = """ import contextlib import psutil import subprocess def __main(): with contextlib.suppress(AttributeError, psutil.Error): if psutil.Process().parent().parent().name() == "explorer.exe": subprocess.Popen("powershell -NoExit -Command \\\"& '{0}'\\\"".format(sys.argv[0])) return main() if __name__ == "__main__": __main() """ with open('instaloader/__main__.py', 'r') as f: # adjust imports for changed file structure regex = re.compile(r'from (?:(\.[^ ]+ )|\.( ))import') lines = [regex.sub(r'from instaloader\1\2import', line) for line in f.readlines()] # insert code for magic exe behavior index = lines.index('if __name__ == "__main__":\n') code_lines = [cl + '\n' for cl in code.splitlines()] for i, code_line in enumerate(code_lines): if i + index < len(lines): lines[i + index] = code_line else: lines.extend(code_lines[i:]) break with open('__main__.py', 'w+') as f: f.writelines(lines) # install dependencies and invoke PyInstaller commands = ["pip install pipenv==2018.11.26", "pipenv sync --dev", "pipenv run pyinstaller --log-level=DEBUG instaloader.spec"] for command in commands: print() print('#' * (len(command) + 6)) print('## {} ##'.format(command)) print('#' * (len(command) + 6)) print(flush=True) err = subprocess.Popen(command).wait() if err != 0: sys.exit(err) # calculate and store MD5 hash for created executable hash_md5 = hashlib.md5() with open('dist/instaloader.exe', 'rb') as f: for chunk in iter(lambda: f.read(4096), b''): hash_md5.update(chunk) with open('dist/instaloader.exe.md5', 'w+') as f: f.write('{} *instaloader.exe\n'.format(hash_md5.hexdigest())) # Create ZIP file shutil.make_archive('instaloader-{}-windows-standalone'.format(os.getenv('VERSION_TAG')), 'zip', 'dist')
887
4
from flask import jsonify from .settings.default import DefaultConfig from . import create_app # 创建flask应用 app = create_app(DefaultConfig, enable_config_file=True) @app.route('/') def route_map(): """ 主视图 :return: """ rules_iterator = app.url_map.iter_rules() return jsonify({rule.endpoint: rule.rule for rule in rules_iterator if rule.endpoint not in ('route_map', 'static')})
156
5
# Generated by Django 2.0 on 2019-05-06 13:31 import datetime from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('project', '0014_auto_20190505_1708'), ] operations = [ migrations.AlterField( model_name='menuitem', name='time', field=models.TimeField(default=datetime.datetime(2019, 5, 6, 13, 31, 40, 37749)), ), ]
196
6
# coding: utf-8 from leancloud import Engine from app import app from leancloud import Query import leancloud #from leancloud import HttpsRedirectMiddleware #app = HttpsRedirectMiddleware(app) engine = Engine(app) @engine.define def hello(**params): if 'name' in params: return 'Hello, {}!'.format(params['name']) else: return 'Hello, 720testCloud!' @engine.define def averageStars(movie): sum = 0 query = Query('Review') try: reviews = query.find() except leancloud.LeanCloudError, e: print e raise e for review in reviews: sum += review.get('starts') return sum / len(reviews)
256
7
"""Model config in json format""" CONFIG = { 'data': { 'train_path': 'data/task1_headline_ABSA_train.json', 'test_path': 'data/task1_headline_ABSA_test.json', 'n_level_1_classes': 4, 'n_level_2_corporate': 12, 'n_level_2_economy': 2, 'n_level_2_market': 4, 'n_level_2_stock': 9, 'vocab_size': 1000, 'seed': 21, 'buffer_size': 1000, 'batch_size': 32, 'train_split': 0.80, 'validation_split': 0.20, 'aspect_labels': [['Corporate, Appointment'], ['Corporate, Company Communication'], ['Corporate, Dividend Policy'], ['Corporate, Financial'], ['Corporate, Legal'], ['Corporate, M&A'], ['Corporate, Regulatory'], ['Corporate, Reputation'], ['Corporate, Risks'], ['Corporate, Rumors'], ['Corporate, Sales'], ['Corporate, Strategy'], ['Economy, Central Banks'], ['Economy, Trade'], ['Market, Conditions'], ['Market, Currency'], ['Market, Market'], ['Market, Volatility'], ['Stock, Buyside'], ['Stock, Coverage'], ['Stock, Fundamentals'], ['Stock, Insider Activity'], ['Stock, IPO'], ['Stock, Options'], ['Stock, Price Action'], ['Stock, Signal'], ['Stock, Technical Analysis']] }, 'train': { 'batch_size': 32, 'epochs': 10, 'learning_rate': 1e-4 }, 'model': { 'vocab_size': 1000 }, }
759
8
# Generated by Django 3.1.4 on 2020-12-20 13:57 import core.models from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('core', '0004_recipe'), ] operations = [ migrations.AddField( model_name='recipe', name='image', field=models.ImageField(null=True, upload_to=core.models.recipe_image_file_path), ), ]
189
9
from common import log from common.constants import CLIENTS from common.vsphere_api import VsphereApi logger = log.get_logger(__name__) def _get_vsphere_api(hostname): return VsphereApi(hostname) def handler(event, context): logger.debug("Beginning network copy!") from_device_number = event.get("from_device", None) to_device_number = event.get("to_device", None) logger.bind(from_device=from_device_number, to_device=to_device_number) # Get the vms in the vCenter from Zamboni from_hyp = CLIENTS.zamboni_client.get_hyps_by_device_id(from_device_number) to_hyp = CLIENTS.zamboni_client.get_hyps_by_device_id(to_device_number) hostname = from_hyp.get("location", None) # Make the call to copy networks try: vsphere_api = _get_vsphere_api(hostname) except Exception as e: logger.error(f"There was an error connecting to {hostname}.", e) raise Exception(f"There was an error connecting to {hostname}. {str(e)}") try: vsphere_api.copy_networks(from_hyp.get("name", None), to_hyp.get("name", None)) except Exception as e: logger.error("There was an error during the network copy process.", e) raise Exception(f"There was an error during the network copy process. {str(e)}") logger.debug("Network copy complete.")
492
10
# -*- coding: utf-8 -*- from unittest import mock import pytest from pytube import YouTube from pytube.exceptions import VideoUnavailable @mock.patch("pytube.__main__.YouTube") def test_prefetch_deferred(youtube): instance = youtube.return_value instance.prefetch_descramble.return_value = None YouTube("https://www.youtube.com/watch?v=9bZkp7q19f0", True) assert not instance.prefetch_descramble.called @mock.patch("urllib.request.install_opener") def test_install_proxy(opener): proxies = {"http": "http://www.example.com:3128/"} YouTube( "https://www.youtube.com/watch?v=9bZkp7q19f0", defer_prefetch_init=True, proxies=proxies, ) opener.assert_called() @mock.patch("pytube.request.get") def test_video_unavailable(get): get.return_value = None youtube = YouTube( "https://www.youtube.com/watch?v=9bZkp7q19f0", defer_prefetch_init=True ) with pytest.raises(VideoUnavailable): youtube.prefetch() def test_video_keywords(cipher_signature): expected = [ 'Rewind', 'Rewind 2019', 'youtube rewind 2019', '#YouTubeRewind', 'MrBeast', 'PewDiePie', 'James Charles', 'Shane Dawson', 'CaseyNeistat', 'RiceGum', 'Simone Giertz', 'JennaMarbles', 'Lilly Singh', 'emma chamberlain', 'The Try Guys', 'Fortnite', 'Minecraft', 'Roblox', 'Marshmello', 'Garena Free Fire', 'GTA V', 'Lachlan', 'Anaysa', 'jeffreestar', 'Noah Schnapp', 'Jennelle Eliana', 'T-Series', 'Azzyland', 'LazarBeam', 'Dude Perfect', 'David Dobrik', 'KSI', 'NikkieTutorials', 'Kurzgesagt', 'Jelly', 'Ariana Grande', 'Billie Eilish', 'BLACKPINK', 'Year in Review' ] assert cipher_signature.keywords == expected
777
11
from django.contrib.auth.middleware import AuthenticationMiddleware from django.contrib.auth.models import User from django.http import HttpRequest from django.test import TestCase class TestSessionAuthenticationMiddleware(TestCase): def setUp(self): self.user_password = 'test_password' self.user = User.objects.create_user('test_user', '[email protected]', self.user_password) self.middleware = AuthenticationMiddleware() self.assertTrue(self.client.login( username=self.user.username, password=self.user_password, )) self.request = HttpRequest() self.request.session = self.client.session def test_changed_password_doesnt_invalidate_session(self): """ Changing a user's password shouldn't invalidate the session if session verification isn't activated. """ session_key = self.request.session.session_key self.middleware.process_request(self.request) self.assertIsNotNone(self.request.user) self.assertFalse(self.request.user.is_anonymous()) # After password change, user should remain logged in. self.user.set_password('new_password') self.user.save() self.middleware.process_request(self.request) self.assertIsNotNone(self.request.user) self.assertFalse(self.request.user.is_anonymous()) self.assertEqual(session_key, self.request.session.session_key) def test_changed_password_invalidates_session_with_middleware(self): session_key = self.request.session.session_key with self.modify_settings(MIDDLEWARE_CLASSES={'append': ['django.contrib.auth.middleware.SessionAuthenticationMiddleware']}): # After password change, user should be anonymous self.user.set_password('new_password') self.user.save() self.middleware.process_request(self.request) self.assertIsNotNone(self.request.user) self.assertTrue(self.request.user.is_anonymous()) # session should be flushed self.assertNotEqual(session_key, self.request.session.session_key)
921
12
from typing import List, Optional class Error(Exception): """Base W&B Error""" def __init__(self, message): super(Error, self).__init__(message) self.message = message # For python 2 support def encode(self, encoding): return self.message class CommError(Error): """Error communicating with W&B""" def __init__(self, msg, exc=None): super(CommError, self).__init__(msg) self.message = msg self.exc = exc class UsageError(Error): """API Usage Error""" pass class LogError(Error): """Raised when wandb.log() fails""" pass class LogMultiprocessError(LogError): """Raised when wandb.log() fails because of multiprocessing""" pass class RequireError(Error): """Raised when wandb.require() fails""" pass class ExecutionError(Error): """Generic execution exception""" pass class DockerError(Error): """Raised when attempting to execute a docker command""" def __init__( self, command_launched: List[str], return_code: int, stdout: Optional[bytes] = None, stderr: Optional[bytes] = None, ): command_launched_str = " ".join(command_launched) error_msg = ( f"The docker command executed was `{command_launched_str}`.\n" f"It returned with code {return_code}\n" ) if stdout is not None: error_msg += f"The content of stdout is '{stdout.decode()}'\n" else: error_msg += ( "The content of stdout can be found above the " "stacktrace (it wasn't captured).\n" ) if stderr is not None: error_msg += f"The content of stderr is '{stderr.decode()}'\n" else: error_msg += ( "The content of stderr can be found above the " "stacktrace (it wasn't captured)." ) super().__init__(error_msg) class LaunchError(Error): """Raised when a known error occurs in wandb launch""" pass __all__ = [ "Error", "UsageError", "CommError", "LogError", "DockerError", "LogMultiprocessError", "RequireError", "ExecutionError", "LaunchError", ]
977
13
# model settings model = dict( type='ImageClassifier', backbone=dict( type='OTEEfficientNet', version='b0'), neck=dict(type='GlobalAveragePooling'), head=dict( type='LinearClsHead', num_classes=1000, in_channels=1280, loss=dict(type='CrossEntropyLoss', loss_weight=1.0), ))
157
14
import time def echo(i): time.sleep(0.001) return i from multiprocessing.pool import Pool p = Pool(10) run1 = [a for a in p.imap_unordered(echo, range(10))] run2 = [a for a in p.imap_unordered(echo, range(10))] run3 = [a for a in p.imap_unordered(echo, range(10))] run4 = [a for a in p.imap_unordered(echo, range(10))] print(run1) print(run2) print(run3) print(run4) print(run1 == run2 == run3 == run4) # Create a deterministic process pool
192
15
# coding: utf-8 """ Isilon SDK Isilon SDK - Language bindings for the OneFS API # noqa: E501 OpenAPI spec version: 10 Contact: [email protected] Generated by: https://github.com/swagger-api/swagger-codegen.git """ from __future__ import absolute_import import unittest import isi_sdk_9_0_0 from isi_sdk_9_0_0.models.network_groupnet_create_params import NetworkGroupnetCreateParams # noqa: E501 from isi_sdk_9_0_0.rest import ApiException class TestNetworkGroupnetCreateParams(unittest.TestCase): """NetworkGroupnetCreateParams unit test stubs""" def setUp(self): pass def tearDown(self): pass def testNetworkGroupnetCreateParams(self): """Test NetworkGroupnetCreateParams""" # FIXME: construct object with mandatory attributes with example values # model = isi_sdk_9_0_0.models.network_groupnet_create_params.NetworkGroupnetCreateParams() # noqa: E501 pass if __name__ == '__main__': unittest.main()
381
16
twoStrings=input() stringList=twoStrings.split(" ") str1=stringList[0]; str2=stringList[1] sum=0 k=0;j=0 while True: if k<len(str1): if j<len(str2): multipliedCodes=ord(str1[k])*ord(str2[j]) sum+=multipliedCodes k+=1; j+=1 else: sum+=ord(str1[k]) k+=1 else: if j<len(str2): sum+=ord(str2[j]) j+=1 else: break print(sum)
301
17
# Copyright 2020 MONAI Consortium # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # http://www.apache.org/licenses/LICENSE-2.0 # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import os import sys from collections import OrderedDict import numpy as np import torch import monai try: import ignite ignite_version = ignite.__version__ except ImportError: ignite_version = 'NOT INSTALLED' def get_config_values(): """ Read the package versions into a dictionary. """ output = OrderedDict() output["MONAI version"] = monai.__version__ output["Python version"] = sys.version.replace("\n", " ") output["Numpy version"] = np.version.full_version output["Pytorch version"] = torch.__version__ output["Ignite version"] = ignite_version return output def print_config(file=sys.stdout): """ Print the package versions to `file`. Defaults to `sys.stdout`. """ for kv in get_config_values().items(): print("%s: %s" % kv, file=file, flush=True) def set_visible_devices(*dev_inds): os.environ["CUDA_VISIBLE_DEVICES"] = ",".join(map(str, dev_inds))
491
18
# Configuration file for the Sphinx documentation builder. # # This file only contains a selection of the most common options. For a full # list see the documentation: # https://www.sphinx-doc.org/en/master/usage/configuration.html # -- Path setup -------------------------------------------------------------- # If extensions (or modules to document with autodoc) are in another directory, # add these directories to sys.path here. If the directory is relative to the # documentation root, use os.path.abspath to make it absolute, like shown here. # import os import sys sys.path.insert(0, os.path.abspath('.')) sys.path.insert(0, os.path.abspath('../')) # -- Project information ----------------------------------------------------- project = 'Algebraic differentiators' copyright = '2021, Amine Othmane' author = 'Amine Othmane' master_doc = 'index' # The full version, including alpha/beta/rc tags release = '1.0' # -- General configuration --------------------------------------------------- # Add any Sphinx extension module names here, as strings. They can be # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom # ones. extensions = [ 'sphinx.ext.autodoc', 'sphinx.ext.coverage', 'sphinx.ext.napoleon'] # Add any paths that contain templates here, relative to this directory. templates_path = ['_templates'] # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. # This pattern also affects html_static_path and html_extra_path. exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store'] # -- Options for HTML output ------------------------------------------------- # The theme to use for HTML and HTML Help pages. See the documentation for # a list of builtin themes. # html_theme = 'alabaster' # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". html_static_path = ['_static']
554
19
class Player: def __init__(self, x, y): self.name = x self.health = y self.level = 1 def take_hit(self, damage): self.health-=damage def heal(self, amount): self.health+=amount def level_up(self): self.level+=1 self.health = 100 def describe(self): print("<{}:{}, {} HP>".format(self.name, self.level, self.health))
188
20
import pytest from src.junit_report import JunitTestSuite, JunitTestCase, JunitFixtureTestCase from tests import REPORT_DIR class TestJunitSuiteNoCases: @JunitTestCase() def dummy_test_case(self): pass @JunitTestCase() def other_test_case(self): pass @JunitTestCase() def exception_test_case(self): raise BrokenPipeError("PIPE") @pytest.fixture @JunitFixtureTestCase() def my_test_fixture(self): self.other_test_case() yield @pytest.fixture @JunitFixtureTestCase() def exception_fixture(self): self.exception_test_case() yield @JunitTestSuite(REPORT_DIR) def test_suite_test_case_inside_fixture(self, my_test_fixture): self.dummy_test_case() @JunitTestSuite(REPORT_DIR) def test_suite_test_case_inside_fixture_with_exception(self, exception_fixture): self.dummy_test_case()
401
21
# Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"). You # may not use this file except in compliance with the License. A copy of # the License is located at # # http://aws.amazon.com/apache2.0/ # # or in the "license" file accompanying this file. This file is # distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF # ANY KIND, either express or implied. See the License for the specific # language governing permissions and limitations under the License. from __future__ import absolute_import import pytest from sagemaker import utils from sagemaker.pytorch import PyTorch from test.integration import training_dir, mnist_script, DEFAULT_TIMEOUT from test.integration.sagemaker.timeout import timeout @pytest.mark.skip_gpu def test_mnist_distributed_cpu(sagemaker_session, ecr_image, instance_type, dist_cpu_backend): instance_type = instance_type or 'ml.c4.xlarge' _test_mnist_distributed(sagemaker_session, ecr_image, instance_type, dist_cpu_backend) @pytest.mark.skip_cpu def test_mnist_distributed_gpu(sagemaker_session, ecr_image, instance_type, dist_gpu_backend): instance_type = instance_type or 'ml.p2.xlarge' _test_mnist_distributed(sagemaker_session, ecr_image, instance_type, dist_gpu_backend) def _test_mnist_distributed(sagemaker_session, ecr_image, instance_type, dist_backend): with timeout(minutes=DEFAULT_TIMEOUT): pytorch = PyTorch(entry_point=mnist_script, role='SageMakerRole', train_instance_count=2, train_instance_type=instance_type, sagemaker_session=sagemaker_session, image_name=ecr_image, hyperparameters={'backend': dist_backend, 'epochs': 2}) training_input = pytorch.sagemaker_session.upload_data(path=training_dir, key_prefix='pytorch/mnist') job_name = utils.unique_name_from_base('test-pytorch-mnist') pytorch.fit({'training': training_input}, job_name=job_name)
884
22
a = b = c = d = e = f = 69 print(c) x, y, z = 1, 2, 3 print(x) print(y) print(z) data = 1, 2, 3 # Tuple x, y, z = data print(x) print(y) print(z) # Practical applications for t in enumerate("abcdef"): print(t) ninja = ("Kakashi", "Hatake", "Jonin", "Raiton", "Sharingan") name, clan, rank, chakra, special = ninja print(name) print(special)
163
23
# coding=utf-8 # Author: Jianghan LI # Question: 093.Restore_IP_Addresses # Date: 2017-05-13 # 2:22 - 2:34 # Complexity: O(C12^3) class Solution(object): def restoreIpAddresses(self, s): """ :type s: str :rtype: List[str] """ if len(s) > 12: return [] def check(s): return 0 <= int(s) <= 255 and str(int(s)) == s res = [] for i, j, k in itertools.combinations(range(1, len(s)), 3): if check(s[:i]) and check(s[i:j]) and check(s[j:k]) and check(s[k:]): res.append(s[:i] + "." + s[i:j] + "." + s[j:k] + "." + s[k:]) return res # 比如1234567 枚举1<=i<=j<=k<7。 # 检验s[0]-s[i-1].s[j]-s[j-1].s[j]-s[k-1].s[k]-s[6] 是否符合题意
442
24
from setuptools import find_packages, setup import iitgauth try: readme = open('README.rst').read() except IOError: readme = '' setup( name='django-iitg-auth', version='.'.join(str(i) for i in iitgauth.VERSION), description='``django-iitg-auth`` is a reusable Django application ' 'which provides a custom authencation backend for ' 'authenticating with IIT Guwahati webmail servers, a login ' 'form and a utility view.', long_description=readme, packages=find_packages(exclude=('tests', 'docs', 'example', )), author='Narendra Choudhary', author_email='[email protected]', url='https://github.com/narenchoudhary/django-iitg-auth', install_requires=['Django>=1.7'], license='BSD 3-Clause', classifiers=[ 'Development Status :: 3 - Alpha', 'Environment :: Web Environment', 'Framework :: Django', 'Framework :: Django :: 1.8', 'Framework :: Django :: 1.9', 'Framework :: Django :: 1.10', 'Intended Audience :: Developers', 'License :: OSI Approved :: BSD License', 'Natural Language :: English', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3.3', 'Programming Language :: Python :: 3.4', 'Programming Language :: Python :: 3.5', 'Topic :: Internet :: WWW/HTTP', 'Topic :: Software Development :: Libraries :: Python Modules', ], keywords='django library development authentication', zip_safe=False, )
676
25
""" I18n utilities. """ from gettext import translation _t = translation('udiskie', localedir=None, languages=None, fallback=True) def _(text, *args, **kwargs): """Translate and then and format the text with ``str.format``.""" msg = _t.gettext(text) if args or kwargs: return msg.format(*args, **kwargs) else: return msg
138
26
# Generated by Django 2.2.1 on 2019-05-28 07:19 from django.db import migrations, models import django.db.models.deletion import uuid class Migration(migrations.Migration): initial = True dependencies = [ ('contests', '0003_auto_20190522_1558'), ('countries', '0002_auto_20190522_1513'), ] operations = [ migrations.CreateModel( name='Voter', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('vote_key', models.UUIDField(default=uuid.uuid4, editable=False, unique=True)), ('contest', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='contests.Contest')), ('country', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='countries.Country')), ], ), ]
405
27
from typing import Dict, Optional from common.constant import HATENA_BLOG_TO_DOC_ENTRY_DICTIONARY_PATH from files.file_accessor import load_json, dump_json class BlogDocEntryMapping: def __init__(self): blog_to_doc: Dict[str, str] = load_json(HATENA_BLOG_TO_DOC_ENTRY_DICTIONARY_PATH) self.__blog_id_to_doc_id: Dict[str, str] = blog_to_doc self.__doc_id_to_blog_id: Dict[str, str] = {} for blog_entry_id, doc_entry_id in blog_to_doc.items(): self.__doc_id_to_blog_id[doc_entry_id] = blog_entry_id def get_blog_entry_id(self, doc_entry_id: str) -> Optional[str]: if doc_entry_id in self.__doc_id_to_blog_id: return self.__doc_id_to_blog_id[doc_entry_id] return None def get_doc_entry_id(self, blog_entry_id: str) -> Optional[str]: if blog_entry_id in self.__blog_id_to_doc_id: return self.__blog_id_to_doc_id[blog_entry_id] return None def push_entry_pair(self, blog_entry_id: str, doc_entry_id: str): self.__blog_id_to_doc_id[blog_entry_id] = doc_entry_id self.__doc_id_to_blog_id[doc_entry_id] = blog_entry_id def dump_file(self): dump_json(HATENA_BLOG_TO_DOC_ENTRY_DICTIONARY_PATH, self.__blog_id_to_doc_id)
597
28
# Let's draw a square on the canvas import turtle ##### INFO ##### # Your goal is to make the turtle to walk a square on the # screen. Let's go through again turtle commands. # this line creates a turtle to screen t = turtle.Turtle() # this line tells that we want to see a turtle shape t.shape("turtle") # this line moves turtle 100 steps forward t.forward(100) # here the turtle turns 90 degrees to the left t.left(90) # And again turtle moves forward t.forward(50) ##### EXERCISE ##### # What do you need to add to have the turtle walk a square # and return where he started? # TIP: you can always hit 'Run' to check what you're about to draw. # add code here: ##### Additional exercise ##### # how would you need to change the commands to have the # turtle draw a diamond (square with tip towards the top) instead?
237
29
import setuptools with open("README.md", "r") as fh: long_description = fh.read() setuptools.setup( name="autopacmen-Paulocracy", version="0.6.0", author="Paulocracy", author_email="[email protected]", description="The AutoPACMEN package", long_description=long_description, long_description_content_type="text/markdown", url="https://github.com/Paulocracy/autopacmen", packages=setuptools.find_packages(), classifiers=[ "Programming Language :: Python :: 3", "License :: OSI Approved :: Apache Software License", "Operating System :: OS Independent", ], python_requires='>=3.7', install_requires=[ "biopython", "cobra", "click", "openpyxl", "pebble", "requests", "xlsxwriter", ], )
361
30
from .abi import ( # noqa: F401 Decodable, TypeStr, ) from .bls import ( # noqa: F401 BLSPubkey, BLSSignature, ) from .encoding import ( # noqa: F401 HexStr, Primitives, ) from .enums import ( # noqa: F401 ForkName, ) from .ethpm import ( # noqa: F401 URI, ContractName, Manifest, ) from .evm import ( # noqa: F401 Address, AnyAddress, BlockIdentifier, BlockNumber, ChecksumAddress, Hash32, HexAddress, )
218
31
while True: n = int(input('Quer ver a tabuada de qual valor?[P/ finalizar digite um valor negativo] ')) print(75*'=') if n<0: print('Obrigado por utilizar meus serviços de tabuada!!!\n\033[1;32mVolte sempre ;-)') break c = 1 while c<=10: print(f'{n} x {c} = {n*c}') c+=1 print(75*'=')
181
32
#!/usr/bin/env python # -*- coding: utf-8 -*- """ DO WHAT THE FUCK YOU WANT TO PUBLIC LICENSE Version 2, December 2004 Copyright (C) 2015 Thoms Maurice <[email protected]> Everyone is permitted to copy and distribute verbatim or modified copies of this license document, and changing it is allowed as long as the name is changed. DO WHAT THE FUCK YOU WANT TO PUBLIC LICENSE TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION 0. You just DO WHAT THE FUCK YOU WANT TO. """ from clifactory import CommandLineInterface, Argument cli = CommandLineInterface() users = ["John Doe", "Dick Head"] @cli.endpoint(Argument('user', help='username to add')) def do_user_add(args): users.append(args.user) print users @cli.endpoint() def do_user_list(args): print users cli.parse()
297
33
#!/usr/bin/python # encoding: utf-8 #pylint: disable=R0904 """ Handle imported files """ # upconvert - A universal hardware design file format converter using # Format: upverter.com/resources/open-json-format/ # Development: github.com/upverter/schematic-file-converter # # Copyright 2011 Upverter, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import argparse import os import sys import re import logging import tempfile import traceback from upconvert.upconverter import Upconverter def filter_all(arg, top, names): for name in names: arg.append(os.path.join(top, name)) def main(): imported_files = [] os.path.walk('./test/imported', filter_all, imported_files) failed_to_autodetect = [] failed_to_parse = [] for file_path in imported_files: try: # test autodetection format = Upconverter.autodetect(file_path) try: # test conversion data = Upconverter.parse(file_path, format) except Exception, e: failed_to_parse.append(file_path) print traceback.print_exc() except Exception, e: failed_to_autodetect.append(file_path) print traceback.print_exc() print '\n\n' print 'failed to autodetect: %s' % (len(failed_to_autodetect)) print '--' for f in failed_to_autodetect: print '%s' % (f) print '\n' print 'failed to parse: %s' % (len(failed_to_parse)) print '--' for f in failed_to_parse: print '%s' % (f) if __name__ == "__main__": main()
859
34
""" Django settings for school project. For more information on this file, see https://docs.djangoproject.com/en/1.6/topics/settings/ For the full list of settings and their values, see https://docs.djangoproject.com/en/1.6/ref/settings/ """ # Build paths inside the project like this: os.path.join(BASE_DIR, ...) import os BASE_DIR = os.path.dirname(os.path.dirname(__file__)) # Quick-start development settings - unsuitable for production # See https://docs.djangoproject.com/en/1.6/howto/deployment/checklist/ # SECURITY WARNING: keep the secret key used in production secret! SECRET_KEY = '*i4p1bb#tq%bqspzd4s^1^^^-xywkltwscmuhh9e7ny&lo^dus' # SECURITY WARNING: don't run with debug turned on in production! DEBUG = True TEMPLATE_DEBUG = True ALLOWED_HOSTS = [] # Application definition INSTALLED_APPS = ( 'boardinghouse', 'django.contrib.admin', 'django.contrib.auth', 'django.contrib.contenttypes', 'django.contrib.sessions', 'django.contrib.messages', 'django.contrib.staticfiles', 'school', ) MIDDLEWARE_CLASSES = ( 'django.contrib.sessions.middleware.SessionMiddleware', 'django.middleware.common.CommonMiddleware', 'django.middleware.csrf.CsrfViewMiddleware', 'django.contrib.auth.middleware.AuthenticationMiddleware', 'django.contrib.messages.middleware.MessageMiddleware', 'django.middleware.clickjacking.XFrameOptionsMiddleware', ) ROOT_URLCONF = 'project.urls' WSGI_APPLICATION = 'project.wsgi.application' # Database # https://docs.djangoproject.com/en/1.6/ref/settings/#databases DATABASES = { 'default': { 'ENGINE': 'boardinghouse.backends.postgres', 'NAME': 'school-demo', } } # Internationalization # https://docs.djangoproject.com/en/1.6/topics/i18n/ LANGUAGE_CODE = 'en-us' TIME_ZONE = 'UTC' USE_I18N = True USE_L10N = True USE_TZ = True # Static files (CSS, JavaScript, Images) # https://docs.djangoproject.com/en/1.6/howto/static-files/ STATIC_URL = '/static/'
794
35
from tests.util import * import pytest def test_failed_always_hungry_fail_escape(): game = get_game_turn() team = game.get_agent_team(game.actor) game.clear_board() passer = team.players[0] passer.role.skills = [] passer.role.ag = 2 passer.extra_skills = [Skill.THROW_TEAM_MATE, Skill.ALWAYS_HUNGRY] game.put(passer, Square(1, 1)) right_stuff = team.players[1] right_stuff.role.skills = [] right_stuff.extra_skills = [Skill.RIGHT_STUFF] right_stuff_position = Square(2, 1) game.put(right_stuff, right_stuff_position) game.step(Action(ActionType.START_PASS, player=passer)) D6.fix(1) # Hungry D6.fix(1) # Escape game.step(Action(ActionType.PICKUP_TEAM_MATE, player=passer, position=right_stuff.position)) game.step(Action(ActionType.DONT_USE_REROLL)) game.step(Action(ActionType.DONT_USE_REROLL)) assert game.has_report_of_type(OutcomeType.FAILED_ESCAPE_BEING_EATEN) assert game.has_report_of_type(OutcomeType.EATEN_DURING_ALWAYS_HUNGRY) assert CasualtyEffect.DEAD in right_stuff.state.injuries_gained assert not game.has_report_of_type(OutcomeType.TURNOVER) assert passer.state.used def test_failed_always_hungry_escaped(): game = get_game_turn() team = game.get_agent_team(game.actor) game.clear_board() passer = team.players[0] passer.role.skills = [] passer.role.ag = 2 passer.extra_skills = [Skill.THROW_TEAM_MATE, Skill.ALWAYS_HUNGRY] game.put(passer, Square(1, 1)) right_stuff = team.players[1] right_stuff.role.skills = [] right_stuff.extra_skills = [Skill.RIGHT_STUFF] right_stuff_position = Square(2, 1) game.put(right_stuff, right_stuff_position) game.step(Action(ActionType.START_PASS, player=passer)) D6.fix(1) # Hungry D6.fix(2) # Escape D6.fix(6) # Land game.step(Action(ActionType.PICKUP_TEAM_MATE, player=passer, position=right_stuff.position)) game.step(Action(ActionType.DONT_USE_REROLL)) assert game.has_report_of_type(OutcomeType.SUCCESSFUL_ESCAPE_BEING_EATEN) assert game.has_report_of_type(OutcomeType.SUCCESSFUL_LAND) assert not game.has_report_of_type(OutcomeType.TURNOVER) assert passer.state.used assert not right_stuff.state.used
952
36
"""Utilities for Maps""" from math import sqrt from random import sample # Rename the built-in zip (http://docs.python.org/3/library/functions.html#zip) _zip = zip def map_and_filter(s, map_fn, filter_fn): """Return a new list containing the result of calling map_fn on each element of sequence s for which filter_fn returns a true value. >>> square = lambda x: x * x >>> is_odd = lambda x: x % 2 == 1 >>> map_and_filter([1, 2, 3, 4, 5], square, is_odd) [1, 9, 25] """ # BEGIN Question 0 return [map_fn(x) for x in s if filter_fn(x)] # END Question 0 def key_of_min_value(d): """Returns the key in dict d that corresponds to the minimum value of d. >>> letters = {'a': 6, 'b': 5, 'c': 4, 'd': 5} >>> min(letters) 'a' >>> key_of_min_value(letters) 'c' """ # BEGIN Question 0 return min(d, key=lambda x: d[x]) # END Question 0 def zip(*sequences): """Returns a list of lists, where the i-th list contains the i-th element from each of the argument sequences. >>> zip(range(0, 3), range(3, 6)) [[0, 3], [1, 4], [2, 5]] >>> for a, b in zip([1, 2, 3], [4, 5, 6]): ... print(a, b) 1 4 2 5 3 6 >>> for triple in zip(['a', 'b', 'c'], [1, 2, 3], ['do', 're', 'mi']): ... print(triple) ['a', 1, 'do'] ['b', 2, 're'] ['c', 3, 'mi'] """ return list(map(list, _zip(*sequences))) def enumerate(s, start=0): """Returns a list of lists, where the i-th list contains i+start and the i-th element of s. >>> enumerate([6, 1, 'a']) [[0, 6], [1, 1], [2, 'a']] >>> enumerate('five', 5) [[5, 'f'], [6, 'i'], [7, 'v'], [8, 'e']] """ # BEGIN Question 0 return zip(range(start,start+len(s)),s) # END Question 0 def distance(pos1, pos2): """Return the Euclidean distance between pos1 and pos2, which are pairs. >>> distance([1, 2], [4, 6]) 5.0 """ return sqrt((pos1[0] - pos2[0]) ** 2 + (pos1[1] - pos2[1]) ** 2) def mean(s): """Return the arithmetic mean of a sequence of numbers s. >>> mean([-1, 3]) 1.0 >>> mean([0, -3, 2, -1]) -0.5 """ assert len(s) > 0, 'cannot find mean of empty sequence' return sum(s) / len(s)
993
37
#! /usr/bin/python # -*- coding: utf-8 -*- import tensorflow as tf from tensorlayer import logging from tensorlayer.initializers import constant from tensorlayer.layers.core import Layer __all__ = [ 'Scale', ] class Scale(Layer): """The :class:`Scale` class is to multiple a trainable scale value to the layer outputs. Usually be used on the output of binary net. Parameters ---------- init_scale : float The initial value for the scale factor. name : a str A unique layer name. Examples: ---------- >>> inputs = tl.layers.Input([8, 3]) >>> dense = tl.layers.Dense(n_units=10)(inputs) >>> outputs = tl.layers.Scale(init_scale=0.5)(dense) >>> model = tl.models.Model(inputs=inputs, outputs=[dense, outputs]) >>> dense_out, scale_out = model(data, is_train=True) """ def __init__( self, init_scale=0.05, name='scale', ): super(Scale, self).__init__(name) self.init_scale = init_scale self.build((None, )) self._built = True logging.info("Scale %s: init_scale: %f" % (self.name, self.init_scale)) def __repr__(self): s = '{classname}(' s += 'init_scale={init_scale},' s += 'name={name}' s += ")" return s.format(classname=self.__class__.__name__, **self.__dict__) def build(self, inputs_shape): self.scale = self._get_weights("scale", shape=[1], init=constant(value=self.init_scale)) @tf.function def forward(self, inputs): outputs = inputs * self.scale return outputs
682
38
# Copyright 2021 NVIDIA Corporation # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # import os import pandas as pd from legate import pandas as lp path = os.path.join( os.path.dirname(__file__), "files", "read_csv_datetime.csv" ) df = pd.read_csv(path, parse_dates=[1]) ldf = lp.read_csv(path, parse_dates=[1]) assert ldf.equals(df) df = pd.read_csv(path, names=["a", "b"], parse_dates=["b"]) ldf = lp.read_csv(path, names=["a", "b"], parse_dates=["b"]) assert ldf.equals(df)
329
39
# Generated by Django 3.0.6 on 2020-05-18 03:00 from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('profiles', '0002_auto_20200517_1855'), ] operations = [ migrations.RemoveField( model_name='profile', name='created_at', ), migrations.RemoveField( model_name='profile', name='username', ), migrations.AlterField( model_name='profile', name='description', field=models.TextField(max_length=2000, null=True), ), ]
295
40
from flask import Flask from config.custom_exception import handle_not_acceptable from controller.restaurants_controller import restaurants_blueprint from documented_endpoints import blueprint as documented_endpoint app = Flask(__name__) app.config['RESTPLUS_MASK_SWAGGER'] = False app.register_blueprint(restaurants_blueprint) app.register_blueprint(documented_endpoint) app.register_error_handler(406, handle_not_acceptable) if __name__ == '__main__': app.run()
143
41
import bitstring zbase32_chars = b'ybndrfg8ejkmcpqxot1uwisza345h769' zbase32_revchars = [ 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 18, 255, 25, 26, 27, 30, 29, 7, 31, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 24, 1, 12, 3, 8, 5, 6, 28, 21, 9, 10, 255, 11, 2, 16, 13, 14, 4, 22, 17, 19, 255, 20, 15, 0, 23, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255 ] def bitarray_to_u5(barr): assert len(barr) % 5 == 0 ret = [] s = bitstring.ConstBitStream(barr) while s.pos != s.len: ret.append(s.read(5).uint) return ret def u5_to_bitarray(arr): ret = bitstring.BitArray() for a in arr: ret += bitstring.pack("uint:5", a) return ret def encode(b): uint5s = bitarray_to_u5(b) res = [zbase32_chars[c] for c in uint5s] return bytes(res) def decode(b): if isinstance(b, str): b = b.encode('ASCII') uint5s = [] for c in b: uint5s.append(zbase32_revchars[c]) dec = u5_to_bitarray(uint5s) return dec.bytes
942
42
from bitmovin.resources.models import AbstractModel from bitmovin.utils import Serializable class AbstractMP4Representation(AbstractModel, Serializable): def __init__(self, encoding_id, muxing_id, media_file, language=None, track_name=None, id_=None, custom_data=None): super().__init__(id_=id_, custom_data=custom_data) self.encodingId = encoding_id self.muxingId = muxing_id self.mediaFile = media_file self.language = language self.trackName = track_name @classmethod def parse_from_json_object(cls, json_object): id_ = json_object.get('id') custom_data = json_object.get('customData') encoding_id = json_object.get('encodingId') muxing_id = json_object.get('muxingId') media_file = json_object.get('mediaFile') language = json_object.get('language') track_name = json_object.get('trackName') abstract_mp4_representation = AbstractMP4Representation( id_=id_, custom_data=custom_data, encoding_id=encoding_id, muxing_id=muxing_id, media_file=media_file, language=language, track_name=track_name) return abstract_mp4_representation
503
43
# -*- coding: utf-8 -*- from setuptools import setup, find_packages package_name = "eutils" short_description = open("doc/short-description.txt").read() long_description = open("README.rst").read() setup( author = package_name + " Committers", description = short_description.replace("\n", " "), license = "Apache License 2.0 (http://www.apache.org/licenses/LICENSE-2.0)", long_description = long_description, name = package_name, packages = find_packages(), use_scm_version = True, zip_safe = True, author_email = "[email protected]", url = "https://github.com/biocommons/" + package_name, classifiers = [ "Development Status :: 3 - Alpha", "Intended Audience :: Developers", "Intended Audience :: Healthcare Industry", "Intended Audience :: Science/Research", "License :: OSI Approved :: Apache Software License", "Operating System :: OS Independent", "Programming Language :: Python", "Programming Language :: Python :: 2", "Programming Language :: Python :: 3", "Topic :: Database :: Front-Ends", "Topic :: Scientific/Engineering :: Bio-Informatics", "Topic :: Scientific/Engineering :: Medical Science Apps.", ], keywords = [ 'bioinformatics', ], install_requires = [ 'lxml', 'pytz', 'requests', 'six', ], setup_requires = [ "pytest-runner", "setuptools_scm", "wheel", ], tests_require = [ "mock", "pytest", "pytest-cov", "tox", "vcrpy", ], ) # <LICENSE> # Copyright 2016 Source Code Committers # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # </LICENSE>
858
44
"""Logging to both file and terminal""" import logging import os from pathlib import Path import sys # Instantiate LOGGER LOGGER = logging.getLogger("optimade") LOGGER.setLevel(logging.DEBUG) # Handler CONSOLE_HANDLER = logging.StreamHandler(sys.stdout) try: from optimade.server.config import CONFIG CONSOLE_HANDLER.setLevel(CONFIG.log_level.value.upper()) if CONFIG.debug: CONSOLE_HANDLER.setLevel(logging.DEBUG) except ImportError: CONSOLE_HANDLER.setLevel(os.getenv("OPTIMADE_LOG_LEVEL", "INFO").upper()) # Formatter; try to use uvicorn default, otherwise just use built-in default try: from uvicorn.logging import DefaultFormatter CONSOLE_FORMATTER = DefaultFormatter("%(levelprefix)s [%(name)s] %(message)s") CONSOLE_HANDLER.setFormatter(CONSOLE_FORMATTER) except ImportError: pass # Add handler to LOGGER LOGGER.addHandler(CONSOLE_HANDLER) # Save a file with all messages (DEBUG level) try: from optimade.server.config import CONFIG LOGS_DIR = CONFIG.log_dir except ImportError: LOGS_DIR = Path(os.getenv("OPTIMADE_LOG_DIR", "/var/log/optimade/")).resolve() try: LOGS_DIR.mkdir(exist_ok=True) # Handlers FILE_HANDLER = logging.handlers.RotatingFileHandler( LOGS_DIR.joinpath("optimade.log"), maxBytes=1000000, backupCount=5 ) except OSError: LOGGER.warning( f"""Log files are not saved. This is usually due to not being able to access a specified log folder or write to files in the specified log location, i.e., a `PermissionError` has been raised. To solve this, either set the OPTIMADE_LOG_DIR environment variable to a location you have permission to write to or create the {LOGS_DIR} folder, which is the default logging folder, with write permissions for the Unix user running the server. """ ) else: FILE_HANDLER.setLevel(logging.DEBUG) # Formatter FILE_FORMATTER = logging.Formatter( "[%(levelname)-8s %(asctime)s %(filename)s:%(lineno)d][%(name)s] %(message)s", "%d-%m-%Y %H:%M:%S", ) FILE_HANDLER.setFormatter(FILE_FORMATTER) # Add handler to LOGGER LOGGER.addHandler(FILE_HANDLER)
820
45
""" Unit tests for decay.py""" # Author: Genevieve Hayes # License: BSD 3 clause import unittest from mlrose import GeomDecay, ArithDecay, ExpDecay, CustomSchedule class TestDecay(unittest.TestCase): """Tests for decay.py.""" @staticmethod def test_geom_above_min(): """Test geometric decay evaluation function for case where result is above the minimum""" schedule = GeomDecay(init_temp=10, decay=0.95, min_temp=1) x = schedule.evaluate(5) assert round(x, 5) == 7.73781 @staticmethod def test_geom_below_min(): """Test geometric decay evaluation function for case where result is below the minimum""" schedule = GeomDecay(init_temp=10, decay=0.95, min_temp=1) x = schedule.evaluate(50) assert x == 1 @staticmethod def test_arith_above_min(): """Test arithmetic decay evaluation function for case where result is above the minimum""" schedule = ArithDecay(init_temp=10, decay=0.95, min_temp=1) x = schedule.evaluate(5) assert x == 5.25 @staticmethod def test_arith_below_min(): """Test arithmetic decay evaluation function for case where result is below the minimum""" schedule = ArithDecay(init_temp=10, decay=0.95, min_temp=1) x = schedule.evaluate(50) assert x == 1 @staticmethod def test_exp_above_min(): """Test exponential decay evaluation function for case where result is above the minimum""" schedule = ExpDecay(init_temp=10, exp_const=0.05, min_temp=1) x = schedule.evaluate(5) assert round(x, 5) == 7.78801 @staticmethod def test_exp_below_min(): """Test exponential decay evaluation function for case where result is below the minimum""" schedule = ExpDecay(init_temp=10, exp_const=0.05, min_temp=1) x = schedule.evaluate(50) assert x == 1 @staticmethod def test_custom(): """Test custom evaluation function""" # Define custom schedule function def custom(t, c): return t + c kwargs = {'c': 10} schedule = CustomSchedule(custom, **kwargs) x = schedule.evaluate(5) assert x == 15 if __name__ == '__main__': unittest.main()
942
46
class Student(object): def __init__(self, name, gender): self.name = name self.gender = gender def get_name(self): return self.__name def get_score(self): return self.__score def set_score(self, score): if 0 <= score <= 100: self.__score = score else: raise ValueError('bad score') def get_grade(self): if self.__score >= 90: return 'A' elif self.__score >= 60: return 'B' else: return 'C' bart = Student('Bart Simpson',59) print('bart.get_name() =', bart.get_name()) bart.set_score(60) print('bart.get_score() =', bart.get_score()) print('DO NOT use bart._Student__name:', bart._Student__name)
364
47
import re from sublime import Region import sublime_plugin REPLACEMENTS = { '\u00a0': ' ', # no-break space '\u200b': '', # zero-width space } class UnicodeTrapsListener(sublime_plugin.EventListener): @staticmethod def on_pre_save(view): view.run_command('unicode_traps') class UnicodeTraps(sublime_plugin.TextCommand): def run(self, edit): all_file = self.view.substr(Region(0, self.view.size())) matches = list(re.finditer('[%s]' % ''.join(REPLACEMENTS), all_file)) for match in reversed(matches): self.view.replace(edit, Region(*match.span()), REPLACEMENTS[match.group()])
261
48
"""Test features process.""" import logging import unittest from titanic.features.feature_config import CONFIG logging.basicConfig() LOGGER = logging.getLogger(__name__) LOGGER.setLevel(logging.INFO) class TestFeatureConfig(unittest.TestCase): """Simple test example.""" def setUp(self): """Setup with basic ETL.""" self.config = CONFIG def test_config_type(self): """Test is startproject method runs without errors.""" self.assertIsInstance(self.config, dict)
177
49
import unittest import networkx as nx import dwave_networkx as dnx from dimod import ExactSolver, SimulatedAnnealingSampler, qubo_energy class TestMaxCut(unittest.TestCase): # def test_edge_cases(self): # # get the empty graph # G = nx.Graph() # S = dnx.maximum_cut(G, ExactSolver()) # self.assertTrue(len(S) == 0) # S = dnx.weighted_maximum_cut(G, ExactSolver()) # self.assertTrue(len(S) == 0) def test_typical_cases(self): G = nx.complete_graph(10) S = dnx.maximum_cut(G, ExactSolver()) self.assertTrue(len(S) == 5) # half of the nodes with self.assertRaises(dnx.DWaveNetworkXException): S = dnx.weighted_maximum_cut(G, ExactSolver()) nx.set_edge_attributes(G, 1, 'weight') S = dnx.weighted_maximum_cut(G, ExactSolver()) self.assertTrue(len(S) == 5) # half of the nodes G = nx.Graph() G.add_edges_from([(0, 1), (0, 2), (1, 2), (1, 3), (3, 4), (2, 4)]) S = dnx.maximum_cut(G, ExactSolver()) self.assertTrue(len(S) in (2, 3)) # this needs another one for weight
546
50
from __future__ import (absolute_import, division, print_function) __metaclass__ = type from ansible.errors import AnsibleError, AnsibleFilterError import re ''' Strip leading and trailing whitespace from each line while keeping newlines intact ''' def strip_lines(text, collapse=True): text = re.sub('^[ \t]+|[ \t]+$', '', text, flags=re.MULTILINE) if not collapse: return text return re.sub('[\r\n]{2,}', '\n\n', text, flags=re.DOTALL) class FilterModule(object): def filters(self): return { 'strip_lines': strip_lines, }
228
51
import sys import os #Se o rótulo for 1 ou 2 é positivo; se for -1 ou -2 é negativo #=================== #Importante #Assume-se que a estrutura do arquivo lido é composta por tweet, espaço e rótulo #=================== fname = '../label/allLabeled.txt' path = '../label/' path2 = 'train/TwoClasses/' pos = path2 + 'pos/all_pos.txt' neg = path2 + 'neg/all_neg.txt' with open(fname, 'r') as f, open(pos, 'w') as p, open(neg, 'w') as n: for file_label in f: file_label = path + file_label.rstrip() mainName = file_label.split('/')[-1] mainName = ('_').join(mainName.split("_")[:-1]) print(mainName) file_pos = path2 + 'pos/' + mainName + '_pos.txt' file_neg = path2 + 'neg/' + mainName + '_neg.txt' with open(file_label, 'r') as fl, open(file_pos, 'w') as fp, open(file_neg, 'w') as fn: for line in fl: line = line.rstrip() label = int(line.split(" ")[-1]) content = ' '.join(line.split(' ')[:-1]) + '\n' if (label == 1 or label == 2): fp.write(content) p.write(content) elif(label <= -1): fn.write(content) n.write(content)
652
52
from django.conf import settings from django.test import TestCase from scan_models.settings import DEFAULT_SETTINGS from scan_models.tests.constances import create_test class TestVerbosity(TestCase): def test_lowest_verbosity(self): self.assertEqual(create_test("tests.TestVerbosity"), {}) def test_first_verbosity(self): settings.SCAN_MODELS["verbosity"] = 1 self.assertEqual(create_test("tests.TestVerbosity"), {"text": {"attributes": {}, "validator": {}}}) settings.SCAN_MODELS["verbosity"] = DEFAULT_SETTINGS["verbosity"] def test_second_verbosity(self): settings.SCAN_MODELS["verbosity"] = 2 self.assertEqual( create_test("tests.TestVerbosity"), {"text": {"attributes": {"element": "textarea"}, "validator": {}}} ) settings.SCAN_MODELS["verbosity"] = DEFAULT_SETTINGS["verbosity"]
336
53
from numify import numify import unittest # ** Tests** class TestNumify(unittest.TestCase): # Test if middle spaces are ignored def test_middle_space(self): testcase = "2 k" expected = 2000 self.assertEqual(numify(testcase), expected) # Test if the trailing alphabet if case insensitive def test_capitals(self): testcase = "30K" expected = 30000 self.assertEqual(numify(testcase), expected) # Test if alphanumeric characters containing floats is handled correctly def test_float(self): self.assertEqual(numify("23.4k"), 23400) # Test if alphanumeric characters raises errors def test_not_alphanum(self): self.assertRaises(ValueError, numify, "32") unittest.main()
300
54
# Monkey-patch jinja to allow variables to not exist, which happens with sub-options import jinja2 jinja2.StrictUndefined = jinja2.Undefined # Monkey-patch cookiecutter to allow sub-items from cookiecutter import prompt from ccds.monkey_patch import generate_context_wrapper, prompt_for_config prompt.prompt_for_config = prompt_for_config # monkey-patch context to point to ccds.json from cookiecutter import generate from ccds.monkey_patch import generate_context_wrapper generate.generate_context = generate_context_wrapper # for use in tests need monkey-patched api main from cookiecutter import cli from cookiecutter import main as api_main main = cli.main if __name__ == "__main__": main()
212
55
# -*- Python -*- # Jiao Lin <[email protected]> # Refs. Vogel thesis import numpy as np def phi1(theta1): def sum_series(theta1): n = np.arange(1., 35.) series = 1./np.exp(n/theta1)/n/n return np.sum(series) return 1./2+2*(theta1*np.log(1-np.exp(-1/theta1))+theta1**2*(np.pi**2/6 - sum_series(theta1))) def phi3(theta1): def sum_series(theta1): n = np.arange(1., 35.) tmp = theta1/n series = (1./2+tmp+tmp*tmp)/np.exp(n/theta1)/n/n return np.sum(series) return 1./4+2*(theta1*np.log(1-np.exp(-1/theta1))+6*theta1**2*(np.pi**4/90*theta1**2 - sum_series(theta1))) # End of file
356
56
"""empty message Revision ID: 7a4a335c28d0 Revises: 38bf7545187a Create Date: 2021-06-19 18:50:31.534280 """ from alembic import op import sqlalchemy as sa # revision identifiers, used by Alembic. revision = '7a4a335c28d0' down_revision = '38bf7545187a' branch_labels = None depends_on = None def upgrade(): # ### commands auto generated by Alembic - please adjust! ### op.create_table('comments', sa.Column('id', sa.Integer(), nullable=False), sa.Column('body', sa.Text(), nullable=True), sa.Column('body_html', sa.Text(), nullable=True), sa.Column('timestamp', sa.DateTime(), nullable=True), sa.Column('disabled', sa.Boolean(), nullable=True), sa.Column('author_id', sa.Integer(), nullable=True), sa.Column('post_id', sa.Integer(), nullable=True), sa.ForeignKeyConstraint(['author_id'], ['users.id'], ), sa.ForeignKeyConstraint(['post_id'], ['posts.id'], ), sa.PrimaryKeyConstraint('id') ) op.create_index(op.f('ix_comments_timestamp'), 'comments', ['timestamp'], unique=False) # ### end Alembic commands ### def downgrade(): # ### commands auto generated by Alembic - please adjust! ### op.drop_index(op.f('ix_comments_timestamp'), table_name='comments') op.drop_table('comments') # ### end Alembic commands ###
661
57
# -*- coding: utf-8 -*- """ Created on Mon Feb 11 07:57:02 2019 This program is the second challenge in the CodeForCanada 2019 Fullstack Fellowship challenge This program obtains the earliest date and the latest date of violations in each given category. @author: sid ramesh """ ## importing the libraries import pandas as pd from datetime import date ## importing the csv df1 = pd.read_csv("C:/Users/sidra/C4C-dev-challenge-2018.csv") ## creating the dataframe with violation_id as the index df2 = df1.set_index("violation_id", drop = True) ## creating subsets of the master data with the violation category column df3 = df2.loc[:,"violation_category"] ## creating subset of the violation date df4 = df2.loc[:,"violation_date"] ## as the values are a combination of date and time, separating only the date values df4 = pd.to_datetime(df4) ## finding the earliest date earliest_date = df4.min() ## making sure only the date value is stored earliest_date = earliest_date.date() ## creating the subset for violation_date_closed df5 = df2.loc[:,"violation_date_closed"] ## checking to see if there are any null values df5.isna() ## filling the null values with the earliest date to avoid complexity df5 = df5.fillna(earliest_date) ## separating the date values from datetime df5 = pd.to_datetime(df5) ## creating a dataframe with just the violation category, violation date and violation date closed df6 = pd.concat([df3, df4, df5], axis=1, keys=['violation_category', 'violation_date', 'violation_date_closed']) ## grouping the dataframe with respect to violation categories and finding the earliest and latest dates gbmax = df6.groupby('violation_category').agg({'violation_date': ('min'), 'violation_date_closed': ('max')}) ## printing the earliest and the latest dates for each violation category print('Here are the earliest and latest date of occurence for each category \n', gbmax)
634
58
import signal import pianohat print(""" This simple example shows you how to make Piano HAT keypresses do something useful. You should see details of each press appear below as you touch Piano HAT's keys Press CTRL+C to exit. """) pianohat.auto_leds(True) def handle_touch(ch, evt): print(ch, evt) pianohat.on_note(handle_touch) pianohat.on_octave_up(handle_touch) pianohat.on_octave_down(handle_touch) pianohat.on_instrument(handle_touch) signal.pause()
191
59
import pathlib from setuptools import setup import os import sys import re from pathlib import Path # The directory containing this file current_path = pathlib.Path(__file__).parent # The text of the README file readme_path = (current_path / "README.md").read_text() install_requires = ['dtaidistance', 'matplotlib', 'pygmt', 'pandas', 'numpy', 'xarray'] setup_requires = ['setuptools>=18.0', 'cython>=0.29.6'] current_path = Path(__file__).parent # Check version number init_fn = current_path / 'dtwhaclustering' / '__init__.py' with init_fn.open('r', encoding='utf-8') as fd: version = re.search(r'^__version__\s*=\s*[\'"]([^\'"]*)[\'"]', fd.read(), re.MULTILINE).group(1) if not version: raise RuntimeError('Cannot find version information') readme_path = current_path / 'README.md' if os.path.exists(readme_path): with readme_path.open('r', encoding='utf-8') as f: long_description = f.read() else: long_description = "" # This call to setup() does all the work setup( name="dtwhaclustering", version=version, description="Codes to perform Dynamic Time Warping Based Hierarchical Agglomerative Clustering of GPS data", long_description=long_description, long_description_content_type="text/markdown", url="https://github.com/earthinversion/DTW-based-Hierarchical-Clustering", author="Utpal Kumar", author_email="[email protected]", python_requires='>=3.5', license='Apache 2.0', classifiers=[ 'License :: OSI Approved :: Apache Software License', 'Programming Language :: Python :: 3' ], packages=["dtwhaclustering"], include_package_data=True, install_requires=install_requires, setup_requires=setup_requires, keywords='dynamic time warping clustering', extras_require={ 'vis': ['matplotlib', 'pygmt'], 'numpy': ['numpy', 'scipy'], 'all': ['matplotlib', 'numpy', 'scipy', 'pandas', 'scikit-learn', 'xarray'] }, )
787
60
# encoding: utf-8 import datetime from south.db import db from south.v2 import SchemaMigration from django.db import models class Migration(SchemaMigration): def forwards(self, orm): # Adding field 'Room.shortname_display' db.add_column('core_room', 'shortname_display', self.gf('django.db.models.fields.CharField')(default='', max_length=256), keep_default=False) def backwards(self, orm): # Deleting field 'Room.shortname_display' db.delete_column('core_room', 'shortname_display') models = { 'core.room': { 'Meta': {'object_name': 'Room'}, 'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}), 'id': ('common.fields.UUIDField', [], {'auto': 'True', 'unique': 'True', 'max_length': '32', 'primary_key': 'True'}), 'shortname': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '256'}), 'shortname_display': ('django.db.models.fields.CharField', [], {'max_length': '256'}), 'title': ('django.db.models.fields.TextField', [], {}), 'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}) } } complete_apps = ['core']
565
61
import contextlib import time class Timer: def __init__(self, description: str) -> None: self.description = description def __enter__(self): self.start = time.time() def __exit__(self, type, value, traceback): self.end = time.time() elapsed_time = self.end - self.start print(f"{self.description}: {elapsed_time}") @contextlib.contextmanager def measure_exec_time(description: str): start = time.time() yield end = time.time() elapsed_time = end - start print(f"{description}: {elapsed_time}") with Timer("Class-based"): a = 5 + 5 with measure_exec_time("Generatpr-based"): a = 5 + 5
277
62
import sys from collections import deque from numpypy import * MAXN = 1000005 sys.stdin = open('input.txt') while True: T = int(input()) if not T: break teamOf = zeros((MAXN), dtype=int16) for t in range(T): line = map(int, raw_input().split()) for m in line[1:]: teamOf[m] = t totalQ = deque() teamQ = [] for i in range(T): teamQ.append(deque()) while True: command = raw_input().strip().split() if command[0] == 'STOP': break elif command[0] == 'ENQUEUE': m = int(command[1]) t = teamOf[m] if teamQ[t]: teamQ[t].append(m) else: teamQ[t].append(m) totalQ.append(teamQ[t]) elif command[0] == 'DEQUEUE': t = totalQ.popleft() print t.popleft() if t: totalQ.appendleft(t)
522
63
import torch import torch.fx.experimental.fx_acc.acc_ops as acc_ops from caffe2.torch.fb.fx2trt.tests.test_utils import AccTestCase, InputTensorSpec from parameterized import parameterized class TestLinearConverter(AccTestCase): @parameterized.expand( [ ("default"), ("no_bias", False), ] ) def test_linear( self, test_name, bias=True, ): class TestModule(torch.nn.Module): def __init__(self): super().__init__() self.linear = torch.nn.Linear(512, 256, bias) def forward(self, x): return self.linear(x) inputs = [torch.randn(1, 512)] self.run_test(TestModule(), inputs, expected_ops={acc_ops.linear}) def test_linear_with_dynamic_shape(self): class TestModule(torch.nn.Module): def __init__(self): super().__init__() self.linear = torch.nn.Linear(512, 256) def forward(self, x): return self.linear(x) input_specs = [ InputTensorSpec( shape=(-1, 3, 512), dtype=torch.float32, shape_ranges=[((1, 3, 512), (3, 3, 512), (4, 3, 512))], ), ] self.run_test_with_dynamic_shape( TestModule(), input_specs, expected_ops={acc_ops.linear}, )
757
64
from mail import mail_time from screenshoot import screenshootfunc import os import sys from sound import sound_go from keystroke import main class keylogger(): def screenshootgo(self): screenshootfunc() def system_name_go(self): return os.environ["USERNAME"] def sys_info(self): return sys.getwindowsversion() def take_sound(self): sound_go() def log_time(self): main() def mailsend(self): mail_time()
207
65
from django.test import TestCase from .sentiment_analyzer import get_article_sentiment class SentimentTest(TestCase): pos_text = """ This is a positive text. What a great story. Excellent! I feel so much joy. This is fantastic! Awesome! I love Hungary! """ neg_text = """ This is a negative text. What a terrible story. Horrible! I feel so sad. This is depressing! Awful! I don't like China! """ def test_score(self): pos_score = int(get_article_sentiment(self.pos_text)) neg_score = int(get_article_sentiment(self.neg_text)) self.assertEqual(pos_score > 0, True) self.assertEqual(neg_score < 0, True)
257
66
# Generated by Django 2.0.8 on 2018-08-05 10:30 from django.db import migrations, models class Migration(migrations.Migration): initial = True dependencies = [ ] operations = [ migrations.CreateModel( name='Comment', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('created_at', models.DateTimeField(auto_now_add=True)), ('updated_at', models.DateTimeField(auto_now=True)), ('message', models.TextField()), ], options={ 'abstract': False, }, ), migrations.CreateModel( name='Image', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('created_at', models.DateTimeField(auto_now_add=True)), ('updated_at', models.DateTimeField(auto_now=True)), ('file', models.ImageField(upload_to='')), ('location', models.CharField(max_length=140)), ], options={ 'abstract': False, }, ), ]
624
67
players = [] numbers = list(map(int, input().split(","))) input() while True: try: board = [] for _ in range(5): line = list(map(int, input().split())) board.append(line) players.append(board) input() except: break def check(b, i, j): #row if sum(b[j]) == -5: return True #column if sum([b[k][i] for k in range(5)]) == -5: return True #nothing found return False def value(b): return sum([sum([b[j][i] for i in range(5) if b[j][i] > 0]) for j in range(5)]) winners = set() def play(loc, x): b = players[loc] for j in range(5): for i in range(5): if b[j][i] == x: b[j][i] = -1 if check(b, i, j): if loc not in winners: print("bingo") print(value(b) * x) winners.add(loc) return 1 else: return -1 end = False for n in numbers: for i in range(len(players)): x = play(i, n) # in the 1st part, end after the first BINGO # if x == 1: # end = True # break
723
68
from sqlalchemy.dialects.postgresql import UUID from sqlalchemy.schema import FetchedValue from sqlalchemy.ext.associationproxy import association_proxy from app.api.utils.models_mixins import Base, AuditMixin from app.extensions import db class ConsequenceClassificationStatusCode(Base, AuditMixin): __tablename__ = "consequence_classification_status" consequence_classification_status_code = db.Column(db.String, primary_key=True) description = db.Column(db.String, nullable=False) active_ind = db.Column(db.Boolean, nullable=False, server_default=FetchedValue()) display_order = db.Column(db.Integer, nullable=False) def __repr__(self): return '<ConsequenceClassificationStatusCode %r>' % self.consequence_classification_status_code @classmethod def get_all(cls): return cls.query.order_by(cls.display_order).all()
299
69
import tkinter import cv2, PySimpleGUI as sg USE_CAMERA = 0 # change to 1 for front facing camera window, cap = sg.Window('Demo Application - OpenCV Integration', [[sg.Image(filename='', key='image')], ], location=(0, 0), grab_anywhere=True),\ cv2.VideoCapture(USE_CAMERA) while window(timeout=20)[0] != sg.WIN_CLOSED: window['image'](data=cv2.imencode('.png', cap.read()[1])[1].tobytes())
212
70
from jd.api.base import RestApi class SellerProductApiWriteAddProductRequest(RestApi): def __init__(self,domain,port=80): RestApi.__init__(self,domain, port) self.spuInfo = None self.skuList = None def getapiname(self): return 'jingdong.seller.product.api.write.addProduct' class SpuInfo(object): def __init__(self): self.packLong = None self.spuName = None self.commonAttributeIds = None self.keywords = None self.description = None self.countryId = None self.warrantyPeriod = None self.productArea = None self.minQuantity = None self.crossProductType = None self.packHeight = None self.taxesType = None self.appDescription = None self.weight = None self.subtitleHrefM = None self.qualityDays = None self.packWide = None self.catId = None self.whetherCod = None self.piece = None self.brandId = None self.subtitle = None self.isQuality = None self.packageInfo = None self.afterSale = None self.clearanceType = None self.subtitleHref = None self.maxQuantity = None self.shopCategorys = None class SkuApiVo(object): def __init__(self): self.saleAttributeIds = None self.costPrice = None self.upc = None self.sellerSkuId = None self.saleAttrValueAlias = None self.skuName = None self.jdPrice = None self.stock = None
555
71
# Copyright (c) Mathias Kaerlev # See LICENSE for details. import os import glob import imp from chowdren.data import ObjectType OBJECTS_DIRECTORY = os.path.join(os.getcwd(), 'objects') class state: objects = None def get_objects(): if state.objects is None: state.objects = {} for name in os.listdir(OBJECTS_DIRECTORY): object_dir = os.path.join(OBJECTS_DIRECTORY, name) if not os.path.isdir(object_dir): continue try: module = imp.load_source(name, os.path.join(object_dir, 'edittime.py')) object_type = module.get_object() state.objects[object_type.__name__] = object_type except AttributeError: continue return state.objects class ObjectBase(object): def __init__(self, project, data = None): self.project = project self.get_image = project.get_image self.save_image = project.save_image if data is None: self.initialize() else: self.read(data) def initialize(self): pass def read(self, data): pass def write(self, data): pass @classmethod def get_class_name(cls): return cls.__name__ def get_data(self): object_type = ObjectType() object_type.name = self.get_class_name() object_type.type_id = self.id object_type.data = {} self.write(object_type.data) return object_type # for runtime def get_parameters(self): return [] @classmethod def get_runtime(cls): name = cls.__module__ return (name, os.path.join(OBJECTS_DIRECTORY, name, 'runtime.cpp'))
826
72
#!/usr/bin/env python # Copyright 2016 Criteo # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Module to create caches.""" from biggraphite import metadata_cache CACHES = frozenset( [ ("disk", metadata_cache.DiskCache), ("memory", metadata_cache.MemoryCache), ("none", metadata_cache.NoneCache) ] ) DEFAULT_CACHE = "memory" class Error(Exception): """Base class for all exceptions from this module.""" class ConfigError(Error): """Configuration problems.""" def add_argparse_arguments(parser): """Add cache related BigGraphite arguments to an argparse parser. Args: parser: argparse.ArgumentParser() """ parser.add_argument( "--cache", help="BigGraphite cache (%s))" % ", ".join([v[0] for v in CACHES]), default=DEFAULT_CACHE, ) parser.add_argument("--cache-size", help="Metadata cache size.") parser.add_argument("--cache-sync", help="Metadata cache sync.") def cache_from_settings(accessor, settings, cname=None): """Get Cache from configuration. Args: settings: dict(str -> value). Returns: Cache (not opened). """ cache_name = settings.get("cache", DEFAULT_CACHE) cache_settings = {"path": settings.get("storage_dir")} for opt in ["size", "ttl", "sync"]: value = settings.get("cache_%s" % opt) if value is not None: cache_settings[opt] = value for name, cache in CACHES: if name == cache_name: return cache(accessor, cache_settings, cname) raise ConfigError("Invalid value '%s' for BG_CACHE." % cache_name)
748
73
# -*- coding: utf-8 -*- import os import subprocess import click from flask import cli @click.command() @cli.pass_script_info def init(info): """Initialize current app with Relask. :type info: cli.ScriptInfo """ app = info.load_app() relask_dir = os.path.dirname(__file__) package_json = os.path.join(app.root_path, 'package.json') if not os.path.isfile(package_json): with open(package_json, 'w') as f: f.write('{}') subprocess.check_call( ['npm', 'install', '--save', relask_dir], cwd=app.root_path) babel_rc = os.path.join(app.root_path, '.babelrc') if not os.path.isfile(babel_rc): with open(babel_rc, 'w') as f: f.write('''\ { "presets": [ "es2015", "react" ], "plugins": [ "relask/babelRelayPlugin", "transform-decorators-legacy", "transform-class-properties" ] } ''') webpack_config = os.path.join(app.root_path, 'webpack.config.js') if not os.path.isfile(webpack_config): with open(webpack_config, 'w') as f: f.write('''\ module.exports = { entry: './scripts/app.js', output: { path: './static', filename: 'app.bundle.js', }, module: { loaders: [{ test: /\.js$/, exclude: /node_modules/, loader: 'babel-loader', }] } } ''')
667
74
""" WSGI config for prhood project. It exposes the WSGI callable as a module-level variable named ``application``. For more information on this file, see https://docs.djangoproject.com/en/3.2/howto/deployment/wsgi/ """ import os from django.core.wsgi import get_wsgi_application os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'prhood.settings') application = get_wsgi_application()
131
75
from django import template register = template.Library() @register.filter def truncate_char(value, arg): """ Truncates a string after a given number of chars Argument: Number of chars to truncate after """ try: length = int(arg) except ValueError: # invalid literal for int() return value # Fail silently. if not isinstance(value, basestring): value = str(value) if (len(value) > length): return value[:length] + "..." else: return value
212
76
# Copyright (c) 2012-2018 SoftBank Robotics. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the COPYING file. """ Change the branch of the manifest Also, checkout the correct branch for every git project in the worktree """ import sys import qisrc.parsers def configure_parser(parser): qisrc.parsers.worktree_parser(parser) group = parser.add_argument_group("checkout options") group.add_argument("branch") group.add_argument("-f", "--force", action="store_true", dest="force", help="Discard local changes. Use with caution") parser.set_defaults(force=False) def do(args): branch = args.branch git_worktree = qisrc.parsers.get_git_worktree(args) manifest = git_worktree.manifest groups = manifest.groups branch = args.branch git_worktree.configure_manifest(manifest.url, groups=groups, branch=branch) ok = git_worktree.checkout(branch, force=args.force) if not ok: sys.exit(1)
370
77
''' @Author: Mr.Sen @LastEditTime: 2020-05-27 13:12:02 @Website: https://grimoire.cn @Mr.Sen All rights reserved ''' import time import os from webdav3.client import Client options = { 'webdav_hostname': "https://pan.grimoire.cn/dav", 'webdav_login': "[email protected]", 'webdav_password': "g6DReYbJFZMtrGXUCROIdQOEc7R1Gt3b", 'disable_check':True } def parse_time(i): t=int(round(int(i[0:-1:])*1000)) st=time.strftime('%Y-%m-%d %H:%M:%S',time.localtime(t/1000)) # print("Time:"+st) return st def upload(options=options): client = Client(options) if not client.check("/server"): client.mkdir("/server") path="/server/"+str(int(time.time())) client.mkdir(path) client.upload(path,"./dat/") print("Data backup succeeded!") # print(path,os.path.dirname(os.getcwd())+"/dat/") # client.upload("/server2/file.txt","D:\\autotg\\rg\\requirement.txt") def fetch(options=options): client = Client(options) if not client.check("/server"): client.mkdir("/server") lst=client.list("/server")[1::] cnt=0 for i in lst: cnt+=1 print("Index:",cnt," Time:",parse_time(i)) index=int(input("Please select the file you want to restore:"))%len(lst) client.download("/server/"+lst[index-1],"./dat/") print(parse_time(lst[index-1]),"has been restored") if __name__=="__main__": # praser=argparse.ArgumentParser() # praser.add_argument("type") # args = praser.parse_args() # if args.type=="fetch": # fetch() # if args.type=="upload": # upload() # upload() cmd=input("Backup Or Restore?") if (cmd=="backup" or cmd=="Backup"): upload() elif (cmd=="restore" or cmd=="Restore"): fetch() else: print("cmd not defined! exiting...")
812
78
import unittest from os import path from agora.jobs import VideoStreamCondense HERE = path.abspath(path.dirname(__file__)) class VideoStreamCondenseTestcase(unittest.TestCase): """ Test agora.jobs.VideoStreamCondense job. """ @classmethod def setup_class(cls): cls.sample_data_file = path.join( HERE, './fixtures/goonhilly-log-sample') def test_mr(self): """ Let's make sure we can run a test runner """ mr_job = VideoStreamCondense(['--no-conf', '-']) with open(self.sample_data_file, 'r') as data: mr_job.sandbox(stdin=data) results = [] with mr_job.make_runner() as runner: runner.run() for line in runner.stream_output(): results.append(line) self.assertTrue(len(results) > 0)
408
79
#!/usr/bin/env python3 # -*- coding:utf-8 -*- import hashlib import hmac import base64 import json from urllib import parse import requests import time import uuid ecs_url = 'https://ecs.aliyuncs.com/' ram_url = 'https://ram.aliyuncs.com' access_key = "access_key" access_key_secret = "access_key_secret" FORMAT_ISO_8601 = "%Y-%m-%dT%H:%M:%SZ" def get_sign_string(source, secret): secret = bytes(secret.encode('utf8')) h = hmac.new(secret, source.encode('utf8'), hashlib.sha1) signature = base64.encodebytes(h.digest()).strip() return signature def __pop_standard_urlencode(query): ret = parse.urlencode(query) ret = ret.replace('+', '%20') ret = ret.replace('*', '%2A') ret = ret.replace('%7E', '~') return ret def __compose_string_to_sign(method, queries): sorted_parameters = sorted(queries.items(), key=lambda queries: queries[0]) string_to_sign = method + "&%2F&" + parse.quote(__pop_standard_urlencode(sorted_parameters)) return string_to_sign def get_sign(paras): str_sign = __compose_string_to_sign("GET", paras) return get_sign_string(str_sign, access_key_secret + "&") def my_ecs_action(action_name, **kwargs): paras = { "SignatureVersion": "1.0", "Format": "JSON", "Timestamp": time.strftime(FORMAT_ISO_8601, time.gmtime()), "AccessKeyId": access_key, "SignatureMethod": "HMAC-SHA1", "Version": "2014-05-26", "Action": action_name, "SignatureNonce": str(uuid.uuid4()), } if kwargs: paras.update(kwargs) paras['Signature'] = get_sign(paras) res = requests.get( url=ecs_url, params=paras, ) ret = json.loads(res.text) print(json.dumps(ret, indent=4).decode('unicode_escape')) if __name__ == "__main__": my_ecs_action("DescribeInstanceTypeFamilies", RegionId='cn-beijing', Generation="ecs-1")
819
80
import datetime import collections import kungfu.yijinjing.time as kft from kungfu.data.sqlite.data_proxy import CalendarDB, make_url from kungfu.wingchun.constants import * class Calendar: def __init__(self, ctx): self.holidays = CalendarDB(ctx.system_config_location, "holidays").get_holidays() self.update_trading_day(datetime.datetime.now()) def update_trading_day(self, now): self.date = now.date() if now.hour >= 18: self.date = self.date + datetime.timedelta(days=1) while not self.is_trading_day(self.date): self.date = self.date + datetime.timedelta(days=1) @property def trading_day(self): now = datetime.datetime.now() if now.hour >= 18: self.update_trading_day(now) return self.date @property def trading_day_ns(self): d = self.trading_day day = datetime.datetime(year=d.year, month=d.month, day=d.day) return int((day - kft.EPOCH).total_seconds() * kft.NANO_PER_SECOND) def is_trading_day(self, dt): return dt.isoweekday() <= 5 and not self.is_holiday(dt) def is_holiday(self, dt): return dt in self.holidays
533
81
import numpy as np # import cv2 from matplotlib import pyplot as plt import os from utils import filename_templates as TEMPLATES def prop_flow(x_flow, y_flow, x_indices, y_indices, x_mask, y_mask, scale_factor=1.0): flow_x_interp = cv2.remap(x_flow, x_indices, y_indices, cv2.INTER_NEAREST) flow_y_interp = cv2.remap(y_flow, x_indices, y_indices, cv2.INTER_NEAREST) x_mask[flow_x_interp == 0] = False y_mask[flow_y_interp == 0] = False x_indices += flow_x_interp * scale_factor y_indices += flow_y_interp * scale_factor return def estimate_corresponding_gt_flow(path_flow, gt_timestamps, start_time, end_time): # Each gt flow at timestamp gt_timestamps[gt_iter] represents the displacement between # gt_iter and gt_iter+1. # gt_timestamps[gt_iter] -> Timestamp just before start_time gt_iter = np.searchsorted(gt_timestamps, start_time, side='right') - 1 gt_dt = gt_timestamps[gt_iter + 1] - gt_timestamps[gt_iter] # Load Flow just before start_time flow_file = os.path.join(path_flow, TEMPLATES.MVSEC_FLOW_GT_FILE.format(gt_iter)) flow = np.load(flow_file) x_flow = flow[0] y_flow = flow[1] #x_flow = np.squeeze(x_flow_in[gt_iter, ...]) #y_flow = np.squeeze(y_flow_in[gt_iter, ...]) dt = end_time - start_time # No need to propagate if the desired dt is shorter than the time between gt timestamps. if gt_dt > dt: return x_flow * dt / gt_dt, y_flow * dt / gt_dt else: raise Exception
1,004
82
from fabric.api import local, settings, abort from fabric.contrib.console import confirm # prepare for deployment def test(): with settings(warn_only=True): result = local( "python test_tasks.py -v && python test_users.py -v", capture=True ) if result.failed and not confirm("Tests failed. Continue?"): abort("Aborted at user request.") def commit(): message = raw_input("Enter a git commit message: ") local("git add . && git commit -am '{}'".format(message)) def push(): local("git push origin master") def prepare(): test() commit() push() # deploy to heroku def pull(): local("git pull origin master") def heroku(): local("git push heroku master") def heroku_test(): local( "heroku run python test_tasks.py -v && heroku run python test_users.py -v" ) def deploy(): pull() test() commit() heroku() heroku_test() # rollback def rollback(): local("heroku rollback")
376
83
# -*- coding: utf-8 -*- """ The MIT License (MIT) Copyright © 2015-2016 Franklin "Snaipe" Mathieu <http://snai.pe/> Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. """ def npartial(func, *args, **kwargs): """ Returns a partial node visitor function """ def wrapped(self, node): func(self, *args, **kwargs) return wrapped
364
84
# Generated by Django 3.1.6 on 2021-03-15 19:55 from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('fytnet', '0006_weightclass'), ] operations = [ migrations.AddField( model_name='fighter', name='weight', field=models.IntegerField(blank=True, null=True), ), ]
172
85
import re def is_number(str): try: int(str) return True except ValueError: return False def peek(stack): return stack[-1] if stack else None def apply_operator(operators, values): operator = operators.pop() right = values.pop() left = values.pop() values.append(compute_op(operator, left, right)) def compute_op(op, lhs, rhs): if op == '+': return lhs + rhs elif op == '-': return lhs - rhs elif op == '*': return lhs * rhs elif op == '/': return lhs / rhs else: raise SyntaxError('This operator is not recognized "{}"'.format(op)) def compute(expression): tokens = re.findall(r"[+/*()-]|\d+", expression) precedences = {'+': 0, '-': 0, '*': 1, '/': 1} values = [] operators = [] for token in tokens: if is_number(token): values.append(int(token)) elif token in precedences.keys(): # Operator top = peek(operators) while (top is not None and top not in "()" and precedences[top] >= precedences[token]): apply_operator(operators, values) top = peek(operators) operators.append(token) else: raise SyntaxError('Element not recognized "{}"'.format(token)) while peek(operators) is not None: apply_operator(operators, values) return values[0]
672
86
from django.contrib import admin from django.contrib.auth.admin import UserAdmin from django.contrib.auth.forms import UserCreationForm, UserChangeForm from .models import CustomUser # Register your models here. class CustomUserAdmin(UserAdmin): add_form = UserCreationForm form = UserChangeForm model = CustomUser list_display = ['pk', 'email', 'display_name', 'first_name', 'last_name'] add_fieldsets = UserAdmin.add_fieldsets + ( (None, {'fields': ('email', 'first_name', 'last_name', 'display_name', 'date_of_birth', 'address1', 'address2', 'zip_code', 'city', 'country', 'mobile_phone', 'additional_information', 'photo',)}), ) fieldsets = UserAdmin.fieldsets + ( (None, {'fields': ('display_name', 'date_of_birth', 'address1', 'address2', 'zip_code', 'city', 'country', 'mobile_phone', 'additional_information', 'photo',)}), ) admin.site.register(CustomUser, CustomUserAdmin)
320
87
from flask import Flask from flask_sqlalchemy import SQLAlchemy from flask_migrate import Migrate from flask_marshmallow import Marshmallow from app.config import Config app = Flask(__name__) db = SQLAlchemy(app) app.config.from_object(Config) migrate = Migrate(app, db) ma = Marshmallow(app) from app import routes, models # def create_app(): # app = Flask(__name__) # return app
130
88
from distutils.core import setup import py2exe, sys, os sys.argv.append('py2exe') setup( data_files = [( "dep",["dep/creepon.ppm", "dep/download.ppm", "dep/login.ppm", "dep/quit.ppm", "dep/pg.ico", "dep/viewer.html",]), "INSTALL.txt", "LICENSE.txt"], options = {'py2exe': {'bundle_files':3}}, windows = [{'script': 'pg.py', "icon_resources":[(1,"dep/pg.ico")]}], zipfile = None )
387
89
# 24. 两两交换链表中的节点 # 给定一个链表,两两交换其中相邻的节点,并返回交换后的链表。 # 你不能只是单纯的改变节点内部的值,而是需要实际的进行节点交换。 # 示例: # 给定 1->2->3->4, 你应该返回 2->1->4->3. # Definition for singly-linked list. class ListNode: def __init__(self, x): self.val = x self.next = None class Solution: def swapPairs(self, head: ListNode) -> ListNode: def reverse2node(node): pre = None while node: nex = node.next node.next = pre pre = node node = nex return pre if not head: return None trev = rev = ListNode(0) begin = tail = head while True: n = 2 while tail and n: tail = tail.next n -= 2 if n > 0 or not tail: rev.next = head break temp = tail.next tail.next = None rev.next = reverse2node(head) rev = head head = temp tail = temp return trev.next
740
90
__author__ = 'wanghao' # import threading import sys import socket from struct import * import time import threading def run_flow(dst_ip, port, size): def run(dst_ip, port, size): sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) # data = os.urandom(size) data = pack('c', 'a') try: sock.connect((dst_ip, port)) size_left = size while size_left: if size_left > 200000000: sock.sendall(data*200000000) size_left -= 200000000 else: sock.sendall(data*size_left) size_left = 0 except socket.timeout: print "Connection Timeout!" except socket.error, e: print e finally: sock.close() t = threading.Thread(target=run(dst_ip, port, size)) t.start() t.join() print "Done" #run(dst_ip, port, size) if __name__ == '__main__': dst_ip = sys.argv[1] port = int(sys.argv[2]) size = int(float(sys.argv[3])) fd = open("fct.txt", 'w') #print "Flow Size:", size fd.write("Flow Size %d " % size) start_t = time.clock() #print "Start:", time.strftime("%M:%S") fd.write("Start: %s " % time.strftime("%M:%S")) run_flow(dst_ip, port, size) end_t = time.clock() #print "End:", time.strftime("%M:%S") fd.write("End: %s " % time.strftime("%M:%S")) print "Duration:", end_t - start_t fd.write("Duration: %f \r\n" % (end_t - start_t)) fd.close()
795
91
import numpy as np from three.mathutils import Matrix import math from three.components import * #Plane component, can be used for several things, including collisions #represented by the planes normal and its offset from the center class Plane(Shape): def __init__(self,normal = (0,1,0), offset = 0): super().__init__() self.normal = np.asarray(normal) self.offset = offset def setOffset(self, newOffset): self.offset = newOffset def setNormal(self, newNormal): self.normal = np.asarray(newNormal) #NOTE: the point taken in must be a numpy array def distanceToPoint(self, point): return np.dot(self.normal, point) + self.offset def intersectsSphere(self, sphere): return sphere.intersectsPlane(self)
275
92
from PyQt5.QtWidgets import * from PyQt5.QtCore import * from easygraphics.widget import ImageWidget from easygraphics import Image import random class MyWindow(QWidget): def __init__(self): super().__init__() self._image = Image.create(800, 600) imageWidget = ImageWidget() imageWidget.setImage(self._image) area = QScrollArea() area.setWidget(imageWidget) area.setAlignment(Qt.AlignCenter) layout = QHBoxLayout(self) layout.addWidget(area) button = QPushButton("Add Circle") button.clicked.connect(self.button_clicked) layout.addWidget(button) self.setLayout(layout) def button_clicked(self): self._image.ellipse(random.randrange(0, 800), random.randrange(0, 600), 20, 20) if __name__ == "__main__": app = QApplication([]) random.seed() window = MyWindow() window.show() app.exec()
389
93
from django.db import models class ApiKey(models.Model): key = models.CharField(max_length=300, null=False, blank=False) def __str__(self): return self.key class YoutubeAPIResult(models.Model): video_title = models.TextField(null=False, blank=False) description = models.TextField(null=False, blank=False) publish_datetime = models.DateTimeField(null=False, blank=False) thumbnail_url = models.TextField() def __str__(self): return self.video_title
181
94
#!/usr/bin/env python3 from pynamodb.models import Model from pynamodb.attributes import ( NumberAttribute, UnicodeAttribute, MapAttribute, UTCDateTimeAttribute, BooleanAttribute ) class ContextTable(Model): class Meta: read_capacity_units = 1 write_capacity_units = 1 table_name = "Context" region = "eu-west-1" datetime = UTCDateTimeAttribute(hash_key=True) holidays = MapAttribute() day_type = UnicodeAttribute() weather = NumberAttribute() daytime = BooleanAttribute() temperature = NumberAttribute()
196
95
import web import importlib try: settings = importlib.import_module('settings') # Assuming that only MySQL is used db = web.database( dbn='mysql', user=getattr(settings, 'dbuser'), pw=getattr(settings, 'dbpw'), db=getattr(settings, 'dbname', 'sprks'), host=getattr(settings, 'host', '127.0.0.1'), port=getattr(settings, 'port', 3306) ) path = getattr(settings, 'path', '') except ImportError, AttributeError: # Default DB credentials db = web.database( dbn='mysql', user='root', pw='1234', db='sprks', host='127.0.0.1', port=3306 ) path = ''
314
96
import typing as t import typing_extensions as te import pytest T = t.TypeVar('T') def typing_modules_for_member(member_name: str) -> t.Sequence[t.Any]: assert hasattr(te, member_name), member_name if hasattr(t, member_name): return (t, te) return (te,) def parametrize_typing_module(member_name: str, argname: str = 'm') -> t.Callable[[T], T]: def _decorator(func: T) -> T: return pytest.mark.parametrize(argname, typing_modules_for_member(member_name))(func) # type: ignore return _decorator
197
97
from __future__ import absolute_import, division, print_function import torch def _patch(target): parts = target.split('.') assert parts[0] == 'torch' module = torch for part in parts[1:-1]: module = getattr(module, part) name = parts[-1] old_fn = getattr(module, name) old_fn = getattr(old_fn, '_pyro_unpatched', old_fn) # ensure patching is idempotent def decorator(new_fn): new_fn.__name__ = name new_fn._pyro_unpatched = old_fn setattr(module, name, new_fn) return new_fn return decorator @_patch('torch._standard_gamma') def _torch_standard_gamma(concentration): unpatched_fn = _torch_standard_gamma._pyro_unpatched if concentration.is_cuda: return unpatched_fn(concentration.cpu()).cuda(concentration.get_device()) return unpatched_fn(concentration) @_patch('torch.distributions.gamma._standard_gamma') def _standard_gamma(concentration): if concentration.is_cuda: return concentration.cpu()._standard_gamma().cuda(concentration.get_device()) return concentration._standard_gamma() @_patch('torch._dirichlet_grad') def _torch_dirichlet_grad(x, concentration, total): unpatched_fn = _torch_dirichlet_grad._pyro_unpatched if x.is_cuda: return unpatched_fn(x.cpu(), concentration.cpu(), total.cpu()).cuda(x.get_device()) return unpatched_fn(x, concentration, total) __all__ = []
576
98
import os from conans import ConanFile, CMake class TestPackageConan(ConanFile): settings = "os", "compiler", "build_type", "arch" generators = "cmake" def build(self): cmake = CMake(self) cmake.configure() cmake.build() def test(self): assert os.path.isfile(os.path.join(self.deps_cpp_info["cppzmq"].rootpath, "licenses", "LICENSE")) bin_path = os.path.join("bin", "test_package") self.run(bin_path, run_environment=True)
212
99
# Copyright (c) 2015 Uber Technologies, Inc. # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in # all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN # THE SOFTWARE. from __future__ import absolute_import from __future__ import division from __future__ import print_function from __future__ import unicode_literals from uber_rides.errors import ClientError from uber_rides.errors import ServerError def error_handler(response, **kwargs): """Error Handler to surface 4XX and 5XX errors. Attached as a callback hook on the Request object. Parameters response (requests.Response) The HTTP response from an API request. **kwargs Arbitrary keyword arguments. Raises ClientError (ApiError) Raised if response contains a 4XX status code. ServerError (ApiError) Raised if response contains a 5XX status code. Returns response (requests.Response) The original HTTP response from the API request. """ if 400 <= response.status_code <= 499: raise ClientError(response) elif 500 <= response.status_code <= 599: raise ServerError(response) return response
650
End of preview. Expand in Data Studio
README.md exists but content is empty.
Downloads last month
70