repo_name
stringlengths 4
116
| path
stringlengths 4
379
| size
stringlengths 1
7
| content
stringlengths 3
1.05M
| license
stringclasses 15
values |
---|---|---|---|---|
leegoway/yii2-rest
|
src/RestException.php
|
97
|
<?php
/**
* @author
*/
namespace leegoway\rest;
class RestException extends \Exception
{
}
|
bsd-3-clause
|
uonafya/jphes-core
|
dhis-2/dhis-services/dhis-service-core/src/main/java/org/hisp/dhis/schema/descriptors/DataElementSchemaDescriptor.java
|
2804
|
package org.hisp.dhis.schema.descriptors;
/*
* Copyright (c) 2004-2016, University of Oslo
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
*
* Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* Neither the name of the HISP project nor the names of its contributors may
* be used to endorse or promote products derived from this software without
* specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
* ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
* ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
import com.google.common.collect.Lists;
import org.hisp.dhis.dataelement.DataElement;
import org.hisp.dhis.security.Authority;
import org.hisp.dhis.security.AuthorityType;
import org.hisp.dhis.schema.Schema;
import org.hisp.dhis.schema.SchemaDescriptor;
/**
* @author Morten Olav Hansen <mortenoh@gmail.com>
*/
public class DataElementSchemaDescriptor implements SchemaDescriptor
{
public static final String SINGULAR = "dataElement";
public static final String PLURAL = "dataElements";
public static final String API_ENDPOINT = "/" + PLURAL;
@Override
public Schema getSchema()
{
Schema schema = new Schema( DataElement.class, SINGULAR, PLURAL );
schema.setRelativeApiEndpoint( API_ENDPOINT );
schema.setOrder( 1200 );
schema.getAuthorities().add( new Authority( AuthorityType.CREATE_PUBLIC, Lists.newArrayList( "F_DATAELEMENT_PUBLIC_ADD" ) ) );
schema.getAuthorities().add( new Authority( AuthorityType.CREATE_PRIVATE, Lists.newArrayList( "F_DATAELEMENT_PRIVATE_ADD" ) ) );
schema.getAuthorities().add( new Authority( AuthorityType.DELETE, Lists.newArrayList( "F_DATAELEMENT_DELETE" ) ) );
return schema;
}
}
|
bsd-3-clause
|
timmolderez/adbc
|
src/be/ac/ua/ansymo/example_bank/SavingsAccount.java
|
1176
|
/*******************************************************************************
* Copyright (c) 2012-2014 Tim Molderez.
*
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the 3-Clause BSD License
* which accompanies this distribution, and is available at
* http://www.opensource.org/licenses/BSD-3-Clause
******************************************************************************/
package be.ac.ua.ansymo.example_bank;
import be.ac.ua.ansymo.adbc.annotations.ensures;
import be.ac.ua.ansymo.adbc.annotations.requires;
/**
* A savings account; the money on a savings account can only be transferred to accounts with the same owner
* (This means a savings account is *not* a behavioural subtype, as the precondition of the transfer() method is too strong.)
* @author Tim Molderez
*/
public class SavingsAccount extends Account {
public SavingsAccount(double amount, User owner) {
super(amount, owner);
}
@requires({
"$super && $this.getOwner()==to.getOwner()"})
@ensures({
"$super"
})
public void transfer(double amount, Account to) {
withdraw(amount);
to.deposit(amount);
}
}
|
bsd-3-clause
|
ric2b/Vivaldi-browser
|
chromium/mojo/core/message_pipe_unittest.cc
|
18579
|
// Copyright 2013 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include <stdint.h>
#include <string.h>
#include <algorithm>
#include <memory>
#include <string>
#include <vector>
#include "base/memory/ptr_util.h"
#include "base/memory/ref_counted.h"
#include "build/build_config.h"
#include "mojo/core/test/mojo_test_base.h"
#include "mojo/public/c/system/core.h"
#include "mojo/public/c/system/types.h"
#include "mojo/public/cpp/system/message_pipe.h"
namespace mojo {
namespace core {
namespace {
const MojoHandleSignals kAllSignals =
MOJO_HANDLE_SIGNAL_READABLE | MOJO_HANDLE_SIGNAL_WRITABLE |
MOJO_HANDLE_SIGNAL_PEER_CLOSED | MOJO_HANDLE_SIGNAL_PEER_REMOTE |
MOJO_HANDLE_SIGNAL_QUOTA_EXCEEDED;
static const char kHelloWorld[] = "hello world";
class MessagePipeTest : public test::MojoTestBase {
public:
MessagePipeTest() {
CHECK_EQ(MOJO_RESULT_OK, MojoCreateMessagePipe(nullptr, &pipe0_, &pipe1_));
}
~MessagePipeTest() override {
if (pipe0_ != MOJO_HANDLE_INVALID)
CHECK_EQ(MOJO_RESULT_OK, MojoClose(pipe0_));
if (pipe1_ != MOJO_HANDLE_INVALID)
CHECK_EQ(MOJO_RESULT_OK, MojoClose(pipe1_));
}
MojoResult WriteMessage(MojoHandle message_pipe_handle,
const void* bytes,
uint32_t num_bytes) {
return mojo::WriteMessageRaw(MessagePipeHandle(message_pipe_handle), bytes,
num_bytes, nullptr, 0,
MOJO_WRITE_MESSAGE_FLAG_NONE);
}
MojoResult ReadMessage(MojoHandle message_pipe_handle,
void* bytes,
uint32_t* num_bytes,
bool may_discard = false) {
MojoMessageHandle message_handle;
MojoResult rv =
MojoReadMessage(message_pipe_handle, nullptr, &message_handle);
if (rv != MOJO_RESULT_OK)
return rv;
const uint32_t expected_num_bytes = *num_bytes;
void* buffer;
rv = MojoGetMessageData(message_handle, nullptr, &buffer, num_bytes,
nullptr, nullptr);
if (rv == MOJO_RESULT_RESOURCE_EXHAUSTED) {
CHECK(may_discard);
} else if (*num_bytes) {
CHECK_EQ(MOJO_RESULT_OK, rv);
CHECK_GE(expected_num_bytes, *num_bytes);
CHECK(bytes);
memcpy(bytes, buffer, *num_bytes);
}
CHECK_EQ(MOJO_RESULT_OK, MojoDestroyMessage(message_handle));
return rv;
}
MojoHandle pipe0_, pipe1_;
private:
DISALLOW_COPY_AND_ASSIGN(MessagePipeTest);
};
using FuseMessagePipeTest = test::MojoTestBase;
TEST_F(MessagePipeTest, WriteData) {
ASSERT_EQ(MOJO_RESULT_OK,
WriteMessage(pipe0_, kHelloWorld, sizeof(kHelloWorld)));
}
// Tests:
// - only default flags
// - reading messages from a port
// - when there are no/one/two messages available for that port
// - with buffer size 0 (and null buffer) -- should get size
// - with too-small buffer -- should get size
// - also verify that buffers aren't modified when/where they shouldn't be
// - writing messages to a port
// - in the obvious scenarios (as above)
// - to a port that's been closed
// - writing a message to a port, closing the other (would be the source) port,
// and reading it
TEST_F(MessagePipeTest, Basic) {
int32_t buffer[2];
const uint32_t kBufferSize = static_cast<uint32_t>(sizeof(buffer));
uint32_t buffer_size;
// Nothing to read yet on port 0.
buffer[0] = 123;
buffer[1] = 456;
buffer_size = kBufferSize;
ASSERT_EQ(MOJO_RESULT_SHOULD_WAIT, ReadMessage(pipe0_, buffer, &buffer_size));
ASSERT_EQ(kBufferSize, buffer_size);
ASSERT_EQ(123, buffer[0]);
ASSERT_EQ(456, buffer[1]);
// Ditto for port 1.
buffer[0] = 123;
buffer[1] = 456;
buffer_size = kBufferSize;
ASSERT_EQ(MOJO_RESULT_SHOULD_WAIT, ReadMessage(pipe1_, buffer, &buffer_size));
// Write from port 1 (to port 0).
buffer[0] = 789012345;
buffer[1] = 0;
ASSERT_EQ(MOJO_RESULT_OK, WriteMessage(pipe1_, buffer, sizeof(buffer[0])));
MojoHandleSignalsState state;
ASSERT_EQ(MOJO_RESULT_OK,
WaitForSignals(pipe0_, MOJO_HANDLE_SIGNAL_READABLE, &state));
// Read from port 0.
buffer[0] = 123;
buffer[1] = 456;
buffer_size = kBufferSize;
ASSERT_EQ(MOJO_RESULT_OK, ReadMessage(pipe0_, buffer, &buffer_size));
ASSERT_EQ(static_cast<uint32_t>(sizeof(buffer[0])), buffer_size);
ASSERT_EQ(789012345, buffer[0]);
ASSERT_EQ(456, buffer[1]);
// Read again from port 0 -- it should be empty.
buffer_size = kBufferSize;
ASSERT_EQ(MOJO_RESULT_SHOULD_WAIT, ReadMessage(pipe0_, buffer, &buffer_size));
// Write two messages from port 0 (to port 1).
buffer[0] = 123456789;
buffer[1] = 0;
ASSERT_EQ(MOJO_RESULT_OK, WriteMessage(pipe0_, buffer, sizeof(buffer[0])));
buffer[0] = 234567890;
buffer[1] = 0;
ASSERT_EQ(MOJO_RESULT_OK, WriteMessage(pipe0_, buffer, sizeof(buffer[0])));
ASSERT_EQ(MOJO_RESULT_OK,
WaitForSignals(pipe1_, MOJO_HANDLE_SIGNAL_READABLE, &state));
// Read from port 1.
buffer[0] = 123;
buffer[1] = 456;
buffer_size = kBufferSize;
ASSERT_EQ(MOJO_RESULT_OK, ReadMessage(pipe1_, buffer, &buffer_size));
ASSERT_EQ(static_cast<uint32_t>(sizeof(buffer[0])), buffer_size);
ASSERT_EQ(123456789, buffer[0]);
ASSERT_EQ(456, buffer[1]);
ASSERT_EQ(MOJO_RESULT_OK,
WaitForSignals(pipe1_, MOJO_HANDLE_SIGNAL_READABLE, &state));
// Read again from port 1.
buffer[0] = 123;
buffer[1] = 456;
buffer_size = kBufferSize;
ASSERT_EQ(MOJO_RESULT_OK, ReadMessage(pipe1_, buffer, &buffer_size));
ASSERT_EQ(static_cast<uint32_t>(sizeof(buffer[0])), buffer_size);
ASSERT_EQ(234567890, buffer[0]);
ASSERT_EQ(456, buffer[1]);
// Read again from port 1 -- it should be empty.
buffer_size = kBufferSize;
ASSERT_EQ(MOJO_RESULT_SHOULD_WAIT, ReadMessage(pipe1_, buffer, &buffer_size));
// Write from port 0 (to port 1).
buffer[0] = 345678901;
buffer[1] = 0;
ASSERT_EQ(MOJO_RESULT_OK, WriteMessage(pipe0_, buffer, sizeof(buffer[0])));
// Close port 0.
MojoClose(pipe0_);
pipe0_ = MOJO_HANDLE_INVALID;
ASSERT_EQ(MOJO_RESULT_OK,
WaitForSignals(pipe1_, MOJO_HANDLE_SIGNAL_PEER_CLOSED, &state));
// Try to write from port 1 (to port 0).
buffer[0] = 456789012;
buffer[1] = 0;
ASSERT_EQ(MOJO_RESULT_FAILED_PRECONDITION,
WriteMessage(pipe1_, buffer, sizeof(buffer[0])));
// Read from port 1; should still get message (even though port 0 was closed).
buffer[0] = 123;
buffer[1] = 456;
buffer_size = kBufferSize;
ASSERT_EQ(MOJO_RESULT_OK, ReadMessage(pipe1_, buffer, &buffer_size));
ASSERT_EQ(static_cast<uint32_t>(sizeof(buffer[0])), buffer_size);
ASSERT_EQ(345678901, buffer[0]);
ASSERT_EQ(456, buffer[1]);
// Read again from port 1 -- it should be empty (and port 0 is closed).
buffer_size = kBufferSize;
ASSERT_EQ(MOJO_RESULT_FAILED_PRECONDITION,
ReadMessage(pipe1_, buffer, &buffer_size));
}
TEST_F(MessagePipeTest, CloseWithQueuedIncomingMessages) {
int32_t buffer[1];
const uint32_t kBufferSize = static_cast<uint32_t>(sizeof(buffer));
uint32_t buffer_size;
// Write some messages from port 1 (to port 0).
for (int32_t i = 0; i < 5; i++) {
buffer[0] = i;
ASSERT_EQ(MOJO_RESULT_OK, WriteMessage(pipe1_, buffer, kBufferSize));
}
MojoHandleSignalsState state;
ASSERT_EQ(MOJO_RESULT_OK,
WaitForSignals(pipe0_, MOJO_HANDLE_SIGNAL_READABLE, &state));
// Port 0 shouldn't be empty.
buffer_size = kBufferSize;
ASSERT_EQ(MOJO_RESULT_OK, ReadMessage(pipe0_, buffer, &buffer_size));
ASSERT_EQ(kBufferSize, buffer_size);
// Close port 0 first, which should have outstanding (incoming) messages.
MojoClose(pipe0_);
MojoClose(pipe1_);
pipe0_ = pipe1_ = MOJO_HANDLE_INVALID;
}
TEST_F(MessagePipeTest, BasicWaiting) {
MojoHandleSignalsState hss;
int32_t buffer[1];
const uint32_t kBufferSize = static_cast<uint32_t>(sizeof(buffer));
uint32_t buffer_size;
// Always writable (until the other port is closed). Not yet readable. Peer
// not closed.
hss = GetSignalsState(pipe0_);
ASSERT_EQ(MOJO_HANDLE_SIGNAL_WRITABLE, hss.satisfied_signals);
ASSERT_EQ(kAllSignals, hss.satisfiable_signals);
hss = MojoHandleSignalsState();
// Write from port 0 (to port 1), to make port 1 readable.
buffer[0] = 123456789;
ASSERT_EQ(MOJO_RESULT_OK, WriteMessage(pipe0_, buffer, kBufferSize));
// Port 1 should already be readable now.
ASSERT_EQ(MOJO_RESULT_OK,
WaitForSignals(pipe1_, MOJO_HANDLE_SIGNAL_READABLE, &hss));
ASSERT_EQ(MOJO_HANDLE_SIGNAL_READABLE | MOJO_HANDLE_SIGNAL_WRITABLE,
hss.satisfied_signals);
ASSERT_EQ(kAllSignals, hss.satisfiable_signals);
// ... and still writable.
hss = MojoHandleSignalsState();
ASSERT_EQ(MOJO_RESULT_OK,
WaitForSignals(pipe1_, MOJO_HANDLE_SIGNAL_WRITABLE, &hss));
ASSERT_EQ(MOJO_HANDLE_SIGNAL_READABLE | MOJO_HANDLE_SIGNAL_WRITABLE,
hss.satisfied_signals);
ASSERT_EQ(kAllSignals, hss.satisfiable_signals);
// Close port 0.
MojoClose(pipe0_);
pipe0_ = MOJO_HANDLE_INVALID;
// Port 1 should be signaled with peer closed.
hss = MojoHandleSignalsState();
ASSERT_EQ(MOJO_RESULT_OK,
WaitForSignals(pipe1_, MOJO_HANDLE_SIGNAL_PEER_CLOSED, &hss));
ASSERT_TRUE(hss.satisfied_signals & MOJO_HANDLE_SIGNAL_PEER_CLOSED);
ASSERT_TRUE(hss.satisfiable_signals & MOJO_HANDLE_SIGNAL_PEER_CLOSED);
// Port 1 should not be writable now or ever again.
hss = MojoHandleSignalsState();
ASSERT_EQ(MOJO_RESULT_FAILED_PRECONDITION,
WaitForSignals(pipe1_, MOJO_HANDLE_SIGNAL_WRITABLE, &hss));
ASSERT_FALSE(hss.satisfied_signals & MOJO_HANDLE_SIGNAL_WRITABLE);
ASSERT_FALSE(hss.satisfiable_signals & MOJO_HANDLE_SIGNAL_WRITABLE);
// But it should still be readable.
hss = MojoHandleSignalsState();
ASSERT_EQ(MOJO_RESULT_OK,
WaitForSignals(pipe1_, MOJO_HANDLE_SIGNAL_READABLE, &hss));
ASSERT_TRUE(hss.satisfied_signals & MOJO_HANDLE_SIGNAL_READABLE);
ASSERT_TRUE(hss.satisfiable_signals & MOJO_HANDLE_SIGNAL_READABLE);
// Read from port 1.
buffer[0] = 0;
buffer_size = kBufferSize;
ASSERT_EQ(MOJO_RESULT_OK, ReadMessage(pipe1_, buffer, &buffer_size));
ASSERT_EQ(123456789, buffer[0]);
// Now port 1 should no longer be readable.
hss = MojoHandleSignalsState();
ASSERT_EQ(MOJO_RESULT_FAILED_PRECONDITION,
WaitForSignals(pipe1_, MOJO_HANDLE_SIGNAL_READABLE, &hss));
ASSERT_EQ(MOJO_HANDLE_SIGNAL_PEER_CLOSED, hss.satisfied_signals);
ASSERT_FALSE(hss.satisfiable_signals & MOJO_HANDLE_SIGNAL_READABLE);
ASSERT_FALSE(hss.satisfiable_signals & MOJO_HANDLE_SIGNAL_WRITABLE);
}
#if !defined(OS_IOS)
const size_t kPingPongHandlesPerIteration = 30;
const size_t kPingPongIterations = 500;
DEFINE_TEST_CLIENT_TEST_WITH_PIPE(HandlePingPong, MessagePipeTest, h) {
// Waits for a handle to become readable and writes it back to the sender.
for (size_t i = 0; i < kPingPongIterations; i++) {
MojoHandle handles[kPingPongHandlesPerIteration];
ReadMessageWithHandles(h, handles, kPingPongHandlesPerIteration);
WriteMessageWithHandles(h, "", handles, kPingPongHandlesPerIteration);
}
EXPECT_EQ(MOJO_RESULT_OK, WaitForSignals(h, MOJO_HANDLE_SIGNAL_READABLE));
char msg[4];
uint32_t num_bytes = 4;
EXPECT_EQ(MOJO_RESULT_OK, ReadMessage(h, msg, &num_bytes));
}
// This test is flaky: http://crbug.com/585784
TEST_F(MessagePipeTest, DISABLED_DataPipeConsumerHandlePingPong) {
MojoHandle p, c[kPingPongHandlesPerIteration];
for (size_t i = 0; i < kPingPongHandlesPerIteration; ++i) {
EXPECT_EQ(MOJO_RESULT_OK, MojoCreateDataPipe(nullptr, &p, &c[i]));
MojoClose(p);
}
RunTestClient("HandlePingPong", [&](MojoHandle h) {
for (size_t i = 0; i < kPingPongIterations; i++) {
WriteMessageWithHandles(h, "", c, kPingPongHandlesPerIteration);
ReadMessageWithHandles(h, c, kPingPongHandlesPerIteration);
}
WriteMessage(h, "quit", 4);
});
for (size_t i = 0; i < kPingPongHandlesPerIteration; ++i)
MojoClose(c[i]);
}
// This test is flaky: http://crbug.com/585784
TEST_F(MessagePipeTest, DISABLED_DataPipeProducerHandlePingPong) {
MojoHandle p[kPingPongHandlesPerIteration], c;
for (size_t i = 0; i < kPingPongHandlesPerIteration; ++i) {
EXPECT_EQ(MOJO_RESULT_OK, MojoCreateDataPipe(nullptr, &p[i], &c));
MojoClose(c);
}
RunTestClient("HandlePingPong", [&](MojoHandle h) {
for (size_t i = 0; i < kPingPongIterations; i++) {
WriteMessageWithHandles(h, "", p, kPingPongHandlesPerIteration);
ReadMessageWithHandles(h, p, kPingPongHandlesPerIteration);
}
WriteMessage(h, "quit", 4);
});
for (size_t i = 0; i < kPingPongHandlesPerIteration; ++i)
MojoClose(p[i]);
}
TEST_F(MessagePipeTest, SharedBufferHandlePingPong) {
MojoHandle buffers[kPingPongHandlesPerIteration];
for (size_t i = 0; i < kPingPongHandlesPerIteration; ++i)
EXPECT_EQ(MOJO_RESULT_OK, MojoCreateSharedBuffer(1, nullptr, &buffers[i]));
RunTestClient("HandlePingPong", [&](MojoHandle h) {
for (size_t i = 0; i < kPingPongIterations; i++) {
WriteMessageWithHandles(h, "", buffers, kPingPongHandlesPerIteration);
ReadMessageWithHandles(h, buffers, kPingPongHandlesPerIteration);
}
WriteMessage(h, "quit", 4);
});
for (size_t i = 0; i < kPingPongHandlesPerIteration; ++i)
MojoClose(buffers[i]);
}
#endif // !defined(OS_IOS)
TEST_F(FuseMessagePipeTest, Basic) {
// Test that we can fuse pipes and they still work.
MojoHandle a, b, c, d;
CreateMessagePipe(&a, &b);
CreateMessagePipe(&c, &d);
EXPECT_EQ(MOJO_RESULT_OK, MojoFuseMessagePipes(b, c, nullptr));
// Handles b and c should be closed.
EXPECT_EQ(MOJO_RESULT_INVALID_ARGUMENT, MojoClose(b));
EXPECT_EQ(MOJO_RESULT_INVALID_ARGUMENT, MojoClose(c));
const std::string kTestMessage1 = "Hello, world!";
const std::string kTestMessage2 = "Goodbye, world!";
WriteMessage(a, kTestMessage1);
EXPECT_EQ(kTestMessage1, ReadMessage(d));
WriteMessage(d, kTestMessage2);
EXPECT_EQ(kTestMessage2, ReadMessage(a));
EXPECT_EQ(MOJO_RESULT_OK, MojoClose(a));
EXPECT_EQ(MOJO_RESULT_OK, MojoClose(d));
}
TEST_F(FuseMessagePipeTest, FuseAfterPeerWrite) {
// Test that messages written before fusion are eventually delivered.
MojoHandle a, b, c, d;
CreateMessagePipe(&a, &b);
CreateMessagePipe(&c, &d);
const std::string kTestMessage1 = "Hello, world!";
const std::string kTestMessage2 = "Goodbye, world!";
WriteMessage(a, kTestMessage1);
WriteMessage(d, kTestMessage2);
EXPECT_EQ(MOJO_RESULT_OK, MojoFuseMessagePipes(b, c, nullptr));
// Handles b and c should be closed.
EXPECT_EQ(MOJO_RESULT_INVALID_ARGUMENT, MojoClose(b));
EXPECT_EQ(MOJO_RESULT_INVALID_ARGUMENT, MojoClose(c));
EXPECT_EQ(kTestMessage1, ReadMessage(d));
EXPECT_EQ(kTestMessage2, ReadMessage(a));
EXPECT_EQ(MOJO_RESULT_OK, MojoClose(a));
EXPECT_EQ(MOJO_RESULT_OK, MojoClose(d));
}
TEST_F(FuseMessagePipeTest, NoFuseAfterWrite) {
// Test that a pipe endpoint which has been written to cannot be fused.
MojoHandle a, b, c, d;
CreateMessagePipe(&a, &b);
CreateMessagePipe(&c, &d);
WriteMessage(b, "shouldn't have done that!");
EXPECT_EQ(MOJO_RESULT_FAILED_PRECONDITION,
MojoFuseMessagePipes(b, c, nullptr));
// Handles b and c should be closed.
EXPECT_EQ(MOJO_RESULT_INVALID_ARGUMENT, MojoClose(b));
EXPECT_EQ(MOJO_RESULT_INVALID_ARGUMENT, MojoClose(c));
EXPECT_EQ(MOJO_RESULT_OK, MojoClose(a));
EXPECT_EQ(MOJO_RESULT_OK, MojoClose(d));
}
TEST_F(FuseMessagePipeTest, NoFuseSelf) {
// Test that a pipe's own endpoints can't be fused together.
MojoHandle a, b;
CreateMessagePipe(&a, &b);
EXPECT_EQ(MOJO_RESULT_FAILED_PRECONDITION,
MojoFuseMessagePipes(a, b, nullptr));
// Handles a and b should be closed.
EXPECT_EQ(MOJO_RESULT_INVALID_ARGUMENT, MojoClose(a));
EXPECT_EQ(MOJO_RESULT_INVALID_ARGUMENT, MojoClose(b));
}
TEST_F(FuseMessagePipeTest, FuseInvalidArguments) {
MojoHandle a, b, c, d;
CreateMessagePipe(&a, &b);
CreateMessagePipe(&c, &d);
EXPECT_EQ(MOJO_RESULT_OK, MojoClose(b));
// Can't fuse an invalid handle.
EXPECT_EQ(MOJO_RESULT_INVALID_ARGUMENT, MojoFuseMessagePipes(b, c, nullptr));
// Handle c should be closed.
EXPECT_EQ(MOJO_RESULT_INVALID_ARGUMENT, MojoClose(c));
// Can't fuse a non-message pipe handle.
MojoHandle e, f;
CreateDataPipe(&e, &f, 16);
EXPECT_EQ(MOJO_RESULT_INVALID_ARGUMENT, MojoFuseMessagePipes(e, d, nullptr));
// Handles d and e should be closed.
EXPECT_EQ(MOJO_RESULT_INVALID_ARGUMENT, MojoClose(d));
EXPECT_EQ(MOJO_RESULT_INVALID_ARGUMENT, MojoClose(e));
EXPECT_EQ(MOJO_RESULT_OK, MojoClose(a));
EXPECT_EQ(MOJO_RESULT_OK, MojoClose(f));
}
TEST_F(FuseMessagePipeTest, FuseAfterPeerClosure) {
// Test that peer closure prior to fusion can still be detected after fusion.
MojoHandle a, b, c, d;
CreateMessagePipe(&a, &b);
CreateMessagePipe(&c, &d);
EXPECT_EQ(MOJO_RESULT_OK, MojoClose(a));
EXPECT_EQ(MOJO_RESULT_OK, MojoFuseMessagePipes(b, c, nullptr));
// Handles b and c should be closed.
EXPECT_EQ(MOJO_RESULT_INVALID_ARGUMENT, MojoClose(b));
EXPECT_EQ(MOJO_RESULT_INVALID_ARGUMENT, MojoClose(c));
EXPECT_EQ(MOJO_RESULT_OK, WaitForSignals(d, MOJO_HANDLE_SIGNAL_PEER_CLOSED));
EXPECT_EQ(MOJO_RESULT_OK, MojoClose(d));
}
TEST_F(FuseMessagePipeTest, FuseAfterPeerWriteAndClosure) {
// Test that peer write and closure prior to fusion still results in the
// both message arrival and awareness of peer closure.
MojoHandle a, b, c, d;
CreateMessagePipe(&a, &b);
CreateMessagePipe(&c, &d);
const std::string kTestMessage = "ayyy lmao";
WriteMessage(a, kTestMessage);
EXPECT_EQ(MOJO_RESULT_OK, MojoClose(a));
EXPECT_EQ(MOJO_RESULT_OK, MojoFuseMessagePipes(b, c, nullptr));
// Handles b and c should be closed.
EXPECT_EQ(MOJO_RESULT_INVALID_ARGUMENT, MojoClose(b));
EXPECT_EQ(MOJO_RESULT_INVALID_ARGUMENT, MojoClose(c));
EXPECT_EQ(kTestMessage, ReadMessage(d));
EXPECT_EQ(MOJO_RESULT_OK, WaitForSignals(d, MOJO_HANDLE_SIGNAL_PEER_CLOSED));
EXPECT_EQ(MOJO_RESULT_OK, MojoClose(d));
}
TEST_F(MessagePipeTest, ClosePipesStressTest) {
// Stress test to exercise https://crbug.com/665869.
const size_t kNumPipes = 100000;
for (size_t i = 0; i < kNumPipes; ++i) {
MojoHandle a, b;
CreateMessagePipe(&a, &b);
MojoClose(a);
MojoClose(b);
}
}
} // namespace
} // namespace core
} // namespace mojo
|
bsd-3-clause
|
JesseLivezey/plankton
|
pylearn2/scripts/plankton/gen_train.py
|
598
|
import os, sys
if len(sys.argv) < 3:
print "Usage: python gen_train.py input_folder output_folder"
exit(1)
fi = sys.argv[1]
fo = sys.argv[2]
if not os.path.exists(fo):
os.makedirs(fo)
cmd = "convert -resize 48x48 -gravity center -background white -extent 48x48 "
classes = os.listdir(fi)
os.chdir(fo)
for cls in classes:
try:
os.mkdir(cls)
except:
pass
imgs = os.listdir(os.path.join(fi,cls))
for img in imgs:
md = ""
md += cmd
md += os.path.join(fi,cls,img)
md += " " + os.path.join(fo,cls,img)
os.system(md)
|
bsd-3-clause
|
vdt/SimpleCV
|
SimpleCV/Features/Features.py
|
68872
|
# SimpleCV Feature library
#
# Tools return basic features in feature sets
# # x = 0.00
# y = 0.00
# _mMaxX = None
# _mMaxY = None
# _mMinX = None
# _mMinY = None
# _mWidth = None
# _mHeight = None
# _mSrcImgW = None
# mSrcImgH = None
#load system libraries
from SimpleCV.base import *
from SimpleCV.Color import *
import copy
class FeatureSet(list):
"""
**SUMMARY**
FeatureSet is a class extended from Python's list which has special functions so that it is useful for handling feature metadata on an image.
In general, functions dealing with attributes will return numpy arrays, and functions dealing with sorting or filtering will return new FeatureSets.
**EXAMPLE**
>>> image = Image("/path/to/image.png")
>>> lines = image.findLines() #lines are the feature set
>>> lines.draw()
>>> lines.x()
>>> lines.crop()
"""
def __getitem__(self,key):
"""
**SUMMARY**
Returns a FeatureSet when sliced. Previously used to
return list. Now it is possible to use FeatureSet member
functions on sub-lists
"""
if type(key) is types.SliceType: #Or can use 'try:' for speed
return FeatureSet(list.__getitem__(self, key))
else:
return list.__getitem__(self,key)
def __getslice__(self, i, j):
"""
Deprecated since python 2.0, now using __getitem__
"""
return self.__getitem__(slice(i,j))
def count(self):
'''
This function returns the length / count of the all the items in the FeatureSet
'''
return len(self)
def draw(self, color = Color.GREEN,width=1, autocolor = False):
"""
**SUMMARY**
Call the draw() method on each feature in the FeatureSet.
**PARAMETERS**
* *color* - The color to draw the object. Either an BGR tuple or a member of the :py:class:`Color` class.
* *width* - The width to draw the feature in pixels. A value of -1 usually indicates a filled region.
* *autocolor* - If true a color is randomly selected for each feature.
**RETURNS**
Nada. Nothing. Zilch.
**EXAMPLE**
>>> img = Image("lenna")
>>> feats = img.findBlobs()
>>> feats.draw(color=Color.PUCE, width=3)
>>> img.show()
"""
for f in self:
if(autocolor):
color = Color().getRandom()
f.draw(color=color,width=width)
def show(self, color = Color.GREEN, autocolor = False,width=1):
"""
**EXAMPLE**
This function will automatically draw the features on the image and show it.
It is a basically a shortcut function for development and is the same as:
**PARAMETERS**
* *color* - The color to draw the object. Either an BGR tuple or a member of the :py:class:`Color` class.
* *width* - The width to draw the feature in pixels. A value of -1 usually indicates a filled region.
* *autocolor* - If true a color is randomly selected for each feature.
**RETURNS**
Nada. Nothing. Zilch.
**EXAMPLE**
>>> img = Image("logo")
>>> feat = img.findBlobs()
>>> if feat: feat.draw()
>>> img.show()
"""
self.draw(color, width, autocolor)
self[-1].image.show()
def reassignImage(self, newImg):
"""
**SUMMARY**
Return a new featureset where the features are assigned to a new image.
**PARAMETERS**
* *img* - the new image to which to assign the feature.
.. Warning::
THIS DOES NOT PERFORM A SIZE CHECK. IF YOUR NEW IMAGE IS NOT THE EXACT SAME SIZE YOU WILL CERTAINLY CAUSE ERRORS.
**EXAMPLE**
>>> img = Image("lenna")
>>> img2 = img.invert()
>>> l = img.findLines()
>>> l2 = img.reassignImage(img2)
>>> l2.show()
"""
retVal = FeatureSet()
for i in self:
retVal.append(i.reassign(newImg))
return retVal
def x(self):
"""
**SUMMARY**
Returns a numpy array of the x (horizontal) coordinate of each feature.
**RETURNS**
A numpy array.
**EXAMPLE**
>>> img = Image("lenna")
>>> feats = img.findBlobs()
>>> xs = feats.x()
>>> print xs
"""
return np.array([f.x for f in self])
def y(self):
"""
**SUMMARY**
Returns a numpy array of the y (vertical) coordinate of each feature.
**RETURNS**
A numpy array.
**EXAMPLE**
>>> img = Image("lenna")
>>> feats = img.findBlobs()
>>> xs = feats.y()
>>> print xs
"""
return np.array([f.y for f in self])
def coordinates(self):
"""
**SUMMARY**
Returns a 2d numpy array of the x,y coordinates of each feature. This
is particularly useful if you want to use Scipy's Spatial Distance module
**RETURNS**
A numpy array of all the positions in the featureset.
**EXAMPLE**
>>> img = Image("lenna")
>>> feats = img.findBlobs()
>>> xs = feats.coordinates()
>>> print xs
"""
return np.array([[f.x, f.y] for f in self])
def center(self):
return self.coordinates()
def area(self):
"""
**SUMMARY**
Returns a numpy array of the area of each feature in pixels.
**RETURNS**
A numpy array of all the positions in the featureset.
**EXAMPLE**
>>> img = Image("lenna")
>>> feats = img.findBlobs()
>>> xs = feats.area()
>>> print xs
"""
return np.array([f.area() for f in self])
def sortArea(self):
"""
**SUMMARY**
Returns a new FeatureSet, with the largest area features first.
**RETURNS**
A featureset sorted based on area.
**EXAMPLE**
>>> img = Image("lenna")
>>> feats = img.findBlobs()
>>> feats = feats.sortArea()
>>> print feats[-1] # biggest blob
>>> print feats[0] # smallest blob
"""
return FeatureSet(sorted(self, key = lambda f: f.area()))
def sortX(self):
"""
**SUMMARY**
Returns a new FeatureSet, with the smallest x coordinates features first.
**RETURNS**
A featureset sorted based on area.
**EXAMPLE**
>>> img = Image("lenna")
>>> feats = img.findBlobs()
>>> feats = feats.sortX()
>>> print feats[-1] # biggest blob
>>> print feats[0] # smallest blob
"""
return FeatureSet(sorted(self, key = lambda f: f.x))
def sortY(self):
"""
**SUMMARY**
Returns a new FeatureSet, with the smallest y coordinates features first.
**RETURNS**
A featureset sorted based on area.
**EXAMPLE**
>>> img = Image("lenna")
>>> feats = img.findBlobs()
>>> feats = feats.sortY()
>>> print feats[-1] # biggest blob
>>> print feats[0] # smallest blob
"""
return FeatureSet(sorted(self, key = lambda f: f.y))
def distanceFrom(self, point = (-1, -1)):
"""
**SUMMARY**
Returns a numpy array of the distance each Feature is from a given coordinate.
Default is the center of the image.
**PARAMETERS**
* *point* - A point on the image from which we will calculate distance.
**RETURNS**
A numpy array of distance values.
**EXAMPLE**
>>> img = Image("lenna")
>>> feats = img.findBlobs()
>>> d = feats.distanceFrom()
>>> d[0] #show the 0th blobs distance to the center.
**TO DO**
Make this accept other features to measure from.
"""
if (point[0] == -1 or point[1] == -1 and len(self)):
point = self[0].image.size()
return spsd.cdist(self.coordinates(), [point])[:,0]
def sortDistance(self, point = (-1, -1)):
"""
**SUMMARY**
Returns a sorted FeatureSet with the features closest to a given coordinate first.
Default is from the center of the image.
**PARAMETERS**
* *point* - A point on the image from which we will calculate distance.
**RETURNS**
A numpy array of distance values.
**EXAMPLE**
>>> img = Image("lenna")
>>> feats = img.findBlobs()
>>> d = feats.sortDistance()
>>> d[-1].show() #show the 0th blobs distance to the center.
"""
return FeatureSet(sorted(self, key = lambda f: f.distanceFrom(point)))
def distancePairs(self):
"""
**SUMMARY**
Returns the square-form of pairwise distances for the featureset.
The resulting N x N array can be used to quickly look up distances
between features.
**RETURNS**
A NxN np matrix of distance values.
**EXAMPLE**
>>> img = Image("lenna")
>>> feats = img.findBlobs()
>>> d = feats.distancePairs()
>>> print d
"""
return spsd.squareform(spsd.pdist(self.coordinates()))
def angle(self):
"""
**SUMMARY**
Return a numpy array of the angles (theta) of each feature.
Note that theta is given in degrees, with 0 being horizontal.
**RETURNS**
An array of angle values corresponding to the features.
**EXAMPLE**
>>> img = Image("lenna")
>>> l = img.findLines()
>>> angs = l.angle()
>>> print angs
"""
return np.array([f.angle() for f in self])
def sortAngle(self, theta = 0):
"""
Return a sorted FeatureSet with the features closest to a given angle first.
Note that theta is given in radians, with 0 being horizontal.
**RETURNS**
An array of angle values corresponding to the features.
**EXAMPLE**
>>> img = Image("lenna")
>>> l = img.findLines()
>>> l = l.sortAngle()
>>> print angs
"""
return FeatureSet(sorted(self, key = lambda f: abs(f.angle() - theta)))
def length(self):
"""
**SUMMARY**
Return a numpy array of the length (longest dimension) of each feature.
**RETURNS**
A numpy array of the length, in pixels, of eatch feature object.
**EXAMPLE**
>>> img = Image("Lenna")
>>> l = img.findLines()
>>> lengt = l.length()
>>> lengt[0] # length of the 0th element.
"""
return np.array([f.length() for f in self])
def sortLength(self):
"""
**SUMMARY**
Return a sorted FeatureSet with the longest features first.
**RETURNS**
A sorted FeatureSet.
**EXAMPLE**
>>> img = Image("Lenna")
>>> l = img.findLines().sortLength()
>>> lengt[-1] # length of the 0th element.
"""
return FeatureSet(sorted(self, key = lambda f: f.length()))
def meanColor(self):
"""
**SUMMARY**
Return a numpy array of the average color of the area covered by each Feature.
**RETURNS**
Returns an array of RGB triplets the correspond to the mean color of the feature.
**EXAMPLE**
>>> img = Image("lenna")
>>> kp = img.findKeypoints()
>>> c = kp.meanColor()
"""
return np.array([f.meanColor() for f in self])
def colorDistance(self, color = (0, 0, 0)):
"""
**SUMMARY**
Return a numpy array of the distance each features average color is from
a given color tuple (default black, so colorDistance() returns intensity)
**PARAMETERS**
* *color* - The color to calculate the distance from.
**RETURNS**
The distance of the average color for the feature from given color as a numpy array.
**EXAMPLE**
>>> img = Image("lenna")
>>> circs = img.findCircle()
>>> d = circs.colorDistance(color=Color.BLUE)
>>> print d
"""
return spsd.cdist(self.meanColor(), [color])[:,0]
def sortColorDistance(self, color = (0, 0, 0)):
"""
Return a sorted FeatureSet with features closest to a given color first.
Default is black, so sortColorDistance() will return darkest to brightest
"""
return FeatureSet(sorted(self, key = lambda f: f.colorDistance(color)))
def filter(self, filterarray):
"""
**SUMMARY**
Return a FeatureSet which is filtered on a numpy boolean array. This
will let you use the attribute functions to easily screen Features out
of return FeatureSets.
**PARAMETERS**
* *filterarray* - A numpy array, matching the size of the feature set,
made of Boolean values, we return the true values and reject the False value.
**RETURNS**
The revised feature set.
**EXAMPLE**
Return all lines < 200px
>>> my_lines.filter(my_lines.length() < 200) # returns all lines < 200px
>>> my_blobs.filter(my_blobs.area() > 0.9 * my_blobs.length**2) # returns blobs that are nearly square
>>> my_lines.filter(abs(my_lines.angle()) < numpy.pi / 4) #any lines within 45 degrees of horizontal
>>> my_corners.filter(my_corners.x() - my_corners.y() > 0) #only return corners in the upper diagonal of the image
"""
return FeatureSet(list(np.array(self)[np.array(filterarray)]))
def width(self):
"""
**SUMMARY**
Returns a nparray which is the width of all the objects in the FeatureSet.
**RETURNS**
A numpy array of width values.
**EXAMPLE**
>>> img = Image("NotLenna")
>>> l = img.findLines()
>>> l.width()
"""
return np.array([f.width() for f in self])
def height(self):
"""
Returns a nparray which is the height of all the objects in the FeatureSet
**RETURNS**
A numpy array of width values.
**EXAMPLE**
>>> img = Image("NotLenna")
>>> l = img.findLines()
>>> l.height()
"""
return np.array([f.height() for f in self])
def crop(self):
"""
**SUMMARY**
Returns a nparray with the cropped features as SimpleCV image.
**RETURNS**
A SimpleCV image cropped to each image.
**EXAMPLE**
>>> img = Image("Lenna")
>>> blobs = img.findBlobs(128)
>>> for b in blobs:
>>> newImg = b.crop()
>>> newImg.show()
>>> time.sleep(1)
"""
return np.array([f.crop() for f in self])
def inside(self,region):
"""
**SUMMARY**
Return only the features inside the region. where region can be a bounding box,
bounding circle, a list of tuples in a closed polygon, or any other featutres.
**PARAMETERS**
* *region*
* A bounding box - of the form (x,y,w,h) where x,y is the upper left corner
* A bounding circle of the form (x,y,r)
* A list of x,y tuples defining a closed polygon e.g. ((x,y),(x,y),....)
* Any two dimensional feature (e.g. blobs, circle ...)
**RETURNS**
Returns a featureset of features that are inside the region.
**EXAMPLE**
>>> img = Image("Lenna")
>>> blobs = img.findBlobs()
>>> b = blobs[-1]
>>> lines = img.findLines()
>>> inside = lines.inside(b)
**NOTE**
This currently performs a bounding box test, not a full polygon test for speed.
"""
fs = FeatureSet()
for f in self:
if(f.isContainedWithin(region)):
fs.append(f)
return fs
def outside(self,region):
"""
**SUMMARY**
Return only the features outside the region. where region can be a bounding box,
bounding circle, a list of tuples in a closed polygon, or any other featutres.
**PARAMETERS**
* *region*
* A bounding box - of the form (x,y,w,h) where x,y is the upper left corner
* A bounding circle of the form (x,y,r)
* A list of x,y tuples defining a closed polygon e.g. ((x,y),(x,y),....)
* Any two dimensional feature (e.g. blobs, circle ...)
**RETURNS**
Returns a featureset of features that are outside the region.
**EXAMPLE**
>>> img = Image("Lenna")
>>> blobs = img.findBlobs()
>>> b = blobs[-1]
>>> lines = img.findLines()
>>> outside = lines.outside(b)
**NOTE**
This currently performs a bounding box test, not a full polygon test for speed.
"""
fs = FeatureSet()
for f in self:
if(f.isNotContainedWithin(region)):
fs.append(f)
return fs
def overlaps(self,region):
"""
**SUMMARY**
Return only the features that overlap or the region. Where region can be a bounding box,
bounding circle, a list of tuples in a closed polygon, or any other featutres.
**PARAMETERS**
* *region*
* A bounding box - of the form (x,y,w,h) where x,y is the upper left corner
* A bounding circle of the form (x,y,r)
* A list of x,y tuples defining a closed polygon e.g. ((x,y),(x,y),....)
* Any two dimensional feature (e.g. blobs, circle ...)
**RETURNS**
Returns a featureset of features that overlap the region.
**EXAMPLE**
>>> img = Image("Lenna")
>>> blobs = img.findBlobs()
>>> b = blobs[-1]
>>> lines = img.findLines()
>>> outside = lines.overlaps(b)
**NOTE**
This currently performs a bounding box test, not a full polygon test for speed.
"""
fs = FeatureSet()
for f in self:
if( f.overlaps(region) ):
fs.append(f)
return fs
def above(self,region):
"""
**SUMMARY**
Return only the features that are above a region. Where region can be a bounding box,
bounding circle, a list of tuples in a closed polygon, or any other featutres.
**PARAMETERS**
* *region*
* A bounding box - of the form (x,y,w,h) where x,y is the upper left corner
* A bounding circle of the form (x,y,r)
* A list of x,y tuples defining a closed polygon e.g. ((x,y),(x,y),....)
* Any two dimensional feature (e.g. blobs, circle ...)
**RETURNS**
Returns a featureset of features that are above the region.
**EXAMPLE**
>>> img = Image("Lenna")
>>> blobs = img.findBlobs()
>>> b = blobs[-1]
>>> lines = img.findLines()
>>> outside = lines.above(b)
**NOTE**
This currently performs a bounding box test, not a full polygon test for speed.
"""
fs = FeatureSet()
for f in self:
if(f.above(region)):
fs.append(f)
return fs
def below(self,region):
"""
**SUMMARY**
Return only the features below the region. where region can be a bounding box,
bounding circle, a list of tuples in a closed polygon, or any other featutres.
**PARAMETERS**
* *region*
* A bounding box - of the form (x,y,w,h) where x,y is the upper left corner
* A bounding circle of the form (x,y,r)
* A list of x,y tuples defining a closed polygon e.g. ((x,y),(x,y),....)
* Any two dimensional feature (e.g. blobs, circle ...)
**RETURNS**
Returns a featureset of features that are below the region.
**EXAMPLE**
>>> img = Image("Lenna")
>>> blobs = img.findBlobs()
>>> b = blobs[-1]
>>> lines = img.findLines()
>>> inside = lines.below(b)
**NOTE**
This currently performs a bounding box test, not a full polygon test for speed.
"""
fs = FeatureSet()
for f in self:
if(f.below(region)):
fs.append(f)
return fs
def left(self,region):
"""
**SUMMARY**
Return only the features left of the region. where region can be a bounding box,
bounding circle, a list of tuples in a closed polygon, or any other featutres.
**PARAMETERS**
* *region*
* A bounding box - of the form (x,y,w,h) where x,y is the upper left corner
* A bounding circle of the form (x,y,r)
* A list of x,y tuples defining a closed polygon e.g. ((x,y),(x,y),....)
* Any two dimensional feature (e.g. blobs, circle ...)
**RETURNS**
Returns a featureset of features that are left of the region.
**EXAMPLE**
>>> img = Image("Lenna")
>>> blobs = img.findBlobs()
>>> b = blobs[-1]
>>> lines = img.findLines()
>>> left = lines.left(b)
**NOTE**
This currently performs a bounding box test, not a full polygon test for speed.
"""
fs = FeatureSet()
for f in self:
if(f.left(region)):
fs.append(f)
return fs
def right(self,region):
"""
**SUMMARY**
Return only the features right of the region. where region can be a bounding box,
bounding circle, a list of tuples in a closed polygon, or any other featutres.
**PARAMETERS**
* *region*
* A bounding box - of the form (x,y,w,h) where x,y is the upper left corner
* A bounding circle of the form (x,y,r)
* A list of x,y tuples defining a closed polygon e.g. ((x,y),(x,y),....)
* Any two dimensional feature (e.g. blobs, circle ...)
**RETURNS**
Returns a featureset of features that are right of the region.
**EXAMPLE**
>>> img = Image("Lenna")
>>> blobs = img.findBlobs()
>>> b = blobs[-1]
>>> lines = img.findLines()
>>> right = lines.right(b)
**NOTE**
This currently performs a bounding box test, not a full polygon test for speed.
"""
fs = FeatureSet()
for f in self:
if(f.right(region)):
fs.append(f)
return fs
def onImageEdge(self, tolerance=1):
"""
**SUMMARY**
The method returns a feature set of features that are on or "near" the edge of
the image. This is really helpful for removing features that are edge effects.
**PARAMETERS**
* *tolerance* - the distance in pixels from the edge at which a feature
qualifies as being "on" the edge of the image.
**RETURNS**
Returns a featureset of features that are on the edge of the image.
**EXAMPLE**
>>> img = Image("./sampleimages/EdgeTest1.png")
>>> blobs = img.findBlobs()
>>> es = blobs.onImageEdge()
>>> es.draw(color=Color.RED)
>>> img.show()
"""
fs = FeatureSet()
for f in self:
if(f.onImageEdge(tolerance)):
fs.append(f)
return fs
def notOnImageEdge(self, tolerance=1):
"""
**SUMMARY**
The method returns a feature set of features that are not on or "near" the edge of
the image. This is really helpful for removing features that are edge effects.
**PARAMETERS**
* *tolerance* - the distance in pixels from the edge at which a feature
qualifies as being "on" the edge of the image.
**RETURNS**
Returns a featureset of features that are not on the edge of the image.
**EXAMPLE**
>>> img = Image("./sampleimages/EdgeTest1.png")
>>> blobs = img.findBlobs()
>>> es = blobs.notOnImageEdge()
>>> es.draw(color=Color.RED)
>>> img.show()
"""
fs = FeatureSet()
for f in self:
if(f.notOnImageEdge(tolerance)):
fs.append(f)
return fs
def topLeftCorners(self):
"""
**SUMMARY**
This method returns the top left corner of each feature's bounding box.
**RETURNS**
A numpy array of x,y position values.
**EXAMPLE**
>>> img = Image("./sampleimages/EdgeTest1.png")
>>> blobs = img.findBlobs()
>>> tl = img.topLeftCorners()
>>> print tl[0]
"""
return np.array([f.topLeftCorner() for f in self])
def bottomLeftCorners(self):
"""
**SUMMARY**
This method returns the bottom left corner of each feature's bounding box.
**RETURNS**
A numpy array of x,y position values.
**EXAMPLE**
>>> img = Image("./sampleimages/EdgeTest1.png")
>>> blobs = img.findBlobs()
>>> bl = img.bottomLeftCorners()
>>> print bl[0]
"""
return np.array([f.bottomLeftCorner() for f in self])
def topLeftCorners(self):
"""
**SUMMARY**
This method returns the top left corner of each feature's bounding box.
**RETURNS**
A numpy array of x,y position values.
**EXAMPLE**
>>> img = Image("./sampleimages/EdgeTest1.png")
>>> blobs = img.findBlobs()
>>> tl = img.bottomLeftCorners()
>>> print tl[0]
"""
return np.array([f.topLeftCorner() for f in self])
def topRightCorners(self):
"""
**SUMMARY**
This method returns the top right corner of each feature's bounding box.
**RETURNS**
A numpy array of x,y position values.
**EXAMPLE**
>>> img = Image("./sampleimages/EdgeTest1.png")
>>> blobs = img.findBlobs()
>>> tr = img.topRightCorners()
>>> print tr[0]
"""
return np.array([f.topRightCorner() for f in self])
def bottomRightCorners(self):
"""
**SUMMARY**
This method returns the bottom right corner of each feature's bounding box.
**RETURNS**
A numpy array of x,y position values.
**EXAMPLE**
>>> img = Image("./sampleimages/EdgeTest1.png")
>>> blobs = img.findBlobs()
>>> br = img.bottomRightCorners()
>>> print br[0]
"""
return np.array([f.bottomRightCorner() for f in self])
def aspectRatios(self):
"""
**SUMMARY**
Return the aspect ratio of all the features in the feature set, For our purposes
aspect ration is max(width,height)/min(width,height).
**RETURNS**
A numpy array of the aspect ratio of the features in the featureset.
**EXAMPLE**
>>> img = Image("OWS.jpg")
>>> blobs = img.findBlobs(128)
>>> print blobs.aspectRatio()
"""
return np.array([f.aspectRatio() for f in self])
def cluster(self,method="kmeans",properties=None,k=3):
"""
**SUMMARY**
This function clusters the blobs in the featureSet based on the properties. Properties can be "color", "shape" or "position" of blobs.
Clustering is done using K-Means or Hierarchical clustering(Ward) algorithm.
**PARAMETERS**
* *properties* - It should be a list with any combination of "color", "shape", "position". properties = ["color","position"]. properties = ["position","shape"]. properties = ["shape"]
* *method* - if method is "kmeans", it will cluster using K-Means algorithm, if the method is "hierarchical", no need to spicify the number of clusters
* *k* - The number of clusters(kmeans).
**RETURNS**
A list of featureset, each being a cluster itself.
**EXAMPLE**
>>> img = Image("lenna")
>>> blobs = img.findBlobs()
>>> clusters = blobs.cluster(method="kmeans",properties=["color"],k=5)
>>> for i in clusters:
>>> i.draw(color=Color.getRandom(),width=5)
>>> img.show()
"""
try :
from sklearn.cluster import KMeans, Ward
from sklearn import __version__
except :
logger.warning("install scikits-learning package")
return
X = [] #List of feature vector of each blob
if not properties:
properties = ['color','shape','position']
if k > len(self):
logger.warning("Number of clusters cannot be greater then the number of blobs in the featureset")
return
for i in self:
featureVector = []
if 'color' in properties:
featureVector.extend(i.mAvgColor)
if 'shape' in properties:
featureVector.extend(i.mHu)
if 'position' in properties:
featureVector.extend(i.extents())
if not featureVector :
logger.warning("properties parameter is not specified properly")
return
X.append(featureVector)
if method == "kmeans":
# Ignore minor version numbers.
sklearn_version = re.search(r'\d+\.\d+', __version__).group()
if (float(sklearn_version) > 0.11):
k_means = KMeans(init='random', n_clusters=k, n_init=10).fit(X)
else:
k_means = KMeans(init='random', k=k, n_init=10).fit(X)
KClusters = [ FeatureSet([]) for i in range(k)]
for i in range(len(self)):
KClusters[k_means.labels_[i]].append(self[i])
return KClusters
if method == "hierarchical":
ward = Ward(n_clusters=int(sqrt(len(self)))).fit(X) #n_clusters = sqrt(n)
WClusters = [ FeatureSet([]) for i in range(int(sqrt(len(self))))]
for i in range(len(self)):
WClusters[ward.labels_[i]].append(self[i])
return WClusters
@property
def image(self):
if not len(self):
return None
return self[0].image
@image.setter
def image(self, i):
for f in self:
f.image = i
### ----------------------------------------------------------------------------
### ----------------------------------------------------------------------------
### ----------------------------FEATURE CLASS-----------------------------------
### ----------------------------------------------------------------------------
### ----------------------------------------------------------------------------
class Feature(object):
"""
**SUMMARY**
The Feature object is an abstract class which real features descend from.
Each feature object has:
* a draw() method,
* an image property, referencing the originating Image object
* x and y coordinates
* default functions for determining angle, area, meanColor, etc for FeatureSets
* in the Feature class, these functions assume the feature is 1px
"""
x = 0.00
y = 0.00
_mMaxX = None
_mMaxY = None
_mMinX = None
_mMinY = None
_mWidth = None
_mHeight = None
_mSrcImgW = None
_mSrcImgH = None
# This is 2.0 refactoring
mBoundingBox = None # THIS SHALT BE TOP LEFT (X,Y) THEN W H i.e. [X,Y,W,H]
mExtents = None # THIS SHALT BE [MAXX,MINX,MAXY,MINY]
points = None # THIS SHALT BE (x,y) tuples in the ORDER [(TopLeft),(TopRight),(BottomLeft),(BottomRight)]
image = "" #parent image
#points = []
#boundingBox = []
def __init__(self, i, at_x, at_y, points):
#THE COVENANT IS THAT YOU PROVIDE THE POINTS IN THE SPECIFIED FORMAT AND ALL OTHER VALUES SHALT FLOW
self.x = at_x
self.y = at_y
self.image = i
self.points = points
self._updateExtents(new_feature=True)
def reassign(self, img):
"""
**SUMMARY**
Reassign the image of this feature and return an updated copy of the feature.
**PARAMETERS**
* *img* - the new image to which to assign the feature.
.. Warning::
THIS DOES NOT PERFORM A SIZE CHECK. IF YOUR NEW IMAGE IS NOT THE EXACT SAME SIZE YOU WILL CERTAINLY CAUSE ERRORS.
**EXAMPLE**
>>> img = Image("lenna")
>>> img2 = img.invert()
>>> l = img.findLines()
>>> l2 = img.reassignImage(img2)
>>> l2.show()
"""
retVal = copy.deepcopy(self)
if( self.image.width != img.width or
self.image.height != img.height ):
warnings.warn("DON'T REASSIGN IMAGES OF DIFFERENT SIZES")
retVal.image = img
return retVal
def corners(self):
self._updateExtents()
return self.points
def coordinates(self):
"""
**SUMMARY**
Returns the x,y position of the feature. This is usually the center coordinate.
**RETURNS**
Returns an (x,y) tuple of the position of the feature.
**EXAMPLE**
>>> img = Image("aerospace.png")
>>> blobs = img.findBlobs()
>>> for b in blobs:
>>> print b.coordinates()
"""
return np.array([self.x, self.y])
def draw(self, color = Color.GREEN):
"""
**SUMMARY**
This method will draw the feature on the source image.
**PARAMETERS**
* *color* - The color as an RGB tuple to render the image.
**RETURNS**
Nothing.
**EXAMPLE**
>>> img = Image("RedDog2.jpg")
>>> blobs = img.findBlobs()
>>> blobs[-1].draw()
>>> img.show()
"""
self.image[self.x, self.y] = color
def show(self, color = Color.GREEN):
"""
**SUMMARY**
This function will automatically draw the features on the image and show it.
**RETURNS**
Nothing.
**EXAMPLE**
>>> img = Image("logo")
>>> feat = img.findBlobs()
>>> feat[-1].show() #window pops up.
"""
self.draw(color)
self.image.show()
def distanceFrom(self, point = (-1, -1)):
"""
**SUMMARY**
Given a point (default to center of the image), return the euclidean distance of x,y from this point.
**PARAMETERS**
* *point* - The point, as an (x,y) tuple on the image to measure distance from.
**RETURNS**
The distance as a floating point value in pixels.
**EXAMPLE**
>>> img = Image("OWS.jpg")
>>> blobs = img.findBlobs(128)
>>> blobs[-1].distanceFrom(blobs[-2].coordinates())
"""
if (point[0] == -1 or point[1] == -1):
point = np.array(self.image.size()) / 2
return spsd.euclidean(point, [self.x, self.y])
def meanColor(self):
"""
**SUMMARY**
Return the average color within the feature as a tuple.
**RETURNS**
An RGB color tuple.
**EXAMPLE**
>>> img = Image("OWS.jpg")
>>> blobs = img.findBlobs(128)
>>> for b in blobs:
>>> if (b.meanColor() == color.WHITE):
>>> print "Found a white thing"
"""
return self.image[self.x, self.y]
def colorDistance(self, color = (0, 0, 0)):
"""
**SUMMARY**
Return the euclidean color distance of the color tuple at x,y from a given color (default black).
**PARAMETERS**
* *color* - An RGB triplet to calculate from which to calculate the color distance.
**RETURNS**
A floating point color distance value.
**EXAMPLE**
>>> img = Image("OWS.jpg")
>>> blobs = img.findBlobs(128)
>>> for b in blobs:
>>> print b.colorDistance(color.WHITE):
"""
return spsd.euclidean(np.array(color), np.array(self.meanColor()))
def angle(self):
"""
**SUMMARY**
Return the angle (theta) in degrees of the feature. The default is 0 (horizontal).
.. Warning::
This is not a valid operation for all features.
**RETURNS**
An angle value in degrees.
**EXAMPLE**
>>> img = Image("OWS.jpg")
>>> blobs = img.findBlobs(128)
>>> for b in blobs:
>>> if b.angle() == 0:
>>> print "I AM HORIZONTAL."
**TODO**
Double check that values are being returned consistently.
"""
return 0
def length(self):
"""
**SUMMARY**
This method returns the longest dimension of the feature (i.e max(width,height)).
**RETURNS**
A floating point length value.
**EXAMPLE**
>>> img = Image("OWS.jpg")
>>> blobs = img.findBlobs(128)
>>> for b in blobs:
>>> if b.length() > 200:
>>> print "OH MY! - WHAT A BIG FEATURE YOU HAVE!"
>>> print "---I bet you say that to all the features."
**TODO**
Should this be sqrt(x*x+y*y)?
"""
return float(np.max([self.width(),self.height()]))
def distanceToNearestEdge(self):
"""
**SUMMARY**
This method returns the distance, in pixels, from the nearest image edge.
**RETURNS**
The integer distance to the nearest edge.
**EXAMPLE**
>>> img = Image("../sampleimages/EdgeTest1.png")
>>> b = img.findBlobs()
>>> b[0].distanceToNearestEdge()
"""
w = self.image.width
h = self.image.height
return np.min([self._mMinX,self._mMinY, w-self._mMaxX,h-self._mMaxY])
def onImageEdge(self,tolerance=1):
"""
**SUMMARY**
This method returns True if the feature is less than `tolerance`
pixels away from the nearest edge.
**PARAMETERS**
* *tolerance* - the distance in pixels at which a feature qualifies
as being on the image edge.
**RETURNS**
True if the feature is on the edge, False otherwise.
**EXAMPLE**
>>> img = Image("../sampleimages/EdgeTest1.png")
>>> b = img.findBlobs()
>>> if(b[0].onImageEdge()):
>>> print "HELP! I AM ABOUT TO FALL OFF THE IMAGE"
"""
# this has to be one to deal with blob library weirdness that goes deep down to opencv
return ( self.distanceToNearestEdge() <= tolerance )
def notOnImageEdge(self,tolerance=1):
"""
**SUMMARY**
This method returns True if the feature is greate than `tolerance`
pixels away from the nearest edge.
**PARAMETERS**
* *tolerance* - the distance in pixels at which a feature qualifies
as not being on the image edge.
**RETURNS**
True if the feature is not on the edge of the image, False otherwise.
**EXAMPLE**
>>> img = Image("../sampleimages/EdgeTest1.png")
>>> b = img.findBlobs()
>>> if(b[0].notOnImageEdge()):
>>> print "I am safe and sound."
"""
# this has to be one to deal with blob library weirdness that goes deep down to opencv
return ( self.distanceToNearestEdge() > tolerance )
def aspectRatio(self):
"""
**SUMMARY**
Return the aspect ratio of the feature, which for our purposes
is max(width,height)/min(width,height).
**RETURNS**
A single floating point value of the aspect ration.
**EXAMPLE**
>>> img = Image("OWS.jpg")
>>> blobs = img.findBlobs(128)
>>> b[0].aspectRatio()
"""
self._updateExtents()
return self.mAspectRatio
def area(self):
"""
**SUMMARY**
Returns the area (number of pixels) covered by the feature.
**RETURNS**
An integer area of the feature.
**EXAMPLE**
>>> img = Image("OWS.jpg")
>>> blobs = img.findBlobs(128)
>>> for b in blobs:
>>> if b.area() > 200:
>>> print b.area()
"""
return self.width() * self.height()
def width(self):
"""
**SUMMARY**
Returns the height of the feature.
**RETURNS**
An integer value for the feature's width.
**EXAMPLE**
>>> img = Image("OWS.jpg")
>>> blobs = img.findBlobs(128)
>>> for b in blobs:
>>> if b.width() > b.height():
>>> print "wider than tall"
>>> b.draw()
>>> img.show()
"""
self._updateExtents()
return self._mWidth
def height(self):
"""
**SUMMARY**
Returns the height of the feature.
**RETURNS**
An integer value of the feature's height.
**EXAMPLE**
>>> img = Image("OWS.jpg")
>>> blobs = img.findBlobs(128)
>>> for b in blobs:
>>> if b.width() > b.height():
>>> print "wider than tall"
>>> b.draw()
>>> img.show()
"""
self._updateExtents()
return self._mHeight
def crop(self):
"""
**SUMMARY**
This function crops the source image to the location of the feature and returns
a new SimpleCV image.
**RETURNS**
A SimpleCV image that is cropped to the feature position and size.
**EXAMPLE**
>>> img = Image("OWS.jpg")
>>> blobs = img.findBlobs(128)
>>> big = blobs[-1].crop()
>>> big.show()
"""
return self.image.crop(self.x, self.y, self.width(), self.height(), centered = True)
def __repr__(self):
return "%s.%s at (%d,%d)" % (self.__class__.__module__, self.__class__.__name__, self.x, self.y)
def _updateExtents(self, new_feature=False):
# mBoundingBox = None # THIS SHALT BE TOP LEFT (X,Y) THEN W H i.e. [X,Y,W,H]
# mExtents = None # THIS SHALT BE [MAXX,MINX,MAXY,MINY]
# points = None # THIS SHALT BE (x,y) tuples in the ORDER [(TopLeft),(TopRight),(BottomLeft),(BottomRight)]
max_x = self._mMaxX
min_x = self._mMinX
max_y = self._mMaxY
min_y = self._mMinY
width = self._mWidth
height = self._mHeight
extents = self.mExtents
bounding_box = self.mBoundingBox
#if new_feature or None in [self._mMaxX, self._mMinX, self._mMaxY, self._mMinY,
# self._mWidth, self._mHeight, self.mExtents, self.mBoundingBox]:
if new_feature or None in [max_x, min_x, max_y, min_y, width, height, extents, bounding_box]:
max_x = max_y = float("-infinity")
min_x = min_y = float("infinity")
for p in self.points:
if (p[0] > max_x):
max_x = p[0]
if (p[0] < min_x):
min_x = p[0]
if (p[1] > max_y):
max_y = p[1]
if (p[1] < min_y):
min_y = p[1]
width = max_x - min_x
height = max_y - min_y
if (width <= 0):
width = 1
if (height <= 0):
height = 1
self.mBoundingBox = [min_x, min_y, width, height]
self.mExtents = [max_x, min_x, max_y, min_y]
if width > height:
self.mAspectRatio = float(width/height)
else:
self.mAspectRatio = float(height/width)
self._mMaxX = max_x
self._mMinX = min_x
self._mMaxY = max_y
self._mMinY = min_y
self._mWidth = width
self._mHeight = height
def boundingBox(self):
"""
**SUMMARY**
This function returns a rectangle which bounds the blob.
**RETURNS**
A list of [x, y, w, h] where (x, y) are the top left point of the rectangle
and w, h are its width and height respectively.
**EXAMPLE**
>>> img = Image("OWS.jpg")
>>> blobs = img.findBlobs(128)
>>> print blobs[-1].boundingBox()
"""
self._updateExtents()
return self.mBoundingBox
def extents(self):
"""
**SUMMARY**
This function returns the maximum and minimum x and y values for the feature and
returns them as a tuple.
**RETURNS**
A tuple of the extents of the feature. The order is (MaxX,MaxY,MinX,MinY).
**EXAMPLE**
>>> img = Image("OWS.jpg")
>>> blobs = img.findBlobs(128)
>>> print blobs[-1].extents()
"""
self._updateExtents()
return self.mExtents
def minY(self):
"""
**SUMMARY**
This method return the minimum y value of the bounding box of the
the feature.
**RETURNS**
An integer value of the minimum y value of the feature.
**EXAMPLE**
>>> img = Image("OWS.jpg")
>>> blobs = img.findBlobs(128)
>>> print blobs[-1].minY()
"""
self._updateExtents()
return self._mMinY
def maxY(self):
"""
**SUMMARY**
This method return the maximum y value of the bounding box of the
the feature.
**RETURNS**
An integer value of the maximum y value of the feature.
**EXAMPLE**
>>> img = Image("OWS.jpg")
>>> blobs = img.findBlobs(128)
>>> print blobs[-1].maxY()
"""
self._updateExtents()
return self._mMaxY
def minX(self):
"""
**SUMMARY**
This method return the minimum x value of the bounding box of the
the feature.
**RETURNS**
An integer value of the minimum x value of the feature.
**EXAMPLE**
>>> img = Image("OWS.jpg")
>>> blobs = img.findBlobs(128)
>>> print blobs[-1].minX()
"""
self._updateExtents()
return self._mMinX
def maxX(self):
"""
**SUMMARY**
This method return the minimum x value of the bounding box of the
the feature.
**RETURNS**
An integer value of the maxium x value of the feature.
**EXAMPLE**
>>> img = Image("OWS.jpg")
>>> blobs = img.findBlobs(128)
>>> print blobs[-1].maxX()
"""
self._updateExtents()
return self._mMaxX
def topLeftCorner(self):
"""
**SUMMARY**
This method returns the top left corner of the bounding box of
the blob as an (x,y) tuple.
**RESULT**
Returns a tupple of the top left corner.
**EXAMPLE**
>>> img = Image("OWS.jpg")
>>> blobs = img.findBlobs(128)
>>> print blobs[-1].topLeftCorner()
"""
self._updateExtents()
return (self._mMinX,self._mMinY)
def bottomRightCorner(self):
"""
**SUMMARY**
This method returns the bottom right corner of the bounding box of
the blob as an (x,y) tuple.
**RESULT**
Returns a tupple of the bottom right corner.
**EXAMPLE**
>>> img = Image("OWS.jpg")
>>> blobs = img.findBlobs(128)
>>> print blobs[-1].bottomRightCorner()
"""
self._updateExtents()
return (self._mMaxX,self._mMaxY)
def bottomLeftCorner(self):
"""
**SUMMARY**
This method returns the bottom left corner of the bounding box of
the blob as an (x,y) tuple.
**RESULT**
Returns a tupple of the bottom left corner.
**EXAMPLE**
>>> img = Image("OWS.jpg")
>>> blobs = img.findBlobs(128)
>>> print blobs[-1].bottomLeftCorner()
"""
self._updateExtents()
return (self._mMinX,self._mMaxY)
def topRightCorner(self):
"""
**SUMMARY**
This method returns the top right corner of the bounding box of
the blob as an (x,y) tuple.
**RESULT**
Returns a tupple of the top right corner.
**EXAMPLE**
>>> img = Image("OWS.jpg")
>>> blobs = img.findBlobs(128)
>>> print blobs[-1].topRightCorner()
"""
self._updateExtents()
return (self._mMaxX,self._mMinY)
def above(self,object):
"""
**SUMMARY**
Return true if the feature is above the object, where object can be a bounding box,
bounding circle, a list of tuples in a closed polygon, or any other featutres.
**PARAMETERS**
* *object*
* A bounding box - of the form (x,y,w,h) where x,y is the upper left corner
* A bounding circle of the form (x,y,r)
* A list of x,y tuples defining a closed polygon e.g. ((x,y),(x,y),....)
* Any two dimensional feature (e.g. blobs, circle ...)
**RETURNS**
Returns a Boolean, True if the feature is above the object, False otherwise.
**EXAMPLE**
>>> img = Image("Lenna")
>>> blobs = img.findBlobs()
>>> b = blobs[0]
>>> if( blobs[-1].above(b) ):
>>> print "above the biggest blob"
"""
if( isinstance(object,Feature) ):
return( self.maxY() < object.minY() )
elif( isinstance(object,tuple) or isinstance(object,np.ndarray) ):
return( self.maxY() < object[1] )
elif( isinstance(object,float) or isinstance(object,int) ):
return( self.maxY() < object )
else:
logger.warning("SimpleCV did not recognize the input type to feature.above(). This method only takes another feature, an (x,y) tuple, or a ndarray type.")
return None
def below(self,object):
"""
**SUMMARY**
Return true if the feature is below the object, where object can be a bounding box,
bounding circle, a list of tuples in a closed polygon, or any other featutres.
**PARAMETERS**
* *object*
* A bounding box - of the form (x,y,w,h) where x,y is the upper left corner
* A bounding circle of the form (x,y,r)
* A list of x,y tuples defining a closed polygon e.g. ((x,y),(x,y),....)
* Any two dimensional feature (e.g. blobs, circle ...)
**RETURNS**
Returns a Boolean, True if the feature is below the object, False otherwise.
**EXAMPLE**
>>> img = Image("Lenna")
>>> blobs = img.findBlobs()
>>> b = blobs[0]
>>> if( blobs[-1].below(b) ):
>>> print "above the biggest blob"
"""
if( isinstance(object,Feature) ):
return( self.minY() > object.maxY() )
elif( isinstance(object,tuple) or isinstance(object,np.ndarray) ):
return( self.minY() > object[1] )
elif( isinstance(object,float) or isinstance(object,int) ):
return( self.minY() > object )
else:
logger.warning("SimpleCV did not recognize the input type to feature.below(). This method only takes another feature, an (x,y) tuple, or a ndarray type.")
return None
def right(self,object):
"""
**SUMMARY**
Return true if the feature is to the right object, where object can be a bounding box,
bounding circle, a list of tuples in a closed polygon, or any other featutres.
**PARAMETERS**
* *object*
* A bounding box - of the form (x,y,w,h) where x,y is the upper left corner
* A bounding circle of the form (x,y,r)
* A list of x,y tuples defining a closed polygon e.g. ((x,y),(x,y),....)
* Any two dimensional feature (e.g. blobs, circle ...)
**RETURNS**
Returns a Boolean, True if the feature is to the right object, False otherwise.
**EXAMPLE**
>>> img = Image("Lenna")
>>> blobs = img.findBlobs()
>>> b = blobs[0]
>>> if( blobs[-1].right(b) ):
>>> print "right of the the blob"
"""
if( isinstance(object,Feature) ):
return( self.minX() > object.maxX() )
elif( isinstance(object,tuple) or isinstance(object,np.ndarray) ):
return( self.minX() > object[0] )
elif( isinstance(object,float) or isinstance(object,int) ):
return( self.minX() > object )
else:
logger.warning("SimpleCV did not recognize the input type to feature.right(). This method only takes another feature, an (x,y) tuple, or a ndarray type.")
return None
def left(self,object):
"""
**SUMMARY**
Return true if the feature is to the left of the object, where object can be a bounding box,
bounding circle, a list of tuples in a closed polygon, or any other featutres.
**PARAMETERS**
* *object*
* A bounding box - of the form (x,y,w,h) where x,y is the upper left corner
* A bounding circle of the form (x,y,r)
* A list of x,y tuples defining a closed polygon e.g. ((x,y),(x,y),....)
* Any two dimensional feature (e.g. blobs, circle ...)
**RETURNS**
Returns a Boolean, True if the feature is to the left of the object, False otherwise.
**EXAMPLE**
>>> img = Image("Lenna")
>>> blobs = img.findBlobs()
>>> b = blobs[0]
>>> if( blobs[-1].left(b) ):
>>> print "left of the biggest blob"
"""
if( isinstance(object,Feature) ):
return( self.maxX() < object.minX() )
elif( isinstance(object,tuple) or isinstance(object,np.ndarray) ):
return( self.maxX() < object[0] )
elif( isinstance(object,float) or isinstance(object,int) ):
return( self.maxX() < object )
else:
logger.warning("SimpleCV did not recognize the input type to feature.left(). This method only takes another feature, an (x,y) tuple, or a ndarray type.")
return None
def contains(self,other):
"""
**SUMMARY**
Return true if the feature contains the object, where object can be a bounding box,
bounding circle, a list of tuples in a closed polygon, or any other featutres.
**PARAMETERS**
* *object*
* A bounding box - of the form (x,y,w,h) where x,y is the upper left corner
* A bounding circle of the form (x,y,r)
* A list of x,y tuples defining a closed polygon e.g. ((x,y),(x,y),....)
* Any two dimensional feature (e.g. blobs, circle ...)
**RETURNS**
Returns a Boolean, True if the feature contains the object, False otherwise.
**EXAMPLE**
>>> img = Image("Lenna")
>>> blobs = img.findBlobs()
>>> b = blobs[0]
>>> if( blobs[-1].contains(b) ):
>>> print "this blob is contained in the biggest blob"
**NOTE**
This currently performs a bounding box test, not a full polygon test for speed.
"""
retVal = False
bounds = self.points
if( isinstance(other,Feature) ):# A feature
retVal = True
for p in other.points: # this isn't completely correct - only tests if points lie in poly, not edges.
p2 = (int(p[0]),int(p[1]))
retVal = self._pointInsidePolygon(p2,bounds)
if( not retVal ):
break
# a single point
elif( (isinstance(other,tuple) and len(other)==2) or ( isinstance(other,np.ndarray) and other.shape[0]==2) ):
retVal = self._pointInsidePolygon(other,bounds)
elif( isinstance(other,tuple) and len(other)==3 ): # A circle
#assume we are in x,y, r format
retVal = True
rr = other[2]*other[2]
x = other[0]
y = other[1]
for p in bounds:
test = ((x-p[0])*(x-p[0]))+((y-p[1])*(y-p[1]))
if( test < rr ):
retVal = False
break
elif( isinstance(other,tuple) and len(other)==4 and ( isinstance(other[0],float) or isinstance(other[0],int))):
retVal = ( self.maxX() <= other[0]+other[2] and
self.minX() >= other[0] and
self.maxY() <= other[1]+other[3] and
self.minY() >= other[1] )
elif(isinstance(other,list) and len(other) >= 4): # an arbitrary polygon
#everything else ....
retVal = True
for p in other:
test = self._pointInsidePolygon(p,bounds)
if(not test):
retVal = False
break
else:
logger.warning("SimpleCV did not recognize the input type to features.contains. This method only takes another blob, an (x,y) tuple, or a ndarray type.")
return False
return retVal
def overlaps(self, other):
"""
**SUMMARY**
Return true if the feature overlaps the object, where object can be a bounding box,
bounding circle, a list of tuples in a closed polygon, or any other featutres.
**PARAMETERS**
* *object*
* A bounding box - of the form (x,y,w,h) where x,y is the upper left corner
* A bounding circle of the form (x,y,r)
* A list of x,y tuples defining a closed polygon e.g. ((x,y),(x,y),....)
* Any two dimensional feature (e.g. blobs, circle ...)
**RETURNS**
Returns a Boolean, True if the feature overlaps object, False otherwise.
**EXAMPLE**
>>> img = Image("Lenna")
>>> blobs = img.findBlobs()
>>> b = blobs[0]
>>> if( blobs[-1].overlaps(b) ):
>>> print "This blob overlaps the biggest blob"
Returns true if this blob contains at least one point, part of a collection
of points, or any part of a blob.
**NOTE**
This currently performs a bounding box test, not a full polygon test for speed.
"""
retVal = False
bounds = self.points
if( isinstance(other,Feature) ):# A feature
retVal = True
for p in other.points: # this isn't completely correct - only tests if points lie in poly, not edges.
retVal = self._pointInsidePolygon(p,bounds)
if( retVal ):
break
elif( (isinstance(other,tuple) and len(other)==2) or ( isinstance(other,np.ndarray) and other.shape[0]==2) ):
retVal = self._pointInsidePolygon(other,bounds)
elif( isinstance(other,tuple) and len(other)==3 and not isinstance(other[0],tuple)): # A circle
#assume we are in x,y, r format
retVal = False
rr = other[2]*other[2]
x = other[0]
y = other[1]
for p in bounds:
test = ((x-p[0])*(x-p[0]))+((y-p[1])*(y-p[1]))
if( test < rr ):
retVal = True
break
elif( isinstance(other,tuple) and len(other)==4 and ( isinstance(other[0],float) or isinstance(other[0],int))):
retVal = ( self.contains( (other[0],other[1] ) ) or # see if we contain any corner
self.contains( (other[0]+other[2],other[1] ) ) or
self.contains( (other[0],other[1]+other[3] ) ) or
self.contains( (other[0]+other[2],other[1]+other[3] ) ) )
elif(isinstance(other,list) and len(other) >= 3): # an arbitrary polygon
#everything else ....
retVal = False
for p in other:
test = self._pointInsidePolygon(p,bounds)
if(test):
retVal = True
break
else:
logger.warning("SimpleCV did not recognize the input type to features.overlaps. This method only takes another blob, an (x,y) tuple, or a ndarray type.")
return False
return retVal
def doesNotContain(self, other):
"""
**SUMMARY**
Return true if the feature does not contain the other object, where other can be a bounding box,
bounding circle, a list of tuples in a closed polygon, or any other featutres.
**PARAMETERS**
* *other*
* A bounding box - of the form (x,y,w,h) where x,y is the upper left corner
* A bounding circle of the form (x,y,r)
* A list of x,y tuples defining a closed polygon e.g. ((x,y),(x,y),....)
* Any two dimensional feature (e.g. blobs, circle ...)
**RETURNS**
Returns a Boolean, True if the feature does not contain the object, False otherwise.
**EXAMPLE**
>>> img = Image("Lenna")
>>> blobs = img.findBlobs()
>>> b = blobs[0]
>>> if( blobs[-1].doesNotContain(b) ):
>>> print "above the biggest blob"
Returns true if all of features points are inside this point.
"""
return not self.contains(other)
def doesNotOverlap( self, other):
"""
**SUMMARY**
Return true if the feature does not overlap the object other, where other can be a bounding box,
bounding circle, a list of tuples in a closed polygon, or any other featutres.
**PARAMETERS**
* *other*
* A bounding box - of the form (x,y,w,h) where x,y is the upper left corner
* A bounding circle of the form (x,y,r)
* A list of x,y tuples defining a closed polygon e.g. ((x,y),(x,y),....)
* Any two dimensional feature (e.g. blobs, circle ...)
**RETURNS**
Returns a Boolean, True if the feature does not Overlap the object, False otherwise.
**EXAMPLE**
>>> img = Image("Lenna")
>>> blobs = img.findBlobs()
>>> b = blobs[0]
>>> if( blobs[-1].doesNotOverlap(b) ):
>>> print "does not over overlap biggest blob"
"""
return not self.overlaps( other)
def isContainedWithin(self,other):
"""
**SUMMARY**
Return true if the feature is contained withing the object other, where other can be a bounding box,
bounding circle, a list of tuples in a closed polygon, or any other featutres.
**PARAMETERS**
* *other*
* A bounding box - of the form (x,y,w,h) where x,y is the upper left corner
* A bounding circle of the form (x,y,r)
* A list of x,y tuples defining a closed polygon e.g. ((x,y),(x,y),....)
* Any two dimensional feature (e.g. blobs, circle ...)
**RETURNS**
Returns a Boolean, True if the feature is above the object, False otherwise.
**EXAMPLE**
>>> img = Image("Lenna")
>>> blobs = img.findBlobs()
>>> b = blobs[0]
>>> if( blobs[-1].isContainedWithin(b) ):
>>> print "inside the blob"
"""
retVal = True
bounds = self.points
if( isinstance(other,Feature) ): # another feature do the containment test
retVal = other.contains(self)
elif( isinstance(other,tuple) and len(other)==3 ): # a circle
#assume we are in x,y, r format
rr = other[2]*other[2] # radius squared
x = other[0]
y = other[1]
for p in bounds:
test = ((x-p[0])*(x-p[0]))+((y-p[1])*(y-p[1]))
if( test > rr ):
retVal = False
break
elif( isinstance(other,tuple) and len(other)==4 and # a bounding box
( isinstance(other[0],float) or isinstance(other[0],int))): # we assume a tuple of four is (x,y,w,h)
retVal = ( self.maxX() <= other[0]+other[2] and
self.minX() >= other[0] and
self.maxY() <= other[1]+other[3] and
self.minY() >= other[1] )
elif(isinstance(other,list) and len(other) > 2 ): # an arbitrary polygon
#everything else ....
retVal = True
for p in bounds:
test = self._pointInsidePolygon(p,other)
if(not test):
retVal = False
break
else:
logger.warning("SimpleCV did not recognize the input type to features.contains. This method only takes another blob, an (x,y) tuple, or a ndarray type.")
retVal = False
return retVal
def isNotContainedWithin(self,shape):
"""
**SUMMARY**
Return true if the feature is not contained within the shape, where shape can be a bounding box,
bounding circle, a list of tuples in a closed polygon, or any other featutres.
**PARAMETERS**
* *shape*
* A bounding box - of the form (x,y,w,h) where x,y is the upper left corner
* A bounding circle of the form (x,y,r)
* A list of x,y tuples defining a closed polygon e.g. ((x,y),(x,y),....)
* Any two dimensional feature (e.g. blobs, circle ...)
**RETURNS**
Returns a Boolean, True if the feature is not contained within the shape, False otherwise.
**EXAMPLE**
>>> img = Image("Lenna")
>>> blobs = img.findBlobs()
>>> b = blobs[0]
>>> if( blobs[-1].isNotContainedWithin(b) ):
>>> print "Not inside the biggest blob"
"""
return not self.isContainedWithin(shape)
def _pointInsidePolygon(self,point,polygon):
"""
returns true if tuple point (x,y) is inside polygon of the form ((a,b),(c,d),...,(a,b)) the polygon should be closed
"""
# try:
# import cv2
# except:
# logger.warning("Unable to import cv2")
# return False
if( len(polygon) < 3 ):
logger.warning("feature._pointInsidePolygon - this is not a valid polygon")
return False
if( not isinstance(polygon,list)):
logger.warning("feature._pointInsidePolygon - this is not a valid polygon")
return False
#if( not isinstance(point,tuple) ):
#if( len(point) == 2 ):
# point = tuple(point)
#else:
# logger.warning("feature._pointInsidePolygon - this is not a valid point")
# return False
#if( cv2.__version__ == '$Rev:4557'):
counter = 0
retVal = True
p1 = None
#print "point: " + str(point)
poly = copy.deepcopy(polygon)
poly.append(polygon[0])
#for p2 in poly:
N = len(poly)
p1 = poly[0]
for i in range(1,N+1):
p2 = poly[i%N]
if( point[1] > np.min((p1[1],p2[1])) ):
if( point[1] <= np.max((p1[1],p2[1])) ):
if( point[0] <= np.max((p1[0],p2[0])) ):
if( p1[1] != p2[1] ):
test = float((point[1]-p1[1])*(p2[0]-p1[0]))/float(((p2[1]-p1[1])+p1[0]))
if( p1[0] == p2[0] or point[0] <= test ):
counter = counter + 1
p1 = p2
if( counter % 2 == 0 ):
retVal = False
return retVal
return retVal
#else:
# result = cv2.pointPolygonTest(np.array(polygon,dtype='float32'),point,0)
# return result > 0
def boundingCircle(self):
"""
**SUMMARY**
This function calculates the minimum bounding circle of the blob in the image
as an (x,y,r) tuple
**RETURNS**
An (x,y,r) tuple where (x,y) is the center of the circle and r is the radius
**EXAMPLE**
>>> img = Image("RatMask.png")
>>> blobs = img.findBlobs()
>>> print blobs[-1].boundingCircle()
"""
try:
import cv2
except:
logger.warning("Unable to import cv2")
return None
# contour of the blob in image
contour = self.contour()
points = []
# list of contour points converted to suitable format to pass into cv2.minEnclosingCircle()
for pair in contour:
points.append([[pair[0], pair[1]]])
points = np.array(points)
(cen, rad) = cv2.minEnclosingCircle(points);
return (cen[0], cen[1], rad)
#---------------------------------------------
|
bsd-3-clause
|
Elastica/kombu
|
examples/hello_publisher.py
|
389
|
from __future__ import absolute_import, unicode_literals
import datetime
from kombu import Connection
with Connection('amqp://guest:guest@localhost:5672//') as conn:
simple_queue = conn.SimpleQueue('simple_queue')
message = 'helloword, sent at {0}'.format(datetime.datetime.today())
simple_queue.put(message)
print('Sent: {0}'.format(message))
simple_queue.close()
|
bsd-3-clause
|
s-nice/s-run
|
backend/themes/quirk/views/page/_form.php
|
1668
|
<?php
use yii\helpers\Html;
use yii\widgets\ActiveForm;
/* @var $this yii\web\View */
/* @var $model common\models\Page */
/* @var $form yii\widgets\ActiveForm */
?>
<div class="page-form panel-body">
<?php $form = ActiveForm::begin(); ?>
<?= $form->field($model, 'name')->textInput(['maxlength' => true]) ?>
<!--
<?= $form->field($model, 'pid')->textInput() ?>
-->
<?= $form->field($model, 'type')->dropDownList([1=>'自定义',2=>'模板'], ['onchange'=>'change()']) ?>
<?= $form->field($model, 'tem')->textInput(['maxlength' => true]) ?>
<?= $form->field($model, 'content')->widget('common\widgets\ueditor\Ueditor',[
'options'=>[
'initialFrameWidth' => '50%',
'initialFrameHeight' => 300,
'toolbars' => [
[
'fullscreen', 'undo', 'redo', '|',
'bold', 'italic','formatmatch', '|',
'forecolor', 'insertorderedlist','insertunorderedlist','fontsize', '|',
'link', 'unlink', 'anchor', '|',
'horizontal','insertcode', '|',
'simpleupload', 'insertimage',
]
],
]
]) ?>
<?= $form->field($model, 'orderid')->textInput() ?>
<?= $form->field($model, 'is_show')->dropDownList([1=>'显示',0=>'不显示']) ?>
<div class="form-group">
<?= Html::submitButton($model->isNewRecord ? '创建' : '更新', ['class' => $model->isNewRecord ? 'btn btn-success' : 'btn btn-primary']) ?>
</div>
<?php ActiveForm::end(); ?>
</div>
<script>
function change(){
var type=$('#page-type').val();
if(type==1){
$('.field-page-tem').hide();
$('.field-page-content').show();
}else{
$('.field-page-tem').show();
$('.field-page-content').hide();
}
}
</script>
|
bsd-3-clause
|
hung101/kbs
|
frontend/views/ref-negara/_search.php
|
1034
|
<?php
use yii\helpers\Html;
use yii\widgets\ActiveForm;
/* @var $this yii\web\View */
/* @var $model frontend\models\RefNegaraSearch */
/* @var $form yii\widgets\ActiveForm */
?>
<div class="ref-negara-search">
<?php $form = ActiveForm::begin([
'action' => ['index'],
'method' => 'get',
]); ?>
<?= $form->field($model, 'id') ?>
<?= $form->field($model, 'desc') ?>
<?= $form->field($model, 'kod_1') ?>
<?= $form->field($model, 'kod_2') ?>
<?= $form->field($model, 'kod_3') ?>
<?php // echo $form->field($model, 'aktif') ?>
<?php // echo $form->field($model, 'created_by') ?>
<?php // echo $form->field($model, 'updated_by') ?>
<?php // echo $form->field($model, 'created') ?>
<?php // echo $form->field($model, 'updated') ?>
<div class="form-group">
<?= Html::submitButton('Search', ['class' => 'btn btn-primary']) ?>
<?= Html::resetButton('Reset', ['class' => 'btn btn-default']) ?>
</div>
<?php ActiveForm::end(); ?>
</div>
|
bsd-3-clause
|
Egor86/yii-app
|
backend/views/subscriber/update.php
|
518
|
<?php
use yii\helpers\Html;
/* @var $this yii\web\View */
/* @var $model common\models\Subscriber */
$this->title = 'Обновление подписчика: ' . $model->name;
$this->params['breadcrumbs'][] = ['label' => 'Subscribers', 'url' => ['index']];
$this->params['breadcrumbs'][] = ['label' => $model->name, 'url' => ['view', 'id' => $model->id]];
$this->params['breadcrumbs'][] = 'Update';
?>
<div class="subscriber-update">
<?= $this->render('_form', [
'model' => $model,
]) ?>
</div>
|
bsd-3-clause
|
weierophinney/zf-api-problem
|
src/ZF/ApiProblem/ApiProblemResponse.php
|
2299
|
<?php
/**
* @license http://opensource.org/licenses/BSD-3-Clause BSD-3-Clause
* @copyright Copyright (c) 2014 Zend Technologies USA Inc. (http://www.zend.com)
*/
namespace ZF\ApiProblem;
use Zend\Http\Response;
/**
* Represents an ApiProblem response payload
*/
class ApiProblemResponse extends Response
{
/**
* @var ApiProblem
*/
protected $apiProblem;
/**
* Flags to use with json_encode
*
* @var int
*/
protected $jsonFlags = 0;
/**
* @param ApiProblem $apiProblem
*/
public function __construct(ApiProblem $apiProblem)
{
$this->apiProblem = $apiProblem;
$this->setStatusCode($apiProblem->status);
$this->setReasonPhrase($apiProblem->title);
if (defined('JSON_UNESCAPED_SLASHES')) {
$this->jsonFlags = constant('JSON_UNESCAPED_SLASHES');
}
}
/**
* @return ApiProblem
*/
public function getApiProblem()
{
return $this->apiProblem;
}
/**
* Retrieve the content
*
* Serializes the composed ApiProblem instance to JSON.
*
* @return string
*/
public function getContent()
{
return json_encode($this->apiProblem->toArray(), $this->jsonFlags);
}
/**
* Retrieve headers
*
* Proxies to parent class, but then checks if we have an content-type
* header; if not, sets it, with a value of "application/problem+json".
*
* @return \Zend\Http\Headers
*/
public function getHeaders()
{
$headers = parent::getHeaders();
if (!$headers->has('content-type')) {
$headers->addHeaderLine('content-type', 'application/problem+json');
}
return $headers;
}
/**
* Override reason phrase handling
*
* If no corresponding reason phrase is available for the current status
* code, return "Unknown Error".
*
* @return string
*/
public function getReasonPhrase()
{
if (! empty($this->reasonPhrase)) {
return $this->reasonPhrase;
}
if (isset($this->recommendedReasonPhrases[$this->statusCode])) {
return $this->recommendedReasonPhrases[$this->statusCode];
}
return 'Unknown Error';
}
}
|
bsd-3-clause
|
chhantyal/scrapd
|
tests/test_zalando.py
|
382
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
test_zalando
----------------------------------
Tests for `zalando` module.
"""
import unittest
from zalando import zalando
class TestZalando(unittest.TestCase):
def setUp(self):
pass
def test_something(self):
pass
def tearDown(self):
pass
if __name__ == '__main__':
unittest.main()
|
bsd-3-clause
|
adregan/sw-redux
|
js/service/middleware.js
|
1310
|
import localforage from 'localforage';
/*
* @description
* Will fire a reset action with the persisted state
*/
export const reset = store => next => action => {
if (action.type !== 'ACTIVATE') return next(action);
if (typeof action.count !== 'undefined') {
localforage.setItem('state', action.count)
.then(localforage.setItem('id', action.id));
return store.dispatch({type: 'RESET', state: action.count, id: action.id});
}
let state;
localforage.getItem('state')
.then(savedState => {
state = savedState;
return localforage.getItem('id');
})
.then(id => store.dispatch({type: 'RESET', state, id}))
.catch(err => console.error(err) && next(action));
};
/*
* @desscription
* Will stash the state of the store into IndexedDB on every update
*/
export const stash = store => next => action => {
let result = next(action);
if (action.type !== 'RESET') {
const { count, id } = store.getState();
localforage.setItem('state', count)
.then(() => {
const data = {count};
fetch(`http://localhost:8080/counters/${id}`,
{ method: 'PUT',
headers: { 'Content-Type' : 'application/json'},
body: JSON.stringify(data)});
})
.catch(err => console.log(err));
}
return result;
};
|
bsd-3-clause
|
Aaron1992/highlight.js
|
test/special/noHighlight.js
|
546
|
'use strict';
describe('no highlighting', function() {
it('should keep block unchanged', function() {
var expected = '<div id="contents">\n ' +
'<p>Hello, World!c\n</div>',
actual = document.querySelector('.nohighlight').innerHTML;
actual.should.equal(expected);
});
it('should skip pre tags without a child code tag', function() {
var expected = 'Computer output',
actual = document.querySelector('pre samp').innerHTML;
actual.should.equal(expected);
});
});
|
bsd-3-clause
|
xkproject/Orchard2
|
src/OrchardCore.Modules/OrchardCore.Sitemaps/Builders/SitemapTypeBuilder.cs
|
1147
|
using System.Collections.Generic;
using System.Threading.Tasks;
using System.Xml.Linq;
using OrchardCore.Sitemaps.Models;
namespace OrchardCore.Sitemaps.Builders
{
public class SitemapTypeBuilder : SitemapTypeBuilderBase<Sitemap>
{
private static readonly XNamespace Namespace = "http://www.sitemaps.org/schemas/sitemap/0.9";
private readonly IEnumerable<ISitemapSourceBuilder> _sitemapSourceBuilders;
public SitemapTypeBuilder(IEnumerable<ISitemapSourceBuilder> sitemapSourceBuilders)
{
_sitemapSourceBuilders = sitemapSourceBuilders;
}
public override async Task BuildSitemapTypeAsync(Sitemap sitemap, SitemapBuilderContext context)
{
context.Response = new SitemapResponse
{
ResponseElement = new XElement(Namespace + "urlset")
};
foreach (var source in sitemap.SitemapSources)
{
foreach (var sourceBuilder in _sitemapSourceBuilders)
{
await sourceBuilder.BuildAsync(source, context);
}
}
}
}
}
|
bsd-3-clause
|
trucnguyenlam/mucke
|
src/simplebman.cc
|
17747
|
// Author: (C) 1996-1997 Armin Biere
// LastChange: Sat Jul 12 17:23:39 MET DST 1997
/*---------------------------------------------------------------------------.
| SimpleBMan |
| SimpleBManPrintManager |
| SimpleBooleQuantData |
| SimpleBooleRepr |
| SimpleBooleSubsData |
`---------------------------------------------------------------------------*/
#include "simplebman.h"
#include "BDDsimple.h"
#include "init.h"
#include "idx.h"
#include "io.h"
#include "InitFileReader.h"
extern "C" {
#include <stdio.h>
};
#include "except.h"
/*---------------------------------------------------------------------------*/
BDDsimpleManager * SimpleBMan::bdd_manager = 0;
SimpleBMan * SimpleBMan::_instance = 0;
/*---------------------------------------------------------------------------*/
static int utab_size = 32769;
static int cache_size = 32769;
static int gc_ratio = 3;
static int rsz_ratio = 4;
#ifdef CHECKMARKS
static int count_casts = 0;
static int max_count_casts = -1;
#endif
/*---------------------------------------------------------------------------*/
void
SimpleBMan_installAt(BooleManager ** where)
{
verbose << "initializing BDDsimple library wrapper ...\n";
*where = SimpleBMan::_instance = new SimpleBMan();
}
/*---------------------------------------------------------------------------*/
extern "C" {
/*---------------------------------------------------------------------------*/
void
simplebman_installAt(BooleManager ** where) { SimpleBMan_installAt(where); }
};
/*---------------------------------------------------------------------------*/
SimpleBMan::SimpleBMan()
:
current_var(0)
{
InitFileReader initFileReader(".simplebmanrc");
initFileReader.getValue("utab_size", utab_size);
initFileReader.getValue("cache_size", cache_size);
initFileReader.getValue("gc_ratio", gc_ratio);
initFileReader.getValue("rsz_ratio", rsz_ratio);
bdd_manager = new BDDsimpleManager(
utab_size, cache_size, gc_ratio, rsz_ratio);
verbose << inc()
<< "unique table size: " << utab_size << '\n'
<< "cache table size: " << cache_size << '\n'
<< "garbage collection ratio: " << gc_ratio << '\n'
<< "resize ratio: " << rsz_ratio << '\n'
<< dec();
}
/*---------------------------------------------------------------------------*/
/* interface for `mark' traversal */
class SimpleBManManaged
{
protected:
SimpleBManManaged() { }
virtual ~SimpleBManManaged() { }
public:
virtual void mark() = 0;
};
/*---------------------------------------------------------------------------*/
class SimpleBooleRepr
:
public BooleRepr,
public SimpleBManManaged
{
friend class SimpleBMan;
BDDsimple * bdd;
public:
SimpleBooleRepr() : BooleRepr(SimpleBMan::instance()), bdd(0) { }
~SimpleBooleRepr() { reset(); }
void mark() { if(bdd) bdd -> mark(); }
void reset()
{
if(bdd)
{
BDDsimple::free(bdd);
bdd = 0;
}
}
};
/*---------------------------------------------------------------------------*/
#ifdef CHECKMARKS
/*---------------------------------------------------------------------------*/
void
inc_count_casts()
{
if(count_casts++>max_count_casts)
{
count_casts = 0;
checkMarks();
debug << "checkMarks succeeded\n";
}
}
/*---------------------------------------------------------------------------*/
#else
/*---------------------------------------------------------------------------*/
#define inc_count_casts() { }
/*---------------------------------------------------------------------------*/
#endif
/*---------------------------------------------------------------------------*/
SimpleBooleRepr *
SimpleBMan::dcast(BooleRepr * br)
{
inc_count_casts();
if(br->manager()!=this)
error << "dynamic cast to simple boole repr not implemented"
<< TypeViolation();
return (SimpleBooleRepr*) br;
}
/*---------------------------------------------------------------------------*/
SimpleBooleQuantData *
SimpleBMan::dcast_quant_data(BooleQuantData * bqd)
{
inc_count_casts();
if(bqd->manager() != this)
error << "dynamic cast to simple boole quant data not implemented"
<< TypeViolation();
return (SimpleBooleQuantData *) bqd;
}
/*---------------------------------------------------------------------------*/
SimpleBooleSubsData *
SimpleBMan::dcast_subs_data(BooleSubsData * bsd)
{
inc_count_casts();
if(bsd->manager() != this)
error << "dynamic cast to simple boole subs data not implemented"
<< TypeViolation();
return (SimpleBooleSubsData*) bsd;
}
/*---------------------------------------------------------------------------*/
int
SimpleBMan::new_var() { return current_var++; }
/*---------------------------------------------------------------------------*/
class SimpleBooleQuantData
:
public BooleQuantData,
public SimpleBManManaged
{
friend class SimpleBMan;
BDDsimple * vars;
SimpleBooleQuantData(IdxSet & s) : BooleQuantData(SimpleBMan::instance())
{
Iterator<int> it(s);
vars = BDDsimple::True;
for(it.first(); !it.isDone(); it.next())
{
BDDsimple * var = BDDsimple::find_var( it.get() );
BDDsimple * tmp = BDDsimple::andop(var, vars);
BDDsimple::free(vars);
BDDsimple::free(var);
vars = tmp;
}
}
~SimpleBooleQuantData() { reset(); }
public:
void mark() { if(vars) vars -> mark(); }
void reset()
{
if(vars)
{
BDDsimple::free(vars);
vars = 0;
}
}
};
/*---------------------------------------------------------------------------*/
BooleQuantData *
SimpleBMan::new_var_set(IdxSet& is) { return new SimpleBooleQuantData(is); }
/*---------------------------------------------------------------------------*/
class SimpleBooleSubsData
:
public BooleSubsData
{
friend class SimpleBMan;
BDDsimpleIntToBDDAssoc * assoc;
SimpleBooleSubsData(const Idx<int> & m) :
BooleSubsData(SimpleBMan::instance())
{
assoc = SimpleBMan::bdd_manager -> newAssoc(m);
}
SimpleBooleSubsData(const Idx<BDDsimple*> & m) :
BooleSubsData(SimpleBMan::instance())
{
assoc = SimpleBMan::bdd_manager -> newAssoc(m);
}
~SimpleBooleSubsData() { reset(); }
public:
void mark() { if(assoc) SimpleBMan::bdd_manager -> mark(assoc); }
void reset()
{
if(assoc)
{
SimpleBMan::bdd_manager -> removeAssoc(assoc);
assoc = 0;
}
}
};
/*---------------------------------------------------------------------------*/
BooleSubsData *
SimpleBMan::new_sub(const Idx<int> & im)
{
return new SimpleBooleSubsData(im);
}
/*---------------------------------------------------------------------------*/
BooleSubsData *
SimpleBMan::new_sub(const Idx<BooleRepr*> & im)
{
IdxIterator<BooleRepr*> it(im);
BooleSubsData * res;
Idx<BDDsimple*> m;
for(it.first(); !it.isDone(); it.next())
{
SimpleBooleRepr * sbr = dcast(it.to());
m.map(it.from(), sbr -> bdd);
}
res = new SimpleBooleSubsData(m);
return res;
}
/*---------------------------------------------------------------------------*/
BooleRepr *
SimpleBMan::var_to_Boole(int v)
{
SimpleBooleRepr * res = new SimpleBooleRepr;
ASSERT(0<=v && v<current_var);
res->bdd = BDDsimple::find_var(v);
return res;
}
/*---------------------------------------------------------------------------*/
BooleRepr *
SimpleBMan::_exists(BooleRepr* operand_br, BooleQuantData * bqd)
{
SimpleBooleRepr * operand = dcast(operand_br);
SimpleBooleQuantData * qd = dcast_quant_data(bqd);
SimpleBooleRepr * res = new SimpleBooleRepr;
res->bdd = BDDsimple::exists(operand->bdd, qd->vars);
return res;
}
/*---------------------------------------------------------------------------*/
BooleRepr *
SimpleBMan::special_quantor(
BooleRepr* abr, BooleQuantData * bqd, BooleRepr* bbr,
BDDsimple * (*f)(BDDsimple *, BDDsimple *, BDDsimple *))
{
SimpleBooleRepr * a = dcast(abr), * b = dcast(bbr);
SimpleBooleQuantData * qd = dcast_quant_data(bqd);
SimpleBooleRepr * res = new SimpleBooleRepr;
res->bdd = (*f)(a->bdd, b->bdd, qd->vars);
return res;
}
/*---------------------------------------------------------------------------*/
BooleRepr* SimpleBMan::special_quantor(
BooleRepr* abr, BooleQuantData * bqd, BooleRepr* bbr, BooleSubsData * bsd,
BDDsimple * (*f)(BDDsimple *, BDDsimple *,
BDDsimple *, BDDsimpleIntToBDDAssoc*))
{
SimpleBooleRepr * a = dcast(abr), * b = dcast(bbr);
SimpleBooleQuantData * qd = dcast_quant_data(bqd);
SimpleBooleSubsData * sd = dcast_subs_data(bsd);
SimpleBooleRepr * res = new SimpleBooleRepr;
res->bdd = (*f)(a->bdd, b->bdd, qd->vars, sd -> assoc);
return res;
}
/*---------------------------------------------------------------------------*/
BooleRepr *
SimpleBMan::_relprod(BooleRepr* a, BooleQuantData * q, BooleRepr* b)
{
return special_quantor(a,q,b,BDDsimple::relProd);
}
/*---------------------------------------------------------------------------*/
BooleRepr *
SimpleBMan::_relprod(
BooleRepr* a, BooleQuantData * q, BooleRepr* b, BooleSubsData * s)
{
return special_quantor(a,q,b,s,BDDsimple::composeRelProd);
}
/*---------------------------------------------------------------------------*/
BooleRepr *
SimpleBMan::_forall(BooleRepr* operand_br, BooleQuantData * bqd)
{
SimpleBooleRepr * operand = dcast(operand_br);
SimpleBooleQuantData * qd = dcast_quant_data(bqd);
SimpleBooleRepr * res = new SimpleBooleRepr;
res->bdd = BDDsimple::forall(operand->bdd, qd->vars);
return res;
}
/*---------------------------------------------------------------------------*/
BooleRepr *
SimpleBMan::_forallImplies(BooleRepr* a, BooleQuantData * bqd, BooleRepr* b)
{
return special_quantor(a,bqd,b,BDDsimple::forallImplies);
}
/*---------------------------------------------------------------------------*/
BooleRepr *
SimpleBMan::_forallImplies(
BooleRepr* a, BooleQuantData * q, BooleRepr* b, BooleSubsData * s)
{
return special_quantor(a,q,b,s,BDDsimple::composeForallImplies);
}
/*---------------------------------------------------------------------------*/
BooleRepr *
SimpleBMan::_forallOr(BooleRepr* a, BooleQuantData * bqd, BooleRepr* b)
{
return special_quantor(a,bqd,b,BDDsimple::forallOr);
}
/*---------------------------------------------------------------------------*/
BooleRepr *
SimpleBMan::_substitute(BooleRepr* operand_br, BooleSubsData * bsd)
{
SimpleBooleRepr * operand = dcast(operand_br);
SimpleBooleSubsData * sbsd = dcast_subs_data(bsd);
SimpleBooleRepr * res = new SimpleBooleRepr;
res -> bdd = BDDsimple::compose(operand -> bdd, sbsd -> assoc);
return res;
}
/*---------------------------------------------------------------------------*/
BooleRepr *
SimpleBMan::copy(BooleRepr* br)
{
SimpleBooleRepr * a = dcast(br);
SimpleBooleRepr * res = new SimpleBooleRepr;
res->bdd = BDDsimple::copy(a->bdd);
return res;
}
/*---------------------------------------------------------------------------*/
BooleRepr *
SimpleBMan::bool_to_Boole(bool b)
{
SimpleBooleRepr * res = new SimpleBooleRepr;
res->bdd = b ? BDDsimple::True : BDDsimple::False;
return res;
}
/*---------------------------------------------------------------------------*/
BooleRepr * SimpleBMan::ite(
BooleRepr * cbr, BooleRepr * tbr, BooleRepr * ebr)
{
SimpleBooleRepr * c = dcast(cbr), * t = dcast(tbr), * e = dcast(ebr);
SimpleBooleRepr * res = new SimpleBooleRepr;
res->bdd = BDDsimple::ite(c->bdd, t->bdd, e->bdd);
return res;
}
/*---------------------------------------------------------------------------*/
BooleRepr *
SimpleBMan::binary(BooleRepr* abr, BooleRepr* bbr,
BDDsimple * (*f)(BDDsimple*,BDDsimple*))
{
SimpleBooleRepr * a = dcast(abr), * b = dcast(bbr);
SimpleBooleRepr * res = new SimpleBooleRepr;
res->bdd = (*f)(a->bdd, b->bdd);
return res;
}
/*---------------------------------------------------------------------------*/
BooleRepr *
SimpleBMan::andop(BooleRepr * a, BooleRepr * b)
{
return binary(a,b,&BDDsimple::andop);
}
/*---------------------------------------------------------------------------*/
BooleRepr *
SimpleBMan::implies(BooleRepr * a, BooleRepr * b)
{
return binary(a,b,&BDDsimple::implies);
}
/*---------------------------------------------------------------------------*/
BooleRepr *
SimpleBMan::seilpmi(BooleRepr * a, BooleRepr * b)
{
return binary(a,b,&BDDsimple::seilpmi);
}
/*---------------------------------------------------------------------------*/
BooleRepr *
SimpleBMan::equiv(BooleRepr * a, BooleRepr * b)
{
return binary(a,b,&BDDsimple::equiv);
}
/*---------------------------------------------------------------------------*/
BooleRepr *
SimpleBMan::notequiv(BooleRepr * a, BooleRepr * b)
{
return binary(a,b,&BDDsimple::notequiv);
}
/*---------------------------------------------------------------------------*/
BooleRepr *
SimpleBMan::simplify_assuming(BooleRepr * a, BooleRepr * b)
{
return binary(a,b,&BDDsimple::simplify_assuming);
}
/*---------------------------------------------------------------------------*/
BooleRepr *
SimpleBMan::cofactor(BooleRepr * a, BooleRepr * b
)
{
return binary(a,b,&BDDsimple::cofactor);
}
/*---------------------------------------------------------------------------*/
BooleRepr *
SimpleBMan::orop(BooleRepr * a, BooleRepr * b)
{
return binary(a,b,&BDDsimple::orop);
}
/*---------------------------------------------------------------------------*/
BDDsimple *
SimpleBMan::set_to_domain(IdxSet & set)
{
IdxSetIterator it(set);
BDDsimple * domain = BDDsimple::copy(BDDsimple::True);
for(it.first(); !it.isDone(); it.next())
{
BDDsimple * var = BDDsimple::find_var(it.get());
BDDsimple * tmp = BDDsimple::andop(var, domain);
BDDsimple::free(var);
BDDsimple::free(domain);
domain = tmp;
}
return domain;
}
/*---------------------------------------------------------------------------*/
BooleRepr *
SimpleBMan::onecube(BooleRepr * abr, IdxSet & set)
{
BDDsimple * domain = set_to_domain(set);
SimpleBooleRepr * a = dcast(abr);
SimpleBooleRepr * res = new SimpleBooleRepr;
res -> bdd = BDDsimple::onecube(a -> bdd, domain);
BDDsimple::free(domain);
return res;
}
/*---------------------------------------------------------------------------*/
BooleRepr *
SimpleBMan::notop(BooleRepr * abr)
{
SimpleBooleRepr * a = dcast(abr);
SimpleBooleRepr * res = new SimpleBooleRepr;
res->bdd = BDDsimple::notop(a->bdd);
return res;
}
/*---------------------------------------------------------------------------*/
bool
SimpleBMan::isTrue(BooleRepr* abr)
{
SimpleBooleRepr * a = dcast(abr);
return a->bdd == BDDsimple::True;
}
/*---------------------------------------------------------------------------*/
bool
SimpleBMan::isFalse(BooleRepr* abr)
{
SimpleBooleRepr * a = dcast(abr);
return a->bdd == BDDsimple::False;
}
/*---------------------------------------------------------------------------*/
bool
SimpleBMan::areEqual(BooleRepr * abr, BooleRepr * bbr)
{
SimpleBooleRepr * a = dcast(abr), * b = dcast(bbr);
return a->bdd == b->bdd;
}
/*---------------------------------------------------------------------------*/
bool
SimpleBMan::doesImply(BooleRepr * abr, BooleRepr * bbr)
{
SimpleBooleRepr * a = dcast(abr), * b = dcast(bbr);
return BDDsimple::doesImply(a->bdd, b->bdd);
}
/*---------------------------------------------------------------------------*/
bool
SimpleBMan::isValid(BooleRepr *br) { return dcast(br)->bdd!=0; }
/*---------------------------------------------------------------------------*/
const char *
SimpleBMan::stats()
{
sprintf(stats_buffer,
"SimpleBMan: %d (SimpleBooleRepr's)\n%s",
num_reprs,
bdd_manager->stats());
return stats_buffer;
}
/*---------------------------------------------------------------------------*/
int
SimpleBMan::size(BooleRepr* abr)
{
SimpleBooleRepr * a = dcast(abr);
return a -> bdd -> size();
}
/*---------------------------------------------------------------------------*/
float
SimpleBMan::onsetsize(BooleRepr * abr, IdxSet & set)
{
BDDsimple * domain = set_to_domain(set);
SimpleBooleRepr * a = dcast(abr);
float res = a -> bdd -> onsetsize(domain);
BDDsimple::free(domain);
return res;
}
/*---------------------------------------------------------------------------*/
void
SimpleBMan::mark()
{
Iterator<BooleManagerManaged*> it(*this);
for(it.first(); !it.isDone(); it.next())
{
SimpleBManManaged * sbman_managed = (SimpleBManManaged*) it.get();
sbman_managed -> mark();
}
}
/*---------------------------------------------------------------------------*/
class SimpleBManPrintManager
:
public BDDsimplePrintManager
{
IOStream stream;
void _print(const char * s) { stream << s; }
public:
SimpleBManPrintManager(const IOStream& io) : stream(io) { }
};
/*---------------------------------------------------------------------------*/
void
SimpleBMan::visualize(BooleRepr * brp)
{
SimpleBooleRepr * sbrp = dcast(brp);
if(sbrp -> bdd) sbrp -> bdd -> visualize();
}
/*---------------------------------------------------------------------------*/
void
checkMarks()
{
BDDsimple::reset_all();
SimpleBMan::instance() -> mark();
BDDsimple::compare_ref_with_mark();
BDDsimple::reset_all();
}
/*---------------------------------------------------------------------------*/
|
bsd-3-clause
|
tylerlong/quick_orm
|
quick_orm/testsuite/fixtures.py
|
2942
|
# coding=utf-8
"""
quick_orm.testsuite.fixtures
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
sample data for testing purpose
"""
from toolkit_library.inspector import ModuleInspector
import quick_orm.testsuite.models
exec(ModuleInspector(quick_orm.testsuite.models).import_all_classes_statement())
users = []
users.append(User(name = 'peter'))
users.append(User(name = 'tyler'))
users.append(User(name = 'simon'))
users.append(User(name = 'jason'))
users.append(User(name = 'justin'))
groups = []
groups.append(Group(name = 'admin', users = [users[0],]))
groups.append(Group(name = 'editor', users = [users[1], users[3], users[4], ]))
groups.append(Group(name = 'user', users = [users[2],]))
blog_entries = []
blog_entries.append(BlogEntry(title = 'blog_entry_title_1', content = 'blog_entry_content_1', user = users[0]))
blog_entries.append(BlogEntry(title = 'blog_entry_title_2', content = 'blog_entry_content_2', user = users[1]))
blog_entries.append(BlogEntry(title = 'blog_entry_title_3', content = 'blog_entry_content_3', user = users[2]))
blog_entries.append(BlogEntry(title = 'blog_entry_title_4', content = 'blog_entry_content_4', user = users[2]))
blog_entries.append(BlogEntry(title = 'blog_entry_title_5', content = 'blog_entry_content_5', user = users[3]))
blog_entries.append(BlogEntry(title = 'blog_entry_title_6', content = 'blog_entry_content_6', user = users[3]))
blog_entries.append(BlogEntry(title = 'blog_entry_title_7', content = 'blog_entry_content_7', user = users[4]))
blog_entries.append(BlogEntry(title = 'blog_entry_title_8', content = 'blog_entry_content_8', user = users[4]))
topics = []
topics.append(Topic(name = 'topic_name_1'))
topics.append(Topic(name = 'topic_name_2'))
topics.append(Topic(name = 'topic_name_3'))
topics.append(Topic(name = 'topic_name_4'))
topics.append(Topic(name = 'topic_name_5'))
questions = []
questions.append(Question(title = 'question_title_1', content = 'question_content_1', topics = [topics[0], topics[1], ], user = users[4]))
questions.append(Question(title = 'question_title_2', content = 'question_content_2', topics = [topics[1], ], user = users[4]))
questions.append(Question(title = 'question_title_3', content = 'question_content_3', topics = [topics[2], ], user = users[4]))
questions.append(Question(title = 'question_title_4', content = 'question_content_4', topics = [topics[3], ], user = users[4]))
questions.append(Question(title = 'question_title_5', content = 'question_content_5', topics = [topics[4], ], user = users[4]))
answers = []
answers.append(Answer(content = 'answer_content_1', user = users[0]))
answers.append(Answer(content = 'answer_content_2', user = users[0]))
answers.append(Answer(content = 'answer_content_3', user = users[0]))
answers.append(Answer(content = 'answer_content_4', user = users[1]))
answers.append(Answer(content = 'answer_content_5', user = users[2]))
|
bsd-3-clause
|
svinotavr/JavascriptPhaserTutorials
|
Tutorial files/M12-Match-3-Game/08-update-grid/js/states/Game.js
|
460
|
var Match3 = Match3 || {};
Match3.GameState = {
init: function() {
this.NUM_ROWS = 8;
this.NUM_COLS = 8;
this.NUM_VARIATIONS = 6;
this.BLOCK_SIZE = 35;
this.ANIMATION_TIME = 200;
},
create: function() {
//game background
this.background = this.add.sprite(0, 0, 'background');
//board model
this.board = new Match3.Board(this, this.NUM_ROWS, this.NUM_COLS, this.NUM_VARIATIONS);
this.board.consoleLog();
}
};
|
bsd-3-clause
|
timo/zasim
|
test/testutil.py
|
3635
|
import numpy as np
from itertools import repeat, chain, product
def compare_ndim_arrays(arr1, arr2):
print "arr1:"
print arr1
print "arr2:"
print arr2
def assert_ndim_arrays_equal(arr1, arr2):
arr1 = arr1.flatten()
arr2 = arr2.flatten()
assert_arrays_equal(arr1, arr2)
def generate_pretty_printed_comparison(arr1, arr2):
"""return a pretty-printed comparison of two arrays as well as its equality:
(equal, l1, mid, l2)"""
equal = True
l1, mid, l2 = "arr1 ", " ", "arr2 "
for i in range(len(arr1)):
if arr1[i] != arr2[i]:
equal = False
l1p = " % 2d" % arr1[i]
l2p = " % 2d" % arr2[i]
l1 += l1p
l2 += l2p
if len(l1) > len(l2):
l2 += " " * (len(l2) - len(l1))
elif len(l1) < len(l2):
l1 += " " * (len(l1) - len(l2))
if arr1[i] == arr2[i]:
mid += " " * len(l1p)
else:
mid += " #" + " " * (max(len(l1p), len(l2p)) - 3)
return (equal, l1, mid, l2)
def compare_arrays(arr1, arr2):
if len(arr1.shape) > 1:
compare_ndim_arrays(arr1, arr2)
return
(equal, l1, mid, l2) = generate_pretty_printed_comparison(arr1, arr2)
if equal:
print l1
else:
print "\n".join((l1, mid, l2))
def assert_arrays_equal(arr1, arr2):
"""assert the equality of two arrays.
highlights different array cells if they differ.
outputs the array if they are the same"""
# are the arrays the same size?
assert arr1.shape == arr2.shape
if len(arr1.shape) > 1:
assert_ndim_arrays_equal(arr1, arr2)
return
(equal, l1, mid, l2) = generate_pretty_printed_comparison(arr1, arr2)
if not equal:
print l1
print mid
print l2
else:
print l1
# are the arrays equal?
assert equal
def pretty_print_binrule(rule_arr):
"""display a key for neighbourhood to value, oriented the same way as the
arrays are displayed"""
l1 = []
l2 = []
for i, idx in enumerate(product(*([[0, 1]] * 3))):
l1.append("%03d" % int(bin(i)[2:]))
l2.append(str(int(rule_arr[idx])))
print " ".join(l1)
print " " + " ".join(l2)
class EvilRandom(object):
def __init__(self, iterator):
self.iterator = iter(iterator)
def random(self):
return self.iterator.next()
class ZerosThenOnesRandom(EvilRandom):
def __init__(self, zeros):
super(ZerosThenOnesRandom, self).__init__(
chain(repeat(0.0, zeros), repeat(1.0)))
INTERESTING_BINRULES = [
26, 30, 122, 184, # triangles of different sorts
45, 54, # other pretty rules
110, # this one's actually able to calculate things
]
TESTED_BINRULE = {
110:
[[1,0,0,1,0,1,0,1,1,0],
[1,0,1,1,1,1,1,1,1,0],
[1,1,1,0,0,0,0,0,1,1],
[1,0,1,0,0,0,0,1,1,0],
[1,1,1,0,0,0,1,1,1,1]]}
TESTED_BINRULE_WITH_BORDERS = dict((k, [np.array(arr) for arr in v]) for k, v in TESTED_BINRULE.iteritems())
TESTED_BINRULE_WITHOUT_BORDERS = dict((k, [np.array(arr[1:-1]) for arr in v]) for k, v in TESTED_BINRULE.iteritems())
GLIDER = [
[[0,1,0,0,0],
[0,0,1,0,0],
[1,1,1,0,0],
[0,0,0,0,0],
[0,0,0,0,0]],
[[0,0,0,0,0],
[1,0,1,0,0],
[0,1,1,0,0],
[0,1,0,0,0],
[0,0,0,0,0]],
[[0,0,0,0,0],
[0,0,1,0,0],
[1,0,1,0,0],
[0,1,1,0,0],
[0,0,0,0,0]],
[[0,0,0,0,0],
[0,1,0,0,0],
[0,0,1,1,0],
[0,1,1,0,0],
[0,0,0,0,0]],]
try:
GLIDER = [np.array(a) for a in GLIDER]
except:
pass
|
bsd-3-clause
|
centrumholdings/cthulhubot
|
tests/example_project/tests/test_buildbot.py
|
1767
|
import os
from django.conf import settings
from djangosanetesting import HttpTestCase
from djangosanetesting.utils import get_live_server_path
from cthulhubot.models import Project, Buildmaster
from tests.helpers import create_project
from tests.helpers import BuildmasterTestCase
# test is spawning child that will not share transaction - test must be destructive
class TestBuildmaster(BuildmasterTestCase):
def assert_running(self):
self.assert_true(self.buildmaster.is_running())
def start_master(self):
import cthulhubot
import example_project
self.buildmaster.start(env={
"PYTHONPATH" : ':'.join([
os.path.abspath(os.path.join(os.path.dirname(cthulhubot.__file__), os.pardir)),
os.path.abspath(os.path.join(os.path.dirname(example_project.__file__), os.pardir))
]),
"DJANGO_SETTINGS_MODULE" : "example_project.buildbot_settings",
})
def stop_master(self):
import cthulhubot
import example_project
self.buildmaster.stop(env={
"PYTHONPATH" : ':'.join([
os.path.abspath(os.path.join(os.path.dirname(cthulhubot.__file__), os.pardir)),
os.path.abspath(os.path.join(os.path.dirname(example_project.__file__), os.pardir))
]),
"DJANGO_SETTINGS_MODULE" : "example_project.buildbot_settings",
})
def test_master_start(self):
self.start_master()
self.assert_running()
def test_master_stop(self):
self.start_master()
self.stop_master()
self.assert_false(self.buildmaster.is_running())
def test_master_not_started_after_creation(self):
self.assert_false(self.buildmaster.is_running())
|
bsd-3-clause
|
utvara/phpworkers
|
tests/Edo/Event/FactoryTest.php
|
3748
|
<?php
/**
* phpworkers
*
* LICENSE
*
* This source file is subject to the new BSD license that is bundled
* with this package in the file LICENSE.txt.
*
* @copyright Copyright (c) 2010 Slobodan Utvic and Julian Davchev
*/
require_once 'PHPUnit/Framework/TestCase.php';
require_once (dirname(dirname(dirname(__FILE__))) . "/TestConfiguration.php");
//require_once WORKSHOP_ROOT . "/Edo/Event.php";
require_once WORKSHOP_ROOT . "/Edo/Event/Engine.php";
require_once WORKSHOP_ROOT . "/Edo/Event/Factory.php";
/**
* Batch_Video test case.
*/
class Edo_Event_FactoryTest extends PHPUnit_Framework_TestCase {
protected $engine = null;
/**
* Prepares the environment before running a test.
*/
protected function setUp() {
$this->engine = FileEngineHelper::getNewFileEngine();
parent::setUp ();
}
/**
* Cleans up the environment after running a test.
*/
protected function tearDown() {
FileEngineHelper::revertSystem($this->engine);
Edo_Event_Engine::setDefaultEngine(null);
$this->engine = null;
parent::tearDown ();
}
public function testCreateAllArgs()
{
$event_data = array (
'ref' => '/yapeeeee/2',
'event' => 'create',
'tag' =>
array (
0 => 'article',
1 => 'marker',
),
'time_started' => 1248086540,
);
$id = Edo_Event_Factory::create($event_data,'manager',$this->engine);
$event = $this->engine->findEventById($id,'manager');
$this->assertEquals($id,$event->id);
}
public function testCreateDefaultEngine()
{
Edo_Event_Engine::setDefaultEngine($this->engine);
$event_data = array (
'ref' => '/yapeeeee/2',
'event' => 'create',
'tag' =>
array (
0 => 'article',
1 => 'marker',
),
'time_started' => 1248086540,
);
$id = Edo_Event_Factory::create($event_data,'manager');
$isLocked = $this->engine->isLocked($id,'manager');
$this->assertFalse($isLocked);
$event = $this->engine->findEventById($id,'manager');
$this->assertEquals($id,$event->id);
}
public function testCreateEventOrData()
{
Edo_Event_Engine::setDefaultEngine($this->engine);
$time_for_this_entry = time();
$event = new Edo_Event();
$event->ref = "/marker/2";
$event->event = 'create';
$event->tag = array('article','marker');
$event->time_started = $time_for_this_entry;
$id = Edo_Event_Factory::create($event,'manager');
$isLocked = $this->engine->isLocked($id,'manager');
$this->assertFalse($isLocked);
$event = $this->engine->findEventById($id,'manager');
$this->assertEquals($id,$event->id);
}
public function testAckquireLockArgument()
{
Edo_Event_Engine::setDefaultEngine($this->engine);
$time_for_this_entry = time();
$event = new Edo_Event();
$event->ref = "/marker/2";
$event->event = 'create';
$event->tag = array('article','marker');
$event->time_started = $time_for_this_entry;
$id = Edo_Event_Factory::create($event,'manager',null,true);
$isLocked = $this->engine->isLocked($id,'manager');
$this->assertTrue($isLocked);
}
public function testWrongInputCreatesExceptionCreatesDefaultEngine()
{
$this->setExpectedException('Edo_Event_Engine_Exception');
Edo_Event_Engine::setDefaultEngine(null);
$id = Edo_Event_Factory::create('moooo','manager',$this->engine,true);
}
}
|
bsd-3-clause
|
dvyushin88/fantasy_ff
|
modules/admin/models/User.php
|
1541
|
<?php
namespace app\modules\admin\models;
use yii\helpers\ArrayHelper;
use app\modules\user\models\User as MainUser;
use Yii;
class User extends MainUser
{
const SCENARIO_ADMIN_CREATE = 'adminCreate';
const SCENARIO_ADMIN_UPDATE = 'adminUpdate';
public $newPassword;
public $newPasswordRepeat;
public function rules()
{
return ArrayHelper::merge(parent::rules(), [
[['newPassword', 'newPasswordRepeat'], 'required', 'on' => self::SCENARIO_ADMIN_CREATE],
['newPassword', 'string', 'min' => 6],
['newPasswordRepeat', 'compare', 'compareAttribute' => 'newPassword'],
]);
}
public function scenarios()
{
$scenarios = parent::scenarios();
$scenarios[self::SCENARIO_ADMIN_CREATE] = ['username', 'email', 'status', 'newPassword', 'newPasswordRepeat'];
$scenarios[self::SCENARIO_ADMIN_UPDATE] = ['username', 'email', 'status', 'newPassword', 'newPasswordRepeat'];
return $scenarios;
}
public function attributeLabels()
{
return ArrayHelper::merge(parent::attributeLabels(), [
'newPassword' => Yii::t('app', 'USER_NEW_PASSWORD'),
'newPasswordRepeat' => Yii::t('app', 'USER_REPEAT_PASSWORD'),
]);
}
public function beforeSave($insert)
{
if (parent::beforeSave($insert)) {
if (!empty($this->newPassword)) {
$this->setPassword($this->newPassword);
}
return true;
}
return false;
}
}
|
bsd-3-clause
|
VISTAS-IVES/pyvistas
|
source/vistas/core/graphics/bounding_box.py
|
2460
|
import numpy
from OpenGL.GL import *
from vistas.core.graphics.geometry import Geometry
from vistas.core.graphics.object import Object3D
from vistas.core.graphics.shader import ShaderProgram
from vistas.core.paths import get_builtin_shader
class BoundingBoxHelper(Object3D):
""" Simple object to help visualize an Object3D's BoundingBox. """
shader = None
indices = numpy.array([
0, 1,
4, 5,
2, 3,
6, 7,
0, 4,
1, 5,
2, 6,
3, 7,
0, 2,
1, 3,
4, 6,
5, 7], dtype=GLint)
def __init__(self, obj: Object3D):
super().__init__()
if BoundingBoxHelper.shader is None:
BoundingBoxHelper.shader = ShaderProgram()
BoundingBoxHelper.shader.attach_shader(get_builtin_shader('bbox_vert.glsl'), GL_VERTEX_SHADER)
BoundingBoxHelper.shader.attach_shader(get_builtin_shader('bbox_frag.glsl'), GL_FRAGMENT_SHADER)
BoundingBoxHelper.shader.link_program()
self.obj = obj
self.geometry = Geometry(24, 8, mode=GL_LINES)
self.geometry.indices = self.indices
self.update()
def update(self):
""" Update the vertex buffer based on the bounding box of the geometry we are attached to. """
self.geometry.bounding_box = self.obj.bounding_box
bbox = self.geometry.bounding_box
pos = self.obj.position
x_min = bbox.min_x + pos.x
x_max = bbox.max_x + pos.x
y_min = bbox.min_y + pos.y
y_max = bbox.max_y + pos.y
z_min = bbox.min_z + pos.z
z_max = bbox.max_z + pos.z
self.geometry.vertices = numpy.array([
x_min, y_min, z_min, # 0
x_max, y_min, z_min, # 1
x_min, y_max, z_min, # 2
x_max, y_max, z_min, # 3
x_min, y_min, z_max, # 4
x_max, y_min, z_max, # 5
x_min, y_max, z_max, # 6
x_max, y_max, z_max # 7
], dtype=GLfloat)
def render(self, color, camera):
self.shader.pre_render(camera)
self.shader.uniform3fv("color", 1, color.rgb.rgb_list)
glBindVertexArray(self.geometry.vertex_array_object)
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, self.geometry.index_buffer)
glDrawElements(GL_LINES, 24, GL_UNSIGNED_INT, None)
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, 0)
glBindVertexArray(0)
self.shader.post_render(camera)
|
bsd-3-clause
|
maxhutch/magma
|
src/core_dsbtype1cb.cpp
|
4809
|
/*
-- MAGMA (version 2.1.0) --
Univ. of Tennessee, Knoxville
Univ. of California, Berkeley
Univ. of Colorado, Denver
@date August 2016
@author Azzam Haidar
@generated from src/core_zhbtype1cb.cpp, normal z -> d, Tue Aug 30 09:38:19 2016
*/
#include "magma_internal.h"
#define A(m,n) (A + lda * (n) + ((m)-(n)))
#define V(m) (V + (m))
#define TAU(m) (TAU + (m))
/***************************************************************************//**
*
* @ingroup magma_hbtype1cb
*
* magma_dsbtype1cb is a kernel that will operate on a region (triangle) of data
* bounded by st and ed. This kernel eliminate a column by an column-wise
* annihiliation, then it apply a left+right update on the symmetric triangle.
* Note that the column to be eliminated is located at st-1.
*
* All detail are available on technical report or SC11 paper.
* Azzam Haidar, Hatem Ltaief, and Jack Dongarra. 2011.
* Parallel reduction to condensed forms for symmetric eigenvalue problems
* using aggregated fine-grained and memory-aware kernels. In Proceedings
* of 2011 International Conference for High Performance Computing,
* Networking, Storage and Analysis (SC '11). ACM, New York, NY, USA,
* Article 8, 11 pages.
* http://doi.acm.org/10.1145/2063384.2063394
*
*******************************************************************************
*
* @param[in] n
* The order of the matrix A.
*
* @param[in] nb
* The size of the band.
*
* @param[in, out] A
* A pointer to the matrix A of size (2*nb+1)-by-n.
*
* @param[in] lda
* The leading dimension of the matrix A. lda >= max(1,2*nb+1)
*
* @param[out] V
* double array, dimension 2*n if eigenvalue only
* requested or (ldv*blkcnt*Vblksiz) if Eigenvectors requested
* The Householder reflectors are stored in this array.
*
* @param[in] ldv
* The leading dimension of the matrix V. ldv >= TODO.
*
* @param[out] TAU
* double array, dimension (n).
* The scalar factors of the Householder reflectors are stored
* in this array.
*
* @param[in] st
* A pointer to the start index where this kernel will operate.
*
* @param[in] ed
* A pointer to the end index where this kernel will operate.
*
* @param[in] sweep
* The sweep number that is eliminated. it serve to calculate the
* pointer to the position where to store the Vs and Ts.
*
* @param[in] Vblksiz
* constant which correspond to the blocking used when applying the Vs.
* it serve to calculate the pointer to the position where to store the
* Vs and Ts.
*
* @param[in] wantz
* constant which indicate if Eigenvalue are requested or both
* Eigenvalue/Eigenvectors.
*
* @param[in] work
* Workspace of size nb.
*
*******************************************************************************
*
* @return
* \retval MAGMA_SUCCESS successful exit
* \retval < 0 if -i, the i-th argument had an illegal value
*
******************************************************************************/
// -----------------------------------------------------------------------------
// TYPE 1-BAND Lower-columnwise-Householder
extern "C" void
magma_dsbtype1cb(magma_int_t n, magma_int_t nb,
double *A, magma_int_t lda,
double *V, magma_int_t ldv,
double *TAU,
magma_int_t st, magma_int_t ed, magma_int_t sweep,
magma_int_t Vblksiz, magma_int_t wantz,
double *work)
{
magma_int_t len;
magma_int_t vpos, taupos;
//magma_int_t blkid, tpos;
magma_int_t ione = 1;
double c_one = MAGMA_D_ONE;
/* find the pointer to the Vs and Ts as stored by the bulgechasing
* note that in case no eigenvector required V and T are stored
* on a vector of size n
* */
if ( wantz == 0 ) {
vpos = (sweep%2)*n + st;
taupos = (sweep%2)*n + st;
} else {
magma_bulge_findVTAUpos(n, nb, Vblksiz, sweep, st, ldv, &vpos, &taupos);
//findVTpos(n, nb, Vblksiz, sweep, st, &vpos, &taupos, &tpos, &blkid);
}
len = ed-st+1;
*(V(vpos)) = c_one;
//magma_int_t len2 = len-1;
//blasf77_dcopy( &len2, A(st+1, st-1), &ione, V(vpos+1), &ione );
memcpy( V(vpos+1), A(st+1, st-1), (len-1)*sizeof(double) );
memset( A(st+1, st-1), 0, (len-1)*sizeof(double) );
/* Eliminate the col at st-1 */
lapackf77_dlarfg( &len, A(st, st-1), V(vpos+1), &ione, TAU(taupos) );
/* Apply left and right on A(st:ed,st:ed) */
magma_dlarfy(len, A(st,st), lda-1, V(vpos), TAU(taupos), work);
}
#undef A
#undef V
#undef TAU
|
bsd-3-clause
|
UgCS/vsm-cpp-sdk
|
test/unit/ut_transport_detector.cpp
|
3358
|
// Copyright (c) 2018, Smart Projects Holdings Ltd
// All rights reserved.
// See LICENSE file for license details.
/*
* Tests for Singleton class.
*/
#include <ugcs/vsm/transport_detector.h>
#include <UnitTest++.h>
#include <future>
using namespace ugcs::vsm;
TEST(vsm_proxy)
{
auto sp = Socket_processor::Get_instance();
auto td = Transport_detector::Get_instance();
auto proccer = Request_processor::Create("UT2 vsm_proxy");
auto worker = Request_worker::Create("UT1 vsm_proxy", std::initializer_list<ugcs::vsm::Request_container::Ptr>{proccer});
auto pr = Properties::Get_instance();
Timer_processor::Get_instance()->Enable();
sp->Enable();
worker->Enable();
proccer->Enable();
td->Enable();
Socket_processor::Socket_listener::Ref listener;
Socket_processor::Socket_listener::Ref server;
std::promise<void> detected;
// My transport-connected callback.
auto h = [&](
std::string,
int,
ugcs::vsm::Socket_address::Ptr,
ugcs::vsm::Io_stream::Ref,
int instance)
{
LOG("got connection for %d", instance);
if (instance == 2) {
detected.set_value();
}
};
// Set up dummy configuration
pr->Set("connection.proxy.1.address","127.0.0.1");
pr->Set("connection.proxy.1.port","12345");
// Simulate proxy endpoint.
sp->Listen("127.0.0.1", "12345",
Make_socket_listen_callback(
[&](Socket_processor::Stream::Ref l, Io_result)
{
listener = l;
}));
// Add two detectors for the proxy connection
td->Add_detector(
Transport_detector::Make_connect_handler(h, 1),
proccer);
td->Add_detector(
Transport_detector::Make_connect_handler(h, 2),
proccer);
// Simulate proxy first time.
sp->Accept(
listener,
Make_socket_accept_callback(
[&](Socket_processor::Stream::Ref s, Io_result)
{
server = s;
}));
// Respond with success to eventual transport detector connection.
// Should trigger first transport-connected callback.
server->Write(Io_buffer::Create("VSMP\x02"));
// Simulate proxy second time.
sp->Accept(
listener,
Make_socket_accept_callback(
[&](Socket_processor::Stream::Ref s, Io_result)
{
server = s;
}));
// Respond with invalid data. detector should establish new connection within a second.
server->Write(Io_buffer::Create("warez"));
// Simulate proxy third time.
sp->Accept(
listener,
Make_socket_accept_callback(
[&](Socket_processor::Stream::Ref s, Io_result)
{
server = s;
}));
// Should trigger second transport-connected callback and succeed the test.
server->Write(Io_buffer::Create("VSMP\x02"));
// Wait for test completion.
auto ret = detected.get_future().wait_for(std::chrono::seconds(5));
// Test the test result.
CHECK (ret == std::future_status::ready);
// Cleanup
listener->Close();
server->Close();
td->Disable();
proccer->Disable();
worker->Disable();
sp->Disable();
Timer_processor::Get_instance()->Disable();
}
|
bsd-3-clause
|
notogawa/libift
|
src/unistd/fchdir.cpp
|
1724
|
// Copyright (c) 2012, Noriyuki OHKAWA a.k.a. notogawa.
// All rights reserved.
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are met:
//
// * Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
//
// * Redistributions in binary form must reproduce the above
// copyright notice, this list of conditions and the following
// disclaimer in the documentation and/or other materials provided
// with the distribution.
//
// * Neither the name of Noriyuki OHKAWA and notogawa nor the names of other
// contributors may be used to endorse or promote products derived
// from this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#include <config.h>
#include <unistd.h>
#include "basic_impl.hpp"
IFT_BASIC_IMPL(int, fchdir, (int), -1, (int fd), (fd))
|
bsd-3-clause
|
jotes/pontoon
|
pontoon/sync/tests/test_vcs.py
|
4053
|
from textwrap import dedent
from unittest.mock import patch
from pontoon.sync.vcs.repositories import VCSRepository
from pontoon.base.tests import CONTAINS, TestCase
class VCSRepositoryTests(TestCase):
def test_execute_log_error(self):
"""
If the return code from execute is non-zero and log_errors is
True, log an error message.
"""
repo = VCSRepository("/path")
with patch("pontoon.sync.vcs.repositories.execute") as mock_execute, patch(
"pontoon.sync.vcs.repositories.log"
) as mock_log:
mock_execute.return_value = 1, "output", "stderr"
assert repo.execute("command", cwd="working_dir", log_errors=True) == (
1,
"output",
"stderr",
)
mock_log.error.assert_called_with(
CONTAINS("stderr", "command", "working_dir")
)
class VCSChangedFilesTests(object):
"""
Mixin class that unifies all tests for changed/removed files between repositories.
Every subclass should provide two properties:a
* shell_output - should contain an string which is returned.
* repository_type - a type of the repository that will be used to perform the test.
"""
shell_output = ""
repository_type = None
def setUp(self):
self.vcsrepository = VCSRepository.for_type(self.repository_type, "/path")
def execute_success(self, *args, **kwargs):
"""
Should be called when repository commands returns contents without error.
"""
return 0, self.shell_output, None
def execute_failure(self, *args, **kwargs):
"""
Returns an error for all tests cases that validate error handling.
"""
return 1, "", None
def test_changed_files(self):
with patch.object(
self.vcsrepository, "execute", side_effect=self.execute_success
) as mock_execute:
changed_files = self.vcsrepository.get_changed_files("/path", "1")
assert mock_execute.called
assert changed_files == [
"changed_file1.properties",
"changed_file2.properties",
]
def test_changed_files_error(self):
with patch.object(
self.vcsrepository, "execute", side_effect=self.execute_failure
) as mock_execute:
assert self.vcsrepository.get_changed_files("path", "1") == []
assert mock_execute.called
def test_removed_files(self):
with patch.object(
self.vcsrepository, "execute", side_effect=self.execute_success
) as mock_execute:
removed_files = self.vcsrepository.get_removed_files("/path", "1")
assert mock_execute.called
assert removed_files == [
"removed_file1.properties",
"removed_file2.properties",
]
def test_removed_files_error(self):
with patch.object(
self.vcsrepository, "execute", side_effect=self.execute_failure
) as mock_execute:
assert self.vcsrepository.get_removed_files("path", "1") == []
assert mock_execute.called
class GitChangedFilesTest(VCSChangedFilesTests, TestCase):
repository_type = "git"
shell_output = dedent(
"""
M changed_file1.properties
M changed_file2.properties
D removed_file1.properties
D removed_file2.properties
"""
)
class HgChangedFilesTest(VCSChangedFilesTests, TestCase):
repository_type = "hg"
shell_output = dedent(
"""
M changed_file1.properties
M changed_file2.properties
R removed_file1.properties
R removed_file2.properties
"""
)
class SVNChangedFilesTest(VCSChangedFilesTests, TestCase):
repository_type = "svn"
shell_output = dedent(
"""
M changed_file1.properties
M changed_file2.properties
D removed_file1.properties
D removed_file2.properties
"""
)
|
bsd-3-clause
|
imerr/LibM2
|
game/DESC_MANAGER.cpp
|
5635
|
/* This file belongs to the LibM2 library (http://github.com/imermcmaps/LibM2)
* Copyright (c) 2013, iMer (www.imer.cc)
* All rights reserved.
* Licensed under the BSD 3-clause license (http://opensource.org/licenses/BSD-3-Clause)
*/
#include "DESC_MANAGER.hpp"
#include "../addr.hpp"
namespace libm2 {
LPDESC DESC_MANAGER::AcceptDesc(LPFDWATCH fdw, socket_t s) {
return ((LPDESC(*)(DESC_MANAGER * const, LPFDWATCH, socket_t))Addr::DESC_MANAGER::AcceptDesc__LPFDWATCH_socket_t)(this, fdw, s);
}
LPDESC DESC_MANAGER::AcceptP2PDesc(LPFDWATCH fdw, socket_t bind_fd) {
return ((LPDESC(*)(DESC_MANAGER * const, LPFDWATCH, socket_t))Addr::DESC_MANAGER::AcceptP2PDesc__LPFDWATCH_socket_t)(this, fdw, bind_fd);
}
void DESC_MANAGER::ConnectAccount(const std::string & login, LPDESC d) {
((void(*)(DESC_MANAGER * const, const std::string &, LPDESC))Addr::DESC_MANAGER::ConnectAccount__const_std_locale_string__LPDESC)(this, login, d);
}
LPCLIENT_DESC DESC_MANAGER::CreateConnectionDesc(LPFDWATCH fdw, const char * host, WORD port, int iPhaseWhenSucceed, bool bRetryWhenClosed) {
return ((LPCLIENT_DESC(*)(DESC_MANAGER * const, LPFDWATCH, const char *, WORD, int, bool))Addr::DESC_MANAGER::CreateConnectionDesc__LPFDWATCH_const_char__WORD_int_bool)(this, fdw, host, port, iPhaseWhenSucceed, bRetryWhenClosed);
}
DWORD DESC_MANAGER::CreateHandshake() {
return ((DWORD(*)(DESC_MANAGER * const))Addr::DESC_MANAGER::CreateHandshake)(this);
}
DWORD DESC_MANAGER::CreateLoginKey(LPDESC d) {
return ((DWORD(*)(DESC_MANAGER * const, LPDESC))Addr::DESC_MANAGER::CreateLoginKey__LPDESC)(this, d);
}
DESC_MANAGER::DESC_MANAGER() {
((void(*)(DESC_MANAGER * const))Addr::DESC_MANAGER::DESC_MANAGER)(this);
}
void DESC_MANAGER::Destroy() {
((void(*)(DESC_MANAGER * const))Addr::DESC_MANAGER::Destroy)(this);
}
void DESC_MANAGER::DestroyClosed() {
((void(*)(DESC_MANAGER * const))Addr::DESC_MANAGER::DestroyClosed)(this);
}
void DESC_MANAGER::DestroyDesc(LPDESC d, bool bEraseFromSet) {
((void(*)(DESC_MANAGER * const, LPDESC, bool))Addr::DESC_MANAGER::DestroyDesc__LPDESC_bool)(this, d, bEraseFromSet);
}
void DESC_MANAGER::DisconnectAccount(const std::string & login) {
((void(*)(DESC_MANAGER * const, const std::string &))Addr::DESC_MANAGER::DisconnectAccount__const_std_locale_string_)(this, login);
}
LPDESC DESC_MANAGER::FindByCharacterName(const char * name) {
return ((LPDESC(*)(DESC_MANAGER * const, const char *))Addr::DESC_MANAGER::FindByCharacterName__const_char_)(this, name);
}
LPDESC DESC_MANAGER::FindByHandle(DWORD handle) {
return ((LPDESC(*)(DESC_MANAGER * const, DWORD))Addr::DESC_MANAGER::FindByHandle__DWORD)(this, handle);
}
LPDESC DESC_MANAGER::FindByHandshake(DWORD dwHandshake) {
return ((LPDESC(*)(DESC_MANAGER * const, DWORD))Addr::DESC_MANAGER::FindByHandshake__DWORD)(this, dwHandshake);
}
LPDESC DESC_MANAGER::FindByLoginKey(DWORD dwKey) {
return ((LPDESC(*)(DESC_MANAGER * const, DWORD))Addr::DESC_MANAGER::FindByLoginKey__DWORD)(this, dwKey);
}
LPDESC DESC_MANAGER::FindByLoginName(const std::string & login) {
return ((LPDESC(*)(DESC_MANAGER * const, const std::string &))Addr::DESC_MANAGER::FindByLoginName__const_std_locale_string_)(this, login);
}
const std::tr1::unordered_set<DESC*> & DESC_MANAGER::GetClientSet() {
return ((const std::tr1::unordered_set<DESC*> &(*)(DESC_MANAGER * const))Addr::DESC_MANAGER::GetClientSet)(this);
}
void DESC_MANAGER::GetUserCount(int & iTotal, int ** paiEmpireUserCount, int & iLocalCount) {
((void(*)(DESC_MANAGER * const, int &, int **, int &))Addr::DESC_MANAGER::GetUserCount__int__int__int_)(this, iTotal, paiEmpireUserCount, iLocalCount);
}
void DESC_MANAGER::Initialize() {
((void(*)(DESC_MANAGER * const))Addr::DESC_MANAGER::Initialize)(this);
}
bool DESC_MANAGER::IsP2PDescExist(const char * szHost, WORD wPort) {
return ((bool(*)(DESC_MANAGER * const, const char *, WORD))Addr::DESC_MANAGER::IsP2PDescExist__const_char__WORD)(this, szHost, wPort);
}
bool DESC_MANAGER::LoadClientPackageCryptInfo(const char * pDirName) {
return ((bool(*)(DESC_MANAGER * const, const char *))Addr::DESC_MANAGER::LoadClientPackageCryptInfo__const_char_)(this, pDirName);
}
DWORD DESC_MANAGER::MakeRandomKey(DWORD dwHandle) {
return ((DWORD(*)(DESC_MANAGER * const, DWORD))Addr::DESC_MANAGER::MakeRandomKey__DWORD)(this, dwHandle);
}
void DESC_MANAGER::ProcessExpiredLoginKey() {
((void(*)(DESC_MANAGER * const))Addr::DESC_MANAGER::ProcessExpiredLoginKey)(this);
}
void DESC_MANAGER::SendClientPackageCryptKey(LPDESC desc) {
((void(*)(DESC_MANAGER * const, LPDESC))Addr::DESC_MANAGER::SendClientPackageCryptKey__LPDESC)(this, desc);
}
void DESC_MANAGER::SendClientPackageSDBToLoadMap(LPDESC desc, const char * pMapName) {
((void(*)(DESC_MANAGER * const, LPDESC, const char *))Addr::DESC_MANAGER::SendClientPackageSDBToLoadMap__LPDESC_const_char_)(this, desc, pMapName);
}
void DESC_MANAGER::TryConnect() {
((void(*)(DESC_MANAGER * const))Addr::DESC_MANAGER::TryConnect)(this);
}
void DESC_MANAGER::UpdateLocalUserCount() {
((void(*)(DESC_MANAGER * const))Addr::DESC_MANAGER::UpdateLocalUserCount)(this);
}
DESC_MANAGER::~DESC_MANAGER() {
((void(*)(DESC_MANAGER * const))Addr::DESC_MANAGER::__DESC_MANAGER)(this);
}
}
|
bsd-3-clause
|
yanbingbing/fly
|
library/Fly/Db/Adapter/Driver/ResultInterface.php
|
1166
|
<?php
/**
* Fly Framework
*
* @copyright Copyright (c) 2013 Bingbing. (http://yanbingbing.com)
*/
namespace Fly\Db\Adapter\Driver;
interface ResultInterface extends \Countable, \Iterator
{
/**
* Force buffering
*
* @return void
*/
public function buffer();
/**
* Check if is buffered
*
* @return bool|null
*/
public function isBuffered();
/**
* Is query result?
*
* @return bool
*/
public function isQueryResult();
/**
* Get affected rows
*
* @return int
*/
public function getAffectedRows();
/**
* Get generated value
*
* @return mixed|null
*/
public function getGeneratedValue();
/**
* Get the resource
*
* @return mixed
*/
public function getResource();
/**
* Get field count
*
* @return int
*/
public function getFieldCount();
/**
* Fetch a record from result
*
* @return false|array
*/
public function fetch();
/**
* Fetch all results as array
*
* @return array
*/
public function fetchAll();
}
|
bsd-3-clause
|
Serulian/compiler
|
compilercommon/localpositionmapper.go
|
1568
|
// Copyright 2017 The Serulian Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package compilercommon
import (
"fmt"
"io/ioutil"
"strings"
)
// LocalFilePositionMapper is a struct which implements the PositionMapper interface over the local file
// system. Note that since accesses in this struct are *not* cached, this instance should *only* be used
// for testing.
type LocalFilePositionMapper struct{}
func (pm LocalFilePositionMapper) RunePositionToLineAndCol(runePosition int, path InputSource, sourceOption SourceMappingOption) (int, int, error) {
contents, err := ioutil.ReadFile(string(path))
if err != nil {
return -1, -1, err
}
sm := CreateSourcePositionMapper(contents)
return sm.RunePositionToLineAndCol(runePosition)
}
func (pm LocalFilePositionMapper) LineAndColToRunePosition(lineNumber int, colPosition int, path InputSource, sourceOption SourceMappingOption) (int, error) {
contents, err := ioutil.ReadFile(string(path))
if err != nil {
return -1, err
}
sm := CreateSourcePositionMapper(contents)
return sm.LineAndColToRunePosition(lineNumber, colPosition)
}
func (pm LocalFilePositionMapper) TextForLine(lineNumber int, path InputSource, sourceOption SourceMappingOption) (string, error) {
contents, err := ioutil.ReadFile(string(path))
if err != nil {
return "", err
}
lines := strings.Split(string(contents), "\n")
if lineNumber >= len(lines) {
return "", fmt.Errorf("Invalid line number")
}
return lines[lineNumber], nil
}
|
bsd-3-clause
|
lwright-sq/HTML_CodeSniffer
|
Standards/WCAG2AAA/Sniffs/Principle1/Guideline1_2/1_2_2.js
|
1641
|
/**
* +--------------------------------------------------------------------+
* | This HTML_CodeSniffer file is Copyright (c) |
* | Squiz Pty Ltd (ABN 77 084 670 600) |
* +--------------------------------------------------------------------+
* | IMPORTANT: Your use of this Software is subject to the terms of |
* | the Licence provided in the file licence.txt. If you cannot find |
* | this file please contact Squiz (www.squiz.com.au) so we may |
* | provide you a copy. |
* +--------------------------------------------------------------------+
*
*/
HTMLCS.addSniff('WCAG2AAA', 'Principle1.Guideline1_2.1_2_2', {
/**
* Determines the elements to register for processing.
*
* Each element of the returned array can either be an element name, or "_top"
* which is the top element of the tested code.
*
* @returns {Array} The list of elements.
*/
register: function()
{
return [
'object',
'embed',
'applet',
'video'
];
},
/**
* Process the registered element.
*
* @param {DOMNode} element The element registered.
* @param {DOMNode} top The top element of the tested code.
*/
process: function(element, top)
{
HTMLCS.addMessage(HTMLCS.NOTICE, element, 'If this embedded object contains pre-recorded synchronised media and is not provided as an alternative for text content, check that captions are provided for audio content.', 'G87,G93');
}
});
|
bsd-3-clause
|
raquel-ucl/cartodb
|
lib/assets/javascripts/cartodb/table/geocoder_working.js
|
1094
|
/**
* Geocoder modal window. Prenvets user to make it work
* twice at the same time.
*/
cdb.admin.GeocoderWorking = cdb.admin.BaseDialog.extend({
_TEXTS: {
georeference: {
title: _t('Geocoder is already running'),
description: _t('If you want to georeference using another pattern, please cancel the current \
one (at the right bottom of your screen) and start again the process.'),
ok: _t('Close')
}
},
initialize: function(options) {
var self = this;
_.extend(this.options, {
title: self._TEXTS.georeference.title,
description: self._TEXTS.georeference.description,
template_name: 'old_common/views/confirm_dialog',
clean_on_hide: true,
enter_to_confirm: true,
ok_button_classes: "right button grey",
ok_title: self._TEXTS.georeference.ok,
cancel_button_classes: "hide",
modal_type: "confirmation",
width: 500
});
this.constructor.__super__.initialize.apply(this);
}
});
|
bsd-3-clause
|
mattunderscorechampion/tree-root
|
trees-mutable/src/main/java/com/mattunderscore/trees/mutable/MutableTreeImpl.java
|
6104
|
/* Copyright © 2014 Matthew Champion
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
* Neither the name of mattunderscore.com nor the
names of its contributors may be used to endorse or promote products
derived from this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL MATTHEW CHAMPION BE LIABLE FOR ANY
DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */
package com.mattunderscore.trees.mutable;
import java.util.Arrays;
import java.util.Iterator;
import net.jcip.annotations.GuardedBy;
import com.mattunderscore.simple.collections.FixedUncheckedSimpleCollection;
import com.mattunderscore.simple.collections.SimpleCollection;
import com.mattunderscore.trees.base.AbstractSettableNode;
import com.mattunderscore.trees.base.MutableChildIterator;
import com.mattunderscore.trees.construction.TypeKey;
/**
* Initial attempt at thread safety is base on copy on mutation. When a child node is added or removed a shallow copy
* of the children is made with the modification present and the new child collection replaces the existing one. Any
* iterators accessing the old child collection see the previous state.
* <p>The problem is that modifications to grandchildren can still be seen because of the shallow copy. The problem with
* this is the modifications can be observed out of order. The modification to the grandchildren is made after the
* parent but seen first.</p>
* @author Matt Champion on 15/07/14.
*/
public final class MutableTreeImpl<E> extends AbstractSettableNode<E, MutableSettableNode<E>> implements MutableTree<E, MutableSettableNode<E>>, MutableSettableNode<E> {
@GuardedBy("this")
private SimpleCollection<MutableTreeImpl<E>> childList;
public MutableTreeImpl(E element) {
super(element);
childList = new FixedUncheckedSimpleCollection<>(new Object[0]);
}
MutableTreeImpl(E element, SimpleCollection<MutableTreeImpl<E>> childList) {
super(element);
this.childList = childList;
}
@Override
public MutableTreeImpl<E> addChild(E e) {
if (e == null) {
throw new NullPointerException("You cannot add a child to an empty tree");
}
final MutableTreeImpl<E> child = new MutableTreeImpl<>(e);
synchronized (this) {
final SimpleCollection<MutableTreeImpl<E>> oldList = childList;
final int size = oldList.size();
final Object[] newArray = new Object[size + 1];
int i = 0;
final Iterator<MutableTreeImpl<E>> iterator = oldList.structuralIterator();
while (iterator.hasNext()) {
newArray[i] = iterator.next();
i++;
}
newArray[size] = child;
childList = new FixedUncheckedSimpleCollection<>(newArray);
}
return child;
}
@Override
public boolean removeChild(MutableSettableNode<E> child) {
if (child == null) {
return false;
}
synchronized (this) {
final SimpleCollection<MutableTreeImpl<E>> oldList = childList;
final int size = oldList.size();
final Object[] searchArray = new Object[size];
int i = 0;
int j = 0;
final Iterator<MutableTreeImpl<E>> iterator = oldList.structuralIterator();
while (iterator.hasNext()) {
final MutableSettableNode<E> currentNode = iterator.next();
if (child != currentNode) {
searchArray[j] = currentNode;
j++;
}
i++;
}
if (j == i) {
// Nothing removed
return false;
}
else {
final int newSize = size - 1;
final Object[] newArray = Arrays.copyOf(searchArray, newSize);
childList = new FixedUncheckedSimpleCollection<>(newArray);
return true;
}
}
}
@Override
public MutableTreeImpl<E> getRoot() {
if (isEmpty()) {
return null;
}
else {
return this;
}
}
@Override
public boolean isEmpty() {
return elementReference.get() == null;
}
@Override
public MutableTreeImpl<E> setRoot(E root) {
elementReference.set(root);
return this;
}
/**
* Construct a TypeKey for a specific element type.
* @param <E> The element type
* @return The type key
*/
public static <E> TypeKey<MutableTreeImpl<E>> typeKey() {
return new TypeKey<MutableTreeImpl<E>>() {};
}
@Override
public int getNumberOfChildren() {
synchronized (this) {
return childList.size();
}
}
@Override
public Iterator<MutableSettableNode<E>> childIterator() {
synchronized (this) {
return new MutableChildIterator<>(this, childList.iterator());
}
}
}
|
bsd-3-clause
|
BITechnologies/boo
|
src/Boo.Lang.Compiler/Ast/Impl/DepthFirstGuide.cs
|
59733
|
#region license
// Copyright (c) 2009 Rodrigo B. de Oliveira (rbo@acm.org)
// All rights reserved.
//
// Redistribution and use in source and binary forms, with or without modification,
// are permitted provided that the following conditions are met:
//
// * Redistributions of source code must retain the above copyright notice,
// this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above copyright notice,
// this list of conditions and the following disclaimer in the documentation
// and/or other materials provided with the distribution.
// * Neither the name of Rodrigo B. de Oliveira nor the names of its
// contributors may be used to endorse or promote products derived from this
// software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
// ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
// WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
// DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE
// FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
// DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
// SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
// CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
// OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
// THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#endregion
//
// DO NOT EDIT THIS FILE!
//
// This file was generated automatically by astgen.boo.
//
namespace Boo.Lang.Compiler.Ast
{
using System;
public delegate void NodeEvent<T>(T node) where T:Node;
public partial class DepthFirstGuide : IAstVisitor
{
public event NodeEvent<CompileUnit> OnCompileUnit;
[System.CodeDom.Compiler.GeneratedCodeAttribute("astgen.boo", "1")]
void IAstVisitor.OnCompileUnit(Boo.Lang.Compiler.Ast.CompileUnit node)
{
{
var modules = node.Modules;
if (modules != null)
{
var innerList = modules.InnerList;
var count = innerList.Count;
for (var i=0; i<count; ++i)
innerList.FastAt(i).Accept(this);
}
}
var handler = OnCompileUnit;
if (handler != null)
handler(node);
}
public event NodeEvent<TypeMemberStatement> OnTypeMemberStatement;
[System.CodeDom.Compiler.GeneratedCodeAttribute("astgen.boo", "1")]
void IAstVisitor.OnTypeMemberStatement(Boo.Lang.Compiler.Ast.TypeMemberStatement node)
{
{
var modifier = node.Modifier;
if (modifier != null)
modifier.Accept(this);
}
{
var typeMember = node.TypeMember;
if (typeMember != null)
typeMember.Accept(this);
}
var handler = OnTypeMemberStatement;
if (handler != null)
handler(node);
}
public event NodeEvent<ExplicitMemberInfo> OnExplicitMemberInfo;
[System.CodeDom.Compiler.GeneratedCodeAttribute("astgen.boo", "1")]
void IAstVisitor.OnExplicitMemberInfo(Boo.Lang.Compiler.Ast.ExplicitMemberInfo node)
{
{
var interfaceType = node.InterfaceType;
if (interfaceType != null)
interfaceType.Accept(this);
}
var handler = OnExplicitMemberInfo;
if (handler != null)
handler(node);
}
public event NodeEvent<SimpleTypeReference> OnSimpleTypeReference;
[System.CodeDom.Compiler.GeneratedCodeAttribute("astgen.boo", "1")]
void IAstVisitor.OnSimpleTypeReference(Boo.Lang.Compiler.Ast.SimpleTypeReference node)
{
var handler = OnSimpleTypeReference;
if (handler == null)
return;
handler(node);
}
public event NodeEvent<ArrayTypeReference> OnArrayTypeReference;
[System.CodeDom.Compiler.GeneratedCodeAttribute("astgen.boo", "1")]
void IAstVisitor.OnArrayTypeReference(Boo.Lang.Compiler.Ast.ArrayTypeReference node)
{
{
var elementType = node.ElementType;
if (elementType != null)
elementType.Accept(this);
}
{
var rank = node.Rank;
if (rank != null)
rank.Accept(this);
}
var handler = OnArrayTypeReference;
if (handler != null)
handler(node);
}
public event NodeEvent<CallableTypeReference> OnCallableTypeReference;
[System.CodeDom.Compiler.GeneratedCodeAttribute("astgen.boo", "1")]
void IAstVisitor.OnCallableTypeReference(Boo.Lang.Compiler.Ast.CallableTypeReference node)
{
{
var parameters = node.Parameters;
if (parameters != null)
{
var innerList = parameters.InnerList;
var count = innerList.Count;
for (var i=0; i<count; ++i)
innerList.FastAt(i).Accept(this);
}
}
{
var returnType = node.ReturnType;
if (returnType != null)
returnType.Accept(this);
}
var handler = OnCallableTypeReference;
if (handler != null)
handler(node);
}
public event NodeEvent<GenericTypeReference> OnGenericTypeReference;
[System.CodeDom.Compiler.GeneratedCodeAttribute("astgen.boo", "1")]
void IAstVisitor.OnGenericTypeReference(Boo.Lang.Compiler.Ast.GenericTypeReference node)
{
{
var genericArguments = node.GenericArguments;
if (genericArguments != null)
{
var innerList = genericArguments.InnerList;
var count = innerList.Count;
for (var i=0; i<count; ++i)
innerList.FastAt(i).Accept(this);
}
}
var handler = OnGenericTypeReference;
if (handler != null)
handler(node);
}
public event NodeEvent<GenericTypeDefinitionReference> OnGenericTypeDefinitionReference;
[System.CodeDom.Compiler.GeneratedCodeAttribute("astgen.boo", "1")]
void IAstVisitor.OnGenericTypeDefinitionReference(Boo.Lang.Compiler.Ast.GenericTypeDefinitionReference node)
{
var handler = OnGenericTypeDefinitionReference;
if (handler == null)
return;
handler(node);
}
public event NodeEvent<CallableDefinition> OnCallableDefinition;
[System.CodeDom.Compiler.GeneratedCodeAttribute("astgen.boo", "1")]
void IAstVisitor.OnCallableDefinition(Boo.Lang.Compiler.Ast.CallableDefinition node)
{
{
var attributes = node.Attributes;
if (attributes != null)
{
var innerList = attributes.InnerList;
var count = innerList.Count;
for (var i=0; i<count; ++i)
innerList.FastAt(i).Accept(this);
}
}
{
var parameters = node.Parameters;
if (parameters != null)
{
var innerList = parameters.InnerList;
var count = innerList.Count;
for (var i=0; i<count; ++i)
innerList.FastAt(i).Accept(this);
}
}
{
var genericParameters = node.GenericParameters;
if (genericParameters != null)
{
var innerList = genericParameters.InnerList;
var count = innerList.Count;
for (var i=0; i<count; ++i)
innerList.FastAt(i).Accept(this);
}
}
{
var returnType = node.ReturnType;
if (returnType != null)
returnType.Accept(this);
}
{
var returnTypeAttributes = node.ReturnTypeAttributes;
if (returnTypeAttributes != null)
{
var innerList = returnTypeAttributes.InnerList;
var count = innerList.Count;
for (var i=0; i<count; ++i)
innerList.FastAt(i).Accept(this);
}
}
var handler = OnCallableDefinition;
if (handler != null)
handler(node);
}
public event NodeEvent<NamespaceDeclaration> OnNamespaceDeclaration;
[System.CodeDom.Compiler.GeneratedCodeAttribute("astgen.boo", "1")]
void IAstVisitor.OnNamespaceDeclaration(Boo.Lang.Compiler.Ast.NamespaceDeclaration node)
{
var handler = OnNamespaceDeclaration;
if (handler == null)
return;
handler(node);
}
public event NodeEvent<Import> OnImport;
[System.CodeDom.Compiler.GeneratedCodeAttribute("astgen.boo", "1")]
void IAstVisitor.OnImport(Boo.Lang.Compiler.Ast.Import node)
{
{
var expression = node.Expression;
if (expression != null)
expression.Accept(this);
}
{
var assemblyReference = node.AssemblyReference;
if (assemblyReference != null)
assemblyReference.Accept(this);
}
{
var alias = node.Alias;
if (alias != null)
alias.Accept(this);
}
var handler = OnImport;
if (handler != null)
handler(node);
}
public event NodeEvent<Module> OnModule;
[System.CodeDom.Compiler.GeneratedCodeAttribute("astgen.boo", "1")]
void IAstVisitor.OnModule(Boo.Lang.Compiler.Ast.Module node)
{
{
var attributes = node.Attributes;
if (attributes != null)
{
var innerList = attributes.InnerList;
var count = innerList.Count;
for (var i=0; i<count; ++i)
innerList.FastAt(i).Accept(this);
}
}
{
var members = node.Members;
if (members != null)
{
var innerList = members.InnerList;
var count = innerList.Count;
for (var i=0; i<count; ++i)
innerList.FastAt(i).Accept(this);
}
}
{
var baseTypes = node.BaseTypes;
if (baseTypes != null)
{
var innerList = baseTypes.InnerList;
var count = innerList.Count;
for (var i=0; i<count; ++i)
innerList.FastAt(i).Accept(this);
}
}
{
var genericParameters = node.GenericParameters;
if (genericParameters != null)
{
var innerList = genericParameters.InnerList;
var count = innerList.Count;
for (var i=0; i<count; ++i)
innerList.FastAt(i).Accept(this);
}
}
{
var @namespace = node.Namespace;
if (@namespace != null)
@namespace.Accept(this);
}
{
var imports = node.Imports;
if (imports != null)
{
var innerList = imports.InnerList;
var count = innerList.Count;
for (var i=0; i<count; ++i)
innerList.FastAt(i).Accept(this);
}
}
{
var globals = node.Globals;
if (globals != null)
globals.Accept(this);
}
{
var assemblyAttributes = node.AssemblyAttributes;
if (assemblyAttributes != null)
{
var innerList = assemblyAttributes.InnerList;
var count = innerList.Count;
for (var i=0; i<count; ++i)
innerList.FastAt(i).Accept(this);
}
}
var handler = OnModule;
if (handler != null)
handler(node);
}
public event NodeEvent<ClassDefinition> OnClassDefinition;
[System.CodeDom.Compiler.GeneratedCodeAttribute("astgen.boo", "1")]
void IAstVisitor.OnClassDefinition(Boo.Lang.Compiler.Ast.ClassDefinition node)
{
{
var attributes = node.Attributes;
if (attributes != null)
{
var innerList = attributes.InnerList;
var count = innerList.Count;
for (var i=0; i<count; ++i)
innerList.FastAt(i).Accept(this);
}
}
{
var members = node.Members;
if (members != null)
{
var innerList = members.InnerList;
var count = innerList.Count;
for (var i=0; i<count; ++i)
innerList.FastAt(i).Accept(this);
}
}
{
var baseTypes = node.BaseTypes;
if (baseTypes != null)
{
var innerList = baseTypes.InnerList;
var count = innerList.Count;
for (var i=0; i<count; ++i)
innerList.FastAt(i).Accept(this);
}
}
{
var genericParameters = node.GenericParameters;
if (genericParameters != null)
{
var innerList = genericParameters.InnerList;
var count = innerList.Count;
for (var i=0; i<count; ++i)
innerList.FastAt(i).Accept(this);
}
}
var handler = OnClassDefinition;
if (handler != null)
handler(node);
}
public event NodeEvent<StructDefinition> OnStructDefinition;
[System.CodeDom.Compiler.GeneratedCodeAttribute("astgen.boo", "1")]
void IAstVisitor.OnStructDefinition(Boo.Lang.Compiler.Ast.StructDefinition node)
{
{
var attributes = node.Attributes;
if (attributes != null)
{
var innerList = attributes.InnerList;
var count = innerList.Count;
for (var i=0; i<count; ++i)
innerList.FastAt(i).Accept(this);
}
}
{
var members = node.Members;
if (members != null)
{
var innerList = members.InnerList;
var count = innerList.Count;
for (var i=0; i<count; ++i)
innerList.FastAt(i).Accept(this);
}
}
{
var baseTypes = node.BaseTypes;
if (baseTypes != null)
{
var innerList = baseTypes.InnerList;
var count = innerList.Count;
for (var i=0; i<count; ++i)
innerList.FastAt(i).Accept(this);
}
}
{
var genericParameters = node.GenericParameters;
if (genericParameters != null)
{
var innerList = genericParameters.InnerList;
var count = innerList.Count;
for (var i=0; i<count; ++i)
innerList.FastAt(i).Accept(this);
}
}
var handler = OnStructDefinition;
if (handler != null)
handler(node);
}
public event NodeEvent<InterfaceDefinition> OnInterfaceDefinition;
[System.CodeDom.Compiler.GeneratedCodeAttribute("astgen.boo", "1")]
void IAstVisitor.OnInterfaceDefinition(Boo.Lang.Compiler.Ast.InterfaceDefinition node)
{
{
var attributes = node.Attributes;
if (attributes != null)
{
var innerList = attributes.InnerList;
var count = innerList.Count;
for (var i=0; i<count; ++i)
innerList.FastAt(i).Accept(this);
}
}
{
var members = node.Members;
if (members != null)
{
var innerList = members.InnerList;
var count = innerList.Count;
for (var i=0; i<count; ++i)
innerList.FastAt(i).Accept(this);
}
}
{
var baseTypes = node.BaseTypes;
if (baseTypes != null)
{
var innerList = baseTypes.InnerList;
var count = innerList.Count;
for (var i=0; i<count; ++i)
innerList.FastAt(i).Accept(this);
}
}
{
var genericParameters = node.GenericParameters;
if (genericParameters != null)
{
var innerList = genericParameters.InnerList;
var count = innerList.Count;
for (var i=0; i<count; ++i)
innerList.FastAt(i).Accept(this);
}
}
var handler = OnInterfaceDefinition;
if (handler != null)
handler(node);
}
public event NodeEvent<EnumDefinition> OnEnumDefinition;
[System.CodeDom.Compiler.GeneratedCodeAttribute("astgen.boo", "1")]
void IAstVisitor.OnEnumDefinition(Boo.Lang.Compiler.Ast.EnumDefinition node)
{
{
var attributes = node.Attributes;
if (attributes != null)
{
var innerList = attributes.InnerList;
var count = innerList.Count;
for (var i=0; i<count; ++i)
innerList.FastAt(i).Accept(this);
}
}
{
var members = node.Members;
if (members != null)
{
var innerList = members.InnerList;
var count = innerList.Count;
for (var i=0; i<count; ++i)
innerList.FastAt(i).Accept(this);
}
}
{
var baseTypes = node.BaseTypes;
if (baseTypes != null)
{
var innerList = baseTypes.InnerList;
var count = innerList.Count;
for (var i=0; i<count; ++i)
innerList.FastAt(i).Accept(this);
}
}
{
var genericParameters = node.GenericParameters;
if (genericParameters != null)
{
var innerList = genericParameters.InnerList;
var count = innerList.Count;
for (var i=0; i<count; ++i)
innerList.FastAt(i).Accept(this);
}
}
var handler = OnEnumDefinition;
if (handler != null)
handler(node);
}
public event NodeEvent<EnumMember> OnEnumMember;
[System.CodeDom.Compiler.GeneratedCodeAttribute("astgen.boo", "1")]
void IAstVisitor.OnEnumMember(Boo.Lang.Compiler.Ast.EnumMember node)
{
{
var attributes = node.Attributes;
if (attributes != null)
{
var innerList = attributes.InnerList;
var count = innerList.Count;
for (var i=0; i<count; ++i)
innerList.FastAt(i).Accept(this);
}
}
{
var initializer = node.Initializer;
if (initializer != null)
initializer.Accept(this);
}
var handler = OnEnumMember;
if (handler != null)
handler(node);
}
public event NodeEvent<Field> OnField;
[System.CodeDom.Compiler.GeneratedCodeAttribute("astgen.boo", "1")]
void IAstVisitor.OnField(Boo.Lang.Compiler.Ast.Field node)
{
{
var attributes = node.Attributes;
if (attributes != null)
{
var innerList = attributes.InnerList;
var count = innerList.Count;
for (var i=0; i<count; ++i)
innerList.FastAt(i).Accept(this);
}
}
{
var type = node.Type;
if (type != null)
type.Accept(this);
}
{
var initializer = node.Initializer;
if (initializer != null)
initializer.Accept(this);
}
var handler = OnField;
if (handler != null)
handler(node);
}
public event NodeEvent<Property> OnProperty;
[System.CodeDom.Compiler.GeneratedCodeAttribute("astgen.boo", "1")]
void IAstVisitor.OnProperty(Boo.Lang.Compiler.Ast.Property node)
{
{
var attributes = node.Attributes;
if (attributes != null)
{
var innerList = attributes.InnerList;
var count = innerList.Count;
for (var i=0; i<count; ++i)
innerList.FastAt(i).Accept(this);
}
}
{
var parameters = node.Parameters;
if (parameters != null)
{
var innerList = parameters.InnerList;
var count = innerList.Count;
for (var i=0; i<count; ++i)
innerList.FastAt(i).Accept(this);
}
}
{
var getter = node.Getter;
if (getter != null)
getter.Accept(this);
}
{
var setter = node.Setter;
if (setter != null)
setter.Accept(this);
}
{
var type = node.Type;
if (type != null)
type.Accept(this);
}
{
var explicitInfo = node.ExplicitInfo;
if (explicitInfo != null)
explicitInfo.Accept(this);
}
var handler = OnProperty;
if (handler != null)
handler(node);
}
public event NodeEvent<Event> OnEvent;
[System.CodeDom.Compiler.GeneratedCodeAttribute("astgen.boo", "1")]
void IAstVisitor.OnEvent(Boo.Lang.Compiler.Ast.Event node)
{
{
var attributes = node.Attributes;
if (attributes != null)
{
var innerList = attributes.InnerList;
var count = innerList.Count;
for (var i=0; i<count; ++i)
innerList.FastAt(i).Accept(this);
}
}
{
var add = node.Add;
if (add != null)
add.Accept(this);
}
{
var remove = node.Remove;
if (remove != null)
remove.Accept(this);
}
{
var raise = node.Raise;
if (raise != null)
raise.Accept(this);
}
{
var type = node.Type;
if (type != null)
type.Accept(this);
}
var handler = OnEvent;
if (handler != null)
handler(node);
}
public event NodeEvent<Local> OnLocal;
[System.CodeDom.Compiler.GeneratedCodeAttribute("astgen.boo", "1")]
void IAstVisitor.OnLocal(Boo.Lang.Compiler.Ast.Local node)
{
var handler = OnLocal;
if (handler == null)
return;
handler(node);
}
public event NodeEvent<BlockExpression> OnBlockExpression;
[System.CodeDom.Compiler.GeneratedCodeAttribute("astgen.boo", "1")]
void IAstVisitor.OnBlockExpression(Boo.Lang.Compiler.Ast.BlockExpression node)
{
{
var parameters = node.Parameters;
if (parameters != null)
{
var innerList = parameters.InnerList;
var count = innerList.Count;
for (var i=0; i<count; ++i)
innerList.FastAt(i).Accept(this);
}
}
{
var returnType = node.ReturnType;
if (returnType != null)
returnType.Accept(this);
}
{
var body = node.Body;
if (body != null)
body.Accept(this);
}
var handler = OnBlockExpression;
if (handler != null)
handler(node);
}
public event NodeEvent<Method> OnMethod;
[System.CodeDom.Compiler.GeneratedCodeAttribute("astgen.boo", "1")]
void IAstVisitor.OnMethod(Boo.Lang.Compiler.Ast.Method node)
{
{
var attributes = node.Attributes;
if (attributes != null)
{
var innerList = attributes.InnerList;
var count = innerList.Count;
for (var i=0; i<count; ++i)
innerList.FastAt(i).Accept(this);
}
}
{
var parameters = node.Parameters;
if (parameters != null)
{
var innerList = parameters.InnerList;
var count = innerList.Count;
for (var i=0; i<count; ++i)
innerList.FastAt(i).Accept(this);
}
}
{
var genericParameters = node.GenericParameters;
if (genericParameters != null)
{
var innerList = genericParameters.InnerList;
var count = innerList.Count;
for (var i=0; i<count; ++i)
innerList.FastAt(i).Accept(this);
}
}
{
var returnType = node.ReturnType;
if (returnType != null)
returnType.Accept(this);
}
{
var returnTypeAttributes = node.ReturnTypeAttributes;
if (returnTypeAttributes != null)
{
var innerList = returnTypeAttributes.InnerList;
var count = innerList.Count;
for (var i=0; i<count; ++i)
innerList.FastAt(i).Accept(this);
}
}
{
var body = node.Body;
if (body != null)
body.Accept(this);
}
{
var locals = node.Locals;
if (locals != null)
{
var innerList = locals.InnerList;
var count = innerList.Count;
for (var i=0; i<count; ++i)
innerList.FastAt(i).Accept(this);
}
}
{
var explicitInfo = node.ExplicitInfo;
if (explicitInfo != null)
explicitInfo.Accept(this);
}
var handler = OnMethod;
if (handler != null)
handler(node);
}
public event NodeEvent<Constructor> OnConstructor;
[System.CodeDom.Compiler.GeneratedCodeAttribute("astgen.boo", "1")]
void IAstVisitor.OnConstructor(Boo.Lang.Compiler.Ast.Constructor node)
{
{
var attributes = node.Attributes;
if (attributes != null)
{
var innerList = attributes.InnerList;
var count = innerList.Count;
for (var i=0; i<count; ++i)
innerList.FastAt(i).Accept(this);
}
}
{
var parameters = node.Parameters;
if (parameters != null)
{
var innerList = parameters.InnerList;
var count = innerList.Count;
for (var i=0; i<count; ++i)
innerList.FastAt(i).Accept(this);
}
}
{
var genericParameters = node.GenericParameters;
if (genericParameters != null)
{
var innerList = genericParameters.InnerList;
var count = innerList.Count;
for (var i=0; i<count; ++i)
innerList.FastAt(i).Accept(this);
}
}
{
var returnType = node.ReturnType;
if (returnType != null)
returnType.Accept(this);
}
{
var returnTypeAttributes = node.ReturnTypeAttributes;
if (returnTypeAttributes != null)
{
var innerList = returnTypeAttributes.InnerList;
var count = innerList.Count;
for (var i=0; i<count; ++i)
innerList.FastAt(i).Accept(this);
}
}
{
var body = node.Body;
if (body != null)
body.Accept(this);
}
{
var locals = node.Locals;
if (locals != null)
{
var innerList = locals.InnerList;
var count = innerList.Count;
for (var i=0; i<count; ++i)
innerList.FastAt(i).Accept(this);
}
}
{
var explicitInfo = node.ExplicitInfo;
if (explicitInfo != null)
explicitInfo.Accept(this);
}
var handler = OnConstructor;
if (handler != null)
handler(node);
}
public event NodeEvent<Destructor> OnDestructor;
[System.CodeDom.Compiler.GeneratedCodeAttribute("astgen.boo", "1")]
void IAstVisitor.OnDestructor(Boo.Lang.Compiler.Ast.Destructor node)
{
{
var attributes = node.Attributes;
if (attributes != null)
{
var innerList = attributes.InnerList;
var count = innerList.Count;
for (var i=0; i<count; ++i)
innerList.FastAt(i).Accept(this);
}
}
{
var parameters = node.Parameters;
if (parameters != null)
{
var innerList = parameters.InnerList;
var count = innerList.Count;
for (var i=0; i<count; ++i)
innerList.FastAt(i).Accept(this);
}
}
{
var genericParameters = node.GenericParameters;
if (genericParameters != null)
{
var innerList = genericParameters.InnerList;
var count = innerList.Count;
for (var i=0; i<count; ++i)
innerList.FastAt(i).Accept(this);
}
}
{
var returnType = node.ReturnType;
if (returnType != null)
returnType.Accept(this);
}
{
var returnTypeAttributes = node.ReturnTypeAttributes;
if (returnTypeAttributes != null)
{
var innerList = returnTypeAttributes.InnerList;
var count = innerList.Count;
for (var i=0; i<count; ++i)
innerList.FastAt(i).Accept(this);
}
}
{
var body = node.Body;
if (body != null)
body.Accept(this);
}
{
var locals = node.Locals;
if (locals != null)
{
var innerList = locals.InnerList;
var count = innerList.Count;
for (var i=0; i<count; ++i)
innerList.FastAt(i).Accept(this);
}
}
{
var explicitInfo = node.ExplicitInfo;
if (explicitInfo != null)
explicitInfo.Accept(this);
}
var handler = OnDestructor;
if (handler != null)
handler(node);
}
public event NodeEvent<ParameterDeclaration> OnParameterDeclaration;
[System.CodeDom.Compiler.GeneratedCodeAttribute("astgen.boo", "1")]
void IAstVisitor.OnParameterDeclaration(Boo.Lang.Compiler.Ast.ParameterDeclaration node)
{
{
var type = node.Type;
if (type != null)
type.Accept(this);
}
{
var attributes = node.Attributes;
if (attributes != null)
{
var innerList = attributes.InnerList;
var count = innerList.Count;
for (var i=0; i<count; ++i)
innerList.FastAt(i).Accept(this);
}
}
var handler = OnParameterDeclaration;
if (handler != null)
handler(node);
}
public event NodeEvent<GenericParameterDeclaration> OnGenericParameterDeclaration;
[System.CodeDom.Compiler.GeneratedCodeAttribute("astgen.boo", "1")]
void IAstVisitor.OnGenericParameterDeclaration(Boo.Lang.Compiler.Ast.GenericParameterDeclaration node)
{
{
var baseTypes = node.BaseTypes;
if (baseTypes != null)
{
var innerList = baseTypes.InnerList;
var count = innerList.Count;
for (var i=0; i<count; ++i)
innerList.FastAt(i).Accept(this);
}
}
var handler = OnGenericParameterDeclaration;
if (handler != null)
handler(node);
}
public event NodeEvent<Declaration> OnDeclaration;
[System.CodeDom.Compiler.GeneratedCodeAttribute("astgen.boo", "1")]
void IAstVisitor.OnDeclaration(Boo.Lang.Compiler.Ast.Declaration node)
{
{
var type = node.Type;
if (type != null)
type.Accept(this);
}
var handler = OnDeclaration;
if (handler != null)
handler(node);
}
public event NodeEvent<Attribute> OnAttribute;
[System.CodeDom.Compiler.GeneratedCodeAttribute("astgen.boo", "1")]
void IAstVisitor.OnAttribute(Boo.Lang.Compiler.Ast.Attribute node)
{
{
var arguments = node.Arguments;
if (arguments != null)
{
var innerList = arguments.InnerList;
var count = innerList.Count;
for (var i=0; i<count; ++i)
innerList.FastAt(i).Accept(this);
}
}
{
var namedArguments = node.NamedArguments;
if (namedArguments != null)
{
var innerList = namedArguments.InnerList;
var count = innerList.Count;
for (var i=0; i<count; ++i)
innerList.FastAt(i).Accept(this);
}
}
var handler = OnAttribute;
if (handler != null)
handler(node);
}
public event NodeEvent<StatementModifier> OnStatementModifier;
[System.CodeDom.Compiler.GeneratedCodeAttribute("astgen.boo", "1")]
void IAstVisitor.OnStatementModifier(Boo.Lang.Compiler.Ast.StatementModifier node)
{
{
var condition = node.Condition;
if (condition != null)
condition.Accept(this);
}
var handler = OnStatementModifier;
if (handler != null)
handler(node);
}
public event NodeEvent<GotoStatement> OnGotoStatement;
[System.CodeDom.Compiler.GeneratedCodeAttribute("astgen.boo", "1")]
void IAstVisitor.OnGotoStatement(Boo.Lang.Compiler.Ast.GotoStatement node)
{
{
var modifier = node.Modifier;
if (modifier != null)
modifier.Accept(this);
}
{
var label = node.Label;
if (label != null)
label.Accept(this);
}
var handler = OnGotoStatement;
if (handler != null)
handler(node);
}
public event NodeEvent<LabelStatement> OnLabelStatement;
[System.CodeDom.Compiler.GeneratedCodeAttribute("astgen.boo", "1")]
void IAstVisitor.OnLabelStatement(Boo.Lang.Compiler.Ast.LabelStatement node)
{
{
var modifier = node.Modifier;
if (modifier != null)
modifier.Accept(this);
}
var handler = OnLabelStatement;
if (handler != null)
handler(node);
}
public event NodeEvent<Block> OnBlock;
[System.CodeDom.Compiler.GeneratedCodeAttribute("astgen.boo", "1")]
void IAstVisitor.OnBlock(Boo.Lang.Compiler.Ast.Block node)
{
{
var modifier = node.Modifier;
if (modifier != null)
modifier.Accept(this);
}
{
var statements = node.Statements;
if (statements != null)
{
var innerList = statements.InnerList;
var count = innerList.Count;
for (var i=0; i<count; ++i)
innerList.FastAt(i).Accept(this);
}
}
var handler = OnBlock;
if (handler != null)
handler(node);
}
public event NodeEvent<DeclarationStatement> OnDeclarationStatement;
[System.CodeDom.Compiler.GeneratedCodeAttribute("astgen.boo", "1")]
void IAstVisitor.OnDeclarationStatement(Boo.Lang.Compiler.Ast.DeclarationStatement node)
{
{
var modifier = node.Modifier;
if (modifier != null)
modifier.Accept(this);
}
{
var declaration = node.Declaration;
if (declaration != null)
declaration.Accept(this);
}
{
var initializer = node.Initializer;
if (initializer != null)
initializer.Accept(this);
}
var handler = OnDeclarationStatement;
if (handler != null)
handler(node);
}
public event NodeEvent<MacroStatement> OnMacroStatement;
[System.CodeDom.Compiler.GeneratedCodeAttribute("astgen.boo", "1")]
void IAstVisitor.OnMacroStatement(Boo.Lang.Compiler.Ast.MacroStatement node)
{
{
var modifier = node.Modifier;
if (modifier != null)
modifier.Accept(this);
}
{
var arguments = node.Arguments;
if (arguments != null)
{
var innerList = arguments.InnerList;
var count = innerList.Count;
for (var i=0; i<count; ++i)
innerList.FastAt(i).Accept(this);
}
}
{
var body = node.Body;
if (body != null)
body.Accept(this);
}
var handler = OnMacroStatement;
if (handler != null)
handler(node);
}
public event NodeEvent<TryStatement> OnTryStatement;
[System.CodeDom.Compiler.GeneratedCodeAttribute("astgen.boo", "1")]
void IAstVisitor.OnTryStatement(Boo.Lang.Compiler.Ast.TryStatement node)
{
{
var modifier = node.Modifier;
if (modifier != null)
modifier.Accept(this);
}
{
var protectedBlock = node.ProtectedBlock;
if (protectedBlock != null)
protectedBlock.Accept(this);
}
{
var exceptionHandlers = node.ExceptionHandlers;
if (exceptionHandlers != null)
{
var innerList = exceptionHandlers.InnerList;
var count = innerList.Count;
for (var i=0; i<count; ++i)
innerList.FastAt(i).Accept(this);
}
}
{
var failureBlock = node.FailureBlock;
if (failureBlock != null)
failureBlock.Accept(this);
}
{
var ensureBlock = node.EnsureBlock;
if (ensureBlock != null)
ensureBlock.Accept(this);
}
var handler = OnTryStatement;
if (handler != null)
handler(node);
}
public event NodeEvent<ExceptionHandler> OnExceptionHandler;
[System.CodeDom.Compiler.GeneratedCodeAttribute("astgen.boo", "1")]
void IAstVisitor.OnExceptionHandler(Boo.Lang.Compiler.Ast.ExceptionHandler node)
{
{
var declaration = node.Declaration;
if (declaration != null)
declaration.Accept(this);
}
{
var filterCondition = node.FilterCondition;
if (filterCondition != null)
filterCondition.Accept(this);
}
{
var block = node.Block;
if (block != null)
block.Accept(this);
}
var handler = OnExceptionHandler;
if (handler != null)
handler(node);
}
public event NodeEvent<IfStatement> OnIfStatement;
[System.CodeDom.Compiler.GeneratedCodeAttribute("astgen.boo", "1")]
void IAstVisitor.OnIfStatement(Boo.Lang.Compiler.Ast.IfStatement node)
{
{
var modifier = node.Modifier;
if (modifier != null)
modifier.Accept(this);
}
{
var condition = node.Condition;
if (condition != null)
condition.Accept(this);
}
{
var trueBlock = node.TrueBlock;
if (trueBlock != null)
trueBlock.Accept(this);
}
{
var falseBlock = node.FalseBlock;
if (falseBlock != null)
falseBlock.Accept(this);
}
var handler = OnIfStatement;
if (handler != null)
handler(node);
}
public event NodeEvent<UnlessStatement> OnUnlessStatement;
[System.CodeDom.Compiler.GeneratedCodeAttribute("astgen.boo", "1")]
void IAstVisitor.OnUnlessStatement(Boo.Lang.Compiler.Ast.UnlessStatement node)
{
{
var modifier = node.Modifier;
if (modifier != null)
modifier.Accept(this);
}
{
var condition = node.Condition;
if (condition != null)
condition.Accept(this);
}
{
var block = node.Block;
if (block != null)
block.Accept(this);
}
var handler = OnUnlessStatement;
if (handler != null)
handler(node);
}
public event NodeEvent<ForStatement> OnForStatement;
[System.CodeDom.Compiler.GeneratedCodeAttribute("astgen.boo", "1")]
void IAstVisitor.OnForStatement(Boo.Lang.Compiler.Ast.ForStatement node)
{
{
var modifier = node.Modifier;
if (modifier != null)
modifier.Accept(this);
}
{
var declarations = node.Declarations;
if (declarations != null)
{
var innerList = declarations.InnerList;
var count = innerList.Count;
for (var i=0; i<count; ++i)
innerList.FastAt(i).Accept(this);
}
}
{
var iterator = node.Iterator;
if (iterator != null)
iterator.Accept(this);
}
{
var block = node.Block;
if (block != null)
block.Accept(this);
}
{
var orBlock = node.OrBlock;
if (orBlock != null)
orBlock.Accept(this);
}
{
var thenBlock = node.ThenBlock;
if (thenBlock != null)
thenBlock.Accept(this);
}
var handler = OnForStatement;
if (handler != null)
handler(node);
}
public event NodeEvent<WhileStatement> OnWhileStatement;
[System.CodeDom.Compiler.GeneratedCodeAttribute("astgen.boo", "1")]
void IAstVisitor.OnWhileStatement(Boo.Lang.Compiler.Ast.WhileStatement node)
{
{
var modifier = node.Modifier;
if (modifier != null)
modifier.Accept(this);
}
{
var condition = node.Condition;
if (condition != null)
condition.Accept(this);
}
{
var block = node.Block;
if (block != null)
block.Accept(this);
}
{
var orBlock = node.OrBlock;
if (orBlock != null)
orBlock.Accept(this);
}
{
var thenBlock = node.ThenBlock;
if (thenBlock != null)
thenBlock.Accept(this);
}
var handler = OnWhileStatement;
if (handler != null)
handler(node);
}
public event NodeEvent<BreakStatement> OnBreakStatement;
[System.CodeDom.Compiler.GeneratedCodeAttribute("astgen.boo", "1")]
void IAstVisitor.OnBreakStatement(Boo.Lang.Compiler.Ast.BreakStatement node)
{
{
var modifier = node.Modifier;
if (modifier != null)
modifier.Accept(this);
}
var handler = OnBreakStatement;
if (handler != null)
handler(node);
}
public event NodeEvent<ContinueStatement> OnContinueStatement;
[System.CodeDom.Compiler.GeneratedCodeAttribute("astgen.boo", "1")]
void IAstVisitor.OnContinueStatement(Boo.Lang.Compiler.Ast.ContinueStatement node)
{
{
var modifier = node.Modifier;
if (modifier != null)
modifier.Accept(this);
}
var handler = OnContinueStatement;
if (handler != null)
handler(node);
}
public event NodeEvent<ReturnStatement> OnReturnStatement;
[System.CodeDom.Compiler.GeneratedCodeAttribute("astgen.boo", "1")]
void IAstVisitor.OnReturnStatement(Boo.Lang.Compiler.Ast.ReturnStatement node)
{
{
var modifier = node.Modifier;
if (modifier != null)
modifier.Accept(this);
}
{
var expression = node.Expression;
if (expression != null)
expression.Accept(this);
}
var handler = OnReturnStatement;
if (handler != null)
handler(node);
}
public event NodeEvent<YieldStatement> OnYieldStatement;
[System.CodeDom.Compiler.GeneratedCodeAttribute("astgen.boo", "1")]
void IAstVisitor.OnYieldStatement(Boo.Lang.Compiler.Ast.YieldStatement node)
{
{
var modifier = node.Modifier;
if (modifier != null)
modifier.Accept(this);
}
{
var expression = node.Expression;
if (expression != null)
expression.Accept(this);
}
var handler = OnYieldStatement;
if (handler != null)
handler(node);
}
public event NodeEvent<RaiseStatement> OnRaiseStatement;
[System.CodeDom.Compiler.GeneratedCodeAttribute("astgen.boo", "1")]
void IAstVisitor.OnRaiseStatement(Boo.Lang.Compiler.Ast.RaiseStatement node)
{
{
var modifier = node.Modifier;
if (modifier != null)
modifier.Accept(this);
}
{
var exception = node.Exception;
if (exception != null)
exception.Accept(this);
}
var handler = OnRaiseStatement;
if (handler != null)
handler(node);
}
public event NodeEvent<UnpackStatement> OnUnpackStatement;
[System.CodeDom.Compiler.GeneratedCodeAttribute("astgen.boo", "1")]
void IAstVisitor.OnUnpackStatement(Boo.Lang.Compiler.Ast.UnpackStatement node)
{
{
var modifier = node.Modifier;
if (modifier != null)
modifier.Accept(this);
}
{
var declarations = node.Declarations;
if (declarations != null)
{
var innerList = declarations.InnerList;
var count = innerList.Count;
for (var i=0; i<count; ++i)
innerList.FastAt(i).Accept(this);
}
}
{
var expression = node.Expression;
if (expression != null)
expression.Accept(this);
}
var handler = OnUnpackStatement;
if (handler != null)
handler(node);
}
public event NodeEvent<ExpressionStatement> OnExpressionStatement;
[System.CodeDom.Compiler.GeneratedCodeAttribute("astgen.boo", "1")]
void IAstVisitor.OnExpressionStatement(Boo.Lang.Compiler.Ast.ExpressionStatement node)
{
{
var modifier = node.Modifier;
if (modifier != null)
modifier.Accept(this);
}
{
var expression = node.Expression;
if (expression != null)
expression.Accept(this);
}
var handler = OnExpressionStatement;
if (handler != null)
handler(node);
}
public event NodeEvent<OmittedExpression> OnOmittedExpression;
[System.CodeDom.Compiler.GeneratedCodeAttribute("astgen.boo", "1")]
void IAstVisitor.OnOmittedExpression(Boo.Lang.Compiler.Ast.OmittedExpression node)
{
var handler = OnOmittedExpression;
if (handler == null)
return;
handler(node);
}
public event NodeEvent<ExpressionPair> OnExpressionPair;
[System.CodeDom.Compiler.GeneratedCodeAttribute("astgen.boo", "1")]
void IAstVisitor.OnExpressionPair(Boo.Lang.Compiler.Ast.ExpressionPair node)
{
{
var first = node.First;
if (first != null)
first.Accept(this);
}
{
var second = node.Second;
if (second != null)
second.Accept(this);
}
var handler = OnExpressionPair;
if (handler != null)
handler(node);
}
public event NodeEvent<MethodInvocationExpression> OnMethodInvocationExpression;
[System.CodeDom.Compiler.GeneratedCodeAttribute("astgen.boo", "1")]
void IAstVisitor.OnMethodInvocationExpression(Boo.Lang.Compiler.Ast.MethodInvocationExpression node)
{
{
var target = node.Target;
if (target != null)
target.Accept(this);
}
{
var arguments = node.Arguments;
if (arguments != null)
{
var innerList = arguments.InnerList;
var count = innerList.Count;
for (var i=0; i<count; ++i)
innerList.FastAt(i).Accept(this);
}
}
{
var namedArguments = node.NamedArguments;
if (namedArguments != null)
{
var innerList = namedArguments.InnerList;
var count = innerList.Count;
for (var i=0; i<count; ++i)
innerList.FastAt(i).Accept(this);
}
}
var handler = OnMethodInvocationExpression;
if (handler != null)
handler(node);
}
public event NodeEvent<UnaryExpression> OnUnaryExpression;
[System.CodeDom.Compiler.GeneratedCodeAttribute("astgen.boo", "1")]
void IAstVisitor.OnUnaryExpression(Boo.Lang.Compiler.Ast.UnaryExpression node)
{
{
var operand = node.Operand;
if (operand != null)
operand.Accept(this);
}
var handler = OnUnaryExpression;
if (handler != null)
handler(node);
}
public event NodeEvent<BinaryExpression> OnBinaryExpression;
[System.CodeDom.Compiler.GeneratedCodeAttribute("astgen.boo", "1")]
void IAstVisitor.OnBinaryExpression(Boo.Lang.Compiler.Ast.BinaryExpression node)
{
{
var left = node.Left;
if (left != null)
left.Accept(this);
}
{
var right = node.Right;
if (right != null)
right.Accept(this);
}
var handler = OnBinaryExpression;
if (handler != null)
handler(node);
}
public event NodeEvent<ConditionalExpression> OnConditionalExpression;
[System.CodeDom.Compiler.GeneratedCodeAttribute("astgen.boo", "1")]
void IAstVisitor.OnConditionalExpression(Boo.Lang.Compiler.Ast.ConditionalExpression node)
{
{
var condition = node.Condition;
if (condition != null)
condition.Accept(this);
}
{
var trueValue = node.TrueValue;
if (trueValue != null)
trueValue.Accept(this);
}
{
var falseValue = node.FalseValue;
if (falseValue != null)
falseValue.Accept(this);
}
var handler = OnConditionalExpression;
if (handler != null)
handler(node);
}
public event NodeEvent<ReferenceExpression> OnReferenceExpression;
[System.CodeDom.Compiler.GeneratedCodeAttribute("astgen.boo", "1")]
void IAstVisitor.OnReferenceExpression(Boo.Lang.Compiler.Ast.ReferenceExpression node)
{
var handler = OnReferenceExpression;
if (handler == null)
return;
handler(node);
}
public event NodeEvent<MemberReferenceExpression> OnMemberReferenceExpression;
[System.CodeDom.Compiler.GeneratedCodeAttribute("astgen.boo", "1")]
void IAstVisitor.OnMemberReferenceExpression(Boo.Lang.Compiler.Ast.MemberReferenceExpression node)
{
{
var target = node.Target;
if (target != null)
target.Accept(this);
}
var handler = OnMemberReferenceExpression;
if (handler != null)
handler(node);
}
public event NodeEvent<GenericReferenceExpression> OnGenericReferenceExpression;
[System.CodeDom.Compiler.GeneratedCodeAttribute("astgen.boo", "1")]
void IAstVisitor.OnGenericReferenceExpression(Boo.Lang.Compiler.Ast.GenericReferenceExpression node)
{
{
var target = node.Target;
if (target != null)
target.Accept(this);
}
{
var genericArguments = node.GenericArguments;
if (genericArguments != null)
{
var innerList = genericArguments.InnerList;
var count = innerList.Count;
for (var i=0; i<count; ++i)
innerList.FastAt(i).Accept(this);
}
}
var handler = OnGenericReferenceExpression;
if (handler != null)
handler(node);
}
public event NodeEvent<QuasiquoteExpression> OnQuasiquoteExpression;
[System.CodeDom.Compiler.GeneratedCodeAttribute("astgen.boo", "1")]
void IAstVisitor.OnQuasiquoteExpression(Boo.Lang.Compiler.Ast.QuasiquoteExpression node)
{
var handler = OnQuasiquoteExpression;
if (handler == null)
return;
handler(node);
}
public event NodeEvent<StringLiteralExpression> OnStringLiteralExpression;
[System.CodeDom.Compiler.GeneratedCodeAttribute("astgen.boo", "1")]
void IAstVisitor.OnStringLiteralExpression(Boo.Lang.Compiler.Ast.StringLiteralExpression node)
{
var handler = OnStringLiteralExpression;
if (handler == null)
return;
handler(node);
}
public event NodeEvent<CharLiteralExpression> OnCharLiteralExpression;
[System.CodeDom.Compiler.GeneratedCodeAttribute("astgen.boo", "1")]
void IAstVisitor.OnCharLiteralExpression(Boo.Lang.Compiler.Ast.CharLiteralExpression node)
{
var handler = OnCharLiteralExpression;
if (handler == null)
return;
handler(node);
}
public event NodeEvent<TimeSpanLiteralExpression> OnTimeSpanLiteralExpression;
[System.CodeDom.Compiler.GeneratedCodeAttribute("astgen.boo", "1")]
void IAstVisitor.OnTimeSpanLiteralExpression(Boo.Lang.Compiler.Ast.TimeSpanLiteralExpression node)
{
var handler = OnTimeSpanLiteralExpression;
if (handler == null)
return;
handler(node);
}
public event NodeEvent<IntegerLiteralExpression> OnIntegerLiteralExpression;
[System.CodeDom.Compiler.GeneratedCodeAttribute("astgen.boo", "1")]
void IAstVisitor.OnIntegerLiteralExpression(Boo.Lang.Compiler.Ast.IntegerLiteralExpression node)
{
var handler = OnIntegerLiteralExpression;
if (handler == null)
return;
handler(node);
}
public event NodeEvent<DecimalLiteralExpression> OnDecimalLiteralExpression;
[System.CodeDom.Compiler.GeneratedCodeAttribute("astgen.boo", "1")]
void IAstVisitor.OnDecimalLiteralExpression(Boo.Lang.Compiler.Ast.DecimalLiteralExpression node)
{
var handler = OnDecimalLiteralExpression;
if (handler == null)
return;
handler(node);
}
public event NodeEvent<DoubleLiteralExpression> OnDoubleLiteralExpression;
[System.CodeDom.Compiler.GeneratedCodeAttribute("astgen.boo", "1")]
void IAstVisitor.OnDoubleLiteralExpression(Boo.Lang.Compiler.Ast.DoubleLiteralExpression node)
{
var handler = OnDoubleLiteralExpression;
if (handler == null)
return;
handler(node);
}
public event NodeEvent<NullLiteralExpression> OnNullLiteralExpression;
[System.CodeDom.Compiler.GeneratedCodeAttribute("astgen.boo", "1")]
void IAstVisitor.OnNullLiteralExpression(Boo.Lang.Compiler.Ast.NullLiteralExpression node)
{
var handler = OnNullLiteralExpression;
if (handler == null)
return;
handler(node);
}
public event NodeEvent<SelfLiteralExpression> OnSelfLiteralExpression;
[System.CodeDom.Compiler.GeneratedCodeAttribute("astgen.boo", "1")]
void IAstVisitor.OnSelfLiteralExpression(Boo.Lang.Compiler.Ast.SelfLiteralExpression node)
{
var handler = OnSelfLiteralExpression;
if (handler == null)
return;
handler(node);
}
public event NodeEvent<SuperLiteralExpression> OnSuperLiteralExpression;
[System.CodeDom.Compiler.GeneratedCodeAttribute("astgen.boo", "1")]
void IAstVisitor.OnSuperLiteralExpression(Boo.Lang.Compiler.Ast.SuperLiteralExpression node)
{
var handler = OnSuperLiteralExpression;
if (handler == null)
return;
handler(node);
}
public event NodeEvent<BoolLiteralExpression> OnBoolLiteralExpression;
[System.CodeDom.Compiler.GeneratedCodeAttribute("astgen.boo", "1")]
void IAstVisitor.OnBoolLiteralExpression(Boo.Lang.Compiler.Ast.BoolLiteralExpression node)
{
var handler = OnBoolLiteralExpression;
if (handler == null)
return;
handler(node);
}
public event NodeEvent<RELiteralExpression> OnRELiteralExpression;
[System.CodeDom.Compiler.GeneratedCodeAttribute("astgen.boo", "1")]
void IAstVisitor.OnRELiteralExpression(Boo.Lang.Compiler.Ast.RELiteralExpression node)
{
var handler = OnRELiteralExpression;
if (handler == null)
return;
handler(node);
}
public event NodeEvent<SpliceExpression> OnSpliceExpression;
[System.CodeDom.Compiler.GeneratedCodeAttribute("astgen.boo", "1")]
void IAstVisitor.OnSpliceExpression(Boo.Lang.Compiler.Ast.SpliceExpression node)
{
{
var expression = node.Expression;
if (expression != null)
expression.Accept(this);
}
var handler = OnSpliceExpression;
if (handler != null)
handler(node);
}
public event NodeEvent<SpliceTypeReference> OnSpliceTypeReference;
[System.CodeDom.Compiler.GeneratedCodeAttribute("astgen.boo", "1")]
void IAstVisitor.OnSpliceTypeReference(Boo.Lang.Compiler.Ast.SpliceTypeReference node)
{
{
var expression = node.Expression;
if (expression != null)
expression.Accept(this);
}
var handler = OnSpliceTypeReference;
if (handler != null)
handler(node);
}
public event NodeEvent<SpliceMemberReferenceExpression> OnSpliceMemberReferenceExpression;
[System.CodeDom.Compiler.GeneratedCodeAttribute("astgen.boo", "1")]
void IAstVisitor.OnSpliceMemberReferenceExpression(Boo.Lang.Compiler.Ast.SpliceMemberReferenceExpression node)
{
{
var target = node.Target;
if (target != null)
target.Accept(this);
}
{
var nameExpression = node.NameExpression;
if (nameExpression != null)
nameExpression.Accept(this);
}
var handler = OnSpliceMemberReferenceExpression;
if (handler != null)
handler(node);
}
public event NodeEvent<SpliceTypeMember> OnSpliceTypeMember;
[System.CodeDom.Compiler.GeneratedCodeAttribute("astgen.boo", "1")]
void IAstVisitor.OnSpliceTypeMember(Boo.Lang.Compiler.Ast.SpliceTypeMember node)
{
{
var attributes = node.Attributes;
if (attributes != null)
{
var innerList = attributes.InnerList;
var count = innerList.Count;
for (var i=0; i<count; ++i)
innerList.FastAt(i).Accept(this);
}
}
{
var typeMember = node.TypeMember;
if (typeMember != null)
typeMember.Accept(this);
}
{
var nameExpression = node.NameExpression;
if (nameExpression != null)
nameExpression.Accept(this);
}
var handler = OnSpliceTypeMember;
if (handler != null)
handler(node);
}
public event NodeEvent<SpliceTypeDefinitionBody> OnSpliceTypeDefinitionBody;
[System.CodeDom.Compiler.GeneratedCodeAttribute("astgen.boo", "1")]
void IAstVisitor.OnSpliceTypeDefinitionBody(Boo.Lang.Compiler.Ast.SpliceTypeDefinitionBody node)
{
{
var attributes = node.Attributes;
if (attributes != null)
{
var innerList = attributes.InnerList;
var count = innerList.Count;
for (var i=0; i<count; ++i)
innerList.FastAt(i).Accept(this);
}
}
{
var expression = node.Expression;
if (expression != null)
expression.Accept(this);
}
var handler = OnSpliceTypeDefinitionBody;
if (handler != null)
handler(node);
}
public event NodeEvent<SpliceParameterDeclaration> OnSpliceParameterDeclaration;
[System.CodeDom.Compiler.GeneratedCodeAttribute("astgen.boo", "1")]
void IAstVisitor.OnSpliceParameterDeclaration(Boo.Lang.Compiler.Ast.SpliceParameterDeclaration node)
{
{
var type = node.Type;
if (type != null)
type.Accept(this);
}
{
var attributes = node.Attributes;
if (attributes != null)
{
var innerList = attributes.InnerList;
var count = innerList.Count;
for (var i=0; i<count; ++i)
innerList.FastAt(i).Accept(this);
}
}
{
var parameterDeclaration = node.ParameterDeclaration;
if (parameterDeclaration != null)
parameterDeclaration.Accept(this);
}
{
var nameExpression = node.NameExpression;
if (nameExpression != null)
nameExpression.Accept(this);
}
var handler = OnSpliceParameterDeclaration;
if (handler != null)
handler(node);
}
public event NodeEvent<ExpressionInterpolationExpression> OnExpressionInterpolationExpression;
[System.CodeDom.Compiler.GeneratedCodeAttribute("astgen.boo", "1")]
void IAstVisitor.OnExpressionInterpolationExpression(Boo.Lang.Compiler.Ast.ExpressionInterpolationExpression node)
{
{
var expressions = node.Expressions;
if (expressions != null)
{
var innerList = expressions.InnerList;
var count = innerList.Count;
for (var i=0; i<count; ++i)
innerList.FastAt(i).Accept(this);
}
}
var handler = OnExpressionInterpolationExpression;
if (handler != null)
handler(node);
}
public event NodeEvent<HashLiteralExpression> OnHashLiteralExpression;
[System.CodeDom.Compiler.GeneratedCodeAttribute("astgen.boo", "1")]
void IAstVisitor.OnHashLiteralExpression(Boo.Lang.Compiler.Ast.HashLiteralExpression node)
{
{
var items = node.Items;
if (items != null)
{
var innerList = items.InnerList;
var count = innerList.Count;
for (var i=0; i<count; ++i)
innerList.FastAt(i).Accept(this);
}
}
var handler = OnHashLiteralExpression;
if (handler != null)
handler(node);
}
public event NodeEvent<ListLiteralExpression> OnListLiteralExpression;
[System.CodeDom.Compiler.GeneratedCodeAttribute("astgen.boo", "1")]
void IAstVisitor.OnListLiteralExpression(Boo.Lang.Compiler.Ast.ListLiteralExpression node)
{
{
var items = node.Items;
if (items != null)
{
var innerList = items.InnerList;
var count = innerList.Count;
for (var i=0; i<count; ++i)
innerList.FastAt(i).Accept(this);
}
}
var handler = OnListLiteralExpression;
if (handler != null)
handler(node);
}
public event NodeEvent<CollectionInitializationExpression> OnCollectionInitializationExpression;
[System.CodeDom.Compiler.GeneratedCodeAttribute("astgen.boo", "1")]
void IAstVisitor.OnCollectionInitializationExpression(Boo.Lang.Compiler.Ast.CollectionInitializationExpression node)
{
{
var collection = node.Collection;
if (collection != null)
collection.Accept(this);
}
{
var initializer = node.Initializer;
if (initializer != null)
initializer.Accept(this);
}
var handler = OnCollectionInitializationExpression;
if (handler != null)
handler(node);
}
public event NodeEvent<ArrayLiteralExpression> OnArrayLiteralExpression;
[System.CodeDom.Compiler.GeneratedCodeAttribute("astgen.boo", "1")]
void IAstVisitor.OnArrayLiteralExpression(Boo.Lang.Compiler.Ast.ArrayLiteralExpression node)
{
{
var items = node.Items;
if (items != null)
{
var innerList = items.InnerList;
var count = innerList.Count;
for (var i=0; i<count; ++i)
innerList.FastAt(i).Accept(this);
}
}
{
var type = node.Type;
if (type != null)
type.Accept(this);
}
var handler = OnArrayLiteralExpression;
if (handler != null)
handler(node);
}
public event NodeEvent<GeneratorExpression> OnGeneratorExpression;
[System.CodeDom.Compiler.GeneratedCodeAttribute("astgen.boo", "1")]
void IAstVisitor.OnGeneratorExpression(Boo.Lang.Compiler.Ast.GeneratorExpression node)
{
{
var expression = node.Expression;
if (expression != null)
expression.Accept(this);
}
{
var declarations = node.Declarations;
if (declarations != null)
{
var innerList = declarations.InnerList;
var count = innerList.Count;
for (var i=0; i<count; ++i)
innerList.FastAt(i).Accept(this);
}
}
{
var iterator = node.Iterator;
if (iterator != null)
iterator.Accept(this);
}
{
var filter = node.Filter;
if (filter != null)
filter.Accept(this);
}
var handler = OnGeneratorExpression;
if (handler != null)
handler(node);
}
public event NodeEvent<ExtendedGeneratorExpression> OnExtendedGeneratorExpression;
[System.CodeDom.Compiler.GeneratedCodeAttribute("astgen.boo", "1")]
void IAstVisitor.OnExtendedGeneratorExpression(Boo.Lang.Compiler.Ast.ExtendedGeneratorExpression node)
{
{
var items = node.Items;
if (items != null)
{
var innerList = items.InnerList;
var count = innerList.Count;
for (var i=0; i<count; ++i)
innerList.FastAt(i).Accept(this);
}
}
var handler = OnExtendedGeneratorExpression;
if (handler != null)
handler(node);
}
public event NodeEvent<Slice> OnSlice;
[System.CodeDom.Compiler.GeneratedCodeAttribute("astgen.boo", "1")]
void IAstVisitor.OnSlice(Boo.Lang.Compiler.Ast.Slice node)
{
{
var begin = node.Begin;
if (begin != null)
begin.Accept(this);
}
{
var end = node.End;
if (end != null)
end.Accept(this);
}
{
var step = node.Step;
if (step != null)
step.Accept(this);
}
var handler = OnSlice;
if (handler != null)
handler(node);
}
public event NodeEvent<SlicingExpression> OnSlicingExpression;
[System.CodeDom.Compiler.GeneratedCodeAttribute("astgen.boo", "1")]
void IAstVisitor.OnSlicingExpression(Boo.Lang.Compiler.Ast.SlicingExpression node)
{
{
var target = node.Target;
if (target != null)
target.Accept(this);
}
{
var indices = node.Indices;
if (indices != null)
{
var innerList = indices.InnerList;
var count = innerList.Count;
for (var i=0; i<count; ++i)
innerList.FastAt(i).Accept(this);
}
}
var handler = OnSlicingExpression;
if (handler != null)
handler(node);
}
public event NodeEvent<TryCastExpression> OnTryCastExpression;
[System.CodeDom.Compiler.GeneratedCodeAttribute("astgen.boo", "1")]
void IAstVisitor.OnTryCastExpression(Boo.Lang.Compiler.Ast.TryCastExpression node)
{
{
var target = node.Target;
if (target != null)
target.Accept(this);
}
{
var type = node.Type;
if (type != null)
type.Accept(this);
}
var handler = OnTryCastExpression;
if (handler != null)
handler(node);
}
public event NodeEvent<CastExpression> OnCastExpression;
[System.CodeDom.Compiler.GeneratedCodeAttribute("astgen.boo", "1")]
void IAstVisitor.OnCastExpression(Boo.Lang.Compiler.Ast.CastExpression node)
{
{
var target = node.Target;
if (target != null)
target.Accept(this);
}
{
var type = node.Type;
if (type != null)
type.Accept(this);
}
var handler = OnCastExpression;
if (handler != null)
handler(node);
}
public event NodeEvent<TypeofExpression> OnTypeofExpression;
[System.CodeDom.Compiler.GeneratedCodeAttribute("astgen.boo", "1")]
void IAstVisitor.OnTypeofExpression(Boo.Lang.Compiler.Ast.TypeofExpression node)
{
{
var type = node.Type;
if (type != null)
type.Accept(this);
}
var handler = OnTypeofExpression;
if (handler != null)
handler(node);
}
public event NodeEvent<CustomStatement> OnCustomStatement;
[System.CodeDom.Compiler.GeneratedCodeAttribute("astgen.boo", "1")]
void IAstVisitor.OnCustomStatement(Boo.Lang.Compiler.Ast.CustomStatement node)
{
{
var modifier = node.Modifier;
if (modifier != null)
modifier.Accept(this);
}
var handler = OnCustomStatement;
if (handler != null)
handler(node);
}
public event NodeEvent<CustomExpression> OnCustomExpression;
[System.CodeDom.Compiler.GeneratedCodeAttribute("astgen.boo", "1")]
void IAstVisitor.OnCustomExpression(Boo.Lang.Compiler.Ast.CustomExpression node)
{
var handler = OnCustomExpression;
if (handler == null)
return;
handler(node);
}
public event NodeEvent<StatementTypeMember> OnStatementTypeMember;
[System.CodeDom.Compiler.GeneratedCodeAttribute("astgen.boo", "1")]
void IAstVisitor.OnStatementTypeMember(Boo.Lang.Compiler.Ast.StatementTypeMember node)
{
{
var attributes = node.Attributes;
if (attributes != null)
{
var innerList = attributes.InnerList;
var count = innerList.Count;
for (var i=0; i<count; ++i)
innerList.FastAt(i).Accept(this);
}
}
{
var statement = node.Statement;
if (statement != null)
statement.Accept(this);
}
var handler = OnStatementTypeMember;
if (handler != null)
handler(node);
}
}
}
|
bsd-3-clause
|
jneslen/pixeltenchi_raw
|
legacy/parenthelpcenter/askwilla/wp-content/themes/askwilla/page.php
|
801
|
<?php get_header(); ?>
<?php get_sidebar(); ?>
</div>
<div id="column_2">
<div id="awheader_1">
</div>
<div id="awsubheader">
<div id="description"><?php bloginfo('description'); ?></div>
</div>
<div id="content" class="narrowcolumn">
<?php if (have_posts()) : while (have_posts()) : the_post(); ?>
<div class="post" id="post-<?php the_ID(); ?>">
<h2><?php the_title(); ?></h2>
<div class="entry">
<?php the_content('<p class="serif">Read the rest of this page »</p>'); ?>
<?php wp_link_pages(array('before' => '<p><strong>Pages:</strong> ', 'after' => '</p>', 'next_or_number' => 'number')); ?>
</div>
</div>
<?php endwhile; endif; ?>
<?php edit_post_link('Edit this entry.', '<p>', '</p>'); ?>
</div>
</div>
<?php get_footer(); ?>
|
bsd-3-clause
|
ernado/turn
|
reqfamily.go
|
1514
|
package turn
import (
"errors"
"gortc.io/stun"
)
// RequestedAddressFamily represents the REQUESTED-ADDRESS-FAMILY Attribute as
// defined in RFC 6156 Section 4.1.1.
type RequestedAddressFamily byte
const requestedFamilySize = 4
// GetFrom decodes REQUESTED-ADDRESS-FAMILY from message.
func (f *RequestedAddressFamily) GetFrom(m *stun.Message) error {
v, err := m.Get(stun.AttrRequestedAddressFamily)
if err != nil {
return err
}
if err = stun.CheckSize(stun.AttrRequestedAddressFamily, len(v), requestedFamilySize); err != nil {
return err
}
switch v[0] {
case byte(RequestedFamilyIPv4), byte(RequestedFamilyIPv6):
*f = RequestedAddressFamily(v[0])
default:
return errors.New("invalid value for requested family attribute")
}
return nil
}
func (f RequestedAddressFamily) String() string {
switch f {
case RequestedFamilyIPv4:
return "IPv4"
case RequestedFamilyIPv6:
return "IPv6"
default:
return "unknown"
}
}
// AddTo adds REQUESTED-ADDRESS-FAMILY to message.
func (f RequestedAddressFamily) AddTo(m *stun.Message) error {
v := make([]byte, requestedFamilySize)
v[0] = byte(f)
// b[1:4] is RFFU = 0.
// The RFFU field MUST be set to zero on transmission and MUST be
// ignored on reception. It is reserved for future uses.
m.Add(stun.AttrRequestedAddressFamily, v)
return nil
}
// Values for RequestedAddressFamily as defined in RFC 6156 Section 4.1.1.
const (
RequestedFamilyIPv4 RequestedAddressFamily = 0x01
RequestedFamilyIPv6 RequestedAddressFamily = 0x02
)
|
bsd-3-clause
|
kiranisaac/beringei-1
|
beringei/lib/BucketMap.cpp
|
30407
|
/**
* Copyright (c) 2016-present, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*/
#include "BucketMap.h"
#include "BucketLogWriter.h"
#include "DataBlockReader.h"
#include "DataLog.h"
#include "GorillaStatsManager.h"
#include "GorillaTimeConstants.h"
#include "TimeSeries.h"
DEFINE_int32(
data_point_queue_size,
1000,
"The size of the qeueue that holds the data points in memory before they "
"can be handled. This queue is only used when shards are being added.");
DEFINE_int64(
missing_logs_threshold_secs,
600, // 10 minute default
"Count gaps longer than this as holes in the log files.");
namespace facebook {
namespace gorilla {
// When performing initial insertion, add this much buffer to the vector
// on each resize.
const int kRowsAtATime = 10000;
static const std::string kMsPerKeyListRead = ".ms_per_key_list_read";
static const std::string kMsPerLogFilesRead = ".ms_per_log_files_read";
static const std::string kMsPerBlockFileRead = ".ms_per_block_file_read";
static const std::string kMsPerQueueProcessing = ".ms_per_queue_processing";
static const std::string kDataPointQueueDropped = ".data_point_queue_dropped";
static const std::string kCorruptKeyFiles = ".corrupt_key_files";
static const std::string kCorruptLogFiles = ".corrupt_log_files";
static const std::string kUnknownKeysInLogFiles = ".unknown_keys_in_log_files";
static const std::string kUnknownKeysInBlockMetadataFiles =
".unknown_keys_in_block_metadata_files";
static const std::string kDataHoles = ".missing_blocks_and_logs";
static const std::string kMissingLogs = ".missing_seconds_of_log_data";
static const std::string kDeletionRaces = ".key_deletion_failures";
static const std::string kDuplicateKeys = ".duplicate_keys_in_key_list";
static const size_t kMaxAllowedKeyLength = 400;
static int16_t kInstagramCategoryId = 271;
const int BucketMap::kNotOwned = -1;
DECLARE_int32(max_allowed_timeseries_id);
BucketMap::BucketMap(
uint8_t buckets,
uint64_t windowSize,
int shardId,
const std::string& dataDirectory,
std::shared_ptr<KeyListWriter> keyWriter,
std::shared_ptr<BucketLogWriter> logWriter,
BucketMap::State state)
: n_(buckets),
windowSize_(windowSize),
reliableDataStartTime_(0),
lock_(),
tableSize_(0),
storage_(buckets, shardId, dataDirectory),
state_(state),
shardId_(shardId),
dataDirectory_(dataDirectory),
keyWriter_(keyWriter),
logWriter_(logWriter),
lastFinalizedBucket_(0) {}
// Insert the given data point, creating a new row if necessary.
// Returns the number of new rows created (0 or 1) and the number of
// data points successfully inserted (0 or 1) as a pair of ints.
// Returns {kNotOwned,kNotOwned} if this map is currenly not owned.
std::pair<int, int> BucketMap::put(
const std::string& key,
const TimeValuePair& value,
uint16_t category,
bool skipStateCheck) {
State state;
uint32_t id;
auto existingItem = getInternal(key, state, id);
// State check can only skipped when processing data points from the
// queue. Data points that come in externally during processing will
// still be queued.
if (skipStateCheck) {
CHECK_EQ(PROCESSING_QUEUED_DATA_POINTS, state);
} else {
switch (state) {
case UNOWNED:
return {kNotOwned, kNotOwned};
case PRE_OWNED:
case READING_KEYS:
queueDataPointWithKey(key, value, category);
// Assume the data point will be added and no new keys will be
// added. This might not be the case but these return values
// are only used for counters.
return {0, 1};
case READING_KEYS_DONE:
case READING_LOGS:
case PROCESSING_QUEUED_DATA_POINTS:
if (existingItem) {
queueDataPointWithId(id, value, category);
} else {
queueDataPointWithKey(key, value, category);
}
return {0, 1};
case READING_BLOCK_DATA:
case OWNED:
case PRE_UNOWNED:
// Continue normal processing. PRE_UNOWNED is still completely
// considered to be owned.
break;
// No default case to let compiler warn if new states are added
// without adding a case for them.
}
}
if (existingItem) {
bool added = putDataPointWithId(&existingItem->second, id, value, category);
return {0, added ? 1 : 0};
}
uint32_t b = bucket(value.unixTime);
// Prepare a row now to minimize critical section.
auto newRow = std::make_shared<std::pair<std::string, BucketedTimeSeries>>();
newRow->first = key;
newRow->second.reset(n_);
newRow->second.put(b, value, &storage_, -1, &category);
int index = 0;
{
// Lock the map again.
folly::RWSpinLock::WriteHolder writeGuard(lock_);
// The value here doesn't matter because it will be replaced later.
auto ret = map_.insert(std::make_pair(newRow->first.c_str(), -1));
if (!ret.second) {
// Nothing was inserted, just update the existing one.
bool added = putDataPointWithId(
&rows_[ret.first->second]->second,
ret.first->second,
value,
category);
return {0, added ? 1 : 0};
}
// Find a row in the vector.
if (freeList_.size()) {
index = freeList_.top();
freeList_.pop();
} else {
tableSize_++;
rows_.emplace_back();
index = rows_.size() - 1;
}
rows_[index] = newRow;
ret.first->second = index;
}
// Write the new key out to disk.
keyWriter_->addKey(shardId_, index, newRow->first, category);
logWriter_->logData(shardId_, index, value.unixTime, value.value);
return {1, 1};
}
// Get a shared_ptr to a TimeSeries.
BucketMap::Item BucketMap::get(const std::string& key) {
State state;
uint32_t id;
return getInternal(key, state, id);
}
// Get all the TimeSeries.
void BucketMap::getEverything(std::vector<Item>& out) {
out.reserve(tableSize_);
folly::RWSpinLock::ReadHolder guard(lock_);
out.insert(out.end(), rows_.begin(), rows_.end());
}
bool BucketMap::getSome(std::vector<Item>& out, int offset, int count) {
out.reserve(count);
folly::RWSpinLock::ReadHolder guard(lock_);
if (offset >= rows_.size()) {
return false;
} else if (offset + count >= rows_.size()) {
out.insert(out.end(), rows_.begin() + offset, rows_.end());
return false;
} else {
out.insert(
out.end(), rows_.begin() + offset, rows_.begin() + offset + count);
return true;
}
}
void BucketMap::erase(int index, Item item) {
folly::RWSpinLock::WriteHolder guard(lock_);
if (rows_[index] != item || !item) {
// The arguments provided are no longer valid.
GorillaStatsManager::addStatValue(kDeletionRaces);
return;
}
auto it = map_.find(item->first.c_str());
if (it != map_.end() && it->second == index) {
// The map still points to the right entry.
map_.erase(it);
} else {
GorillaStatsManager::addStatValue(kDeletionRaces);
}
rows_[index].reset();
freeList_.push(index);
}
uint32_t
BucketMap::bucket(uint64_t unixTime, uint64_t windowSize, int shardId) {
return (uint32_t)(unixTime / windowSize);
}
uint32_t BucketMap::bucket(uint64_t unixTime) const {
return bucket(unixTime, windowSize_, shardId_);
}
uint64_t
BucketMap::timestamp(uint32_t bucket, uint64_t windowSize, int shardId) {
return bucket * windowSize;
}
uint64_t BucketMap::timestamp(uint32_t bucket) const {
return timestamp(bucket, windowSize_, shardId_);
}
uint64_t BucketMap::duration(uint32_t buckets, uint64_t windowSize) {
return buckets * windowSize;
}
uint64_t BucketMap::duration(uint32_t buckets) const {
return duration(buckets, windowSize_);
}
uint32_t BucketMap::buckets(uint64_t duration, uint64_t windowSize) {
return duration / windowSize;
}
uint32_t BucketMap::buckets(uint64_t duration) const {
return buckets(duration, windowSize_);
}
BucketStorage* BucketMap::getStorage() {
return &storage_;
}
bool BucketMap::setState(BucketMap::State state) {
Timer timer(true);
// If we have to drop a shard, move the data here, then free all the memory
// outside of any locks, as this can take a long time.
std::unordered_map<const char*, int, CaseHash, CaseEq> tmpMap;
std::priority_queue<int, std::vector<int>, std::less<int>> tmpQueue;
std::vector<Item> tmpVec;
std::vector<std::vector<uint32_t>> tmpDeviations;
std::unique_lock<std::mutex> stateGuard(stateChangeMutex_);
folly::RWSpinLock::WriteHolder guard(lock_);
if (!isAllowedStateTransition(state_, state)) {
LOG(WARNING) << "Illegal transition from " << state_ << " to " << state;
return false;
}
if (state == PRE_OWNED) {
addTimer_.start();
keyWriter_->startShard(shardId_);
logWriter_->startShard(shardId_);
dataPointQueue_ = std::make_shared<folly::MPMCQueue<QueuedDataPoint>>(
FLAGS_data_point_queue_size);
// Deviations are indexed per minute.
deviations_.resize(duration(n_) / kGorillaSecondsPerMinute);
} else if (state == UNOWNED) {
tmpMap.swap(map_);
tmpQueue.swap(freeList_);
tmpVec.swap(rows_);
tmpDeviations.swap(deviations_);
tableSize_ = 0;
// These operations do block, but only to enqueue flags, not drain the
// queues to disk.
keyWriter_->stopShard(shardId_);
logWriter_->stopShard(shardId_);
} else if (state == OWNED) {
// Calling this won't hurt even if the timer isn't running.
addTimer_.stop();
}
BucketMap::State oldState = state_;
state_ = state;
guard.reset();
// Enable/disable storage outside the lock because it might take a
// while and the the storage object has its own locking.
if (state == PRE_OWNED) {
storage_.enable();
} else if (state == UNOWNED) {
storage_.clearAndDisable();
}
LOG(INFO) << "Changed state of shard " << shardId_ << " from " << oldState
<< " to " << state << " in " << timer.get() << "us";
return true;
}
BucketMap::State BucketMap::getState() {
folly::RWSpinLock::ReadHolder guard(lock_);
return state_;
}
Timer::TimeVal BucketMap::getAddTime() {
return addTimer_.get() / kGorillaUsecPerMs;
}
bool BucketMap::cancelUnowning() {
folly::RWSpinLock::WriteHolder guard(lock_);
if (state_ != PRE_UNOWNED) {
return false;
}
state_ = OWNED;
return true;
}
bool BucketMap::isAllowedStateTransition(State from, State to) {
return to > from || (from == OWNED && to == PRE_UNOWNED);
}
int BucketMap::finalizeBuckets(uint32_t lastBucketToFinalize) {
if (getState() != OWNED) {
return 0;
}
// This code assumes that only one thread will be calling this at a
// time. If this isn't the case anymore, locks need to be added.
uint32_t bucketToFinalize;
if (lastFinalizedBucket_ == 0) {
bucketToFinalize = lastBucketToFinalize;
} else {
bucketToFinalize = lastFinalizedBucket_ + 1;
}
if (bucketToFinalize <= lastFinalizedBucket_ ||
bucketToFinalize > lastBucketToFinalize) {
return 0;
}
// There might be more than one bucket to finalize if the server was
// restarted or shards moved.
int bucketsToFinalize = lastBucketToFinalize - bucketToFinalize + 1;
std::vector<BucketMap::Item> timeSeriesData;
getEverything(timeSeriesData);
uint32_t droppedBatchCount = 0;
for (uint32_t bucket = bucketToFinalize; bucket <= lastBucketToFinalize;
bucket++) {
for (int i = 0; i < timeSeriesData.size(); i++) {
if (timeSeriesData[i].get()) {
timeSeriesData[i]->second.setCurrentBucket(
bucket + 1,
getStorage(),
i); // `i` is the id of the time series
}
}
getStorage()->finalizeBucket(bucket);
}
lastFinalizedBucket_ = lastBucketToFinalize;
return bucketsToFinalize;
}
bool BucketMap::isBehind(uint32_t bucketToFinalize) const {
return lastFinalizedBucket_ != 0 &&
bucketToFinalize > lastFinalizedBucket_ + 1;
}
void BucketMap::shutdown() {
if (getState() == OWNED) {
logWriter_->stopShard(shardId_);
keyWriter_->stopShard(shardId_);
// Set the state directly without calling setState which would try
// to deallocate memory.
std::unique_lock<std::mutex> stateGuard(stateChangeMutex_);
folly::RWSpinLock::WriteHolder guard(lock_);
state_ = UNOWNED;
}
}
void BucketMap::compactKeyList() {
std::vector<Item> items;
getEverything(items);
uint32_t i = -1;
keyWriter_->compact(shardId_, [&]() {
for (i++; i < items.size(); i++) {
if (items[i].get()) {
return std::make_tuple(
i, items[i]->first.c_str(), items[i]->second.getCategory());
}
}
return std::make_tuple<uint32_t, const char*, uint16_t>(0, nullptr, 0);
});
}
void BucketMap::deleteOldBlockFiles() {
// Start far enough back that we can't possibly interfere with anything.
storage_.deleteBucketsOlderThan(bucket(time(nullptr)) - n_ - 1);
}
void BucketMap::startMonitoring() {
GorillaStatsManager::addStatExportType(kMsPerKeyListRead, AVG);
GorillaStatsManager::addStatExportType(kMsPerLogFilesRead, AVG);
GorillaStatsManager::addStatExportType(kMsPerBlockFileRead, AVG);
GorillaStatsManager::addStatExportType(kMsPerBlockFileRead, COUNT);
GorillaStatsManager::addStatExportType(kMsPerQueueProcessing, AVG);
GorillaStatsManager::addStatExportType(kDataPointQueueDropped, SUM);
GorillaStatsManager::addStatExportType(kCorruptLogFiles, SUM);
GorillaStatsManager::addStatExportType(kCorruptKeyFiles, SUM);
GorillaStatsManager::addStatExportType(kUnknownKeysInLogFiles, SUM);
GorillaStatsManager::addStatExportType(kUnknownKeysInBlockMetadataFiles, SUM);
GorillaStatsManager::addStatExportType(kDataHoles, SUM);
GorillaStatsManager::addStatExportType(kMissingLogs, SUM);
GorillaStatsManager::addStatExportType(kMissingLogs, AVG);
GorillaStatsManager::addStatExportType(kMissingLogs, COUNT);
GorillaStatsManager::addStatExportType(kDeletionRaces, SUM);
GorillaStatsManager::addStatExportType(kDuplicateKeys, SUM);
}
BucketMap::Item
BucketMap::getInternal(const std::string& key, State& state, uint32_t& id) {
folly::RWSpinLock::ReadHolder guard(lock_);
state = state_;
if (state_ >= UNOWNED && state_ <= READING_KEYS) {
// Either the state is UNOWNED or keys are being read. In both
// cases do not try to find the key.
return nullptr;
}
const auto& it = map_.find(key.c_str());
if (it != map_.end()) {
id = it->second;
return rows_[id];
}
return nullptr;
}
void BucketMap::readData() {
bool success = setState(READING_LOGS);
CHECK(success) << "Setting state failed";
Timer timer(true);
DataBlockReader reader(shardId_, dataDirectory_);
{
std::unique_lock<std::mutex> guard(unreadBlockFilesMutex_);
unreadBlockFiles_ = reader.findCompletedBlockFiles();
if (unreadBlockFiles_.size() > 0) {
checkForMissingBlockFiles();
lastFinalizedBucket_ = *unreadBlockFiles_.rbegin();
}
}
readLogFiles(lastFinalizedBucket_);
GorillaStatsManager::addStatValue(
kMsPerLogFilesRead, timer.reset() / kGorillaUsecPerMs);
CHECK(getState() == READING_LOGS);
success = setState(PROCESSING_QUEUED_DATA_POINTS);
CHECK(success);
// Skip state check when processing queued data points.
processQueuedDataPoints(true);
// There's a tiny chance that incoming data points will think that
// the state is PROCESSING_QUEUED_DATA_POINTS and they will be
// queued after the second call to processQueuedDataPoints.
success = setState(READING_BLOCK_DATA);
CHECK(success);
// Process queued data points again, just to be sure that the queue
// is empty because it is possible that something was inserted into
// the queue after it was emptied and before the state was set to
// READING_BLOCK_DATA.
processQueuedDataPoints(false);
GorillaStatsManager::addStatValue(
kMsPerQueueProcessing, timer.reset() / kGorillaUsecPerMs);
// Take a copy of the shared pointer to avoid freeing the memory
// while holding the write lock. Not the most elegant solution but it
// guarantees that freeing memory won't block anything else.
std::shared_ptr<folly::MPMCQueue<QueuedDataPoint>> copy;
{
folly::RWSpinLock::WriteHolder guard(lock_);
copy = dataPointQueue_;
dataPointQueue_.reset();
}
// Probably not needed because this object will fall out of scope,
// but I am afraid of compiler optimizations that might end up
// freeing the memory inside the write lock.
copy.reset();
}
bool BucketMap::readBlockFiles() {
uint32_t position;
{
std::unique_lock<std::mutex> guard(unreadBlockFilesMutex_);
if (unreadBlockFiles_.empty()) {
bool success = setState(OWNED);
CHECK(success);
// Done reading block files.
return false;
}
position = *unreadBlockFiles_.rbegin();
unreadBlockFiles_.erase(position);
}
std::vector<uint32_t> timeSeriesIds;
std::vector<uint64_t> storageIds;
LOG(INFO) << "Reading blockfiles for shard " << shardId_ << ": " << position;
Timer timer(true);
if (storage_.loadPosition(position, timeSeriesIds, storageIds)) {
folly::RWSpinLock::ReadHolder guard(lock_);
for (int i = 0; i < timeSeriesIds.size(); i++) {
if (timeSeriesIds[i] < rows_.size() && rows_[timeSeriesIds[i]].get()) {
rows_[timeSeriesIds[i]]->second.setDataBlock(
position, n_, storageIds[i]);
} else {
GorillaStatsManager::addStatValue(kUnknownKeysInBlockMetadataFiles);
}
}
GorillaStatsManager::addStatValue(
kMsPerBlockFileRead, timer.reset() / kGorillaUsecPerMs);
LOG(INFO) << "Done reading blockfiles for shard " << shardId_ << ": "
<< position;
} else {
// This could just be because we've read the data before, but that shouldn't
// happen (it gets cleared on shard drop). Bump the counter anyway.
LOG(ERROR) << "Failed to read blockfiles for shard " << shardId_ << ": "
<< position << ". Already loaded?";
}
return true;
}
void BucketMap::readKeyList() {
LOG(INFO) << "Reading keys for shard " << shardId_;
Timer timer(true);
bool success = setState(READING_KEYS);
CHECK(success) << "Setting state failed";
// No reason to lock because nothing is touching the rows_ or map_
// while this is running.
// Read all the keys from disk into the vector.
PersistentKeyList::readKeys(
shardId_,
dataDirectory_,
[&](uint32_t id, const char* key, uint16_t category) {
if (strlen(key) >= kMaxAllowedKeyLength) {
LOG(ERROR) << "Key too long. Key file is corrupt for shard "
<< shardId_;
GorillaStatsManager::addStatValue(kCorruptKeyFiles);
// Don't continue reading from this file anymore.
return false;
}
if (id > FLAGS_max_allowed_timeseries_id) {
LOG(ERROR) << "ID is too large. Key file is corrupt for shard "
<< shardId_;
GorillaStatsManager::addStatValue(kCorruptKeyFiles);
// Don't continue reading from this file anymore.
return false;
}
if (id >= rows_.size()) {
rows_.resize(id + kRowsAtATime);
}
rows_[id].reset(new std::pair<std::string, BucketedTimeSeries>());
rows_[id]->first = key;
rows_[id]->second.reset(n_);
rows_[id]->second.setCategory(category);
return true;
});
tableSize_ = rows_.size();
map_.reserve(rows_.size());
// Put all the rows in either the map or the free list.
for (int i = 0; i < rows_.size(); i++) {
if (rows_[i].get()) {
auto result = map_.insert({rows_[i]->first.c_str(), i});
// Ignore keys that already exist.
if (!result.second) {
GorillaStatsManager::addStatValue(kDuplicateKeys);
rows_[i].reset();
freeList_.push(i);
}
} else {
freeList_.push(i);
}
}
LOG(INFO) << "Done reading keys for shard " << shardId_;
GorillaStatsManager::addStatValue(
kMsPerKeyListRead, timer.reset() / kGorillaUsecPerMs);
success = setState(READING_KEYS_DONE);
CHECK(success) << "Setting state failed";
}
void BucketMap::readLogFiles(uint32_t lastBlock) {
LOG(INFO) << "Reading logs for shard " << shardId_;
FileUtils files(shardId_, BucketLogWriter::kLogFilePrefix, dataDirectory_);
uint32_t unknownKeys = 0;
int64_t lastTimestamp = timestamp(lastBlock + 1);
for (int64_t id : files.ls()) {
if (id < timestamp(lastBlock + 1)) {
LOG(INFO) << "Skipping log file " << id << " because it's already "
<< "covered by a block";
continue;
}
auto file = files.open(id, "rb", 0);
if (!file.file) {
LOG(ERROR) << "Could not open logfile for reading";
continue;
}
uint32_t b = bucket(id);
DataLogReader::readLog(
file, id, [&](uint32_t key, int64_t unixTime, double value) {
if (unixTime < timestamp(b) || unixTime > timestamp(b + 1)) {
LOG(ERROR) << "Unix time is out of the expected range: " << unixTime
<< " [" << timestamp(b) << "," << timestamp(b + 1)
<< "]";
GorillaStatsManager::addStatValue(kCorruptLogFiles);
// It's better to stop reading this log file here because
// none of the data can be trusted after this.
return false;
}
folly::RWSpinLock::ReadHolder guard(lock_);
if (key < rows_.size() && rows_[key].get()) {
TimeValuePair tv;
tv.unixTime = unixTime;
tv.value = value;
rows_[key]->second.put(
bucket(unixTime), tv, &storage_, key, nullptr);
} else {
unknownKeys++;
}
int64_t gap = unixTime - lastTimestamp;
if (gap > FLAGS_missing_logs_threshold_secs &&
lastTimestamp > timestamp(1)) {
LOG(ERROR) << gap << " seconds of missing logs from "
<< lastTimestamp << " to " << unixTime << " for shard "
<< shardId_;
GorillaStatsManager::addStatValue(kDataHoles, 1);
GorillaStatsManager::addStatValue(kMissingLogs, gap);
reliableDataStartTime_ = unixTime;
}
lastTimestamp = std::max(lastTimestamp, unixTime);
return true;
});
fclose(file.file);
}
int64_t now = time(nullptr);
int64_t gap = now - lastTimestamp;
if (gap > FLAGS_missing_logs_threshold_secs && lastTimestamp > timestamp(1)) {
LOG(ERROR) << gap << " seconds of missing logs from " << lastTimestamp
<< " to now (" << now << ") for shard " << shardId_;
GorillaStatsManager::addStatValue(kDataHoles, 1);
GorillaStatsManager::addStatValue(kMissingLogs, gap);
reliableDataStartTime_ = now;
}
LOG(INFO) << "Done reading logs for shard " << shardId_;
LOG(INFO) << unknownKeys << " unknown keys found";
GorillaStatsManager::addStatValue(kUnknownKeysInLogFiles, unknownKeys);
}
void BucketMap::queueDataPointWithKey(
const std::string& key,
const TimeValuePair& value,
uint16_t category) {
if (key == "") {
LOG(WARNING) << "Not queueing with empty key";
return;
}
QueuedDataPoint dp;
dp.key = key;
dp.unixTime = value.unixTime;
dp.value = value.value;
dp.category = category;
queueDataPoint(dp);
}
void BucketMap::queueDataPointWithId(
uint32_t id,
const TimeValuePair& value,
uint16_t category) {
QueuedDataPoint dp;
// Leave key string empty to indicate that timeSeriesId is used.
dp.timeSeriesId = id;
dp.unixTime = value.unixTime;
dp.value = value.value;
dp.category = category;
queueDataPoint(dp);
}
void BucketMap::queueDataPoint(QueuedDataPoint& dp) {
std::shared_ptr<folly::MPMCQueue<QueuedDataPoint>> queue;
{
folly::RWSpinLock::ReadHolder guard(lock_);
queue = dataPointQueue_;
}
if (!queue) {
LOG(ERROR) << "Queue was deleted!";
GorillaStatsManager::addStatValue(kDataPointQueueDropped);
reliableDataStartTime_ = time(nullptr);
return;
}
if (!queue->write(std::move(dp))) {
GorillaStatsManager::addStatValue(kDataPointQueueDropped);
reliableDataStartTime_ = time(nullptr);
}
}
void BucketMap::processQueuedDataPoints(bool skipStateCheck) {
std::shared_ptr<folly::MPMCQueue<QueuedDataPoint>> queue;
{
// Take a copy of the shared pointer for the queue. Even if this
// shard is let go while processing the queue, nothing will cause
// a segfault and the data points are just skipped.
folly::RWSpinLock::ReadHolder guard(lock_);
queue = dataPointQueue_;
}
if (!queue) {
LOG(WARNING) << "Could not process data points. The queue was deleted!";
return;
}
QueuedDataPoint dp;
while (queue->read(dp)) {
TimeValuePair value;
value.unixTime = dp.unixTime;
value.value = dp.value;
if (dp.key.length() == 0) {
// Time series id is known. It's possbible to take a few
// shortcuts to make adding the data point faster.
Item item;
State state;
{
folly::RWSpinLock::ReadHolder guard(lock_);
CHECK(dp.timeSeriesId < rows_.size());
item = rows_[dp.timeSeriesId];
state = state_;
}
if (!skipStateCheck && state != OWNED && state != PRE_UNOWNED) {
// Extremely rare corner case. We just set the state to owned
// and the queue should be really tiny or empty but still
// state was changed.
continue;
}
putDataPointWithId(&item->second, dp.timeSeriesId, value, dp.category);
} else {
// Run these through the normal workflow.
put(dp.key, value, dp.category, skipStateCheck);
}
}
}
bool BucketMap::putDataPointWithId(
BucketedTimeSeries* timeSeries,
uint32_t timeSeriesId,
const TimeValuePair& value,
uint16_t category) {
uint32_t b = bucket(value.unixTime);
bool added = timeSeries->put(b, value, &storage_, timeSeriesId, &category);
if (added) {
logWriter_->logData(shardId_, timeSeriesId, value.unixTime, value.value);
}
return added;
}
int64_t BucketMap::getReliableDataStartTime() {
return reliableDataStartTime_;
}
void BucketMap::checkForMissingBlockFiles() {
// Just look for holes in the progression of files.
// Gaps between log and block files will be checked elsewhere.
int missingFiles = 0;
for (auto it = unreadBlockFiles_.begin();
std::next(it) != unreadBlockFiles_.end();
it++) {
if (*it + 1 != *std::next(it)) {
missingFiles++;
}
}
if (missingFiles > 0) {
uint32_t now = bucket(time(nullptr));
std::stringstream error;
error << missingFiles << " completed block files are missing. Got blocks";
for (uint32_t id : unreadBlockFiles_) {
error << " " << id;
}
error << ". Expected blocks in range [" << now - n_ << ", " << now - 1
<< "]"
<< " for shard " << shardId_;
LOG(ERROR) << error.str();
GorillaStatsManager::addStatValue(kDataHoles, missingFiles);
reliableDataStartTime_ = time(nullptr);
}
}
int BucketMap::indexDeviatingTimeSeries(
uint32_t deviationStartTime,
uint32_t indexingStartTime,
uint32_t endTime,
double minimumSigma) {
if (getState() != OWNED) {
return 0;
}
int totalMinutes = duration(n_) / kGorillaSecondsPerMinute;
CHECK_EQ(totalMinutes, deviations_.size());
uint32_t begin = bucket(deviationStartTime);
uint32_t end = bucket(endTime);
std::vector<Item> timeSeriesData;
getEverything(timeSeriesData);
// Low estimate for the number of time series that have a deviation
// to avoid constant reallocation.
int initialSize = timeSeriesData.size() / pow(10, minimumSigma);
std::vector<std::vector<uint32_t>> deviations(totalMinutes);
for (int i = indexingStartTime; i <= endTime; i += kGorillaSecondsPerMinute) {
deviations[i / kGorillaSecondsPerMinute % totalMinutes].reserve(
initialSize);
}
for (int i = 0; i < timeSeriesData.size(); i++) {
auto& timeSeries = timeSeriesData[i];
if (!timeSeries.get()) {
continue;
}
std::vector<TimeSeriesBlock> out;
timeSeries->second.get(begin, end, out, getStorage());
std::vector<TimeValuePair> values;
for (auto& block : out) {
TimeSeries::getValues(block, values, deviationStartTime, endTime);
}
if (values.size() == 0) {
continue;
}
// Calculate the mean and standard deviation.
double sum = 0;
for (auto& v : values) {
sum += v.value;
}
double avg = sum / values.size();
double variance = 0.0;
for (auto& value : values) {
variance += (value.value - avg) * (value.value - avg);
}
variance /= values.size();
if (variance == 0) {
continue;
}
// Index values that are over the limit.
double stddev = sqrt(variance);
double limit = minimumSigma * stddev;
for (auto& v : values) {
if (v.unixTime >= indexingStartTime && v.unixTime <= endTime &&
fabs(v.value - avg) >= limit) {
uint32_t time = (v.unixTime / kGorillaSecondsPerMinute) % totalMinutes;
deviations[time].push_back(i);
}
}
}
folly::RWSpinLock::WriteHolder guard(lock_);
int deviationsIndexed = 0;
for (int i = indexingStartTime; i <= endTime; i += kGorillaSecondsPerMinute) {
int pos = i / kGorillaSecondsPerMinute % totalMinutes;
deviationsIndexed += deviations[pos].size();
deviations_[pos] = std::move(deviations[pos]);
}
return deviationsIndexed;
}
std::vector<BucketMap::Item> BucketMap::getDeviatingTimeSeries(
uint32_t unixTime) {
if (getState() != OWNED) {
return {};
}
int totalMinutes = duration(n_) / kGorillaSecondsPerMinute;
CHECK_EQ(totalMinutes, deviations_.size());
std::vector<BucketMap::Item> deviations;
int time = unixTime / kGorillaSecondsPerMinute % totalMinutes;
folly::RWSpinLock::ReadHolder guard(lock_);
deviations.reserve(deviations_.size());
for (auto& row : deviations_[time]) {
if (row < rows_.size()) {
deviations.push_back(rows_[row]);
}
}
return deviations;
}
}
} // facebook::gorilla
|
bsd-3-clause
|
mikael-laine/kitchenrating
|
frontend/web/js/top10.js
|
1273
|
(function($){
var rangeObj = {
rank_option1 : 0,
rank_option2 : 0,
rank_option3 : 0
};
$( ".rs_filter" ).slider({
step:50,
change: function( event, ui ) {
$(this).attr('data-value', ui.value);
renderChange();
}
});
$( ".rs_filter" ).each(function(){
$(this).slider( "value", parseInt($(this).attr('data-value')) );
})
$( ".rs_filter_dec" ).on('click',function(e){
e.preventDefault();
var sliderSelect = $(this).closest('.rs_filter_group').find('.rs_filter');
sliderSelect.slider('value',sliderSelect.slider("value")-50);
})
$( ".rs_filter_inc" ).on('click',function(e){
e.preventDefault();
var sliderSelect = $(this).closest('.rs_filter_group').find('.rs_filter');
sliderSelect.slider('value',sliderSelect.slider("value")+50);
})
function renderChange() {
var isChanged = false;
$( ".rs_filter" ).each(function(){
var key = $(this).attr('data-key');
var value = $(this).attr('data-value');
if (rangeObj[key] != value) {
rangeObj[key] = value;
isChanged = true;
}
});
if (isChanged) {
var actionUrl = $(".product_list").attr("data-action");
$.ajax( {
type: "POST",
url : actionUrl,
data : rangeObj,
success : function (data) {
$(".product_list").html(data);
},
dataType: 'html'
});
}
}
})(jQuery);
|
bsd-3-clause
|
kendzi/kendzi-math
|
kendzi-math-geometry/src/main/java/kendzi/math/geometry/triangle/Triangle3dUtil.java
|
1142
|
package kendzi.math.geometry.triangle;
import javax.vecmath.Point3d;
/**
* Utils for 3d triangles.
*/
public class Triangle3dUtil {
/**
* Calculates area of triangle described by vertex point.
*
* @see "http://math.stackexchange.com/a/128999"
* @param a
* the point a
* @param b
* the point b
* @param c
* the point c
* @return area of triangle
*
*
*/
public static double area(Point3d a, Point3d b, Point3d c) {
double abx = b.x - a.x;
double aby = b.y - a.y;
double abz = b.z - a.z;
double acx = c.x - a.x;
double acy = c.y - a.y;
double acz = c.z - a.z;
double x1 = abx;
double x2 = aby;
double x3 = abz;
double y1 = acx;
double y2 = acy;
double y3 = acz;
return 0.5 * Math.sqrt( //
pow(x2 * y3 - x3 * y2) + pow(x3 * y1 - x1 * y3) + pow(x1 * y2 - x2 * y1));
}
private static double pow(double number) {
return number * number;
}
}
|
bsd-3-clause
|
ojii/django-nani
|
hvad/tests/related.py
|
12697
|
# -*- coding: utf-8 -*-
from django.core.exceptions import FieldError
from django.db import models
from django.db.models.query_utils import Q
from hvad.exceptions import WrongManager
from hvad.models import (TranslatedFields, TranslatableModelBase,
TranslatableModel)
from hvad.test_utils.context_managers import LanguageOverride
from hvad.test_utils.fixtures import (OneSingleTranslatedNormalMixin,
TwoNormalOneStandardMixin, TwoTranslatedNormalMixin)
from hvad.test_utils.testcase import NaniTestCase
from hvad.utils import get_translation_aware_manager
from testproject.app.models import Normal, Related, Standard, Other, Many
class NormalToNormalFKTest(NaniTestCase, OneSingleTranslatedNormalMixin):
def test_relation(self):
"""
'normal' (aka 'shared') relations are relations from the shared (or
normal) model to another shared (or normal) model.
They should behave like normal foreign keys in Django
"""
normal = Normal.objects.language('en').get(pk=1)
related = Related.objects.create(normal=normal)
self.assertEqual(related.normal.pk, normal.pk)
self.assertEqual(related.normal.shared_field, normal.shared_field)
self.assertEqual(related.normal.translated_field, normal.translated_field)
self.assertTrue(related in normal.rel1.all())
def test_failed_relation(self):
related = Related.objects.create()
related.normal_id = 999
related.save()
self.assertRaises(Normal.DoesNotExist, getattr, related, 'normal')
class StandardToTransFKTest(NaniTestCase, TwoNormalOneStandardMixin):
def test_relation(self):
en = Normal.objects.language('en').get(pk=1)
ja = Normal.objects.language('ja').get(pk=1)
related = Standard.objects.get(pk=1)
with LanguageOverride('en'):
related = self.reload(related)
self.assertEqual(related.normal.pk, en.pk)
self.assertEqual(related.normal.shared_field, en.shared_field)
self.assertEqual(related.normal.translated_field, en.translated_field)
self.assertTrue(related in en.standards.all())
with LanguageOverride('ja'):
related = self.reload(related)
self.assertEqual(related.normal.pk, ja.pk)
self.assertEqual(related.normal.shared_field, ja.shared_field)
self.assertEqual(related.normal.translated_field, ja.translated_field)
self.assertTrue(related in ja.standards.all())
def test_num_queries(self):
with LanguageOverride('en'):
en = Normal.objects.language('en').get(pk=1)
with self.assertNumQueries(1):
related = Standard.objects.select_related('normal').get(pk=1)
self.assertEqual(related.normal.pk, en.pk)
with self.assertNumQueries(0):
self.assertEqual(related.normal.shared_field, en.shared_field)
with self.assertNumQueries(1):
self.assertEqual(related.normal.translated_field, en.translated_field)
def test_lookup_by_pk(self):
en = Normal.objects.language('en').get(pk=1)
by_pk = Standard.objects.get(normal__pk=en.pk)
with LanguageOverride('en'):
self.assertEqual(by_pk.normal.pk, en.pk)
self.assertEqual(by_pk.normal.shared_field, en.shared_field)
self.assertEqual(by_pk.normal.translated_field, en.translated_field)
self.assertTrue(by_pk in en.standards.all())
def test_lookup_by_shared_field(self):
en = Normal.objects.language('en').get(pk=1)
by_shared_field = Standard.objects.get(normal__shared_field=en.shared_field)
with LanguageOverride('en'):
self.assertEqual(by_shared_field.normal.pk, en.pk)
self.assertEqual(by_shared_field.normal.shared_field, en.shared_field)
self.assertEqual(by_shared_field.normal.translated_field, en.translated_field)
self.assertTrue(by_shared_field in en.standards.all())
def test_lookup_by_translated_field(self):
en = Normal.objects.language('en').get(pk=1)
translation_aware_manager = get_translation_aware_manager(Standard)
with LanguageOverride('en'):
by_translated_field = translation_aware_manager.get(normal__translated_field=en.translated_field)
self.assertEqual(by_translated_field.normal.pk, en.pk)
self.assertEqual(by_translated_field.normal.shared_field, en.shared_field)
self.assertEqual(by_translated_field.normal.translated_field, en.translated_field)
self.assertTrue(by_translated_field in en.standards.all())
def test_lookup_by_translated_field_requires_translation_aware_manager(self):
en = Normal.objects.language('en').get(pk=1)
with LanguageOverride('en'):
self.assertRaises(WrongManager, Standard.objects.get,
normal__translated_field=en.translated_field)
def test_lookup_by_non_existing_field(self):
en = Normal.objects.language('en').get(pk=1)
with LanguageOverride('en'):
self.assertRaises(FieldError, Standard.objects.get,
normal__non_existing_field=1)
def test_lookup_by_translated_field_using_q_objects(self):
en = Normal.objects.language('en').get(pk=1)
translation_aware_manager = get_translation_aware_manager(Standard)
with LanguageOverride('en'):
q = Q(normal__translated_field=en.translated_field)
by_translated_field = translation_aware_manager.get(q)
self.assertEqual(by_translated_field.normal.pk, en.pk)
self.assertEqual(by_translated_field.normal.shared_field, en.shared_field)
self.assertEqual(by_translated_field.normal.translated_field, en.translated_field)
self.assertTrue(by_translated_field in en.standards.all())
def test_filter_by_shared_field(self):
en = Normal.objects.language('en').get(pk=1)
with LanguageOverride('en'):
by_shared_field = Standard.objects.filter(normal__shared_field=en.shared_field)
normals = [obj.normal.pk for obj in by_shared_field]
expected = [en.pk]
self.assertEqual(normals, expected)
shared_fields = [obj.normal.shared_field for obj in by_shared_field]
expected_fields = [en.shared_field]
self.assertEqual(shared_fields, expected_fields)
translated_fields = [obj.normal.translated_field for obj in by_shared_field]
expected_fields = [en.translated_field]
self.assertEqual(translated_fields, expected_fields)
for obj in by_shared_field:
self.assertTrue(obj in en.standards.all())
def test_filter_by_translated_field(self):
en = Normal.objects.language('en').get(pk=1)
translation_aware_manager = get_translation_aware_manager(Standard)
with LanguageOverride('en'):
by_translated_field = translation_aware_manager.filter(normal__translated_field=en.translated_field)
normals = [obj.normal.pk for obj in by_translated_field]
expected = [en.pk]
self.assertEqual(normals, expected)
shared_fields = [obj.normal.shared_field for obj in by_translated_field]
expected_fields = [en.shared_field]
self.assertEqual(shared_fields, expected_fields)
translated_fields = [obj.normal.translated_field for obj in by_translated_field]
expected_fields = [en.translated_field]
self.assertEqual(translated_fields, expected_fields)
for obj in by_translated_field:
self.assertTrue(obj in en.standards.all())
def test_filter_by_translated_field_requires_translation_aware_manager(self):
en = Normal.objects.language('en').get(pk=1)
with LanguageOverride('en'):
self.assertRaises(WrongManager, Standard.objects.filter,
normal__translated_field=en.translated_field)
def test_filter_by_translated_field_using_q_objects(self):
en = Normal.objects.language('en').get(pk=1)
translation_aware_manager = get_translation_aware_manager(Standard)
with LanguageOverride('en'):
q = Q(normal__translated_field=en.translated_field)
by_translated_field = translation_aware_manager.filter(q)
normals = [obj.normal.pk for obj in by_translated_field]
expected = [en.pk]
self.assertEqual(normals, expected)
shared_fields = [obj.normal.shared_field for obj in by_translated_field]
expected_fields = [en.shared_field]
self.assertEqual(shared_fields, expected_fields)
translated_fields = [obj.normal.translated_field for obj in by_translated_field]
expected_fields = [en.translated_field]
self.assertEqual(translated_fields, expected_fields)
for obj in by_translated_field:
self.assertTrue(obj in en.standards.all())
class TripleRelationTests(NaniTestCase):
def test_triple(self):
normal = Normal.objects.language('en').create(shared_field='SHARED', translated_field='English')
other = Other.objects.create(normal=normal)
standard = Standard.objects.create(normal=normal, normal_field='NORMAL FIELD')
obj = Normal.objects.language('en').get(standards__pk=standard.pk)
self.assertEqual(obj.pk, normal.pk)
obj = Normal.objects.language('en').get(others__pk=other.pk)
self.assertEqual(obj.pk, normal.pk)
# We created an english Normal object, so we want to make sure that we use 'en'
with LanguageOverride('en'):
obj = get_translation_aware_manager(Standard).get(normal__others__pk=other.pk)
self.assertEqual(obj.pk, standard.pk)
# If we don't use language 'en', it should give DoesNotExist, when using the
# translation aware manager
with LanguageOverride('ja'):
manager = get_translation_aware_manager(Standard)
self.assertRaises(Standard.DoesNotExist, manager.get, normal__others__pk=other.pk)
# However, if we don't use the translation aware manager, we can query any
# the shared fields in any language, and it should return the object,
# even though there is no translated Normal objects
with LanguageOverride('ja'):
obj = Standard.objects.get(normal__others__pk=other.pk)
self.assertEqual(obj.pk, standard.pk)
class ManyToManyTest(NaniTestCase, TwoTranslatedNormalMixin):
def test_triple(self):
normal1 = Normal.objects.language('en').get(pk=1)
many = normal1.manyrels.create(name="many1")
with LanguageOverride('en'):
# Get the Normal objects associated with the Many object "many1":
normals = Normal.objects.language().filter(manyrels__id=many.pk).order_by("translated_field")
self.assertEqual([n.pk for n in normals], [normal1.pk])
# Same thing, another way:
normals = many.normals.language() # This query is fetching Normal objects that are not associated with the Many object "many" !
normals_plain = many.normals.all()
# The two queries above should return the same objects, since all normals are translated
self.assertEqual([n.pk for n in normals], [n.pk for n in normals_plain])
class ForwardDeclaringForeignKeyTests(NaniTestCase):
def test_issue_22(self):
class ForwardRelated(TranslatableModel):
shared_field = models.CharField(max_length=255)
translations = TranslatedFields(
translated = models.ForeignKey("ReverseRelated", related_name='rel', null=True),
)
class ReverseRelated(TranslatableModel):
shared_field = models.CharField(max_length=255)
translated_fields = TranslatedFields(
translated = models.CharField(max_length=1)
)
def test_issue_22_non_translatable_model(self):
class ForwardRelated2(models.Model):
shared_field = models.CharField(max_length=255)
fk = models.ForeignKey("ReverseRelated2", related_name='rel', null=True)
class ReverseRelated2(TranslatableModel):
shared_field = models.CharField(max_length=255)
translated_fields = TranslatedFields(
translated = models.CharField(max_length=1)
)
|
bsd-3-clause
|
bogdal/django-filer
|
filer/migrations/0015_auto__add_field_file_who_can_read_local__add_field_file_who_can_edit_l.py
|
13812
|
# -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'File.who_can_read_local'
db.add_column(u'filer_file', 'who_can_read_local',
self.gf('django.db.models.fields.TextField')(default='', blank=True),
keep_default=False)
# Adding field 'File.who_can_edit_local'
db.add_column(u'filer_file', 'who_can_edit_local',
self.gf('django.db.models.fields.TextField')(default='', blank=True),
keep_default=False)
# Adding field 'File.who_can_read'
db.add_column(u'filer_file', 'who_can_read',
self.gf('django.db.models.fields.TextField')(default='', blank=True),
keep_default=False)
# Adding field 'File.who_can_edit'
db.add_column(u'filer_file', 'who_can_edit',
self.gf('django.db.models.fields.TextField')(default='', blank=True),
keep_default=False)
# Adding field 'Folder.who_can_read_local'
db.add_column(u'filer_folder', 'who_can_read_local',
self.gf('django.db.models.fields.TextField')(default='', blank=True),
keep_default=False)
# Adding field 'Folder.who_can_edit_local'
db.add_column(u'filer_folder', 'who_can_edit_local',
self.gf('django.db.models.fields.TextField')(default='', blank=True),
keep_default=False)
# Adding field 'Folder.who_can_read'
db.add_column(u'filer_folder', 'who_can_read',
self.gf('django.db.models.fields.TextField')(default='', blank=True),
keep_default=False)
# Adding field 'Folder.who_can_edit'
db.add_column(u'filer_folder', 'who_can_edit',
self.gf('django.db.models.fields.TextField')(default='', blank=True),
keep_default=False)
def backwards(self, orm):
# Deleting field 'File.who_can_read_local'
db.delete_column(u'filer_file', 'who_can_read_local')
# Deleting field 'File.who_can_edit_local'
db.delete_column(u'filer_file', 'who_can_edit_local')
# Deleting field 'File.who_can_read'
db.delete_column(u'filer_file', 'who_can_read')
# Deleting field 'File.who_can_edit'
db.delete_column(u'filer_file', 'who_can_edit')
# Deleting field 'Folder.who_can_read_local'
db.delete_column(u'filer_folder', 'who_can_read_local')
# Deleting field 'Folder.who_can_edit_local'
db.delete_column(u'filer_folder', 'who_can_edit_local')
# Deleting field 'Folder.who_can_read'
db.delete_column(u'filer_folder', 'who_can_read')
# Deleting field 'Folder.who_can_edit'
db.delete_column(u'filer_folder', 'who_can_edit')
models = {
u'auth.group': {
'Meta': {'object_name': 'Group'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
u'auth.permission': {
'Meta': {'ordering': "(u'content_type__app_label', u'content_type__model', u'codename')", 'unique_together': "((u'content_type', u'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
u'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
u'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'filer.clipboard': {
'Meta': {'object_name': 'Clipboard'},
'files': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'in_clipboards'", 'symmetrical': 'False', 'through': "orm['filer.ClipboardItem']", 'to': "orm['filer.File']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'filer_clipboards'", 'to': u"orm['auth.User']"})
},
'filer.clipboarditem': {
'Meta': {'object_name': 'ClipboardItem'},
'clipboard': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['filer.Clipboard']"}),
'file': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['filer.File']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'})
},
'filer.file': {
'Meta': {'object_name': 'File'},
'_file_size': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'file': ('django.db.models.fields.files.FileField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'folder': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'all_files'", 'null': 'True', 'to': "orm['filer.Folder']"}),
'has_all_mandatory_data': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_public': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'modified_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '255', 'blank': 'True'}),
'original_filename': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'owner': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'owned_files'", 'null': 'True', 'to': u"orm['auth.User']"}),
'polymorphic_ctype': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'polymorphic_filer.file_set'", 'null': 'True', 'to': u"orm['contenttypes.ContentType']"}),
'sha1': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '40', 'blank': 'True'}),
'uploaded_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'who_can_edit': ('django.db.models.fields.TextField', [], {'default': "''", 'blank': 'True'}),
'who_can_edit_local': ('django.db.models.fields.TextField', [], {'default': "''", 'blank': 'True'}),
'who_can_read': ('django.db.models.fields.TextField', [], {'default': "''", 'blank': 'True'}),
'who_can_read_local': ('django.db.models.fields.TextField', [], {'default': "''", 'blank': 'True'})
},
'filer.folder': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('parent', 'name'),)", 'object_name': 'Folder'},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'level': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'lft': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'modified_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'owner': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'filer_owned_folders'", 'null': 'True', 'to': u"orm['auth.User']"}),
'parent': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'children'", 'null': 'True', 'to': "orm['filer.Folder']"}),
'rght': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'tree_id': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'uploaded_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'who_can_edit': ('django.db.models.fields.TextField', [], {'default': "''", 'blank': 'True'}),
'who_can_edit_local': ('django.db.models.fields.TextField', [], {'default': "''", 'blank': 'True'}),
'who_can_read': ('django.db.models.fields.TextField', [], {'default': "''", 'blank': 'True'}),
'who_can_read_local': ('django.db.models.fields.TextField', [], {'default': "''", 'blank': 'True'})
},
'filer.folderpermission': {
'Meta': {'object_name': 'FolderPermission'},
'can_add_children': ('django.db.models.fields.SmallIntegerField', [], {'default': 'None', 'null': 'True', 'blank': 'True'}),
'can_edit': ('django.db.models.fields.SmallIntegerField', [], {'default': 'None', 'null': 'True', 'blank': 'True'}),
'can_read': ('django.db.models.fields.SmallIntegerField', [], {'default': 'None', 'null': 'True', 'blank': 'True'}),
'everybody': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'folder': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['filer.Folder']", 'null': 'True', 'blank': 'True'}),
'group': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'filer_folder_permissions'", 'null': 'True', 'to': u"orm['auth.Group']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'type': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'filer_folder_permissions'", 'null': 'True', 'to': u"orm['auth.User']"})
},
'filer.image': {
'Meta': {'object_name': 'Image', '_ormbases': ['filer.File']},
'_height': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'_width': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'author': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'date_taken': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'default_alt_text': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'default_caption': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
u'file_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['filer.File']", 'unique': 'True', 'primary_key': 'True'}),
'must_always_publish_author_credit': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'must_always_publish_copyright': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'subject_location': ('django.db.models.fields.CharField', [], {'default': 'None', 'max_length': '64', 'null': 'True', 'blank': 'True'})
}
}
complete_apps = ['filer']
|
bsd-3-clause
|
mural/spm
|
db4oj/src/main/java/com/db4o/internal/Reflection4.java
|
6196
|
/* This file is part of the db4o object database http://www.db4o.com
Copyright (C) 2004 - 2010 Versant Corporation http://www.versant.com
db4o is free software; you can redistribute it and/or modify it under
the terms of version 3 of the GNU General Public License as published
by the Free Software Foundation.
db4o is distributed in the hope that it will be useful, but WITHOUT ANY
WARRANTY; without even the implied warranty of MERCHANTABILITY or
FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
for more details.
You should have received a copy of the GNU General Public License along
with this program. If not, see http://www.gnu.org/licenses/. */
package com.db4o.internal;
import java.lang.reflect.*;
import com.db4o.foundation.*;
/**
* @exclude
*
* Use the methods in this class for system classes only, since they
* are not ClassLoader or Reflector-aware.
*
* TODO: this class should go to foundation.reflect, along with ReflectException and ReflectPlatform
*/
public class Reflection4 {
public static Object invokeStatic(Class clazz, String methodName) {
return invoke(clazz, methodName, null, null, null);
}
public static Object invoke (Object obj, String methodName) throws ReflectException {
return invoke(obj.getClass(), methodName, null, null, obj );
}
public static Object invoke (Object obj, String methodName, Object[] params) throws ReflectException {
Class[] paramClasses = new Class[params.length];
for (int i = 0; i < params.length; i++) {
paramClasses[i] = params[i].getClass();
}
return invoke(obj.getClass(), methodName, paramClasses, params, obj );
}
public static Object invoke (Object obj, String methodName, Class[] paramClasses, Object[] params) throws ReflectException {
return invoke(obj.getClass(), methodName, paramClasses, params, obj );
}
public static Object invoke (Class clazz, String methodName, Class[] paramClasses, Object[] params) throws ReflectException {
return invoke(clazz, methodName, paramClasses, params, null);
}
private static Object invoke(Class clazz, String methodName, Class[] paramClasses, Object[] params, Object onObject) {
return invoke(params, onObject, getMethod(clazz, methodName, paramClasses));
}
public static Object invoke(String className, String methodName,
Class[] paramClasses, Object[] params, Object onObject) throws ReflectException {
Method method = getMethod(className, methodName, paramClasses);
return invoke(params, onObject, method);
}
public static Object invoke(Object[] params, Object onObject, Method method) throws ReflectException {
if(method == null) {
return null;
}
Platform4.setAccessible(method);
try {
return method.invoke(onObject, params);
} catch (InvocationTargetException e) {
throw new ReflectException(e.getTargetException());
} catch (IllegalArgumentException e) {
throw new ReflectException(e);
} catch (IllegalAccessException e) {
throw new ReflectException(e);
}
}
/**
* calling this method "method" will break C# conversion with the old converter
*/
public static Method getMethod(String className, String methodName,
Class[] paramClasses) {
Class clazz = ReflectPlatform.forName(className);
if (clazz == null) {
return null;
}
return getMethod(clazz, methodName, paramClasses);
}
public static Method getMethod(Class clazz, String methodName,
Class[] paramClasses) {
Class curclazz=clazz;
while(curclazz!=null) {
try {
return curclazz.getDeclaredMethod(methodName, paramClasses);
} catch (Exception e) {
}
curclazz=curclazz.getSuperclass();
}
return null;
}
public static Object invoke(final Object obj, String methodName,
Class signature, Object value) throws ReflectException {
return invoke(obj, methodName, new Class[] { signature }, new Object[] { value });
}
public static Field getField(final Class clazz,final String name) {
Class curclazz=clazz;
while(curclazz!=null) {
try {
Field field=curclazz.getDeclaredField(name);
Platform4.setAccessible(field);
if(field != null){
return field;
}
} catch (Exception e) {
}
curclazz=curclazz.getSuperclass();
}
return null;
}
public static Object getFieldValue(final Object obj, final String fieldName)
throws ReflectException {
try {
return getField(obj.getClass(), fieldName).get(obj);
} catch (Exception e) {
throw new ReflectException(e);
}
}
public static Object newInstance(Object template) {
try {
return template.getClass().newInstance();
} catch (Exception e) {
throw new ReflectException(e);
}
}
public static String dump(Object obj){
return dumpPreventRecursion(obj, new IdentitySet4(), 2);
}
private static String dumpPreventRecursion(Object obj, IdentitySet4 dumped, int stackLimit) {
stackLimit--;
if(obj == null){
return "null";
}
Class clazz = obj.getClass();
if(Platform4.isSimple(clazz)){
return obj.toString();
}
StringBuffer sb = new StringBuffer();
sb.append(clazz.getName());
sb.append(" (");
sb.append(System.identityHashCode(obj));
sb.append(")");
if(dumped.contains(obj) || stackLimit <= 0){
return sb.toString();
}
dumped.add(obj);
Field[] fields = clazz.getDeclaredFields();
for (Field field : fields) {
Platform4.setAccessible(field);
try {
if( field.get(null) == field.get(obj) ){
continue; // static field.getModifiers() wouldn't sharpen
}
} catch (Exception e) {
}
sb.append("\n");
sb.append("\t");
sb.append(field.getName());
sb.append(": ");
try {
sb.append(dumpPreventRecursion(field.get(obj), dumped, stackLimit));
} catch (Exception e) {
sb.append("Exception caught: ");
sb.append(e);
}
}
return sb.toString();
}
}
|
bsd-3-clause
|
michalliu/chromium-depot_tools
|
fetch.py
|
10248
|
#!/usr/bin/env python
# Copyright (c) 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
Tool to perform checkouts in one easy command line!
Usage:
fetch <recipe> [--property=value [--property2=value2 ...]]
This script is a wrapper around various version control and repository
checkout commands. It requires a |recipe| name, fetches data from that
recipe in depot_tools/recipes, and then performs all necessary inits,
checkouts, pulls, fetches, etc.
Optional arguments may be passed on the command line in key-value pairs.
These parameters will be passed through to the recipe's main method.
"""
import json
import optparse
import os
import pipes
import subprocess
import sys
import textwrap
from distutils import spawn
SCRIPT_PATH = os.path.dirname(os.path.abspath(__file__))
#################################################
# Checkout class definitions.
#################################################
class Checkout(object):
"""Base class for implementing different types of checkouts.
Attributes:
|base|: the absolute path of the directory in which this script is run.
|spec|: the spec for this checkout as returned by the recipe. Different
subclasses will expect different keys in this dictionary.
|root|: the directory into which the checkout will be performed, as returned
by the recipe. This is a relative path from |base|.
"""
def __init__(self, options, spec, root):
self.base = os.getcwd()
self.options = options
self.spec = spec
self.root = root
def exists(self):
pass
def init(self):
pass
def sync(self):
pass
def run(self, cmd, **kwargs):
print 'Running: %s' % (' '.join(pipes.quote(x) for x in cmd))
if self.options.dry_run:
return 0
return subprocess.check_call(cmd, **kwargs)
class GclientCheckout(Checkout):
def run_gclient(self, *cmd, **kwargs):
if not spawn.find_executable('gclient'):
cmd_prefix = (sys.executable, os.path.join(SCRIPT_PATH, 'gclient.py'))
else:
cmd_prefix = ('gclient',)
return self.run(cmd_prefix + cmd, **kwargs)
class GitCheckout(Checkout):
def run_git(self, *cmd, **kwargs):
if sys.platform == 'win32' and not spawn.find_executable('git'):
git_path = os.path.join(SCRIPT_PATH, 'git.bat')
else:
git_path = 'git'
return self.run((git_path,) + cmd, **kwargs)
class SvnCheckout(Checkout):
def run_svn(self, *cmd, **kwargs):
if sys.platform == 'win32' and not spawn.find_executable('svn'):
svn_path = os.path.join(SCRIPT_PATH, 'svn_bin', 'svn.exe')
else:
svn_path = 'svn'
return self.run((svn_path,) + cmd, **kwargs)
class GclientGitCheckout(GclientCheckout, GitCheckout):
def __init__(self, options, spec, root):
super(GclientGitCheckout, self).__init__(options, spec, root)
assert 'solutions' in self.spec
keys = ['solutions', 'target_os', 'target_os_only']
gclient_spec = '\n'.join('%s = %s' % (key, self.spec[key])
for key in keys if key in self.spec)
self.spec['gclient_spec'] = gclient_spec
def exists(self):
return os.path.exists(os.path.join(os.getcwd(), self.root))
def init(self):
# Configure and do the gclient checkout.
self.run_gclient('config', '--spec', self.spec['gclient_spec'])
sync_cmd = ['sync']
if self.options.nohooks:
sync_cmd.append('--nohooks')
if self.options.no_history:
sync_cmd.append('--no-history')
if self.spec.get('with_branch_heads', False):
sync_cmd.append('--with_branch_heads')
self.run_gclient(*sync_cmd)
# Configure git.
wd = os.path.join(self.base, self.root)
if self.options.dry_run:
print 'cd %s' % wd
self.run_git(
'submodule', 'foreach',
'git config -f $toplevel/.git/config submodule.$name.ignore all',
cwd=wd)
self.run_git(
'config', '--add', 'remote.origin.fetch',
'+refs/tags/*:refs/tags/*', cwd=wd)
self.run_git('config', 'diff.ignoreSubmodules', 'all', cwd=wd)
class GclientGitSvnCheckout(GclientGitCheckout, SvnCheckout):
def __init__(self, options, spec, root):
super(GclientGitSvnCheckout, self).__init__(options, spec, root)
def init(self):
# Ensure we are authenticated with subversion for all submodules.
git_svn_dirs = json.loads(self.spec.get('submodule_git_svn_spec', '{}'))
git_svn_dirs.update({self.root: self.spec})
for _, svn_spec in git_svn_dirs.iteritems():
if svn_spec.get('svn_url'):
try:
self.run_svn('ls', '--non-interactive', svn_spec['svn_url'])
except subprocess.CalledProcessError:
print 'Please run `svn ls %s`' % svn_spec['svn_url']
return 1
super(GclientGitSvnCheckout, self).init()
# Configure git-svn.
for path, svn_spec in git_svn_dirs.iteritems():
real_path = os.path.join(*path.split('/'))
if real_path != self.root:
real_path = os.path.join(self.root, real_path)
wd = os.path.join(self.base, real_path)
if self.options.dry_run:
print 'cd %s' % wd
if svn_spec.get('auto'):
self.run_git('auto-svn', cwd=wd)
continue
self.run_git('svn', 'init', svn_spec['svn_url'], cwd=wd)
self.run_git('config', '--unset-all', 'svn-remote.svn.fetch', cwd=wd)
for svn_branch, git_ref in svn_spec.get('git_svn_fetch', {}).items():
self.run_git('config', '--add', 'svn-remote.svn.fetch',
'%s:%s' % (svn_branch, git_ref), cwd=wd)
for svn_branch, git_ref in svn_spec.get('git_svn_branches', {}).items():
self.run_git('config', '--add', 'svn-remote.svn.branches',
'%s:%s' % (svn_branch, git_ref), cwd=wd)
self.run_git('svn', 'fetch', cwd=wd)
CHECKOUT_TYPE_MAP = {
'gclient': GclientCheckout,
'gclient_git': GclientGitCheckout,
'gclient_git_svn': GclientGitSvnCheckout,
'git': GitCheckout,
}
def CheckoutFactory(type_name, options, spec, root):
"""Factory to build Checkout class instances."""
class_ = CHECKOUT_TYPE_MAP.get(type_name)
if not class_:
raise KeyError('unrecognized checkout type: %s' % type_name)
return class_(options, spec, root)
#################################################
# Utility function and file entry point.
#################################################
def usage(msg=None):
"""Print help and exit."""
if msg:
print 'Error:', msg
print textwrap.dedent("""\
usage: %s [options] <recipe> [--property=value [--property2=value2 ...]]
This script can be used to download the Chromium sources. See
http://www.chromium.org/developers/how-tos/get-the-code
for full usage instructions.
Valid options:
-h, --help, help Print this message.
--nohooks Don't run hooks after checkout.
-n, --dry-run Don't run commands, only print them.
--no-history Perform shallow clones, don't fetch the full git history.
Valid fetch recipes:""") % os.path.basename(sys.argv[0])
for fname in os.listdir(os.path.join(SCRIPT_PATH, 'recipes')):
if fname.endswith('.py'):
print ' ' + fname[:-3]
sys.exit(bool(msg))
def handle_args(argv):
"""Gets the recipe name from the command line arguments."""
if len(argv) <= 1:
usage('Must specify a recipe.')
if argv[1] in ('-h', '--help', 'help'):
usage()
dry_run = False
nohooks = False
no_history = False
while len(argv) >= 2:
arg = argv[1]
if not arg.startswith('-'):
break
argv.pop(1)
if arg in ('-n', '--dry-run'):
dry_run = True
elif arg == '--nohooks':
nohooks = True
elif arg == '--no-history':
no_history = True
else:
usage('Invalid option %s.' % arg)
def looks_like_arg(arg):
return arg.startswith('--') and arg.count('=') == 1
bad_parms = [x for x in argv[2:] if not looks_like_arg(x)]
if bad_parms:
usage('Got bad arguments %s' % bad_parms)
recipe = argv[1]
props = argv[2:]
return (
optparse.Values(
{'dry_run':dry_run, 'nohooks':nohooks, 'no_history': no_history }),
recipe,
props)
def run_recipe_fetch(recipe, props, aliased=False):
"""Invoke a recipe's fetch method with the passed-through args
and return its json output as a python object."""
recipe_path = os.path.abspath(os.path.join(SCRIPT_PATH, 'recipes', recipe))
if not os.path.exists(recipe_path + '.py'):
print "Could not find a recipe for %s" % recipe
sys.exit(1)
cmd = [sys.executable, recipe_path + '.py', 'fetch'] + props
result = subprocess.Popen(cmd, stdout=subprocess.PIPE).communicate()[0]
spec = json.loads(result)
if 'alias' in spec:
assert not aliased
return run_recipe_fetch(
spec['alias']['recipe'], spec['alias']['props'] + props, aliased=True)
cmd = [sys.executable, recipe_path + '.py', 'root']
result = subprocess.Popen(cmd, stdout=subprocess.PIPE).communicate()[0]
root = json.loads(result)
return spec, root
def run(options, spec, root):
"""Perform a checkout with the given type and configuration.
Args:
options: Options instance.
spec: Checkout configuration returned by the the recipe's fetch_spec
method (checkout type, repository url, etc.).
root: The directory into which the repo expects to be checkout out.
"""
assert 'type' in spec
checkout_type = spec['type']
checkout_spec = spec['%s_spec' % checkout_type]
try:
checkout = CheckoutFactory(checkout_type, options, checkout_spec, root)
except KeyError:
return 1
if checkout.exists():
print 'You appear to already have a checkout. "fetch" is used only'
print 'to get new checkouts. Use "gclient sync" to update the checkout.'
print
print 'Fetch also does not yet deal with partial checkouts, so if fetch'
print 'failed, delete the checkout and start over (crbug.com/230691).'
return 1
return checkout.init()
def main():
options, recipe, props = handle_args(sys.argv)
spec, root = run_recipe_fetch(recipe, props)
return run(options, spec, root)
if __name__ == '__main__':
sys.exit(main())
|
bsd-3-clause
|
fake-name/ReadableWebProxy
|
WebMirror/management/rss_parser_funcs/feed_parse_extractCloudManor.py
|
987
|
def extractCloudManor(item):
"""
"""
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol or frag) or 'preview' in item['title'].lower():
return None
if 'Book of Sun & Moon Swordplay' in item['tags']:
return buildReleaseMessageWithType(item, 'Book of Sun & Moon Swordplay', vol, chp, frag=frag, postfix=postfix)
if 'It is a Straight Road' in item['tags']:
return buildReleaseMessageWithType(item, 'It is a Straight Road', vol, chp, frag=frag, postfix=postfix)
if 'Pursuit of Liao Yue Murderer' in item['tags']:
return buildReleaseMessageWithType(item, 'Pursuit of Liao Yue Murderer', vol, chp, frag=frag, postfix=postfix)
if 'Rice Pot Next Door' in item['tags']:
return buildReleaseMessageWithType(item, 'Rice Pot Next Door', vol, chp, frag=frag, postfix=postfix)
if 'Man from Wild South' in item['tags']:
return buildReleaseMessageWithType(item, 'Man from the Wild South', vol, chp, frag=frag, postfix=postfix)
return False
|
bsd-3-clause
|
kodiers/yii2build
|
frontend/controllers/ProfileController.php
|
5857
|
<?php
namespace frontend\controllers;
use Yii;
use frontend\models\Profile;
use frontend\models\search\ProfileSearch;
use yii\web\Controller;
use yii\web\NotFoundHttpException;
use yii\filters\VerbFilter;
use common\models\PermissionHelpers;
use common\models\RecordHelpers;
/**
* ProfileController implements the CRUD actions for Profile model.
*/
class ProfileController extends Controller
{
public function behaviors()
{
return [
'access' => [
'class' => \yii\filters\AccessControl::className(),
'only' => ['index', 'view', 'create', 'update', 'delete'],
'rules' => [
[
'actions' => ['index', 'view', 'create', 'update', 'delete'],
'allow' => true,
'roles' => ['@'],
],
],
],
'access2' => [
'class' => \yii\filters\AccessControl::className(),
'only' => ['index', 'view', 'create', 'update', 'delete'],
'rules' => [
[
'actions' => ['index', 'view', 'create', 'update', 'delete'],
'allow' => true,
'roles' => ['@'],
'matchCallback' => function ($rule, $action) {
return PermissionHelpers::requireStatus('Active');
}
],
],
],
'verbs' => [
'class' => VerbFilter::className(),
'actions' => [
'delete' => ['post'],
],
],
];
}
/**
* Lists all Profile models.
* @return mixed
*/
public function actionIndex()
{
// $searchModel = new ProfileSearch();
// $dataProvider = $searchModel->search(Yii::$app->request->queryParams);
//
// return $this->render('index', [
// 'searchModel' => $searchModel,
// 'dataProvider' => $dataProvider,
// ]);
if ($already_exists = RecordHelpers::userHas('profile')) {
return $this->render('view', ['model' => $this->findModel($already_exists)]);
} else {
return $this->redirect(['create']);
}
}
/**
* Displays a single Profile model.
*
* @return mixed
*/
public function actionView()
{
// return $this->render('view', [
// 'model' => $this->findModel($id),
// ]);
if ($already_exists = RecordHelpers::userHas('profile')) {
return $this->render('view', ['model' => $this->findModel($already_exists),]);
} else {
return $this->redirect(['create']);
}
}
/**
* Creates a new Profile model.
* If creation is successful, the browser will be redirected to the 'view' page.
* @return mixed
*/
public function actionCreate()
{
$model = new Profile();
$model->user_id = \Yii::$app->user->identity->id;
if ($already_exists = RecordHelpers::userHas('profile')) {
return $this->render('view', ['model' => $this->findModel($already_exists),]);
} elseif ($model->load(Yii::$app->request->post()) && $model->save()) {
return $this->redirect(['view']);
} else {
return $this->render('create', ['model' => $model]);
}
// if ($model->load(Yii::$app->request->post()) && $model->save()) {
// return $this->redirect(['view', 'id' => $model->id]);
// } else {
// return $this->render('create', [
// 'model' => $model,
// ]);
// }
}
/**
* Updates an existing Profile model.
* If update is successful, the browser will be redirected to the 'view' page.
* @param integer $id
* @return mixed
*/
public function actionUpdate()
{
// $model = $this->findModel($id);
//
// if ($model->load(Yii::$app->request->post()) && $model->save()) {
// return $this->redirect(['view', 'id' => $model->id]);
// } else {
// return $this->render('update', [
// 'model' => $model,
// ]);
// }
PermissionHelpers::requireUpgradeTo('Paid');
if ($model = Profile::find()->where(['user_id' => Yii::$app->user->identity->id])->one()) {
if ($model->load(Yii::$app->request->post()) && $model->save()) {
return $this->redirect(['view']);
} else {
return $this->render('update', ['model' => $model]);
}
} else {
throw new NotFoundHttpException('No such profile');
}
}
/**
* Deletes an existing Profile model.
* If deletion is successful, the browser will be redirected to the 'index' page.
* @param integer $id
* @return mixed
*/
public function actionDelete($id)
{
// $this->findModel($id)->delete();
//
// return $this->redirect(['index']);
$model = Profile::find()->where(['user_id' => Yii::$app->user->identity->id])->one();
$this->findModel($model->id)->delete();
return $this->redirect(['site/index']);
}
/**
* Finds the Profile model based on its primary key value.
* If the model is not found, a 404 HTTP exception will be thrown.
* @param integer $id
* @return Profile the loaded model
* @throws NotFoundHttpException if the model cannot be found
*/
protected function findModel($id)
{
if (($model = Profile::findOne($id)) !== null) {
return $model;
} else {
throw new NotFoundHttpException('The requested page does not exist.');
}
}
}
|
bsd-3-clause
|
vagovszky/console
|
module/Import/Module.php
|
275
|
<?php
/**
* This file is placed here for compatibility with Zendframework's ModuleManager.
* It allows usage of this module even without composer.
* The original Module.php lives in 'src' directory in order to respect PSR-0
*/
require_once __DIR__ . '/src/Import/Module.php';
|
bsd-3-clause
|
hugelgupf/u-root
|
cmds/core/ntpdate/ntpdate.go
|
2152
|
// Copyright 2016-2017 the u-root Authors. All rights reserved
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// ntpdate uses NTP to adjust the system clock.
package main
import (
"bufio"
"flag"
"fmt"
"log"
"os"
"strings"
"syscall"
"time"
"github.com/beevik/ntp"
)
var (
config = flag.String("config", "/etc/ntp.conf", "NTP config file.")
verbose = flag.Bool("verbose", false, "Verbose output")
debug = func(string, ...interface{}) {}
)
const (
fallback = "time.google.com"
)
func parseServers(r *bufio.Reader) []string {
var uri []string
var l string
var err error
debug("Reading config file")
for err == nil {
// This handles the case where the last line doesn't end in \n
l, err = r.ReadString('\n')
debug("%v", l)
if w := strings.Fields(l); len(w) > 1 && w[0] == "server" {
// We look only for the server lines, we ignore options like iburst
// TODO(ganshun): figure out what options we want to support.
uri = append(uri, w[1])
}
}
return uri
}
func getTime(servers []string) (t time.Time, err error) {
for _, s := range servers {
debug("Getting time from %v", s)
if t, err = ntp.Time(s); err == nil {
// Right now we return on the first valid time.
// We can implement better heuristics here.
debug("Got time %v", t)
return
}
debug("Error getting time: %v", err)
}
err = fmt.Errorf("unable to get any time from servers %v", servers)
return
}
func main() {
var servers []string
flag.Parse()
if *verbose {
debug = log.Printf
}
debug("Reading NTP servers from config file: %v", *config)
f, err := os.Open(*config)
if err == nil {
defer f.Close()
servers = parseServers(bufio.NewReader(f))
debug("Found %v servers", len(servers))
} else {
log.Printf("Unable to open config file: %v\nFalling back to : %v", err, fallback)
servers = []string{fallback}
}
t, err := getTime(servers)
if err != nil {
log.Fatalf("Unable to get time: %v", err)
}
tv := syscall.NsecToTimeval(t.UnixNano())
if err = syscall.Settimeofday(&tv); err != nil {
log.Fatalf("Unable to set system time: %v", err)
}
}
|
bsd-3-clause
|
zendframework/zend-loader
|
test/TestAsset/TestPlugins/Foo.php
|
390
|
<?php
/**
* @see https://github.com/zendframework/zend-loader for the canonical source repository
* @copyright Copyright (c) 2005-2018 Zend Technologies USA Inc. (https://www.zend.com)
* @license https://github.com/zendframework/zend-loader/blob/master/LICENSE.md New BSD License
*/
namespace ZendTest\Loader\TestAsset\TestPlugins;
/**
* @group Loader
*/
class Foo
{
}
|
bsd-3-clause
|
wayfinder/Wayfinder-S60-Navigator
|
CPP/Shared/Nav2ErrorNo.cpp
|
2280
|
/*
Copyright (c) 1999 - 2010, Vodafone Group Services Ltd
All rights reserved.
Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution.
* Neither the name of the Vodafone Group Services Ltd nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#define LANGUAGE_NO
#include "master.loc"
#include "Nav2Error.h"
#include "Nav2ErrorXX.h"
namespace isab {
namespace Nav2Error {
static const Nav2ErrorElement nav2ErrorVector[] = {
#define NAV2ERROR_LINE(symbol, id, txt) {ErrorNbr(id), txt},
#define NAV2ERROR_LINE_LAST(symbol, id, txt) {ErrorNbr(id), txt}
#include "Nav2Error.master"
#undef NAV2ERROR_LINE
#undef NAV2ERROR_LINE_LAST
};
Nav2ErrorTableNo::Nav2ErrorTableNo() : Nav2ErrorTable()
{
int32 elementSize = (uint8*)&nav2ErrorVector[1] -
(uint8*)&nav2ErrorVector[0];
m_table = nav2ErrorVector;
m_tableSize = sizeof(nav2ErrorVector) / elementSize;
}
} /* namespace Nav2Error */
} /* namespace isab */
|
bsd-3-clause
|
kansey/site-weather
|
backend/views/site/error.php
|
630
|
<?php
/* @var $this yii\web\View */
/* @var $name string */
/* @var $message string */
/* @var $exception Exception */
use yii\helpers\Html;
$this->title = $name;
?>
<div class="site-error">
<h1><?= Html::encode($this->title) ?></h1>
<div class="alert alert-danger">
<?= nl2br(Html::encode($message)) ?>
</div>
<p>
Произошла ошибка во время обработки вашего запроса.
</p>
<p>
Пожалуйста, свяжитесь с нами, если вы думаете, что это ошибка сервера. Спасибо.
</p>
</div>
|
bsd-3-clause
|
sunlightlabs/sitegeist
|
sitegeist/data/nhgis/loading.py
|
742
|
import csv
import os
from django.conf import settings
from sitegeist.data.nhgis.models import Tract
def load():
Tract.objects.all().delete()
path = os.path.join(settings.SITEGEIST['DATA_CACHE'], 'nhgis', 'nhgis0001_ts_tract.csv')
with open(path) as infile:
count = 0
reader = csv.DictReader(infile)
for rec in reader:
if count == 0:
count += 1
continue
Tract.objects.create(
state=rec['STATEA'],
county=rec['COUNTYA'],
tract=rec['TRACT'],
year=rec['YEAR'],
units=int(rec['A41AA'])
)
count += 1
if __name__ == "__main__":
load()
|
bsd-3-clause
|
vana14/code_example
|
web/index-test.php
|
886
|
<?php
defined('APPLICATION_DIR') || define('APPLICATION_DIR', __DIR__ . '/..');
require(__DIR__ . '/../vendor/autoload.php');
require(__DIR__ . '/../helpers/helpers.php');
load_environment('test');
// NOTE: Make sure this file is not accessible when deployed to production
if (!env('TESTS_IGNORE_REMOTE') && !in_array(@$_SERVER['REMOTE_ADDR'], ['127.0.0.1', '::1'])) {
die('You are not allowed to access this file.');
}
defined('YII_DEBUG') || define('YII_DEBUG', true);
defined('YII_ENV') || define('YII_ENV', 'test');
require(__DIR__ . '/../vendor/yiisoft/yii2/Yii.php');
$config = require(__DIR__ . '/../tests/codeception/config/api.php');
require(__DIR__ . '/../vendor/codeception/c3/c3.php');
if (array_key_exists('HTTP_IGNOREAUTHENTICATION', $_SERVER)) {
defined('API_IGNORE_AUTH') || define('API_IGNORE_AUTH', true);
}
(new yii\web\Application($config))->run();
|
bsd-3-clause
|
orobardet/miranda
|
module/Application/src/Application/Model/BaseAttributesTrait.php
|
653
|
<?php
namespace Application\Model;
trait BaseAttributesTrait
{
protected $baseAttributes = array();
public function setBaseAttribute($name, $value)
{
$this->baseAttributes[$name] = $value;
}
public function hasBaseAttribute($name)
{
return array_key_exists($name, $this->baseAttributes);
}
public function getBaseAttribute($name, $default = null)
{
if (array_key_exists($name, $this->baseAttributes)) {
return $this->baseAttributes[$name];
} else {
return $default;
}
}
public function unsetBaseAttribute($name)
{
if (array_key_exists($name, $this->baseAttributes)) {
unset($this->baseAttributes[$name]);
}
}
}
|
bsd-3-clause
|
evanchsa/jing-trang
|
mod/rng-validate/src/main/com/thaiopensource/relaxng/impl/AttributePattern.java
|
2509
|
package com.thaiopensource.relaxng.impl;
import org.xml.sax.Locator;
import org.xml.sax.SAXException;
class AttributePattern extends Pattern {
private final NameClass nameClass;
private final Pattern p;
private final Locator loc;
AttributePattern(NameClass nameClass, Pattern value, Locator loc) {
super(false,
EMPTY_CONTENT_TYPE,
combineHashCode(ATTRIBUTE_HASH_CODE,
nameClass.hashCode(),
value.hashCode()));
this.nameClass = nameClass;
this.p = value;
this.loc = loc;
}
Pattern expand(SchemaPatternBuilder b) {
Pattern ep = p.expand(b);
if (ep != p)
return b.makeAttribute(nameClass, ep, loc);
else
return this;
}
void checkRestrictions(int context, DuplicateAttributeDetector dad, Alphabet alpha)
throws RestrictionViolationException {
switch (context) {
case START_CONTEXT:
throw new RestrictionViolationException("start_contains_attribute");
case ELEMENT_CONTEXT:
if (nameClass.isOpen())
throw new RestrictionViolationException("open_name_class_not_repeated");
break;
case ELEMENT_REPEAT_GROUP_CONTEXT:
throw new RestrictionViolationException("one_or_more_contains_group_contains_attribute");
case ELEMENT_REPEAT_INTERLEAVE_CONTEXT:
throw new RestrictionViolationException("one_or_more_contains_interleave_contains_attribute");
case LIST_CONTEXT:
throw new RestrictionViolationException("list_contains_attribute");
case ATTRIBUTE_CONTEXT:
throw new RestrictionViolationException("attribute_contains_attribute");
case DATA_EXCEPT_CONTEXT:
throw new RestrictionViolationException("data_except_contains_attribute");
}
dad.addAttribute(nameClass);
try {
p.checkRestrictions(ATTRIBUTE_CONTEXT, null, null);
}
catch (RestrictionViolationException e) {
e.maybeSetLocator(loc);
throw e;
}
}
boolean samePattern(Pattern other) {
if (!(other instanceof AttributePattern))
return false;
AttributePattern ap = (AttributePattern)other;
return nameClass.equals(ap.nameClass)&& p == ap.p;
}
void checkRecursion(int depth) throws SAXException {
p.checkRecursion(depth);
}
void accept(PatternVisitor visitor) {
visitor.visitAttribute(nameClass, p);
}
Object apply(PatternFunction f) {
return f.caseAttribute(this);
}
Pattern getContent() {
return p;
}
NameClass getNameClass() {
return nameClass;
}
Locator getLocator() {
return loc;
}
}
|
bsd-3-clause
|
imply/chuu
|
chrome/test/data/extensions/platform_apps/web_view/navigation/embedder.js
|
7739
|
// Copyright 2013 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
var embedder = {};
embedder.tests = {};
embedder.baseGuestURL = '';
embedder.guestURL = '';
window.runTest = function(testName) {
if (!embedder.test.testList[testName]) {
console.log('Incorrect testName: ' + testName);
embedder.test.fail();
return;
}
// Run the test.
embedder.test.testList[testName]();
};
// window.* exported functions end.
/** @private */
embedder.setUpGuest_ = function() {
document.querySelector('#webview-tag-container').innerHTML =
'<webview style="width: 100px; height: 100px;"></webview>';
var webview = document.querySelector('webview');
if (!webview) {
embedder.test.fail('No <webview> element created');
}
return webview;
};
embedder.getHTMLForGuestWithTitle_ = function(title) {
var html =
'data:text/html,' +
'<html><head><title>%s</title></head>' +
'<body>hello world</body>' +
'</html>';
return html.replace('%s', title);
};
embedder.test = {};
embedder.test.succeed = function() {
chrome.test.sendMessage('DoneNavigationTest.PASSED');
};
embedder.test.fail = function() {
chrome.test.sendMessage('DoneNavigationTest.FAILED');
};
embedder.test.assertEq = function(a, b) {
if (a != b) {
console.log('assertion failed: ' + a + ' != ' + b);
embedder.test.fail();
}
};
embedder.test.assertTrue = function(condition) {
if (!condition) {
console.log('assertion failed: true != ' + condition);
embedder.test.fail();
}
};
embedder.test.assertFalse = function(condition) {
if (condition) {
console.log('assertion failed: false != ' + condition);
embedder.test.fail();
}
};
// Tests begin.
function testNavigation() {
var webview = embedder.setUpGuest_();
var step = 1;
console.log('run step: ' + step);
// Verify that canGoBack and canGoForward work as expected.
var runStep2 = function() {
step = 2;
console.log('run step: ' + step);
webview.executeScript({
code: 'document.title'
}, function(results) {
embedder.test.assertEq('step1', results[0]);
embedder.test.assertFalse(webview.canGoBack());
embedder.test.assertFalse(webview.canGoForward());
webview.src = embedder.getHTMLForGuestWithTitle_('step2');
});
};
// Verify that canGoBack and canGoForward work as expected.
var runStep3 = function() {
step = 3;
console.log('run step: ' + step);
webview.executeScript({
code: 'document.title'
}, function(results) {
embedder.test.assertEq('step2', results[0]);
embedder.test.assertTrue(webview.canGoBack());
embedder.test.assertFalse(webview.canGoForward());
webview.back();
});
};
// Verify that webview.back works as expected.
var runStep4 = function() {
step = 4;
console.log('run step: ' + step);
webview.executeScript({
code: 'document.title'
}, function(results) {
embedder.test.assertEq('step1', results[0]);
embedder.test.assertFalse(webview.canGoBack());
embedder.test.assertTrue(webview.canGoForward());
webview.forward();
});
};
// Verify that webview.forward works as expected.
var runStep5 = function() {
step = 5;
console.log('run step: ' + step);
webview.executeScript({
code: 'document.title'
}, function(results) {
embedder.test.assertEq('step2', results[0]);
embedder.test.assertTrue(webview.canGoBack());
embedder.test.assertFalse(webview.canGoForward());
webview.src = embedder.getHTMLForGuestWithTitle_('step3');
});
};
// Navigate one more time to allow for interesting uses of webview.go.
var runStep6 = function() {
step = 6;
console.log('run step: ' + step);
webview.executeScript({
code: 'document.title'
}, function(results) {
embedder.test.assertEq('step3', results[0]);
embedder.test.assertTrue(webview.canGoBack());
embedder.test.assertFalse(webview.canGoForward());
webview.go(-2);
});
};
// Verify that webview.go works as expected. Test the forward key.
var runStep7 = function() {
step = 7;
console.log('run step: ' + step);
webview.executeScript({
code: 'document.title'
}, function(results) {
embedder.test.assertEq('step1', results[0]);
embedder.test.assertFalse(webview.canGoBack());
embedder.test.assertTrue(webview.canGoForward());
embedder.test.succeed();
});
};
var onLoadStop = function(e) {
switch (step) {
case 1:
runStep2();
break;
case 2:
runStep3();
break;
case 3:
runStep4();
break;
case 4:
runStep5();
break;
case 5:
runStep6();
break;
case 6:
runStep7();
break;
default:
console.log('unexpected step: ' + step);
embedder.test.fail();
}
};
webview.addEventListener('loadstop', onLoadStop);
webview.src = embedder.getHTMLForGuestWithTitle_('step1');
}
function testBackForwardKeys() {
var webview = embedder.setUpGuest_();
var step = 1;
console.log('run step: ' + step);
// Verify that canGoBack and canGoForward work as expected.
var runStep2 = function() {
step = 2;
console.log('run step: ' + step);
webview.executeScript({
code: 'document.title'
}, function(results) {
embedder.test.assertEq('step1', results[0]);
embedder.test.assertFalse(webview.canGoBack());
embedder.test.assertFalse(webview.canGoForward());
webview.src = embedder.getHTMLForGuestWithTitle_('step2');
});
};
// Verify that webview.go works as expected. Test the forward key.
var runStep3 = function() {
step = 3;
console.log('run step: ' + step);
webview.executeScript({
code: 'document.title'
}, function(results) {
embedder.test.assertEq('step2', results[0]);
embedder.test.assertTrue(webview.canGoBack());
embedder.test.assertFalse(webview.canGoForward());
// Focus the webview to make sure it gets the forward key.
webview.focus();
chrome.test.sendMessage('ReadyForBackKey');
});
};
var runStep4 = function() {
step = 4;
console.log('run step: ' + step);
webview.executeScript({
code: 'document.title'
}, function(results) {
embedder.test.assertEq('step1', results[0]);
embedder.test.assertFalse(webview.canGoBack());
embedder.test.assertTrue(webview.canGoForward());
chrome.test.sendMessage('ReadyForForwardKey');
});
};
var runStep5 = function() {
step = 5;
console.log('run step: ' + step);
webview.executeScript({
code: 'document.title'
}, function(results) {
embedder.test.assertEq('step2', results[0]);
embedder.test.assertTrue(webview.canGoBack());
embedder.test.assertFalse(webview.canGoForward());
embedder.test.succeed();
});
};
var onLoadStop = function(e) {
switch (step) {
case 1:
runStep2();
break;
case 2:
runStep3();
break;
case 3:
runStep4();
break;
case 4:
runStep5();
break;
default:
console.log('unexpected step: ' + step);
embedder.test.fail();
}
};
webview.addEventListener('loadstop', onLoadStop);
webview.src = embedder.getHTMLForGuestWithTitle_('step1');
}
embedder.test.testList = {
'testNavigation': testNavigation,
'testBackForwardKeys': testBackForwardKeys
};
onload = function() {
chrome.test.getConfig(function(config) {
chrome.test.sendMessage("Launched");
});
};
|
bsd-3-clause
|
frankpaul142/optica-los-andes
|
models/Local.php
|
1576
|
<?php
namespace app\models;
use Yii;
/**
* This is the model class for table "local".
*
* @property integer $id
* @property integer $city_id
* @property string $name
* @property string $address
* @property string $schedule
* @property string $phone
* @property string $cellphone
* @property string $maps
* @property string $status
*
* @property City $city
*/
class Local extends \yii\db\ActiveRecord
{
/**
* @inheritdoc
*/
public static function tableName()
{
return 'local';
}
/**
* @inheritdoc
*/
public function rules()
{
return [
[['city_id', 'name', 'address', 'schedule'], 'required'],
[['city_id'], 'integer'],
[['status'], 'string'],
[['name'], 'string', 'max' => 150],
[['address', 'schedule', 'maps'], 'string', 'max' => 255],
[['phone'], 'string', 'max' => 9],
[['cellphone'], 'string', 'max' => 10]
];
}
/**
* @inheritdoc
*/
public function attributeLabels()
{
return [
'id' => 'ID',
'city_id' => 'City ID',
'name' => 'Name',
'address' => 'Address',
'schedule' => 'Schedule',
'phone' => 'Phone',
'cellphone' => 'Cellphone',
'maps' => 'Maps',
'status' => 'Status',
];
}
/**
* @return \yii\db\ActiveQuery
*/
public function getCity()
{
return $this->hasOne(City::className(), ['id' => 'city_id']);
}
}
|
bsd-3-clause
|
tsungtingkuo/stakenet
|
src/stock/edge/StockEdgeValueTransformer.java
|
251
|
package stock.edge;
import org.apache.commons.collections15.Transformer;
public class StockEdgeValueTransformer implements Transformer<StockEdge, Number> {
@Override
public Number transform(StockEdge e) {
return e.getValue();
}
}
|
bsd-3-clause
|
anthonykoerber/strand
|
src/mm-docs-shell/mm-docs-shell.js
|
4203
|
/**
* @license
* Copyright (c) 2015 MediaMath Inc. All rights reserved.
* This code may only be used under the BSD style license found at http://mediamath.github.io/strand/LICENSE.txt
*/
(function (scope) {
scope.DocsShell = Polymer({
is: 'mm-docs-shell',
behaviors: [
StrandTraits.Stylable,
StrandTraits.WindowNotifier
],
properties: {
mobileHeaderHeightCalc: {
type: Number,
value: 0
},
logoAreaHeightCalc: {
type: Number,
value: 0
},
paddingLeftCalc: {
type: Number,
value: 0
},
navLeftCalc: {
type: Number,
value: 0
},
unit: {
type: String,
value: "em"
},
mobileHeader: {
type: String,
value: "Docs"
},
menuIconColor: {
type: String,
value: "#333333"
},
menuIconWidth: {
type: Number,
value: 20
},
menuIconHeight: {
type: Number,
value: 20
},
logoAreaHeight: {
type: Number,
value: 8.25
},
navWidth: {
type: Number,
value: 16.563
},
navLeft: {
type: Number,
value: 0
},
mobileHeaderHeight: {
type: Number,
value: 3.75
},
blockerOpacity: {
type: Number,
value: 0.33
},
minWidth: {
type: Number,
value: 768
}
},
ready: function() {
// set some defaults:
this.logoAreaHeightCalc = this.logoAreaHeight + this.unit;
// start with correct positioning:
if (this.shouldHideNav) {
this.paddingLeftCalc = 0 + this.unit;
this.navLeftCalc = -this.navWidth + this.unit;
this.mobileHeaderHeightCalc = this.mobileHeaderHeight + this.unit;
} else {
this.paddingLeftCalc = this.navWidth + this.unit;
this.navLeftCalc = 0 + this.unit;
this.mobileHeaderHeightCalc = 0 + this.unit;
}
},
domReady: function() {
this.resize();
},
_menuTap: function(e) {
console.log("_menuTap: ", e);
e.preventDefault();
this.showNav();
},
_blockerTap: function(e) {
console.log("_blockerTap: ", e);
e.preventDefault();
this.hideNav();
},
showNav: function() {
this.navLeftCalc = (0 + this.unit);
this.paddingLeftCalc = !this.shouldHideNav ? (this.navWidth + this.unit) : (0 + this.unit);
this.mobileHeaderHeightCalc = !this.shouldHideNav ? (0 + this.unit) : (this.mobileHeaderHeight + this.unit);
this.$.blocker.style.visibility = this.shouldHideNav ? "visible" : "hidden";
this.$.blocker.style.opacity = this.shouldHideNav ? this.blockerOpacity : 0;
document.body.style.overflow = this.shouldHideNav ? "hidden" : "auto";
},
hideNav: function() {
this.navLeftCalc = this.shouldHideNav ? (-this.navWidth + this.unit) : (0 + this.unit);
this.paddingLeftCalc = this.shouldHideNav ? (0 + this.unit) : (this.navWidth + this.unit);
this.mobileHeaderHeightCalc = this.shouldHideNav ? (this.mobileHeaderHeight + this.unit) : (0 + this.unit);
this.$.blocker.style.visibility = "hidden";
this.$.blocker.style.opacity = 0;
document.body.style.overflow = "auto";
},
get shouldHideNav() {
return window.innerWidth < this.minWidth;
},
resizeHandler: function(e) {
this.debounce("resize", this.resize);
},
resize: function() {
if (this.shouldHideNav) {
this.async(this.hideNav);
} else {
this.async(this.showNav);
}
},
// Styling
_updateMobileHead: function(mobileHeaderHeightCalc) {
return this.styleBlock({
height: mobileHeaderHeightCalc
});
},
_updateMobileHeadBox: function(mobileHeaderHeight, unit) {
return this.styleBlock({
height: mobileHeaderHeight + unit
});
},
_updateMainContent: function(paddingLeftCalc) {
return this.styleBlock({
paddingLeft: paddingLeftCalc
});
},
_updateMainNav: function(navWidth, unit, navLeftCalc) {
return this.styleBlock({
width: navWidth + unit,
left: navLeftCalc
});
},
_updateLogoArea: function(navWidth, unit, logoAreaHeight) {
return this.styleBlock({
width: navWidth + unit,
height: logoAreaHeight + unit
});
},
_updateNavContent: function(logoAreaHeight, unit) {
return this.styleBlock({
height: "calc(100% - " + logoAreaHeight + unit + ")"
});
}
});
})(window.Strand = window.Strand || {});
|
bsd-3-clause
|
wayfinder/Wayfinder-CppCore-v2
|
cpp/Targets/Nav2API/Shared/src/Nav2APIImpl.cpp
|
40928
|
/*
Copyright (c) 1999 - 2010, Vodafone Group Services Ltd
All rights reserved.
Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution.
* Neither the name of the Vodafone Group Services Ltd nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#define CORE_LOGGING_MODULE_NAME "Nav2APIImpl"
//#define CORE_LOGGING_MODULE_LEVEL CL_ALL_FILE
#include "CoreDebugPrint.h"
#include "FileSystem.h"
#include "LogFiles.h"
#include "Nav2APIImpl.h"
#include "Nav2.h"
#include "LanguageHelper.h"
#include "MC2SimpleString.h"
#include "WFAPISync.h"
#include "Interface.h"
#include "InterfaceImpl.h"
#include "StartupData.h"
#include "Nav2StatusListener.h"
#include "AsynchronousStatus.h"
#include "Nav2APIChannel.h"
#include "TextLanguage.h"
#include "ReadyToRunListener.h"
#include "DistanceUnit.h"
#include "ProgramVersion.h"
// Nav2
#include "Thread.h"
#include "Monitor.h"
#include "Mutex.h"
#include "GuiProtMess.h"
#include "GuiProtEnums.h"
#include "GuiParameterEnums.h"
#include "GuiProtRouteMess.h"
#include "Nav2Logging.h"
// Status codes
#include "RouteStatusCode.h"
#include "NetworkStatusCode.h"
#include "FavouriteStatusCode.h"
#include "LocationStatusCode.h"
// Interfaces
#include "RouteInterface.h"
#include "NavigationInterface.h"
#include "NetworkInterface.h"
#include "LocationInterface.h"
#include "FavouriteInterface.h"
#include "SettingsInterface.h"
#include "SearchInterface.h"
#include "ImageInterface.h"
#include "BillingInterface.h"
#include "TunnelInterface.h"
// Impls
#include "RouteInterfaceImpl.h"
#include "NavigationInterfaceImpl.h"
#include "NetworkInterfaceImpl.h"
#include "LocationInterfaceImpl.h"
#include "FavouriteInterfaceImpl.h"
#include "SettingsInterfaceImpl.h"
#include "MapRequesterInterfaceImpl.h"
#include "SearchInterfaceImpl.h"
#include "CategoryTreeInterfaceImpl.h"
#include "OneSearchInterfaceImpl.h"
#include "ImageInterfaceImpl.h"
#include "BillingInterfaceImpl.h"
#include "TunnelInterfaceImpl.h"
#include <memory>
//#include <iostream>
#include <assert.h>
#include <ctype.h>
#include <string>
#include "MapLibAPI.h"
#include "MapLibStartupListener.h"
#include "ConfigInterface.h"
#include "DetailedConfigInterface.h"
#include "MapLib.h"
#include "RouteID.h"
#include "QueueThread.h"
#include "CoreFunctionPrologue.h"
#include "NotificationCenter.h"
using namespace std;
namespace WFAPI {
Nav2APIImpl::Nav2APIImpl(Nav2API* api)
: m_nextRequestID(1), m_nextMessageID(1),m_nav2(NULL), m_mapLibAPI(NULL),
m_guiChannel(NULL), // Set this when Nav2 exists...
m_bluetoothChannel(NULL),
m_nbrSynchonous(0),
m_favouriteInterface(new FavouriteInterface(this)),
m_imageInterface(new ImageInterface(this)),
m_locationInterface(new LocationInterface(this)),
m_navigationInterface(new NavigationInterface(this)),
m_networkInterface(new NetworkInterface(this)),
m_routeInterface(new RouteInterface(this)),
m_searchInterface(new SearchInterface(this)),
m_settingsInterface(new SettingsInterface(this)),
m_billingInterface(new BillingInterface(this)),
m_tunnelInterface(new TunnelInterface(this)),
m_mapRequesterInterfaceImpl(new MapRequesterInterfaceImpl(this)),
m_sessionData(NULL),
m_readyToRunListener(NULL),
m_queueThread(NULL),
m_shutDown(false),
m_mutex(new isab::Mutex()),
m_api(api),
m_statusNotifier(NULL)
{
coreprintln_init();
m_notificationCenter = new NotificationCenter;
m_mapRequesterInterfaceImpl->setNetworkInterfaceImpl(m_networkInterface->getImpl());
initNoReplyMessageSet();
resetCategoryTags();
}
Nav2APIImpl::~Nav2APIImpl()
{
// coretracefunc();
coreprintln_deinit();
// (Use auto_ptr?)
delete m_readyToRunListener;
delete m_guiChannel;
delete m_bluetoothChannel;
delete m_favouriteInterface;
delete m_imageInterface;
delete m_locationInterface;
delete m_navigationInterface;
delete m_networkInterface;
delete m_routeInterface;
delete m_searchInterface;
delete m_settingsInterface;
delete m_billingInterface;
delete m_tunnelInterface;
delete m_sessionData;
delete m_mapRequesterInterfaceImpl;
delete m_mutex;
delete m_queueThread;
delete m_notificationCenter;
delete m_statusNotifier;
}
void
Nav2APIImpl::initNoReplyMessageSet()
{
// Insert the messages that should be fire and forget, we should not
// wait for a reply for these messages.
m_noReplyMessages.insert(isab::GuiProtEnums::PREPARE_SOUNDS_REPLY);
m_noReplyMessages.insert(isab::GuiProtEnums::PLAY_SOUNDS_REPLY);
m_noReplyMessages.insert(isab::GuiProtEnums::CONNECT_GPS);
m_noReplyMessages.insert(isab::GuiProtEnums::DISCONNECT_GPS);
m_noReplyMessages.insert(isab::GuiProtEnums::INVALIDATE_ROUTE);
}
void
Nav2APIImpl::nav2Reply(isab::Buffer& buf)
{
//nav2log << "Nav2APIImpl::nav2Reply Starts. readPos " << buf.getReadPos() << endl;
buf.setReadPos(buf.getReadPos() + 4); // Start after version and length
//nav2log << "Nav2APIImpl::nav2Reply After setReadPos. readPos " << buf.getReadPos() << endl;
auto_ptr<isab::GuiProtMess> guiProtMess(
isab::GuiProtMess::createMsgFromBuf(&buf));
if (guiProtMess.get() == NULL) {
nav2log << "Nav2APIImpl::nav2Reply NULL GuiProtMess." << endl;
// Safety for unlikely cases
return;
}
WFAPISync sync(m_mutex);
if (guiProtMess->getMessageType() ==
isab::GuiProtEnums::GET_MULTI_VECTOR_MAP_REPLY) {
// MapLib first!
m_mapLibMsgs.push_back(guiProtMess.release());
if (m_readyToRunListener != NULL) {
// Possible callbacks from ready call leads to mutex unlocking.
m_mutex->unlock();
m_readyToRunListener->ready();
m_mutex->lock();
} else {
nav2log << "Got premature maplib message. Storing it for later."
<< endl;
}
// Else, wait for connectMapLib to be called and call ready() from there.
return;
}
wf_uint16 guiProtID = guiProtMess->getMessageID();
RequestInformationMap::iterator it = m_requestInformation.find(
guiProtID);
//nav2log << "Nav2APIImpl::nav2Reply guiID " << guiProtID
// << " found? " << (it != m_requestInformation.end()) << endl;
if (it != m_requestInformation.end()) {
nav2log << "Nav2APIImpl::nav2Reply reqID "
<< it->second.getRequestID().getID() << endl;
if (it->second.isSynchronous()) {
nav2log << "Nav2APIImpl::nav2Reply isSynchronous" << endl;
// Synchronous
m_nbrSynchonous--;
it->second.getInterface()->receiveSynchronousReply(
guiProtMess.release(), it->second.getRequestID());
m_requestInformation.erase(it);
if (m_nbrSynchonous == 0) {
// Reply all the stored asynchronous replies
if (m_readyToRunListener != NULL) {
if (!m_msgQueue.empty()) {
// Request callback
while (!m_msgQueue.empty()) {
m_msgs.push_back(m_msgQueue.front());
m_msgQueue.pop_front();
}
m_mutex->unlock();
m_readyToRunListener->ready();
m_mutex->lock();
} // Else nothing to do.
} else {
while (!m_msgQueue.empty()) {
nav2log << "Nav2APIImpl::nav2Reply msgQueue "
<< m_msgQueue.size() << endl;
auto_ptr<isab::GuiProtMess> asynMess(m_msgQueue.front());
m_msgQueue.pop_front();
wf_uint16 asynID = asynMess->getMessageID();
it = m_requestInformation.find(asynID);
if (it != m_requestInformation.end()) {
RequestInformation reqInfo(it->second);
m_requestInformation.erase(it);
m_mutex->unlock();
reqInfo.getInterface()->receiveAsynchronousReply(
*asynMess, reqInfo.getRequestID());
m_mutex->lock();
} else {
m_mutex->unlock();
unsolicitedReply(*asynMess);
m_mutex->lock();
}
asynMess->deleteMembers();
}
} // End else no callback required
}
} else {
if (m_readyToRunListener != NULL) {
// Store message for later synchronised callback.
m_msgs.push_back(guiProtMess.release());
m_mutex->unlock();
m_readyToRunListener->ready();
m_mutex->lock();
} else {
// Asynchronous
nav2log << "Nav2APIImpl::nav2Reply Asynchronous" << endl;
if (m_nbrSynchonous > 0) {
// Store the asynchronous reply until the synchronous reply
// comes.
nav2log << "Nav2APIImpl::nav2Reply storing it, type "
<< int(guiProtMess->getMessageType()) << endl;
m_msgQueue.push_back(guiProtMess.release());
} else {
RequestInformation reqInfo(it->second);
m_requestInformation.erase(it);
m_mutex->unlock();
nav2log << "Nav2APIImpl::nav2Reply replying" << endl;
reqInfo.getInterface()->receiveAsynchronousReply(
*guiProtMess, reqInfo.getRequestID());
m_mutex->lock();
}
} // End else no callback required
}
} else { // End if we have a receiver for the guiProtMess
//nav2log << "Nav2APIImpl::nav2Reply unsolicited" << endl;
if (m_nbrSynchonous > 0) {
// Store the asynchronous reply until the synchronous reply comes.
nav2log << "Nav2APIImpl::nav2Reply unsolicited storing it, type "
<< int(guiProtMess->getMessageType()) << endl;
m_msgQueue.push_back(guiProtMess.release());
} else {
if (m_readyToRunListener != NULL) {
// Store message for later synchronised callback.
m_msgs.push_back(guiProtMess.release());
m_mutex->unlock();
m_readyToRunListener->ready();
m_mutex->lock();
} else {
m_mutex->unlock();
unsolicitedReply(*guiProtMess);
m_mutex->lock();
}
}
}
if (guiProtMess.get() != NULL) {
guiProtMess->deleteMembers();
}
}
wf_uint16
Nav2APIImpl::sendGuiMessage(isab::GuiProtMess* guiProtMess)
{
wf_uint16 msgID = m_nextMessageID;
m_nextMessageID += 2; // Nav2 uses it this way, Gui msgs are odd.
isab::Buffer buf;
guiProtMess->setMessageId(msgID);
guiProtMess->serialize(&buf);
m_guiChannel->getChannel()->writeData(buf.accessRawData(), buf.getLength());
return msgID;
}
RequestID
Nav2APIImpl::nav2Request(isab::GuiProtMess* guiProtMess,
InterfaceImpl* interface,
bool synchronous,
RequestID* requestID)
{
if (m_shutDown) {
// Is in progress of shutting down, no more messages can
// be sent.
return RequestID(RequestID::INVALID_REQUEST_ID);
}
WFAPISync sync(m_mutex);
if (synchronous) {
// Check current threadid against QueueThread's!
// If QueueThread return error!
if (isab::Thread::currentThread() == m_queueThread) {
// This is bad!
nav2log << "Nav2APIImpl::nav2Request queueThread called me!"
<< endl;
assert(isab::Thread::currentThread() != m_queueThread);
return RequestID(RequestID::INVALID_REQUEST_ID);
}
m_nbrSynchonous++;
}
nav2log << "Nav2APIImpl::nav2Request, pre sendGuiMessage" << endl;
wf_uint16 msgID = sendGuiMessage(guiProtMess);
nav2log << "Nav2APIImpl::nav2Request, post sendGuiMessage" << endl;
RequestID usedRequestID(0); // Value set in if-statement
if (requestID == NULL) {
usedRequestID = makeRequestID();
} else {
usedRequestID = *requestID;
}
// Check if the message type of the message to send is located in
// the set of message types that should be fire and forget.
if (m_noReplyMessages.find(guiProtMess->getMessageType()) ==
m_noReplyMessages.end()) {
// The message type was not found in the m_noReplyMessages, this
// means that we should add information to the m_requestInformation.
nav2log << "IntImpl::nav2Request synchronous " << synchronous
<< " guiID " << msgID << " reqID " << usedRequestID.getID()
<< endl;
m_requestInformation.insert(
make_pair(msgID,
RequestInformation(interface, usedRequestID,
synchronous)));
}
nav2log << "Nav2APIImpl::nav2Request nbr requests "
<< m_requestInformation.size() << " IDs";
for (RequestInformationMap::const_iterator it =
m_requestInformation.begin(); it != m_requestInformation.end();
++it) {
nav2log << " " << it->second.getRequestID().getID();
}
nav2log << endl;
nav2log << "Nav2APIImpl::nav2Request, pre return" << endl;
return usedRequestID;
}
RequestID
Nav2APIImpl::makeRequestID()
{
return RequestID(m_nextRequestID++);
}
// Helper function
void cleanMsgQueueFromID(Nav2APIImpl::MsgQueue& msgs,
wf_uint16 guiID) {
for (Nav2APIImpl::MsgQueue::iterator it = msgs.begin();
it != msgs.end(); /*advances in code*/) {
if ((*it)->getMessageID() == guiID) {
it = msgs.erase(it);
} else {
++it;
}
}
}
void
Nav2APIImpl::cancelRequest(RequestID requestID)
{
WFAPISync sync(m_mutex);
for (RequestInformationMap::iterator guiProtMessRequestIDIterator =
m_requestInformation.begin() ;
guiProtMessRequestIDIterator != m_requestInformation.end() ; ) {
if (guiProtMessRequestIDIterator->second.getRequestID() == requestID) {
// Also check m_msgs and m_msgQueue
wf_uint16 guiID = guiProtMessRequestIDIterator->first;
cleanMsgQueueFromID(m_msgs, guiID);
cleanMsgQueueFromID(m_msgQueue, guiID);
if (guiProtMessRequestIDIterator->second.isSynchronous()) {
// This should not happen! We never return requestID for
// synchronous calls!
m_nbrSynchonous--;
// if zero then send stored!
}
m_requestInformation.erase(guiProtMessRequestIDIterator++);
// No break here as there can be more than one request with
// the same requestID
} else {
++guiProtMessRequestIDIterator;
}
}
}
Nav2APIImpl::RequestInformation::RequestInformation(InterfaceImpl* interface,
RequestID requestID,
bool synchronous)
: m_interface(interface), m_requestID(requestID),
m_synchronous(synchronous)
{
}
Nav2APIImpl::RequestInformation::~RequestInformation()
{
}
InterfaceImpl*
Nav2APIImpl::RequestInformation::getInterface()
{
return m_interface;
}
RequestID
Nav2APIImpl::RequestInformation::getRequestID() const
{
return m_requestID;
}
bool
Nav2APIImpl::RequestInformation::isSynchronous() const
{
return m_synchronous;
}
void
Nav2APIImpl::connectMapLib(MapLibAPI* mapLibAPI) {
WFAPISync sync(m_mutex);
nav2log << "Nav2APIImpl::connectMapLib" << endl;
m_mapLibAPI = mapLibAPI;
m_mapLibAPI->setStartupListener(m_queueThread);
// Check if any pending messages exists for MapLib to poll.
if (! m_mapLibMsgs.empty()) {
nav2log << "Nav2APIImpl::connectMapLib - sending premature maplib messages."
<< endl;
m_mutex->unlock();
m_readyToRunListener->ready();
m_mutex->lock();
}
}
FavouriteInterface&
Nav2APIImpl::getFavouriteInterface() {
return *m_favouriteInterface;
}
ImageInterface&
Nav2APIImpl::getImageInterface() {
return *m_imageInterface;
}
LocationInterface&
Nav2APIImpl::getLocationInterface() {
return *m_locationInterface;
}
NavigationInterface&
Nav2APIImpl::getNavigationInterface() {
return *m_navigationInterface;
}
NetworkInterface&
Nav2APIImpl::getNetworkInterface() {
return *m_networkInterface;
}
RouteInterface&
Nav2APIImpl::getRouteInterface() {
return *m_routeInterface;
}
SearchInterface&
Nav2APIImpl::getSearchInterface() {
return *m_searchInterface;
}
SettingsInterface&
Nav2APIImpl::getSettingsInterface() {
return *m_settingsInterface;
}
BillingInterface&
Nav2APIImpl::getBillingInterface() {
return *m_billingInterface;
}
TunnelInterface&
Nav2APIImpl::getTunnelInterface() {
return *m_tunnelInterface;
}
MapRequesterInterfaceImpl&
Nav2APIImpl::getMapRequesterInterfaceImpl()
{
return *m_mapRequesterInterfaceImpl;
}
// This should be moved to shared code
// Right now this is a copy of MapPlotter::getBitMapExtension.
const char*
Nav2APIImpl::getImageExtension(ImageExtension imageExtension) const
{
switch (imageExtension) {
case MIF:
return "mif";
case SVG:
return "svg";
case PNG:
return "png";
}
return "png";
}
CombinedSearch::CSImageDimension
Nav2APIImpl::getImageDimension(ImageDimension imageDimension) const
{
CombinedSearch::CSImageDimension dimension;
switch(imageDimension) {
case DIMENSION_20X20:
dimension.width = 20;
dimension.height = 20;
break;
case DIMENSION_40X40:
dimension.width = 40;
dimension.height = 40;
break;
case DIMENSION_50X50:
dimension.width = 50;
dimension.height = 50;
break;
case DIMENSION_75X75:
dimension.width = 75;
dimension.height = 75;
break;
default:
dimension.width = 0;
dimension.height = 0;
break;
}
return dimension;
}
void
Nav2APIImpl::start(StartupData* startupData,
const WFString& audioTypeDirName,
const ProgramVersion& programVersion,
const WFString& clientType,
const WFString& clientTypeOptions,
bool httpProto,
const WFString& httpUserAgent,
const WFString& httpRequestString,
const WFString& httpFixedHost,
bool useTracking,
bool wayfinderIDStartUp,
const HardwareIDArray& hwids,
ReadyToRunListener* readyListener,
Nav2StatusNotifier* statusNotifier)
{
CORE_FUNCTION_PROLOGUE_SET_CONTEXT();
if(startupData != NULL) {
m_sessionData = new InternalSessionData(startupData);
}
m_statusNotifier = statusNotifier;
m_networkInterface->getImpl()->setConnectionManager(
startupData->getConnectionManager());
// Nav2 startup stuff
bool startedOk = true;
// Create
isab::Nav2Error::Nav2ErrorTable* errorTable = NULL;
isab::AudioCtrlLanguage* audioSyntax = NULL;
LanguageHelper::createErrorTableAndSyntax(
LanguageHelper::textLanguageToNav2(m_sessionData->getTextLanguage()),
errorTable, audioSyntax);
isab::Nav2StartupData startData(
isab::GuiProtEnums::Gold,
m_sessionData->getCommonDataStoragePath().c_str(),
m_sessionData->getParameterStoragePath().c_str(),
errorTable, audioSyntax);
// Set the data path to the search interface
WFString imageStoragePath = m_sessionData->getImageStoragePath();
m_searchInterface->getImpl()->setImagePath(imageStoragePath.c_str());
const char* imageExtension = getImageExtension(startupData->getImageExtension());
WFString categoryImagePath = m_sessionData->getCategoryImageStoragePath();
m_searchInterface->getImpl()->
setCategoryImagePath(categoryImagePath.c_str());
m_searchInterface->getImpl()->
setImageExt(getImageExtension(startupData->getImageExtension()));
m_searchInterface->getImpl()->
setImageDimension(getImageDimension(startupData->getImageDimension()));
WFString categoryTreeImagePath =
m_sessionData->getCategoryTreeImageStoragePath();
m_searchInterface->getCategoryTreeImpl()->
setCategoryImagePath(categoryTreeImagePath.c_str());
m_searchInterface->getCategoryTreeImpl()->setImageExt(imageExtension);
m_searchInterface->getCategoryTreeImpl()->
setImageDimension(getImageDimension(startupData->getImageDimension()));
m_searchInterface->getOneSearchImpl()->
setImagePath(categoryTreeImagePath.c_str());
m_searchInterface->getOneSearchImpl()->setImageExt(imageExtension);
// Program version
startData.setProgramVersion(programVersion.majorV,
programVersion.minorV,
programVersion.buildV);
// Client type
startData.clientType = clientType.c_str();
startData.clientTypeOptions = clientTypeOptions.c_str();
// HW IDs
for (wf_uint32 i = 0; i < hwids.size(); ++i) {
startData.hardwareIDs.push_back(new isab::HWID(hwids[i].key.c_str(),
hwids[i].type.c_str()));
}
// HTTP settings
startData.setHttpProto(httpProto);
if (!httpUserAgent.empty()) {
startData.setHttpUserAgent(httpUserAgent.c_str());
}
if (!httpRequestString.empty()) {
startData.httpRequest = httpRequestString.c_str();
}
if (!httpFixedHost.empty()) {
startData.httpHostOverride = httpFixedHost.c_str();
}
startData.setUseTracking(useTracking);
startData.wayfinderIDStartUP = wayfinderIDStartUp;
// Creates and starts Nav2
m_nav2 =
new isab::Nav2Release(startData,
m_networkInterface->getImpl()->getConnectionManager(),
m_notificationCenter);
// Set this for later use
m_guiChannel = new Nav2APIChannel(m_nav2->getGuiChannel());
// Set this for later use, this should be sent to the gps interface
// for writing nmea data to nav2
m_bluetoothChannel = new Nav2APIChannel(m_nav2->getBluetoothChannel());
m_locationInterface->getImpl()->setBtChannel(m_bluetoothChannel);
// Set this for later use.
m_readyToRunListener = readyListener;
// GuiProtEnums::LOAD_AUDIO_SYNTAX with _XX language path
// Make audio path from getResourceStoragePath and VoiceLanguage
MC2SimpleString audioPath(
m_sessionData->getResourceStoragePath().c_str());
// Add the directory name of the type of sounds used.
audioPath.append("/");
audioPath.append(audioTypeDirName.c_str());
audioPath.append("/");
audioPath.append(getVoiceLanguageAsString(m_sessionData->getVoiceLanguage(),
true).c_str());
audioPath.append("/");
// Add the syntax file
audioPath.append(getVoiceLanguageAsString(m_sessionData->getVoiceLanguage(),
false).c_str());
audioPath.append(".syn");
isab::GenericGuiMess* loadAudio = new isab::GenericGuiMess(
isab::GuiProtEnums::LOAD_AUDIO_SYNTAX, audioPath.c_str());
vector<isab::GuiProtMess*> initialMsgs;
initialMsgs.push_back(loadAudio);
// Set language in Nav2 too
isab::GeneralParameterMess* setLang = new isab::GeneralParameterMess(
isab::GuiProtEnums::paramLanguage,
wf_int32(LanguageHelper::textLanguageToNav2(
m_sessionData->getTextLanguage())));
initialMsgs.push_back(setLang);
// Let QueueThread wait until startup is done and call startupcomplete
// Start QueueThread if all is ok to far
if (startedOk) {
m_queueThread = new QueueThread(
m_guiChannel->getChannel(), this, initialMsgs,
m_sessionData->getTextLanguage());
if (m_queueThread->start() != isab::Thread::GOOD_START) {
startedOk = false;
// Remove the remains
delete m_queueThread;
m_queueThread = NULL;
}
}
if (!startedOk) {
m_statusNotifier->addError(AsynchronousStatus(
RequestID::INVALID_REQUEST_ID,
GENERAL_ERROR, "", ""));
} // Else wait for QueueThread to call startupComplete in this class.
}
void Nav2APIImpl::addStatusListener(Nav2StatusListener* statusListener)
{
if(m_statusNotifier) {
m_statusNotifier->addListener(statusListener);
}
}
void Nav2APIImpl::removeStatusListener(Nav2StatusListener* statusListener)
{
if(m_statusNotifier) {
m_statusNotifier->removeListener(statusListener);
}
}
void
Nav2APIImpl::stop()
{
coretracefunc();
m_shutDown = true;
m_queueThread->terminate();
}
void
Nav2APIImpl::deleteNav2()
{
coretracefunc();
delete m_nav2;
m_nav2 = NULL;
coreprintln("Issuing STOP_COMPLETE!");
m_statusNotifier->addMessage(Nav2StatusNotifier::STOP_COMPLETE, OK);
}
void
Nav2APIImpl::startupComplete(StatusCode status)
{
nav2log << "Nav2APIMPL::startupcomplete" << endl;
if (status == OK) {
coreprintln("Nav2APIImpl::startupcomplete, status OK");
m_statusNotifier->addMessage(Nav2StatusNotifier::STARTUP_COMPLETE,
status);
} else {
coreprintln("Nav2APIImpl::startupcomplete, status NOT OK");
m_statusNotifier->addError(AsynchronousStatus(
RequestID::INVALID_REQUEST_ID,
status, "", ""));
}
}
void
Nav2APIImpl::mapLibStartupComplete(StatusCode status)
{
if (status == OK) {
m_statusNotifier->addMessage(Nav2StatusNotifier::MAPLIB_STARTUP_COMPLETE,
status);
} else {
m_statusNotifier->addError(AsynchronousStatus(
RequestID::INVALID_REQUEST_ID,
status, "", ""));
}
}
void
Nav2APIImpl::unsolicitedReply(const isab::GuiProtMess& guiProtMess)
{
// Else check for errors or unsolicited updates
switch (guiProtMess.getMessageType()) {
case isab::GuiProtEnums::MESSAGETYPE_ERROR:
{
const isab::ErrorMess* errorMsg = static_cast<const isab::ErrorMess*>(
&guiProtMess);
// Call errorListener
nav2log << "Nav2APIImpl::nav2Reply unsolicited error" << endl;
wf_uint32 statusCode = nav2ErrorNumberToStatusCode(
errorMsg->getErrorNumber());
// if(statusCode >= NET_ERR_UNKNOWN &&
// statusCode <= NET_EVENT_CLOSE_IND) {
// m_networkInterface->getImpl()->
// reportNetworkError(AsynchronousStatus(
// RequestID::INVALID_REQUEST_ID,
// statusCode, "", ""));
// } else {
const char* errStr = errorMsg->getErrorString();
const char* errUrl = errorMsg->getErrorURL();
const char* errData = errorMsg->getErrorData();
m_statusNotifier->addError(AsynchronousStatus(
RequestID::INVALID_REQUEST_ID,
statusCode,
errStr != NULL ? errStr : "",
errUrl != NULL ? errUrl : "",
errData != NULL ? errData : ""));
//}
break;
}
// Route updates...
case isab::GuiProtEnums::STARTED_NEW_ROUTE:
case isab::GuiProtEnums::REROUTE:
{
nav2log << "Nav2APIImpl::nav2Reply unsolicited route" << endl;
// Set routeID in MapLib too
if (getConnectedMapLib() != NULL && guiProtMess.getMessageType() ==
isab::GuiProtEnums::STARTED_NEW_ROUTE) {
const isab::StartedNewRouteMess* dataMessage =
static_cast<const isab::StartedNewRouteMess*>(&guiProtMess);
getConnectedMapLib()->getMapLib()->setRouteID(
RouteID(dataMessage->getRouteId()));
}
// Send message to
m_routeInterface->getImpl()->receiveAsynchronousReply(
guiProtMess, RequestID(RequestID::INVALID_REQUEST_ID));
break;
}
// Navigation updates...
case isab::GuiProtEnums::PREPARE_SOUNDS:
case isab::GuiProtEnums::PLAY_SOUNDS:
case isab::GuiProtEnums::SOUND_FILE_LIST:
case isab::GuiProtEnums::UPDATE_ROUTE_INFO:
{
nav2log << "Nav2APIImpl::nav2Reply unsolicited navigation"
<< endl;
m_navigationInterface->getImpl()->receiveAsynchronousReply(
guiProtMess, RequestID(RequestID::INVALID_REQUEST_ID));
break;
}
// Favourite updates...
case isab::GuiProtEnums::FAVORITES_CHANGED:
{
nav2log << "Nav2APIImpl::nav2Reply unsolicited favourites"
<< endl;
m_favouriteInterface->getImpl()->receiveAsynchronousReply(
guiProtMess, RequestID(RequestID::INVALID_REQUEST_ID));
break;
}
// GPS updates...
case isab::GuiProtEnums::UPDATE_POSITION_INFO:
case isab::GuiProtEnums::SATELLITE_INFO:
{
nav2log << "Nav2APIImpl::nav2Reply unsolicited location "
<< int(guiProtMess.getMessageType()) << endl;
m_locationInterface->getImpl()->receiveAsynchronousReply(
guiProtMess, RequestID(RequestID::INVALID_REQUEST_ID));
break;
}
// Search
case isab::GuiProtEnums::SEARCH_RESULT_CHANGED:
{
nav2log << "Nav2APIImpl::nav2Reply unsolicited search"
<< endl;
// Uncomment when Impl is done!
//m_searchInterface->getImpl()->receiveAsynchronousReply(
// guiProtMess, RequestID(RequestID::INVALID_REQUEST_ID));
break;
}
case isab::GuiProtEnums::PROGRESS_INDICATOR:
// These are quite annoying to look at.
// Ignore for now.
break;
// Settings updates...(We ignore these? Get them when needed.)
// PARAMETER_CHANGED
case isab::GuiProtEnums::SET_GENERAL_PARAMETER: {
const isab::GeneralParameterMess* mess =
static_cast<const isab::GeneralParameterMess*> (
&guiProtMess);
handleSetParameter(*mess);
break;
}
case isab::GuiProtEnums::NEW_VERSION_MSG: {
m_settingsInterface->getImpl()->receiveAsynchronousReply(
guiProtMess, RequestID(RequestID::INVALID_REQUEST_ID));
}
default:
nav2log << "Nav2APIImpl::nav2Reply unsolicited unknown"
<< " message type " << int(guiProtMess.getMessageType())
<< endl;
break;
}
}
wf_uint32
Nav2APIImpl::nav2ErrorNumberToStatusCode(wf_uint32 errorNumber) const
{
wf_uint32 res = GENERAL_ERROR;
switch (isab::Nav2Error::ErrorNbr(errorNumber)) {
// General
case isab::Nav2Error::NSC_EXPIRED_USER:
res = EXPIRED_ERROR;
break;
case isab::Nav2Error::NSC_AUTHORIZATION_FAILED:
res = UNAUTHORIZED_ERROR;
break;
case isab::Nav2Error::NSC_SERVER_NOT_OK:
res = GENERAL_SERVER_ERROR;
break;
case isab::Nav2Error::NSC_SERVER_OUTSIDE_MAP:
res = OUTSIDE_MAP_ERROR;
break;
case isab::Nav2Error::NSC_SERVER_UNAUTHORIZED_MAP:
res = OUTSIDE_ALLOWED_MAP_ERROR;
break;
// Route errors
case isab::Nav2Error::NSC_NO_ROUTE_RIGHTS:
res = NO_ROUTE_RIGHT_ERROR;
break;
case isab::Nav2Error::NSC_NO_GPS_WARN:
res = NO_GPS_WARN;
break;
case isab::Nav2Error::NSC_NO_GPS_ERR:
res = NO_GPS_ERROR;
break;
case isab::Nav2Error::NSC_SERVER_ROUTE_TOO_LONG:
res = TOO_FAR_FOR_VEHICLE;
break;
case isab::Nav2Error::NSC_SERVER_NO_ROUTE_FOUND:
res = NO_ROUTE_FOUND;
break;
case isab::Nav2Error::NSC_SERVER_BAD_ORIGIN:
res = PROBLEM_WITH_ORIGIN;
break;
case isab::Nav2Error::NSC_SERVER_BAD_DESTINATION:
res = PROBLEM_WITH_DEST;
break;
case isab::Nav2Error::NAVTASK_ALREADY_DOWNLOADING_ROUTE:
res = ALREADY_DOWNLOADING_ROUTE;
break;
case isab::Nav2Error::NAVTASK_ROUTE_INVALID:
res = ROUTE_INVALID;
break;
case isab::Nav2Error::NAVTASK_NSC_OUT_OF_SYNC:
case isab::Nav2Error::NAVTASK_INTERNAL_ERROR:
case isab::Nav2Error::NAVTASK_CONFUSED:
case isab::Nav2Error::NAVTASK_NO_ROUTE:
res = GENERAL_ROUTE_ERROR;
break;
case isab::Nav2Error::NAVTASK_FAR_AWAY:
res = TOO_FAR_AWAY;
break;
// Network transport failed
case isab::Nav2Error::NSC_TRANSPORT_FAILED:
res = NETWORK_TRANSPORT_FAILED;
break;
case isab::Nav2Error::DEST_SYNC_ALREADY_IN_PROGRESS:
res = FAVOURITE_SYNC_ALREADY_IN_PROGRESS;
break;
// Network
case isab::Nav2Error::NSC_TCP_INTERNAL_ERROR:
case isab::Nav2Error::NSC_TCP_INTERNAL_ERROR2:
case isab::Nav2Error::NSC_SERVER_COMM_TIMEOUT_CONNECTED:
case isab::Nav2Error::NSC_SERVER_COMM_TIMEOUT_CONNECTING:
case isab::Nav2Error::NSC_SERVER_COMM_TIMEOUT_DISCONNECTING:
case isab::Nav2Error::NSC_SERVER_COMM_TIMEOUT_CLEAR:
case isab::Nav2Error::NSC_SERVER_COMM_TIMEOUT_WAITING_FOR_USER:
case isab::Nav2Error::NSC_FAKE_CONNECT_TIMEOUT:
case isab::Nav2Error::NSC_SERVER_REQUEST_TIMEOUT:
res = NETWORK_TIMEOUT_ERROR;
break;
case isab::Nav2Error::NSC_NO_NETWORK_AVAILABLE:
case isab::Nav2Error::NSC_SERVER_NOT_FOUND:
case isab::Nav2Error::NSC_SERVER_UNREACHABLE:
case isab::Nav2Error::NSC_SERVER_NOT_RESPONDING:
case isab::Nav2Error::NSC_SERVER_CONNECTION_BROKEN:
case isab::Nav2Error::NSC_FLIGHT_MODE:
res = GENERAL_NETWORK_ERROR;
break;
case isab::Nav2Error::GUIPROT_FAILED_GET_TOP_REGION_LIST:
res = GENERAL_ERROR;
break;
case isab::Nav2Error::GUIPROT_FAILED_GET_SIMPLE_PARAMETER:
res = GENERAL_ERROR;
break;
case isab::Nav2Error::GUIPROT_FAILED_SET_SIMPLE_PARAMETER:
res = GENERAL_ERROR;
break;
case isab::Nav2Error::GUIPROT_FAILED_GET_FAVORITES:
case isab::Nav2Error::GUIPROT_FAILED_GET_FAVORITES_ALL_DATA:
case isab::Nav2Error::GUIPROT_FAILED_SORT_FAVORITES:
case isab::Nav2Error::GUIPROT_FAILED_SYNC_FAVORITES:
case isab::Nav2Error::GUIPROT_FAILED_GET_FAVORITE_INFO:
res = GENERAL_FAVOURITE_ERROR;
break;
case isab::Nav2Error::GUIPROT_FAILED_ADD_FAVORITE:
case isab::Nav2Error::GUIPROT_FAILED_ADD_FAVORITE_FROM_SEARCH:
res = FAILED_ADD_FAVOURITE;
break;
case isab::Nav2Error::GUIPROT_FAILED_REMOVE_FAVORITE:
res = FAILED_REMOVE_FAVOURITE;
break;
case isab::Nav2Error::GUIPROT_FAILED_CHANGE_FAVORITE:
res = FAILED_CHANGE_FAVOURITE;
break;
case isab::Nav2Error::GUIPROT_FAILED_ROUTE_TO_FAVORITE:
// Not used feature
res = GENERAL_ERROR;
break;
case isab::Nav2Error::GUIPROT_FAILED_DISCONNECT_GPS:
res = LBS_STOP_FAILED;
break;
case isab::Nav2Error::GUIPROT_FAILED_CONNECT_GPS:
res = LBS_STARTUP_FAILED;
break;
default:
coreprintln("Serious problem, hit default case: isab::Nav2Error::ErrorNbr(errorNumber)=%d", isab::Nav2Error::ErrorNbr(errorNumber));
break;
}
return res;
}
MapLibAPI*
Nav2APIImpl::getConnectedMapLib()
{
return m_mapLibAPI;
}
Nav2API*
Nav2APIImpl::getNav2API()
{
return m_api;
}
void
Nav2APIImpl::doCallbacks()
{
internalDoCallbacks(m_msgs);
internalDoCallbacks(m_mapLibMsgs);
}
WFString
Nav2APIImpl::getVoiceLanguageAsString(VoiceLanguage::VoiceLanguage lang,
bool uppercase)
{
WFString res = LanguageHelper::getLanguageCodeForVoice(lang);
if (!uppercase) {
// Lower case it
std::string tmp;
for (unsigned int l = 0; l < res.length(); ++l) {
tmp.append(1, tolower(res[l]));
}
res = tmp.c_str();
}
return res;
}
void
Nav2APIImpl::internalDoCallbacks(MsgQueue& msgs)
{
WFAPISync sync(m_mutex);
while (!msgs.empty()) {
// nav2log << "Nav2APIImpl::doCallbacks msgs "
// << msgs.size() << endl;
auto_ptr<isab::GuiProtMess> mess(msgs.front());
msgs.pop_front();
wf_uint16 guiID = mess->getMessageID();
RequestInformationMap::iterator it = m_requestInformation.find(guiID);
if (it != m_requestInformation.end()) {
RequestInformation reqInfo(it->second);
m_requestInformation.erase(it);
m_mutex->unlock();
// nav2log << "Nav2APIImpl::doCallbacks replying" << endl;
reqInfo.getInterface()->receiveAsynchronousReply(
*mess, reqInfo.getRequestID());
m_mutex->lock();
} else {
m_mutex->unlock();
unsolicitedReply(*mess);
m_mutex->lock();
}
mess->deleteMembers();
}
}
void
Nav2APIImpl::handleSetParameter(const isab::GeneralParameterMess& mess)
{
switch(mess.getParamId()) {
case isab::GuiProtEnums::paramDistanceMode:
{
// Send to NavigationInterface
DistanceUnit distanceUnit =
SettingsInterfaceImpl::nav2DistanceUnitToWFAPI(
mess.getIntegerData()[0]);
getNavigationInterface().getImpl()->setDistanceUnit(distanceUnit);
break;
}
case isab::GuiProtEnums::paramTopRegionList:
getSearchInterface().getImpl()->handleSetTopRegionList();
break;
case isab::GuiProtEnums::paramCategoryChecksum:
m_categoryState[CHECKSUM_TAG] = true;
getSearchInterface().getImpl()->handleSetCategoryChecksum(
*mess.getIntegerData());
break;
case isab::GuiProtEnums::paramCategoryIds:
m_categoryState[ID_TAG] = true;
break;
case isab::GuiProtEnums::paramCategoryNames:
m_categoryState[NAMES_TAG] = true;
break;
case isab::GuiProtEnums::paramCategoryIcons:
m_categoryState[ICONS_TAG] = true;
break;
case isab::GuiProtEnums::paramCategoryIntIds:
m_categoryState[INTS_TAG] = true;
break;
default:
nav2log << "Nav2APIImpl::handleSetParameter unsolicited "
<< "parameter ID 0x" << hex << mess.getParamId() << dec
<< endl;
}
if(allCategoriesUpdated()) {
m_searchInterface->getImpl()->notifyCategoriesUpdated();
resetCategoryTags();
}
}
void Nav2APIImpl::resetCategoryTags()
{
for(int i = 0; i < NUM_TAGS; i++) {
m_categoryState[i] = false;
}
}
bool Nav2APIImpl::allCategoriesUpdated() const
{
for(int i = 0; i < NUM_TAGS; i++) {
if(!m_categoryState[i]) {
return false;
}
}
return true;
}
WGS84Coordinate
Nav2APIImpl::getCurrentPosition()
{
return m_locationInterface->getImpl()->getCurrentPosition();
}
const InternalSessionData*
Nav2APIImpl::getSessionData() const
{
return m_sessionData;
}
NotificationCenter* WFAPI::Nav2APIImpl::getNotificationCenter()
{
return m_notificationCenter;
}
} // End namespace WFAPI
|
bsd-3-clause
|
pombredanne/django-narcissus
|
narcissus/garden/urls.py
|
304
|
from django.conf.urls.defaults import patterns, include, url
from narcissus.garden.views import HomeView, PetalCreateView
urlpatterns = patterns('',
url(r'^$', HomeView.as_view(), name='narcissus-home'),
url(r'^new/([\w-]+)/$', PetalCreateView.as_view(),
name='narcissus-new-petal'),
)
|
bsd-3-clause
|
chad/rubinius
|
lib/decompiler.rb
|
3613
|
require 'kernel/core/iseq' unless defined? RUBY_ENGINE and RUBY_ENGINE == 'rbx'
# Like String#unpack('N'), but returns a Fixnum, rather than an array containing a string
class String
def unpack_int(endian = ?b)
if ?b == endian
# Int is encoded big-endian
i = (self[3] | (self[2] << 8) | (self[1] << 16) | (self[0] << 24))
else
# Int is encoded little-endian
i = (self[0] | (self[1] << 8) | (self[2] << 16) | (self[3] << 24))
end
i
end
end
class MarshalEmitter
def initialize(ver, str, start=0)
@rbc_version = ver
@string = str
@index = start
@decoder = InstructionSequence::Encoder.new
end
attr_reader :rbc_version
TagNames = {
?n => :nil,
?t => :true,
?f => :false,
?i => :int,
?s => :string,
?d => :float,
?B => :bignum,
?x => :symbol,
?b => :bytes,
?I => :instructions,
?p => :tuple,
?A => :array,
?m => :method,
?M => :method2,
?r => :object,
?S => :send_site
}
NoBody = [:nil, :true, :false]
def self.process_rbc(file)
# Binary mode needs to be specified on Win
str = ""
File.open(file, 'rb') do |f|
str = f.read
end
raise "Not a Rubinius compiled file" unless 'RBIX' == str[0..3]
ver = str[4..7].unpack_int
return new(ver, str, 28)
end
def process
tag = @string[@index]
@index += 1
name = TagNames[tag]
raise "Unrecognised tag '" << (tag || '\0') << "' at #{@index} (#{sprintf('%#x', @index)})" unless name
if NoBody.include? name
name
else
body = __send__ "process_#{name}"
if name == :bignum
body.to_i
else
[name, body]
end
end
end
def process_int
body = @string[@index, 5]
@index += 5
sign = body[0]
int = body[1..-1].unpack_int
if sign == ?n
int = -int
end
return int
end
def process_string
sz = @string[@index,4].unpack_int
@index += 4
body = @string[@index, sz]
@index += sz
return body
end
def process_num
body = process_string
@index += 1 # Discard trailing \0
body
end
alias :process_float :process_num
alias :process_bignum :process_num
alias :process_symbol :process_string
alias :process_bytes :process_string
alias :process_send_site :process_string
def process_tuple
sz = @string[@index,4].unpack_int
@index += 4
body = []
sz.times do
body << process()
end
body
end
alias :process_method :process_tuple
alias :process_object :process_tuple
# Support for version 2 of compiled method, which replaces size with a version number
def process_method2
ver = @string[@index,4].unpack_int
@index += 4
body = []
sz = 16 if 1 == ver
raise "Unsupported version (#{ver}) of CompiledMethod" unless sz
sz.times do
body << process()
end
body
end
def process_instructions
endian = @string[@index]
@index += 1
body = process_string()
body = @decoder.decode_iseq(body)
body.map! {|i| [i.first.opcode].concat i[1..-1]}
body
end
end
# If file is run, dump content of .rbc to STDOUT
if __FILE__ == $0
if ARGV.size > 0
require 'pp'
while rbc = ARGV.shift
emit = MarshalEmitter.process_rbc(rbc)
STDOUT.puts "\nContent of #{rbc}:"
pp emit.process
end
else
STDOUT.puts "Usage: #{__FILE__} <rbc_file> [<rbc_file> ...]"
end
end
|
bsd-3-clause
|
mudunuriRaju/tlr-live
|
frontend/web/js/angular/i18n/angular-locale_ckb-arab-ir.js
|
6135
|
'use strict';
angular.module("ngLocale", [], ["$provide", function ($provide) {
var PLURAL_CATEGORY = {ZERO: "zero", ONE: "one", TWO: "two", FEW: "few", MANY: "many", OTHER: "other"};
function getDecimals(n) {
n = n + '';
var i = n.indexOf('.');
return (i == -1) ? 0 : n.length - i - 1;
}
function getVF(n, opt_precision) {
var v = opt_precision;
if (undefined === v) {
v = Math.min(getDecimals(n), 3);
}
var base = Math.pow(10, v);
var f = ((n * base) | 0) % base;
return {v: v, f: f};
}
$provide.value("$locale", {
"DATETIME_FORMATS": {
"AMPMS": [
"\u0628.\u0646",
"\u062f.\u0646"
],
"DAY": [
"\u06cc\u06d5\u06a9\u0634\u06d5\u0645\u0645\u06d5",
"\u062f\u0648\u0648\u0634\u06d5\u0645\u0645\u06d5",
"\u0633\u06ce\u0634\u06d5\u0645\u0645\u06d5",
"\u0686\u0648\u0627\u0631\u0634\u06d5\u0645\u0645\u06d5",
"\u067e\u06ce\u0646\u062c\u0634\u06d5\u0645\u0645\u06d5",
"\u06be\u06d5\u06cc\u0646\u06cc",
"\u0634\u06d5\u0645\u0645\u06d5"
],
"ERANAMES": [
"\u067e\u06ce\u0634 \u0632\u0627\u06cc\u06cc\u0646",
"\u0632\u0627\u06cc\u06cc\u0646\u06cc"
],
"ERAS": [
"\u067e\u06ce\u0634 \u0632\u0627\u06cc\u06cc\u06cc\u0646",
"\u0632"
],
"FIRSTDAYOFWEEK": 5,
"MONTH": [
"\u06a9\u0627\u0646\u0648\u0648\u0646\u06cc \u062f\u0648\u0648\u06d5\u0645",
"\u0634\u0648\u0628\u0627\u062a",
"\u0626\u0627\u0632\u0627\u0631",
"\u0646\u06cc\u0633\u0627\u0646",
"\u0626\u0627\u06cc\u0627\u0631",
"\u062d\u0648\u0632\u06d5\u06cc\u0631\u0627\u0646",
"\u062a\u06d5\u0645\u0648\u0648\u0632",
"\u0626\u0627\u0628",
"\u0626\u06d5\u06cc\u0644\u0648\u0648\u0644",
"\u062a\u0634\u0631\u06cc\u0646\u06cc \u06cc\u06d5\u06a9\u06d5\u0645",
"\u062a\u0634\u0631\u06cc\u0646\u06cc \u062f\u0648\u0648\u06d5\u0645",
"\u06a9\u0627\u0646\u0648\u0646\u06cc \u06cc\u06d5\u06a9\u06d5\u0645"
],
"SHORTDAY": [
"\u06cc\u06d5\u06a9\u0634\u06d5\u0645\u0645\u06d5",
"\u062f\u0648\u0648\u0634\u06d5\u0645\u0645\u06d5",
"\u0633\u06ce\u0634\u06d5\u0645\u0645\u06d5",
"\u0686\u0648\u0627\u0631\u0634\u06d5\u0645\u0645\u06d5",
"\u067e\u06ce\u0646\u062c\u0634\u06d5\u0645\u0645\u06d5",
"\u06be\u06d5\u06cc\u0646\u06cc",
"\u0634\u06d5\u0645\u0645\u06d5"
],
"SHORTMONTH": [
"\u06a9\u0627\u0646\u0648\u0648\u0646\u06cc \u062f\u0648\u0648\u06d5\u0645",
"\u0634\u0648\u0628\u0627\u062a",
"\u0626\u0627\u0632\u0627\u0631",
"\u0646\u06cc\u0633\u0627\u0646",
"\u0626\u0627\u06cc\u0627\u0631",
"\u062d\u0648\u0632\u06d5\u06cc\u0631\u0627\u0646",
"\u062a\u06d5\u0645\u0648\u0648\u0632",
"\u0626\u0627\u0628",
"\u0626\u06d5\u06cc\u0644\u0648\u0648\u0644",
"\u062a\u0634\u0631\u06cc\u0646\u06cc \u06cc\u06d5\u06a9\u06d5\u0645",
"\u062a\u0634\u0631\u06cc\u0646\u06cc \u062f\u0648\u0648\u06d5\u0645",
"\u06a9\u0627\u0646\u0648\u0646\u06cc \u06cc\u06d5\u06a9\u06d5\u0645"
],
"STANDALONEMONTH": [
"\u06a9\u0627\u0646\u0648\u0648\u0646\u06cc \u062f\u0648\u0648\u06d5\u0645",
"\u0634\u0648\u0628\u0627\u062a",
"\u0626\u0627\u0632\u0627\u0631",
"\u0646\u06cc\u0633\u0627\u0646",
"\u0626\u0627\u06cc\u0627\u0631",
"\u062d\u0648\u0632\u06d5\u06cc\u0631\u0627\u0646",
"\u062a\u06d5\u0645\u0648\u0648\u0632",
"\u0626\u0627\u0628",
"\u0626\u06d5\u06cc\u0644\u0648\u0648\u0644",
"\u062a\u0634\u0631\u06cc\u0646\u06cc \u06cc\u06d5\u06a9\u06d5\u0645",
"\u062a\u0634\u0631\u06cc\u0646\u06cc \u062f\u0648\u0648\u06d5\u0645",
"\u06a9\u0627\u0646\u0648\u0646\u06cc \u06cc\u06d5\u06a9\u06d5\u0645"
],
"WEEKENDRANGE": [
4,
5
],
"fullDate": "y MMMM d, EEEE",
"longDate": "d\u06cc MMMM\u06cc y",
"medium": "y MMM d HH:mm:ss",
"mediumDate": "y MMM d",
"mediumTime": "HH:mm:ss",
"short": "y-MM-dd HH:mm",
"shortDate": "y-MM-dd",
"shortTime": "HH:mm"
},
"NUMBER_FORMATS": {
"CURRENCY_SYM": "Rial",
"DECIMAL_SEP": "\u066b",
"GROUP_SEP": "\u066c",
"PATTERNS": [
{
"gSize": 3,
"lgSize": 3,
"maxFrac": 3,
"minFrac": 0,
"minInt": 1,
"negPre": "-",
"negSuf": "",
"posPre": "",
"posSuf": ""
},
{
"gSize": 3,
"lgSize": 3,
"maxFrac": 2,
"minFrac": 2,
"minInt": 1,
"negPre": "-\u00a4\u00a0",
"negSuf": "",
"posPre": "\u00a4\u00a0",
"posSuf": ""
}
]
},
"id": "ckb-arab-ir",
"pluralCat": function (n, opt_precision) {
var i = n | 0;
var vf = getVF(n, opt_precision);
if (i == 1 && vf.v == 0) {
return PLURAL_CATEGORY.ONE;
}
return PLURAL_CATEGORY.OTHER;
}
});
}]);
|
bsd-3-clause
|
MaddTheSane/MacPaf
|
src/com/redbugz/maf/jdom/EventJDOM.java
|
7002
|
package com.redbugz.maf.jdom;
//import java.text.DateFormat;
//import java.text.ParseException;
//import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.GregorianCalendar;
import org.apache.log4j.Logger;
import org.apache.log4j.helpers.DateTimeDateFormat;
import org.jdom.Element;
import org.joda.time.DateTime;
import org.joda.time.format.DateTimeFormat;
import org.joda.time.format.DateTimeFormatter;
import com.redbugz.maf.Event;
import com.redbugz.maf.Family;
import com.redbugz.maf.Place;
import com.redbugz.maf.util.StringUtils;
/**
* Created by IntelliJ IDEA.
* User: logan
* Date: Mar 16, 2003
* Time: 3:34:57 PM
* To change this template use Options | File Templates.
*/
public class EventJDOM implements Event {
private static final Logger log = Logger.getLogger(EventJDOM.class);
private Date date = new GregorianCalendar(2000, 6, 23).getTime();
// private String dateString = "";
private Place place = Place.UNKNOWN_PLACE;
DateTimeFormatter dateFormat = DateTimeFormat.forPattern("dd MMM yyyy");
// protected DateTimeFormatter dateFormat = DateTimeFormat.forPattern("dd MMM yyyy");
Element element = new Element("EVEN");
// private String type = "";
// protected String cause = "";
// protected int age = -1;
// protected String ageString = "";
// Individual Event Types
public static final String ADOPTION = "ADOP";
public static final String BIRTH = "BIRT";
public static final String CHRISTENING = "CHR";
public static final String DEATH = "DEAT";
public static final String BURIAL = "BURI";
public static final String CREMATION = "CREM";
// Family Event Types
public static final String MARRIAGE = "MARR";
public static final String DIVORCE = "DIV";
public static final String DATE = "DATE";
public static final String EVENT = "EVEN";
public static final String TYPE = "TYPE";
public static final String PLACE = "PLAC";
public static final String ADDR = "ADDR";
public static final String AGE = "AGE";
public static final String AGENCY = "AGNC";
public static final String CAUSE = "CAUS";
public Date getDate() {
try {
if (!StringUtils.isEmpty(getDateString())) {
date = DateTimeDateFormat.getInstance().parse(getDateString());
// date = dateFormat.parse(getDateString());
}
}
catch (Exception e) {
// TODO Auto-generated catch block
log.error("Date Parse error: "+e.getMessage());
}
return date;
}
public EventJDOM(Element element) {
if (element == null) {
element = new Element(EVENT);
}
this.element = element;
place = new PlaceJDOM(element.getChild(PlaceJDOM.PLACE));
}
public EventJDOM(String dateString, Place place) {
setDateString(dateString);
setPlace(place);
}
public EventJDOM(Date date, Place place) {
this.date = date;
this.place = place;
}
public EventJDOM() {
element = new Element(EVENT);
}
public EventJDOM(Event originalEvent) {
if (originalEvent instanceof EventJDOM) {
element = ( (EventJDOM) originalEvent).getElement();
}
else {
setDate(originalEvent.getDate());
setPlace(originalEvent.getPlace());
}
}
public static EventJDOM createBirthEventInstance(Family family) {
return new EventJDOM(new Element(BIRTH));
}
public static EventJDOM createChristeningEventInstance(Family family) {
return new EventJDOM(new Element(CHRISTENING));
}
public static EventJDOM createDeathEventInstance() {
return new EventJDOM(new Element(DEATH));
}
public static EventJDOM createBurialEventInstance() {
return new EventJDOM(new Element(BURIAL));
}
public static EventJDOM createCremationEventInstance() {
return new EventJDOM(new Element(CREMATION));
}
public static EventJDOM createAdoptionEventInstance(Family family, boolean adoptedByHusband, boolean adoptedByWife) {
return new EventJDOM(new Element(ADOPTION));
}
public static EventJDOM createMarriageEventInstance() {
return new EventJDOM(new Element(MARRIAGE));
}
public static EventJDOM createDivorceEventInstance() {
return new EventJDOM(new Element(DIVORCE).setText("Y"));
}
public Element getElement() {
return element;
}
public void setDate(Date date) {
setDateString(dateFormat.print(new DateTime(date)));
}
public String getDateString() {
String dateString = element.getChildTextTrim(DATE);
if (dateString == null) {
dateString = "";
}
return dateString;
}
public void setDateString(String dateString) {
// this.dateString = dateString;
try {
if (!StringUtils.isEmpty(getDateString())) {
date = dateFormat.parseDateTime(getDateString()).toDate();
} else {
date = null;
}
}
catch (Exception e) {
date = null;
log.error("Date Parse error: "+e.getMessage());
}
element.removeChildren(DATE);
if (StringUtils.notEmpty(dateString)) {
element.addContent(new Element(DATE).setText(dateString));
}
}
public Place getPlace() {
if (place == null) {
place = new PlaceJDOM(element.getChild(PLACE));
}
return place;
}
public void setPlace(Place place) {
this.place = place;
element.removeChildren(PLACE);
if (! (place instanceof Place.UnknownPlace)) {
element.addContent(new PlaceJDOM(place).getElement());
}
}
// todo: add address support to events
// public Address getAddress() {
// return address;
// }
//
// public void setAddress(Address address) {
// this.address = address;
// element.removeChildren(ADDRESS);
// element.addContent(new AddressJDOM(address).getElement());
// }
/**
* @return Returns the age.
*/
public int getAge() {
return Integer.parseInt(getAgeString());
}
/**
* @param age The age to set.
*/
public void setAge(int age) {
setAgeString(String.valueOf(age));
}
/**
* @return Returns the ageString.
*/
public String getAgeString() {
String ageString = element.getChildTextTrim(AGE);
if (ageString == null) {
ageString = "";
}
return ageString;
}
/**
* @param ageString The ageString to set.
*/
public void setAgeString(String ageString) {
element.removeChildren(AGE);
if (StringUtils.notEmpty(ageString)) {
element.addContent(new Element(AGE).setText(ageString));
}
}
/**
* @return Returns the cause.
*/
public String getCause() {
String cause = element.getChildTextTrim(CAUSE);
if (cause == null) {
cause = "";
}
return cause;
}
/**
* @param cause The cause to set.
*/
public void setCause(String cause) {
element.removeChildren(CAUSE);
if (StringUtils.notEmpty(cause)) {
element.addContent(new Element(CAUSE).setText(cause));
}
}
/**
* @return Returns the type.
*/
public String getType() {
String source = element.getChildTextTrim(TYPE);
if (source == null) {
source = "";
}
return source;
}
public String getEventTypeString() {
return element.getName();
}
/**
* @param type The type to set.
*/
public void setType(String type) {
element.removeChildren(TYPE);
element.addContent(new Element(TYPE).setText(type));
}
}
|
bsd-3-clause
|
hongqipiaoyang/houtai
|
commands/QueController.php
|
6625
|
<?php
// 问题时间插入
namespace app\commands;
use Yii;
use yii\console\Controller;
use app\models\QueModel;
use app\models\MatchInfoModel;
use app\models\BaskMatchInfoModel;
class QueController extends Controller{
private $time = 2; //获取准确的比赛时间偏差
public function actionIndex(){
$log = '';
$log .= 'start_time: '.date('Y-m-d H:i:s');
$log .= $this->_checkMatchTime();
$log .= 'end_time:'.date('Y-m-d H:i:s')."\n\r";
file_put_contents('./log/log.txt', $log,FILE_APPEND);
}
//获取比赛的准确的开始时间和足球15-45赛场的竞猜开始时间
private function _checkMatchTime(){
$time = $this->time;
$filed_que = ['id','match_time','match_id','type'];
$where_que = ['and',['or','time_option=1','time_option=-1'],'match_time-'.time().'<= 3600*'.$time];
$data = QueModel::find()->select($filed_que)->where($where_que)->asArray()->all(); //查询两个小时内将要直播的比赛
$ball_match_id = $bask_match_id = $ball_match_id_tump = $bask_match_id_tump = array();
//将篮球和足球区分开
foreach($data as $k=>$v){
if($v['type'] == 1){
$ball_match_id[] = $v['match_id'];
}else{
$bask_match_id[] = $v['match_id'];
}
$match_time[$v['match_id']] = $v['match_time'];
}
$filed_match = ['MatchID','MatchDateTime'];
$ball_time_arr = MatchInfoModel::find()->select($filed_match)->where(['MatchID'=>$ball_match_id])->asArray()->all();
$bask_time_arr = BaskMatchInfoModel::find()->select($filed_match)->where(['MatchID'=>$bask_match_id])->asArray()->all();
foreach($ball_time_arr as $v){ $ball_time[$v['MatchID']] = strtotime($v['MatchDateTime']);}
foreach($bask_time_arr as $v){ $bask_time[$v['MatchID']] = strtotime($v['MatchDateTime']);}
$ball_sql = 'match_time= CASE match_id '; //足球开始时间
$ball_15_45_start_sql = 'start_time= CASE match_id '; //足球15-45场竞猜开始时间
$ball_15_45_end_sql = 'end_time= CASE match_id '; //足球15-45场竞猜结束时间
$ball_45_60_start_sql = 'start_time= CASE match_id '; //足球45-60场竞猜开始时间
$ball_45_60_end_sql = 'end_time= CASE match_id '; //足球45-60场竞猜结束时间
$bask_sql = 'match_time= CASE match_id '; //篮球开始时间
$bask_2_sql = 'start_time= CASE match_id '; //篮球第二节场竞猜开始时间
foreach($data as $k=>$v){
if($v['type']== 1 && $v['match_time'] != $ball_time[$v['match_id']]){
$ball_15_45_start_sql .= ' WHEN '.$v['match_id'].' THEN '.($ball_time[$v['match_id']]-60*5); //获取足球15-45场的竞猜开始时间
$ball_15_45_end_sql .= ' WHEN '.$v['match_id'].' THEN '.($ball_time[$v['match_id']]+60*13); //获取足球15-45场的竞猜开始时间
$ball_45_60_start_sql .= ' WHEN '.$v['match_id'].' THEN '.($ball_time[$v['match_id']]+60*14); //获取足球15-45场的竞猜开始时间
$ball_45_60_end_sql .= ' WHEN '.$v['match_id'].' THEN '.($ball_time[$v['match_id']]+60*55); //获取足球15-45场的竞猜开始时间
$ball_sql.= ' WHEN '.$v['match_id'].' THEN '. $ball_time[$v['match_id']]; //获取足球比赛的准确开始时间
$ball_match_id_tump[] = $v['match_id']; //获取需要修改足球的赛事ID
} else if($v['type']== -1 && $v['match_time'] != $bask_time[$v['match_id']]){
$bask_sql .= ' WHEN '.$v['match_id'].' THEN '. $bask_time[$v['match_id']]; //获取篮球比赛的准确开始时间
$bask_sql1 .= ' WHEN '.$v['match_id'].' THEN '. ($bask_time[$v['match_id']]-60*5); //获取篮球比赛第二节竞猜的开始时间
$bask_match_id_tump[] = $v['match_id']; //获取需要修改的篮球的赛事ID
}
}
$log = '';
if($ball_match_id_tump){
$match_id = implode(',', $ball_match_id_tump);
$log .= ' 足球更新的数量:'.($this->_updateDate($ball_sql.' END ', $match_id));
$log .= ' 15-45场比赛竞猜时间:'.($this->_updateDate($ball_15_45_start_sql.' END '.$ball_15_45_end_sql.' END ', $match_id,'and time_option=-1'));
$log .= ' 45-60场比赛竞猜时间:'.($this->_updateDate($ball_45_60_start_sql.' END '.$ball_45_60_end_sql.' END ', $match_id,'and time_option=-2'));
}else{
$log .= ' 没有需要更新的足球的问题';
}
if($bask_match_id_tump){
$update_sql = $bask_sql. ' END ';
$match_id = implode(',', $bask_match_id_tump);
$log .= ' 篮球更新的数量:'.($this->_updateDate($update_sql, $match_id));
$update_sql = $ball_sql1.' END ';
$log .= ' 第二节场比赛竞猜开始时间:'.($this->_updateDate($update_sql, $match_id,'and time_option=1'));
}else{
$log .= ' 没有需要更新的篮球的问题';
}
return $log;
}
//实验
private function _checksql($time_field,$true_time,$data,$time_con,$type){
$sql = $time_field.'= CASE match_id ';
foreach($data as $k=>$v){
if($v['type'] == $type && $v['match_time'] != $ball_time[$v['match_id']]){
$sql .= ' WHEN '.$v['match_id'].' THEN '.$time_con;
$match_id[] = $v['match_id'];
}
}
return [$sql,$match_id];
}
//更新比赛时间
private function _updateDate($update_sql,$match_id,$addwhere = ''){
$sql_true = 'UPDATE sl_que_content SET'.$update_sql.'WHERE match_id IN ('.$match_id.') and check_state=1 and stop_state=0 '.$addwhere.';';
return Yii::$app->db->createCommand($sql_true)->execute();
}
public function actionTime6190(){
// $field = [''];
$where = ['and','time_option=-3','start_time <> 0','match_time-'.(time()).'< 90'];
$match_id = QueModel::find()->select('match_id')->distinct()->where($where)->asArray()->column();
}
}
|
bsd-3-clause
|
luizrjunior/ClubeLuxuria
|
module/Curtidas/Module.php
|
1064
|
<?php
namespace Curtidas;
use Curtidas\Service\CurtidasService;
use Zend\Mvc\ModuleRouteListener;
use Zend\Mvc\MvcEvent;
class Module {
public function onBootstrap(MvcEvent $e) {
$eventManager = $e->getApplication()->getEventManager();
$moduleRouteListener = new ModuleRouteListener();
$moduleRouteListener->attach($eventManager);
}
public function getConfig() {
return include __DIR__ . '/config/module.config.php';
}
public function getAutoloaderConfig() {
return array(
'Zend\Loader\StandardAutoloader' => array(
'namespaces' => array(
__NAMESPACE__ => __DIR__ . '/src/' . __NAMESPACE__,
),
),
);
}
public function getServiceConfig() {
return array(
'factories' => array(
'Curtidas\Service\CurtidasService' => function($em) {
return new CurtidasService($em->get('Doctrine\ORM\EntityManager'));
}
)
);
}
}
|
bsd-3-clause
|
devhood/erp-base
|
module/Main/src/Main/Entity/Tblcity.php
|
1863
|
<?php
namespace Main\Entity;
use Doctrine\ORM\Mapping as ORM;
/**
* Tblcity
*
* @ORM\Table(name="tblCity", indexes={@ORM\Index(name="fk_tblCity_tblProvince1_idx", columns={"intProvinceID"})})
* @ORM\Entity
*/
class Tblcity
{
/**
* @var integer
*
* @ORM\Column(name="intCityID", type="integer", nullable=false)
* @ORM\Id
* @ORM\GeneratedValue(strategy="IDENTITY")
*/
private $intcityid;
/**
* @var string
*
* @ORM\Column(name="strCityName", type="string", length=45, nullable=false)
*/
private $strcityname;
/**
* @var \Main\Entity\Tblprovince
*
* @ORM\ManyToOne(targetEntity="Main\Entity\Tblprovince")
* @ORM\JoinColumns({
* @ORM\JoinColumn(name="intProvinceID", referencedColumnName="intProvinceID")
* })
*/
private $intprovinceid;
/**
* Get intcityid
*
* @return integer
*/
public function getIntcityid()
{
return $this->intcityid;
}
/**
* Set strcityname
*
* @param string $strcityname
* @return Tblcity
*/
public function setStrcityname($strcityname)
{
$this->strcityname = $strcityname;
return $this;
}
/**
* Get strcityname
*
* @return string
*/
public function getStrcityname()
{
return $this->strcityname;
}
/**
* Set intprovinceid
*
* @param \Main\Entity\Tblprovince $intprovinceid
* @return Tblcity
*/
public function setIntprovinceid(\Main\Entity\Tblprovince $intprovinceid = null)
{
$this->intprovinceid = $intprovinceid;
return $this;
}
/**
* Get intprovinceid
*
* @return \Main\Entity\Tblprovince
*/
public function getIntprovinceid()
{
return $this->intprovinceid;
}
}
|
bsd-3-clause
|
rackerlabs/django-DefectDojo
|
dojo/unittests/tools/test_sonatype_parser.py
|
997
|
from django.test import TestCase
from dojo.models import Test
from dojo.tools.sonatype.parser import SonatypeParser
class TestSonatypeParser(TestCase):
def test_parse_file_with_one_vuln(self):
testfile = open("dojo/unittests/scans/sonatype/one_vuln.json")
parser = SonatypeParser()
findings = parser.get_findings(testfile, Test())
testfile.close()
self.assertEqual(1, len(findings))
def test_parse_file_with_many_vulns(self):
testfile = open("dojo/unittests/scans/sonatype/many_vulns.json")
parser = SonatypeParser()
findings = parser.get_findings(testfile, Test())
testfile.close()
self.assertEqual(3, len(findings))
def test_parse_file_with_long_file_path(self):
testfile = open("dojo/unittests/scans/sonatype/long_file_path.json")
parser = SonatypeParser()
findings = parser.get_findings(testfile, Test())
testfile.close()
self.assertEqual(2, len(findings))
|
bsd-3-clause
|
rutgersmobile/android-client
|
app/src/main/java/edu/rutgers/css/Rutgers/channels/food/model/DiningMenuAdapter.java
|
1043
|
package edu.rutgers.css.Rutgers.channels.food.model;
import android.content.Context;
import android.support.annotation.NonNull;
import edu.rutgers.css.Rutgers.api.food.model.DiningMenu;
import edu.rutgers.css.Rutgers.model.SectionedListAdapter;
/**
* Sectioned adapter for dining hall menus. The sections are the food categories and the
* items are the food items.
*/
public class DiningMenuAdapter extends SectionedListAdapter<DiningMenu.Genre, String> {
public DiningMenuAdapter(@NonNull Context context, int itemResource, int headerResource, int textViewId) {
super(context, itemResource, headerResource, textViewId);
}
@Override
public String getSectionHeader(DiningMenu.Genre section) {
return section.getGenreName();
}
@Override
public String getSectionItem(DiningMenu.Genre section, int position) {
return section.getItems().get(position);
}
@Override
public int getSectionItemCount(DiningMenu.Genre section) {
return section.getItems().size();
}
}
|
bsd-3-clause
|
rjschof/gem5
|
src/base/loader/dtb_object.cc
|
6176
|
/*
* Copyright (c) 2013 The Regents of The University of Michigan
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are
* met: redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer;
* redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution;
* neither the name of the copyright holders nor the names of its
* contributors may be used to endorse or promote products derived from
* this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
* A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
* OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
* LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
* DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
* THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*
* Authors: Anthony Gutierrez
*/
#include "base/loader/dtb_object.hh"
#include <sys/mman.h>
#include <unistd.h>
#include <cassert>
#include "sim/byteswap.hh"
#include "fdt.h"
#include "libfdt.h"
ObjectFile *
DtbObject::tryFile(const std::string &fname, size_t len, uint8_t *data)
{
// Check if this is a FDT file by looking for magic number
if (fdt_magic((void*)data) == FDT_MAGIC) {
return new DtbObject(fname, len, data,
ObjectFile::UnknownArch, ObjectFile::UnknownOpSys);
} else {
return NULL;
}
}
DtbObject::DtbObject(const std::string &_filename, size_t _len, uint8_t *_data,
Arch _arch, OpSys _opSys)
: ObjectFile(_filename, _len, _data, _arch, _opSys)
{
text.baseAddr = 0;
text.size = len;
text.fileImage = fileData;
data.baseAddr = 0;
data.size = 0;
data.fileImage = NULL;
bss.baseAddr = 0;
bss.size = 0;
bss.fileImage = NULL;
fileDataMmapped = true;
}
DtbObject::~DtbObject()
{
// Make sure to clean up memory properly depending
// on how buffer was allocated.
if (fileData && !fileDataMmapped) {
delete [] fileData;
fileData = NULL;
} else if (fileData) {
munmap(fileData, len);
fileData = NULL;
}
}
bool
DtbObject::addBootCmdLine(const char* _args, size_t len)
{
const char* root_path = "/";
const char* node_name = "chosen";
const char* full_path_node_name = "/chosen";
const char* property_name = "bootargs";
// Make a new buffer that has extra space to add nodes/properties
int newLen = 2*this->len;
uint8_t* fdt_buf_w_space = new uint8_t[newLen];
// Copy and unpack flattened device tree into new buffer
int ret = fdt_open_into((void*)fileData, (void*)fdt_buf_w_space, (newLen));
if (ret < 0) {
warn("Error resizing buffer of flattened device tree, "
"errno: %d\n", ret);
delete [] fdt_buf_w_space;
return false;
}
// First try finding the /chosen node in the dtb
int offset = fdt_path_offset((void*)fdt_buf_w_space, full_path_node_name);
if (offset < 0) {
// try adding the node by walking dtb tree to proper insertion point
offset = fdt_path_offset((void*)fdt_buf_w_space, root_path);
offset = fdt_add_subnode((void*)fdt_buf_w_space, offset, node_name);
// if we successfully add the subnode, get the offset
if (offset >= 0)
offset = fdt_path_offset((void*)fdt_buf_w_space, full_path_node_name);
if (offset < 0) {
warn("Error finding or adding \"chosen\" subnode to flattened "
"device tree, errno: %d\n", offset);
delete [] fdt_buf_w_space;
return false;
}
}
// Set the bootargs property in the /chosen node
ret = fdt_setprop((void*)fdt_buf_w_space, offset, property_name,
(const void*)_args, len+1);
if (ret < 0) {
warn("Error setting \"bootargs\" property to flattened device tree, "
"errno: %d\n", ret);
delete [] fdt_buf_w_space;
return false;
}
// Repack the dtb for kernel use
ret = fdt_pack((void*)fdt_buf_w_space);
if (ret < 0) {
warn("Error re-packing flattened device tree structure, "
"errno: %d\n", ret);
delete [] fdt_buf_w_space;
return false;
}
text.size = newLen;
text.fileImage = fdt_buf_w_space;
// clean up old buffer and set to new fdt blob
munmap(fileData, this->len);
fileData = fdt_buf_w_space;
fileDataMmapped = false;
this->len = newLen;
return true;
}
Addr
DtbObject::findReleaseAddr()
{
void *fd = (void*)fileData;
int offset = fdt_path_offset(fd, "/cpus/cpu@0");
int len;
const void* temp = fdt_getprop(fd, offset, "cpu-release-addr", &len);
Addr rel_addr = 0;
if (len > 3)
rel_addr = betoh(*static_cast<const uint32_t*>(temp));
if (len == 8)
rel_addr = (rel_addr << 32) | betoh(*(static_cast<const uint32_t*>(temp)+1));
return rel_addr;
}
bool
DtbObject::loadAllSymbols(SymbolTable *symtab, Addr base, Addr offset,
Addr addr_mask)
{
return false;
}
bool
DtbObject::loadGlobalSymbols(SymbolTable *symtab, Addr base, Addr offset,
Addr addr_mask)
{
// nothing to do here
return false;
}
bool
DtbObject::loadLocalSymbols(SymbolTable *symtab, Addr base, Addr offset,
Addr addr_mask)
{
// nothing to do here
return false;
}
|
bsd-3-clause
|
smithfarm/mfile-www
|
share/js/core/tests/lib.js
|
5444
|
// *************************************************************************
// Copyright (c) 2014-2017, SUSE LLC
//
// All rights reserved.
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are met:
//
// 1. Redistributions of source code must retain the above copyright notice,
// this list of conditions and the following disclaimer.
//
// 2. Redistributions in binary form must reproduce the above copyright
// notice, this list of conditions and the following disclaimer in the
// documentation and/or other materials provided with the distribution.
//
// 3. Neither the name of SUSE LLC nor the names of its contributors may be
// used to endorse or promote products derived from this software without
// specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
// AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
// IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
// ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
// LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
// CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
// SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
// INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
// CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
// ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
// POSSIBILITY OF SUCH DAMAGE.
// *************************************************************************
//
// tests/lib.js
//
"use strict";
define ([
'QUnit',
'cf',
'current-user',
'lib'
], function (
QUnit,
cf,
currentUser,
lib
) {
return function () {
// hairCut
QUnit.test('internal library functions: hairCut', function (assert) {
var obj = Object.create(null);
obj = { a: 1, b: 2, c: 3, bogusProp: "bogus" };
assert.ok(obj.hasOwnProperty("a"), "a");
assert.ok(obj.hasOwnProperty("b"), "b");
assert.ok(obj.hasOwnProperty("c"), "c");
assert.ok(obj.hasOwnProperty("bogusProp"), "bogusProp present");
lib.hairCut(obj, ['a', 'b', 'c']);
assert.ok(obj.hasOwnProperty("a"), "a still there");
assert.ok(obj.hasOwnProperty("b"), "b still there");
assert.ok(obj.hasOwnProperty("c"), "c still there");
assert.strictEqual(obj.hasOwnProperty("bogusProp"), false, "no bogus property anymore");
});
// isInteger
QUnit.test('internal library functions: isInteger', function (assert) {
assert.ok(lib.isInteger(-1), "-1 is an integer");
assert.ok(lib.isInteger(0), "0 is an integer");
assert.ok(lib.isInteger(1), "1 is an integer");
assert.notOk(lib.isInteger("foo"), "foo is not an integer");
assert.notOk(lib.isInteger(""), "empty string is not an integer");
});
// privCheck
QUnit.test('internal library functions: privCheck', function (assert) {
assert.strictEqual(currentUser('priv'), null, "starting currentUserPriv is null");
currentUser('priv', 'passerby');
assert.strictEqual(currentUser('priv'), 'passerby', "set currentUserPriv to passerby");
assert.strictEqual(lib.privCheck('passerby'), true, "user passerby, ACL passerby");
assert.strictEqual(lib.privCheck('inactive'), false, "user passerby, ACL inactive");
assert.strictEqual(lib.privCheck('active'), false, "user passerby, ACL active");
assert.strictEqual(lib.privCheck('admin'), false, "user passerby, ACL admin");
currentUser('priv', 'inactive');
assert.strictEqual(currentUser('priv'), 'inactive', "set currentUserPriv to inactive");
assert.strictEqual(lib.privCheck('passerby'), true, "user inactive, ACL passerby");
assert.strictEqual(lib.privCheck('inactive'), true, "user inactive, ACL inactive");
assert.strictEqual(lib.privCheck('active'), false, "user inactive, ACL active");
assert.strictEqual(lib.privCheck('admin'), false, "user inactive, ACL admin");
currentUser('priv', 'active');
assert.strictEqual(currentUser('priv'), 'active', "set currentUserPriv to active");
assert.strictEqual(lib.privCheck('passerby'), true, "user active, ACL passerby");
assert.strictEqual(lib.privCheck('inactive'), true, "user active, ACL inactive");
assert.strictEqual(lib.privCheck('active'), true, "user active, ACL active");
assert.strictEqual(lib.privCheck('admin'), false, "user active, ACL admin");
currentUser('priv', 'admin');
assert.strictEqual(currentUser('priv'), 'admin', "set currentUserPriv to admin");
assert.strictEqual(lib.privCheck('passerby'), true, "user admin, ACL passerby");
assert.strictEqual(lib.privCheck('inactive'), true, "user admin, ACL inactive");
assert.strictEqual(lib.privCheck('active'), true, "user admin, ACL active");
assert.strictEqual(lib.privCheck('admin'), true, "user admin, ACL admin");
currentUser('priv', null);
});
};
});
|
bsd-3-clause
|
ohboyohboyohboy/antlr3
|
java/RubyTarget.java
|
18911
|
/*
[The "BSD license"]
Copyright (c) 2010 Kyle Yetter
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions
are met:
1. Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
3. The name of the author may not be used to endorse or promote products
derived from this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.antlr.codegen;
import org.antlr.Tool;
import org.antlr.tool.Grammar;
import org.stringtemplate.v4.AttributeRenderer;
import org.stringtemplate.v4.ST;
import org.stringtemplate.v4.STGroup;
import java.io.IOException;
import java.util.*;
public class RubyTarget extends Target
{
/** A set of ruby keywords which are used to escape labels and method names
* which will cause parse errors in the ruby source
*/
public static final Set<String> rubyKeywords =
new HashSet<String>() {
{
add( "alias" ); add( "END" ); add( "retry" );
add( "and" ); add( "ensure" ); add( "return" );
add( "BEGIN" ); add( "false" ); add( "self" );
add( "begin" ); add( "for" ); add( "super" );
add( "break" ); add( "if" ); add( "then" );
add( "case" ); add( "in" ); add( "true" );
add( "class" ); add( "module" ); add( "undef" );
add( "def" ); add( "next" ); add( "unless" );
add( "defined?" ); add( "nil" ); add( "until" );
add( "do" ); add( "not" ); add( "when" );
add( "else" ); add( "or" ); add( "while" );
add( "elsif" ); add( "redo" ); add( "yield" );
add( "end" ); add( "rescue" );
}
};
public static Map<String, Map<String, Object>> sharedActionBlocks = new HashMap<String, Map<String, Object>>();
public class RubyRenderer implements AttributeRenderer
{
protected String[] rubyCharValueEscape = new String[256];
public RubyRenderer() {
for ( int i = 0; i < 16; i++ ) {
rubyCharValueEscape[ i ] = "\\x0" + Integer.toHexString( i );
}
for ( int i = 16; i < 32; i++ ) {
rubyCharValueEscape[ i ] = "\\x" + Integer.toHexString( i );
}
for ( char i = 32; i < 127; i++ ) {
rubyCharValueEscape[ i ] = Character.toString( i );
}
for ( int i = 127; i < 256; i++ ) {
rubyCharValueEscape[ i ] = "\\x" + Integer.toHexString( i );
}
rubyCharValueEscape['\n'] = "\\n";
rubyCharValueEscape['\r'] = "\\r";
rubyCharValueEscape['\t'] = "\\t";
rubyCharValueEscape['\b'] = "\\b";
rubyCharValueEscape['\f'] = "\\f";
rubyCharValueEscape['\\'] = "\\\\";
rubyCharValueEscape['"'] = "\\\"";
}
@Override
public String toString( Object o, String formatName, Locale locale ) {
if ( formatName==null ) {
return o.toString();
}
String idString = o.toString();
if ( idString.length() == 0 ) return idString;
if ( formatName.equals( "snakecase" ) ) {
return snakecase( idString );
} else if ( formatName.equals( "camelcase" ) ) {
return camelcase( idString );
} else if ( formatName.equals( "subcamelcase" ) ) {
return subcamelcase( idString );
} else if ( formatName.equals( "constant" ) ) {
return constantcase( idString );
} else if ( formatName.equals( "platform" ) ) {
return platform( idString );
} else if ( formatName.equals( "lexerRule" ) ) {
return lexerRule( idString );
} else if ( formatName.equals( "constantPath" ) ) {
return constantPath( idString );
} else if ( formatName.equals( "rubyString" ) ) {
return rubyString( idString );
} else if ( formatName.equals( "label" ) ) {
return label( idString );
} else if ( formatName.equals( "symbol" ) ) {
return symbol( idString );
} else if ( formatName.equals( "tokenLabel" ) ) {
return tokenLabel( idString );
} else {
throw new IllegalArgumentException( "Unsupported format name" );
}
}
/** given an input string, which is presumed
* to contain a word, which may potentially be camelcased,
* and convert it to snake_case underscore style.
*
* algorithm --
* iterate through the string with a sliding window 3 chars wide
*
* example -- aGUIWhatNot
* c c+1 c+2 action
* a G << 'a' << '_' // a lower-upper word edge
* G U I << 'g'
* U I W << 'w'
* I W h << 'i' << '_' // the last character in an acronym run of uppers
* W h << 'w'
* ... and so on
*/
private String snakecase( String value ) {
StringBuilder output_buffer = new StringBuilder();
int l = value.length();
int cliff = l - 1;
char cur;
char next;
char peek;
if ( value.length() == 0 ) return value;
if ( l == 1 ) return value.toLowerCase();
for ( int i = 0; i < cliff; i++ ) {
cur = value.charAt( i );
next = value.charAt( i + 1 );
if ( Character.isLetter( cur ) ) {
output_buffer.append( Character.toLowerCase( cur ) );
if ( Character.isDigit( next ) || Character.isWhitespace( next ) ) {
output_buffer.append( '_' );
} else if ( Character.isLowerCase( cur ) && Character.isUpperCase( next ) ) {
// at camelcase word edge
output_buffer.append( '_' );
} else if ( ( i < cliff - 1 ) && Character.isUpperCase( cur ) && Character.isUpperCase( next ) ) {
// cur is part of an acronym
peek = value.charAt( i + 2 );
if ( Character.isLowerCase( peek ) ) {
/* if next is the start of word (indicated when peek is lowercase)
then the acronym must be completed by appending an underscore */
output_buffer.append( '_' );
}
}
} else if ( Character.isDigit( cur ) ) {
output_buffer.append( cur );
if ( Character.isLetter( next ) ) {
output_buffer.append( '_' );
}
} else if ( Character.isWhitespace( cur ) ) {
// do nothing
} else {
output_buffer.append( cur );
}
}
cur = value.charAt( cliff );
if ( ! Character.isWhitespace( cur ) ) {
output_buffer.append( Character.toLowerCase( cur ) );
}
return output_buffer.toString();
}
private String constantcase( String value ) {
return snakecase( value ).toUpperCase();
}
private String platform( String value ) {
return ( "__" + value + "__" );
}
private String symbol( String value ) {
if ( value.matches( "[a-zA-Z_]\\w*[\\?\\!\\=]?" ) ) {
return ( ":" + value );
} else {
return ( "%s(" + value + ")" );
}
}
private String lexerRule( String value ) {
// System.out.print( "lexerRule( \"" + value + "\") => " );
if ( value.equals( "Tokens" ) ) {
// System.out.println( "\"token!\"" );
return "token!";
} else {
// String result = snakecase( value ) + "!";
// System.out.println( "\"" + result + "\"" );
return ( snakecase( value ) + "!" );
}
}
private String tokenLabel( String value ) {
return value.replaceAll( "^<|>$", "" );
}
private String constantPath( String value ) {
return value.replaceAll( "\\.", "::" );
}
private String rubyString( String value ) {
StringBuilder output_buffer = new StringBuilder();
int len = value.length();
output_buffer.append( '"' );
for ( int i = 0; i < len; i++ ) {
output_buffer.append( rubyCharValueEscape[ value.charAt( i ) ] );
}
output_buffer.append( '"' );
return output_buffer.toString();
}
private String camelcase( String value ) {
StringBuilder output_buffer = new StringBuilder();
int cliff = value.length();
char cur;
char next;
boolean at_edge = true;
if ( value.length() == 0 ) return value;
if ( cliff == 1 ) return value.toUpperCase();
for ( int i = 0; i < cliff; i++ ) {
cur = value.charAt( i );
if ( Character.isWhitespace( cur ) ) {
at_edge = true;
continue;
} else if ( cur == '_' ) {
at_edge = true;
continue;
} else if ( Character.isDigit( cur ) ) {
output_buffer.append( cur );
at_edge = true;
continue;
}
if ( at_edge ) {
output_buffer.append( Character.toUpperCase( cur ) );
if ( Character.isLetter( cur ) ) at_edge = false;
} else {
output_buffer.append( cur );
}
}
return output_buffer.toString();
}
private String label( String value ) {
if ( rubyKeywords.contains( value ) ) {
return platform( value );
} else if ( Character.isUpperCase( value.charAt( 0 ) ) &&
( !value.equals( "FILE" ) ) &&
( !value.equals( "LINE" ) ) ) {
return platform( value );
} else if ( value.equals( "FILE" ) ) {
return "_FILE_";
} else if ( value.equals( "LINE" ) ) {
return "_LINE_";
} else {
return value;
}
}
private String subcamelcase( String value ) {
value = camelcase( value );
if ( value.length() == 0 )
return value;
Character head = Character.toLowerCase( value.charAt( 0 ) );
String tail = value.substring( 1 );
return head.toString().concat( tail );
}
}
@Override
protected void genRecognizerFile(
Tool tool,
CodeGenerator generator,
Grammar grammar,
ST outputFileST
) throws IOException
{
/*
Below is an experimental attempt at providing a few named action blocks
that are printed in both lexer and parser files from combined grammars.
ANTLR appears to first generate a parser, then generate an independent lexer,
and then generate code from that. It keeps the combo/parser grammar object
and the lexer grammar object, as well as their respective code generator and
target instances, completely independent. So, while a bit hack-ish, this is
a solution that should work without having to modify Terrence Parr's
core tool code.
- sharedActionBlocks is a class variable containing a hash map
- if this method is called with a combo grammar, and the action map
in the grammar contains an entry for the named scope "all",
add an entry to sharedActionBlocks mapping the grammar name to
the "all" action map.
- if this method is called with an `implicit lexer'
(one that's extracted from a combo grammar), check to see if
there's an entry in sharedActionBlocks for the lexer's grammar name.
- if there is an action map entry, place it in the lexer's action map
- the recognizerFile template has code to place the
"all" actions appropriately
problems:
- This solution assumes that the parser will be generated
before the lexer. If that changes at some point, this will
not work.
- I have not investigated how this works with delegation yet
Kyle Yetter - March 25, 2010
*/
if ( grammar.type == Grammar.COMBINED ) {
Map<String, Map<String, Object>> actions = grammar.getActions();
if ( actions.containsKey( "all" ) ) {
sharedActionBlocks.put( grammar.name, actions.get( "all" ) );
}
} else if ( grammar.implicitLexer ) {
if ( sharedActionBlocks.containsKey( grammar.name ) ) {
Map<String, Map<String, Object>> actions = grammar.getActions();
actions.put( "all", sharedActionBlocks.get( grammar.name ) );
}
}
STGroup group = generator.getTemplates();
RubyRenderer renderer = new RubyRenderer();
try {
group.registerRenderer( Class.forName( "java.lang.String" ), renderer );
} catch ( ClassNotFoundException e ) {
// this shouldn't happen
System.err.println( "ClassNotFoundException: " + e.getMessage() );
e.printStackTrace( System.err );
}
String fileName =
generator.getRecognizerFileName( grammar.name, grammar.type );
generator.write( outputFileST, fileName );
}
@Override
public String getTargetCharLiteralFromANTLRCharLiteral(
CodeGenerator generator,
String literal
)
{
int code_point = 0;
literal = literal.substring( 1, literal.length() - 1 );
if ( literal.charAt( 0 ) == '\\' ) {
switch ( literal.charAt( 1 ) ) {
case '\\':
case '"':
case '\'':
code_point = literal.codePointAt( 1 );
break;
case 'n':
code_point = 10;
break;
case 'r':
code_point = 13;
break;
case 't':
code_point = 9;
break;
case 'b':
code_point = 8;
break;
case 'f':
code_point = 12;
break;
case 'u': // Assume unnnn
code_point = Integer.parseInt( literal.substring( 2 ), 16 );
break;
default:
System.out.println( "1: hey you didn't account for this: \"" + literal + "\"" );
break;
}
} else if ( literal.length() == 1 ) {
code_point = literal.codePointAt( 0 );
} else {
System.out.println( "2: hey you didn't account for this: \"" + literal + "\"" );
}
return ( "0x" + Integer.toHexString( code_point ) );
}
@Override
public int getMaxCharValue( CodeGenerator generator )
{
// Versions before 1.9 do not support unicode
return 0xFF;
}
@Override
public String getTokenTypeAsTargetLabel( CodeGenerator generator, int ttype )
{
String name = generator.grammar.getTokenDisplayName( ttype );
// If name is a literal, return the token type instead
if ( name.charAt( 0 )=='\'' ) {
return generator.grammar.computeTokenNameFromLiteral( ttype, name );
}
return name;
}
@Override
public boolean isValidActionScope( int grammarType, String scope ) {
if ( scope.equals( "all" ) ) {
return true;
}
if ( scope.equals( "token" ) ) {
return true;
}
if ( scope.equals( "module" ) ) {
return true;
}
if ( scope.equals( "overrides" ) ) {
return true;
}
switch ( grammarType ) {
case Grammar.LEXER:
if ( scope.equals( "lexer" ) ) {
return true;
}
break;
case Grammar.PARSER:
if ( scope.equals( "parser" ) ) {
return true;
}
break;
case Grammar.COMBINED:
if ( scope.equals( "parser" ) ) {
return true;
}
if ( scope.equals( "lexer" ) ) {
return true;
}
break;
case Grammar.TREE_PARSER:
if ( scope.equals( "treeparser" ) ) {
return true;
}
break;
}
return false;
}
@Override
public String encodeIntAsCharEscape( final int v ) {
final int intValue;
if ( v == 65535 ) {
intValue = -1;
} else {
intValue = v;
}
return String.valueOf( intValue );
}
}
|
bsd-3-clause
|
youtube/cobalt
|
third_party/skia/infra/bots/recipe_modules/flavor/ios.py
|
3255
|
# Copyright 2017 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
# Disable warning about setting self.device_dirs in install(); we need to.
# pylint: disable=W0201
from . import default
"""iOS flavor, used for running code on iOS."""
class iOSFlavor(default.DefaultFlavor):
def __init__(self, m):
super(iOSFlavor, self).__init__(m)
self.device_dirs = default.DeviceDirs(
bin_dir='[unused]',
dm_dir='dm',
perf_data_dir='perf',
resource_dir='resources',
images_dir='images',
lotties_dir='lotties',
skp_dir='skps',
svg_dir='svgs',
mskp_dir='mskp',
tmp_dir='tmp')
def install(self):
# Set up the device
self.m.run(self.m.step, 'setup_device', cmd=['ios.py'], infra_step=True)
# Install the app.
for app_name in ['dm', 'nanobench']:
app_package = self.host_dirs.bin_dir.join('%s.app' % app_name)
def uninstall_app(attempt):
# If app ID changes, upgrade will fail, so try uninstalling.
self.m.run(self.m.step,
'uninstall_' + app_name,
cmd=['ideviceinstaller', '-U', 'com.google.%s' % app_name],
infra_step=True,
# App may not be installed.
abort_on_failure=False, fail_build_on_failure=False)
num_attempts = 2
self.m.run.with_retry(self.m.step, 'install_' + app_name, num_attempts,
cmd=['ideviceinstaller', '-i', app_package],
between_attempts_fn=uninstall_app,
infra_step=True)
def step(self, name, cmd, env=None, **kwargs):
bundle_id = 'com.google.%s' % cmd[0]
self.m.run(self.m.step, name,
cmd=['idevice-app-runner', '-s', bundle_id, '--args'] +
map(str, cmd[1:]))
def _run_ios_script(self, script, first, *rest):
full = self.m.path['start_dir'].join(
'skia', 'platform_tools', 'ios', 'bin', 'ios_' + script)
self.m.run(self.m.step,
name = '%s %s' % (script, first),
cmd = [full, first] + list(rest),
infra_step=True)
def copy_file_to_device(self, host, device):
self._run_ios_script('push_file', host, device)
def copy_directory_contents_to_device(self, host, device):
self._run_ios_script('push_if_needed', host, device)
def copy_directory_contents_to_host(self, device, host):
self._run_ios_script('pull_if_needed', device, host)
def remove_file_on_device(self, path):
self._run_ios_script('rm', path)
def create_clean_device_dir(self, path):
self._run_ios_script('rm', path)
self._run_ios_script('mkdir', path)
def read_file_on_device(self, path, **kwargs):
full = self.m.path['start_dir'].join(
'skia', 'platform_tools', 'ios', 'bin', 'ios_cat_file')
rv = self.m.run(self.m.step,
name = 'cat_file %s' % path,
cmd = [full, path],
stdout=self.m.raw_io.output(),
infra_step=True,
**kwargs)
return rv.stdout.rstrip() if rv and rv.stdout else None
|
bsd-3-clause
|
ric2b/Vivaldi-browser
|
chromium/media/ffmpeg/ffmpeg_regression_tests.cc
|
18981
|
// Copyright (c) 2012 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
//
// Regression tests for FFmpeg. Test files can be found in the internal media
// test data directory:
//
// https://chrome-internal.googlesource.com/chrome/data/media
//
// Simply add the custom_dep below to your gclient and sync:
//
// "src/media/test/data/internal":
// "https://chrome-internal.googlesource.com/chrome/data/media"
//
// Many of the files here do not cause issues outside of tooling, so you'll need
// to run this test under ASAN, TSAN, and Valgrind to ensure that all issues are
// caught.
//
// Test cases labeled FLAKY may not always pass, but they should never crash or
// cause any kind of warnings or errors under tooling.
#include <string>
#include "base/bind.h"
#include "media/test/pipeline_integration_test_base.h"
namespace media {
const char kRegressionTestDataPathPrefix[] = "internal/";
struct RegressionTestData {
RegressionTestData(const char* filename,
PipelineStatus init_status,
PipelineStatus end_status,
base::TimeDelta seek_time)
: filename(std::string(kRegressionTestDataPathPrefix) + filename),
init_status(init_status),
end_status(end_status),
seek_time(seek_time) {}
std::string filename;
PipelineStatus init_status;
PipelineStatus end_status;
// |seek_time| is the time to seek to at the end of the test if the pipeline
// successfully reaches that point in the test. If kNoTimestamp, the actual
// seek time will be GetStartTime().
base::TimeDelta seek_time;
};
// Used for tests which just need to run without crashing or tooling errors, but
// which may have undefined PipelineStatus results.
struct FlakyRegressionTestData {
FlakyRegressionTestData(const char* filename)
: filename(std::string(kRegressionTestDataPathPrefix) + filename) {
}
std::string filename;
};
class FFmpegRegressionTest
: public testing::TestWithParam<RegressionTestData>,
public PipelineIntegrationTestBase {
};
class FlakyFFmpegRegressionTest
: public testing::TestWithParam<FlakyRegressionTestData>,
public PipelineIntegrationTestBase {
};
#define FFMPEG_TEST_CASE_SEEKING(name, fn, init_status, end_status, seek_time) \
INSTANTIATE_TEST_SUITE_P(name, FFmpegRegressionTest, \
testing::Values(RegressionTestData( \
fn, init_status, end_status, seek_time)))
#define FFMPEG_TEST_CASE(name, fn, init_status, end_status) \
FFMPEG_TEST_CASE_SEEKING(name, fn, init_status, end_status, kNoTimestamp)
#define FLAKY_FFMPEG_TEST_CASE(name, fn) \
INSTANTIATE_TEST_SUITE_P(FLAKY_##name, FlakyFFmpegRegressionTest, \
testing::Values(FlakyRegressionTestData(fn)))
// Test cases from issues.
FFMPEG_TEST_CASE(Cr47325, "security/47325.mp4", PIPELINE_OK, PIPELINE_OK);
FFMPEG_TEST_CASE(Cr47761, "crbug47761.ogg", PIPELINE_OK, PIPELINE_OK);
FFMPEG_TEST_CASE(Cr50045, "crbug50045.mp4", PIPELINE_OK, PIPELINE_OK);
FFMPEG_TEST_CASE(Cr62127,
"crbug62127.webm",
PIPELINE_ERROR_DECODE,
PIPELINE_ERROR_DECODE);
FFMPEG_TEST_CASE(Cr93620, "security/93620.ogg", PIPELINE_OK, PIPELINE_OK);
FFMPEG_TEST_CASE(Cr100492,
"security/100492.webm",
DECODER_ERROR_NOT_SUPPORTED,
DECODER_ERROR_NOT_SUPPORTED);
FFMPEG_TEST_CASE(Cr100543, "security/100543.webm", PIPELINE_OK, PIPELINE_OK);
FFMPEG_TEST_CASE(Cr101458,
"security/101458.webm",
PIPELINE_ERROR_DECODE,
PIPELINE_ERROR_DECODE);
FFMPEG_TEST_CASE(Cr108416, "security/108416.webm", PIPELINE_OK, PIPELINE_OK);
FFMPEG_TEST_CASE(Cr110849,
"security/110849.mkv",
DEMUXER_ERROR_COULD_NOT_OPEN,
DEMUXER_ERROR_NO_SUPPORTED_STREAMS);
FFMPEG_TEST_CASE(Cr112384,
"security/112384.webm",
DEMUXER_ERROR_COULD_NOT_PARSE,
DEMUXER_ERROR_COULD_NOT_PARSE);
FFMPEG_TEST_CASE(Cr112976,
"security/112976.ogg",
PIPELINE_ERROR_DECODE,
PIPELINE_ERROR_DECODE);
FFMPEG_TEST_CASE(Cr116927,
"security/116927.ogv",
DEMUXER_ERROR_NO_SUPPORTED_STREAMS,
DEMUXER_ERROR_NO_SUPPORTED_STREAMS);
FFMPEG_TEST_CASE(Cr117912,
"security/117912.webm",
DEMUXER_ERROR_COULD_NOT_OPEN,
DEMUXER_ERROR_COULD_NOT_OPEN);
FFMPEG_TEST_CASE(Cr123481, "security/123481.ogv", PIPELINE_OK, PIPELINE_OK);
FFMPEG_TEST_CASE(Cr132779,
"security/132779.webm",
DEMUXER_ERROR_COULD_NOT_PARSE,
DEMUXER_ERROR_COULD_NOT_PARSE);
FFMPEG_TEST_CASE(Cr140165,
"security/140165.ogg",
DEMUXER_ERROR_COULD_NOT_PARSE,
DEMUXER_ERROR_COULD_NOT_PARSE);
FFMPEG_TEST_CASE(Cr140647,
"security/140647.ogv",
DEMUXER_ERROR_COULD_NOT_OPEN,
DEMUXER_ERROR_COULD_NOT_OPEN);
FFMPEG_TEST_CASE(Cr142738,
"crbug142738.ogg",
DEMUXER_ERROR_COULD_NOT_PARSE,
DEMUXER_ERROR_COULD_NOT_PARSE);
FFMPEG_TEST_CASE(Cr152691,
"security/152691.mp3",
PIPELINE_ERROR_DECODE,
PIPELINE_ERROR_DECODE);
FFMPEG_TEST_CASE(Cr161639,
"security/161639.m4a",
PIPELINE_ERROR_DECODE,
PIPELINE_ERROR_DECODE);
FFMPEG_TEST_CASE(Cr222754,
"security/222754.mp4",
DEMUXER_ERROR_NO_SUPPORTED_STREAMS,
DEMUXER_ERROR_NO_SUPPORTED_STREAMS);
FFMPEG_TEST_CASE(Cr234630a, "security/234630a.mov", PIPELINE_OK, PIPELINE_OK);
FFMPEG_TEST_CASE(Cr234630b,
"security/234630b.mov",
DEMUXER_ERROR_NO_SUPPORTED_STREAMS,
DEMUXER_ERROR_NO_SUPPORTED_STREAMS);
FFMPEG_TEST_CASE(Cr242786, "security/242786.webm", PIPELINE_OK, PIPELINE_OK);
// Test for out-of-bounds access with slightly corrupt file (detection logic
// thinks it's a MONO file, but actually contains STEREO audio).
FFMPEG_TEST_CASE(Cr275590,
"security/275590.m4a",
PIPELINE_ERROR_DECODE,
PIPELINE_ERROR_DECODE);
FFMPEG_TEST_CASE(Cr444522,
"security/444522.mp4",
DEMUXER_ERROR_COULD_NOT_OPEN,
DEMUXER_ERROR_COULD_NOT_OPEN);
FFMPEG_TEST_CASE(Cr444539,
"security/444539.m4a",
DEMUXER_ERROR_COULD_NOT_OPEN,
DEMUXER_ERROR_COULD_NOT_OPEN);
FFMPEG_TEST_CASE(Cr444546,
"security/444546.mp4",
DEMUXER_ERROR_COULD_NOT_OPEN,
DEMUXER_ERROR_COULD_NOT_OPEN);
FFMPEG_TEST_CASE(Cr447860,
"security/447860.webm",
PIPELINE_ERROR_DECODE,
PIPELINE_ERROR_DECODE);
FFMPEG_TEST_CASE(Cr449958,
"security/449958.webm",
PIPELINE_OK,
PIPELINE_ERROR_DECODE);
FFMPEG_TEST_CASE(Cr536601,
"security/536601.m4a",
PIPELINE_ERROR_DECODE,
PIPELINE_ERROR_DECODE);
FFMPEG_TEST_CASE(Cr532967,
"security/532967.webm",
PIPELINE_ERROR_DECODE,
PIPELINE_ERROR_DECODE);
// TODO(tguilbert): update PIPELINE_ERROR_DECODE to
// AUDIO_RENDERER_ERROR_IMPLICIT_CONFIG_CHANGE once the status is created.
FFMPEG_TEST_CASE(Cr599625,
"security/599625.mp4",
PIPELINE_ERROR_DECODE,
PIPELINE_ERROR_DECODE);
FFMPEG_TEST_CASE(Cr635422,
"security/635422.ogg",
DEMUXER_ERROR_COULD_NOT_OPEN,
DEMUXER_ERROR_COULD_NOT_OPEN);
FFMPEG_TEST_CASE(Cr637428, "security/637428.ogg", PIPELINE_OK, PIPELINE_OK);
FFMPEG_TEST_CASE(Cr639961,
"security/639961.flac",
PIPELINE_ERROR_INITIALIZATION_FAILED,
PIPELINE_ERROR_INITIALIZATION_FAILED);
FFMPEG_TEST_CASE(Cr640889,
"security/640889.flac",
PIPELINE_ERROR_DECODE,
PIPELINE_ERROR_DECODE);
FFMPEG_TEST_CASE(Cr640912,
"security/640912.flac",
PIPELINE_ERROR_DECODE,
PIPELINE_ERROR_DECODE);
// TODO(liberato): before crbug.com/658440 was fixed, this would fail if run
// twice under ASAN. If run once, then it doesn't. However, it still catches
// issues in crbug.com/662118, so it's included anyway.
FFMPEG_TEST_CASE(Cr658440, "security/658440.flac", PIPELINE_OK, PIPELINE_OK);
FFMPEG_TEST_CASE(Cr665305,
"crbug665305.flac",
DEMUXER_ERROR_COULD_NOT_PARSE,
DEMUXER_ERROR_COULD_NOT_PARSE);
FFMPEG_TEST_CASE_SEEKING(Cr666770,
"security/666770.mp4",
PIPELINE_ERROR_DECODE,
PIPELINE_ERROR_DECODE,
base::TimeDelta::FromSecondsD(0.0843));
FFMPEG_TEST_CASE(Cr666874,
"security/666874.mp3",
DEMUXER_ERROR_COULD_NOT_OPEN,
DEMUXER_ERROR_COULD_NOT_OPEN);
FFMPEG_TEST_CASE(Cr667063, "security/667063.mp4", PIPELINE_OK, PIPELINE_OK);
FFMPEG_TEST_CASE(Cr668346,
"security/668346.flac",
PIPELINE_ERROR_INITIALIZATION_FAILED,
PIPELINE_ERROR_INITIALIZATION_FAILED);
FFMPEG_TEST_CASE(Cr670190,
"security/670190.ogg",
DECODER_ERROR_NOT_SUPPORTED,
DECODER_ERROR_NOT_SUPPORTED);
// General MP4 test cases.
FFMPEG_TEST_CASE(MP4_0,
"security/aac.10419.mp4",
DEMUXER_ERROR_COULD_NOT_OPEN,
DEMUXER_ERROR_COULD_NOT_OPEN);
FFMPEG_TEST_CASE(MP4_1,
"security/clockh264aac_200021889.mp4",
DEMUXER_ERROR_COULD_NOT_OPEN,
DEMUXER_ERROR_COULD_NOT_OPEN);
FFMPEG_TEST_CASE(MP4_2,
"security/clockh264aac_200701257.mp4",
PIPELINE_ERROR_DECODE,
PIPELINE_ERROR_DECODE);
FFMPEG_TEST_CASE(MP4_5,
"security/clockh264aac_3022500.mp4",
DEMUXER_ERROR_NO_SUPPORTED_STREAMS,
DEMUXER_ERROR_NO_SUPPORTED_STREAMS);
FFMPEG_TEST_CASE(MP4_6,
"security/clockh264aac_344289.mp4",
PIPELINE_ERROR_DECODE,
PIPELINE_ERROR_DECODE);
FFMPEG_TEST_CASE(MP4_7,
"security/clockh264mp3_187697.mp4",
PIPELINE_OK,
PIPELINE_OK);
FFMPEG_TEST_CASE(MP4_8,
"security/h264.705767.mp4",
DEMUXER_ERROR_COULD_NOT_PARSE,
DEMUXER_ERROR_COULD_NOT_PARSE);
FFMPEG_TEST_CASE(MP4_9,
"security/smclockmp4aac_1_0.mp4",
DEMUXER_ERROR_COULD_NOT_OPEN,
DEMUXER_ERROR_COULD_NOT_OPEN);
FFMPEG_TEST_CASE(MP4_11, "security/null1.mp4", PIPELINE_OK, PIPELINE_OK);
FFMPEG_TEST_CASE(MP4_16,
"security/looping2.mov",
DEMUXER_ERROR_COULD_NOT_OPEN,
DEMUXER_ERROR_COULD_NOT_OPEN);
FFMPEG_TEST_CASE(MP4_17, "security/assert2.mov", PIPELINE_OK, PIPELINE_OK);
// This test is a valid file, so should always pass correctly.
FFMPEG_TEST_CASE(MP4_18,
"security/negative_timestamp.mp4",
PIPELINE_OK,
PIPELINE_OK);
// General OGV test cases.
FFMPEG_TEST_CASE(OGV_1,
"security/out.163.ogv",
DECODER_ERROR_NOT_SUPPORTED,
DECODER_ERROR_NOT_SUPPORTED);
FFMPEG_TEST_CASE(OGV_2,
"security/out.391.ogv",
DECODER_ERROR_NOT_SUPPORTED,
DECODER_ERROR_NOT_SUPPORTED);
FFMPEG_TEST_CASE(OGV_5,
"security/smclocktheora_1_0.ogv",
DECODER_ERROR_NOT_SUPPORTED,
DECODER_ERROR_NOT_SUPPORTED);
FFMPEG_TEST_CASE(OGV_7,
"security/smclocktheora_1_102.ogv",
DECODER_ERROR_NOT_SUPPORTED,
DECODER_ERROR_NOT_SUPPORTED);
FFMPEG_TEST_CASE(OGV_8,
"security/smclocktheora_1_104.ogv",
DECODER_ERROR_NOT_SUPPORTED,
DECODER_ERROR_NOT_SUPPORTED);
FFMPEG_TEST_CASE(OGV_9,
"security/smclocktheora_1_110.ogv",
DECODER_ERROR_NOT_SUPPORTED,
DECODER_ERROR_NOT_SUPPORTED);
FFMPEG_TEST_CASE(OGV_10,
"security/smclocktheora_1_179.ogv",
DECODER_ERROR_NOT_SUPPORTED,
DECODER_ERROR_NOT_SUPPORTED);
FFMPEG_TEST_CASE(OGV_11,
"security/smclocktheora_1_20.ogv",
DECODER_ERROR_NOT_SUPPORTED,
DECODER_ERROR_NOT_SUPPORTED);
FFMPEG_TEST_CASE(OGV_12,
"security/smclocktheora_1_723.ogv",
DECODER_ERROR_NOT_SUPPORTED,
DECODER_ERROR_NOT_SUPPORTED);
FFMPEG_TEST_CASE(OGV_14,
"security/smclocktheora_2_10405.ogv",
PIPELINE_ERROR_DECODE,
PIPELINE_ERROR_DECODE);
FFMPEG_TEST_CASE(OGV_15,
"security/smclocktheora_2_10619.ogv",
DECODER_ERROR_NOT_SUPPORTED,
DECODER_ERROR_NOT_SUPPORTED);
FFMPEG_TEST_CASE(OGV_16,
"security/smclocktheora_2_1075.ogv",
DECODER_ERROR_NOT_SUPPORTED,
DECODER_ERROR_NOT_SUPPORTED);
FFMPEG_TEST_CASE(OGV_17,
"security/vorbis.482086.ogv",
DEMUXER_ERROR_COULD_NOT_OPEN,
DEMUXER_ERROR_COULD_NOT_OPEN);
FFMPEG_TEST_CASE(OGV_18,
"security/wav.711.ogv",
DECODER_ERROR_NOT_SUPPORTED,
DECODER_ERROR_NOT_SUPPORTED);
FFMPEG_TEST_CASE(OGV_19,
"security/null1.ogv",
DECODER_ERROR_NOT_SUPPORTED,
DECODER_ERROR_NOT_SUPPORTED);
FFMPEG_TEST_CASE(OGV_20,
"security/null2.ogv",
DECODER_ERROR_NOT_SUPPORTED,
DECODER_ERROR_NOT_SUPPORTED);
FFMPEG_TEST_CASE(OGV_21,
"security/assert1.ogv",
DECODER_ERROR_NOT_SUPPORTED,
DECODER_ERROR_NOT_SUPPORTED);
FFMPEG_TEST_CASE(OGV_22,
"security/assert2.ogv",
DECODER_ERROR_NOT_SUPPORTED,
DECODER_ERROR_NOT_SUPPORTED);
FFMPEG_TEST_CASE(OGV_23,
"security/assert2.ogv",
DECODER_ERROR_NOT_SUPPORTED,
DECODER_ERROR_NOT_SUPPORTED);
// General WebM test cases.
FFMPEG_TEST_CASE(WEBM_0, "security/memcpy.webm", PIPELINE_OK, PIPELINE_OK);
FFMPEG_TEST_CASE(WEBM_1, "security/no-bug.webm", PIPELINE_OK, PIPELINE_OK);
FFMPEG_TEST_CASE(WEBM_2,
"security/uninitialize.webm",
DEMUXER_ERROR_NO_SUPPORTED_STREAMS,
DEMUXER_ERROR_NO_SUPPORTED_STREAMS);
FFMPEG_TEST_CASE(WEBM_4,
"security/out.webm.68798.1929",
DECODER_ERROR_NOT_SUPPORTED,
DECODER_ERROR_NOT_SUPPORTED);
FFMPEG_TEST_CASE(WEBM_5, "frame_size_change.webm", PIPELINE_OK, PIPELINE_OK);
// General MKV test cases.
FFMPEG_TEST_CASE(MKV_0,
"security/nested_tags_lang.mka.627.628",
PIPELINE_OK,
PIPELINE_ERROR_DECODE);
FFMPEG_TEST_CASE(MKV_1,
"security/nested_tags_lang.mka.667.628",
PIPELINE_OK,
PIPELINE_ERROR_DECODE);
// Allocate gigabytes of memory, likely can't be run on 32bit machines.
FFMPEG_TEST_CASE(BIG_MEM_1,
"security/bigmem1.mov",
DEMUXER_ERROR_COULD_NOT_OPEN,
DEMUXER_ERROR_COULD_NOT_OPEN);
FFMPEG_TEST_CASE(BIG_MEM_2,
"security/looping1.mov",
DEMUXER_ERROR_COULD_NOT_OPEN,
DEMUXER_ERROR_COULD_NOT_OPEN);
FFMPEG_TEST_CASE(BIG_MEM_5,
"security/looping5.mov",
DEMUXER_ERROR_COULD_NOT_OPEN,
DEMUXER_ERROR_COULD_NOT_OPEN);
FLAKY_FFMPEG_TEST_CASE(BIG_MEM_3, "security/looping3.mov");
FLAKY_FFMPEG_TEST_CASE(BIG_MEM_4, "security/looping4.mov");
// Flaky under threading or for other reasons. Per rbultje, most of these will
// never be reliable since FFmpeg does not guarantee consistency in error cases.
// We only really care that these don't cause crashes or errors under tooling.
FLAKY_FFMPEG_TEST_CASE(Cr99652, "security/99652.webm");
FLAKY_FFMPEG_TEST_CASE(Cr100464, "security/100464.webm");
FLAKY_FFMPEG_TEST_CASE(Cr111342, "security/111342.ogm");
FLAKY_FFMPEG_TEST_CASE(Cr368980, "security/368980.mp4");
FLAKY_FFMPEG_TEST_CASE(OGV_0, "security/big_dims.ogv");
FLAKY_FFMPEG_TEST_CASE(OGV_3, "security/smclock_1_0.ogv");
FLAKY_FFMPEG_TEST_CASE(OGV_4, "security/smclock.ogv.1.0.ogv");
FLAKY_FFMPEG_TEST_CASE(OGV_6, "security/smclocktheora_1_10000.ogv");
FLAKY_FFMPEG_TEST_CASE(OGV_13, "security/smclocktheora_1_790.ogv");
FLAKY_FFMPEG_TEST_CASE(MP4_3, "security/clockh264aac_300413969.mp4");
FLAKY_FFMPEG_TEST_CASE(MP4_4, "security/clockh264aac_301350139.mp4");
FLAKY_FFMPEG_TEST_CASE(MP4_12, "security/assert1.mov");
FLAKY_FFMPEG_TEST_CASE(WEBM_3, "security/out.webm.139771.2965");
// Init status flakes between PIPELINE_OK and PIPELINE_ERROR_DECODE, and gives
// PIPELINE_ERROR_DECODE later if initialization was PIPELINE_OK.
FLAKY_FFMPEG_TEST_CASE(Cr666794, "security/666794.webm");
// Not really flaky, but can't pass the seek test.
FLAKY_FFMPEG_TEST_CASE(MP4_10, "security/null1.m4a");
FLAKY_FFMPEG_TEST_CASE(Cr112670, "security/112670.mp4");
// Uses ASSERTs to prevent sharded tests from hanging on failure.
TEST_P(FFmpegRegressionTest, BasicPlayback) {
if (GetParam().init_status == PIPELINE_OK) {
ASSERT_EQ(PIPELINE_OK, Start(GetParam().filename, kUnreliableDuration));
Play();
ASSERT_EQ(GetParam().end_status, WaitUntilEndedOrError());
// Check for ended if the pipeline is expected to finish okay.
if (GetParam().end_status == PIPELINE_OK) {
ASSERT_TRUE(ended_);
// Tack a seek on the end to catch any seeking issues.
Seek(GetParam().seek_time == kNoTimestamp ? GetStartTime()
: GetParam().seek_time);
}
} else {
// Don't bother checking the exact status as we only care that the
// pipeline failed to start.
EXPECT_NE(PIPELINE_OK, Start(GetParam().filename, kUnreliableDuration));
}
}
TEST_P(FlakyFFmpegRegressionTest, BasicPlayback) {
if (Start(GetParam().filename, kUnreliableDuration) == PIPELINE_OK) {
Play();
WaitUntilEndedOrError();
}
}
} // namespace media
|
bsd-3-clause
|
caiges/populous
|
populous/podcasts/urls.py
|
956
|
from django.conf.urls.defaults import *
urlpatterns = patterns('populous.podcasts.views',
# Show list of all shows
url(r'^$', view='show_list', name='podcast_shows'),
# Episode list of one show
url(r'^(?P<slug>[-\w]+)/$', view='episode_list', name='podcast_episodes'),
# Episode list feed by show (RSS 2.0 and iTunes)
url(r'^(?P<slug>[-\w]+)/feed/$', view='show_list_feed', name='podcast_feed'),
# Episode list feed by show (Atom)
url(r'^(?P<slug>[-\w]+)/atom/$', view='show_list_atom', name='podcast_atom'),
# Episode list feed by show (Media RSS)
url(r'^(?P<slug>[-\w]+)/media/$', view='show_list_media', name='podcast_media'),
# Episode sitemap list of one show
url(r'^(?P<slug>[-\w]+)/sitemap.xml$', view='episode_sitemap', name='podcast_sitemap'),
# Episode detail of one show
url(r'^(?P<show_slug>[-\w]+)/(?P<episode_slug>[-\w]+)/$', view='episode_detail', name='podcast_episode'),
)
|
bsd-3-clause
|
prooph/pdo-event-store
|
tests/Projection/MariaDbProjectionManagerTest.php
|
4117
|
<?php
/**
* This file is part of prooph/pdo-event-store.
* (c) 2016-2021 Alexander Miertsch <kontakt@codeliner.ws>
* (c) 2016-2021 Sascha-Oliver Prolic <saschaprolic@googlemail.com>
*
* For the full copyright and license information, please view the LICENSE
* file that was distributed with this source code.
*/
declare(strict_types=1);
namespace ProophTest\EventStore\Pdo\Projection;
use PDO;
use Prooph\Common\Messaging\FQCNMessageFactory;
use Prooph\EventStore\EventStore;
use Prooph\EventStore\EventStoreDecorator;
use Prooph\EventStore\Pdo\Exception\InvalidArgumentException;
use Prooph\EventStore\Pdo\Exception\RuntimeException;
use Prooph\EventStore\Pdo\MariaDbEventStore;
use Prooph\EventStore\Pdo\PersistenceStrategy\MariaDbPersistenceStrategy;
use Prooph\EventStore\Pdo\Projection\MariaDbProjectionManager;
use ProophTest\EventStore\Pdo\TestUtil;
use ProophTest\EventStore\Projection\AbstractProjectionManagerTest;
use Prophecy\PhpUnit\ProphecyTrait;
/**
* @group mariadb
*/
class MariaDbProjectionManagerTest extends AbstractProjectionManagerTest
{
use ProphecyTrait;
/**
* @var MariaDbProjectionManager
*/
protected $projectionManager;
/**
* @var MariaDbEventStore
*/
private $eventStore;
/**
* @var PDO
*/
private $connection;
/**
* @test
* @large
*/
public function it_fetches_projection_names(): void
{
// Overwrite parent test for different test duration
parent::it_fetches_projection_names();
}
/**
* @test
* @large
*/
public function it_fetches_projection_names_using_regex(): void
{
// Overwrite parent test for different test duration
parent::it_fetches_projection_names_using_regex();
}
protected function setUp(): void
{
if (TestUtil::getDatabaseDriver() !== 'pdo_mysql') {
throw new \RuntimeException('Invalid database driver');
}
$this->connection = TestUtil::getConnection();
TestUtil::initDefaultDatabaseTables($this->connection);
$persistenceStrategy = $this->prophesize(MariaDbPersistenceStrategy::class)->reveal();
$this->eventStore = new MariaDbEventStore(
new FQCNMessageFactory(),
$this->connection,
$persistenceStrategy
);
$this->projectionManager = new MariaDbProjectionManager($this->eventStore, $this->connection);
}
protected function tearDown(): void
{
TestUtil::tearDownDatabase();
}
/**
* @test
*/
public function it_throws_exception_when_invalid_event_store_instance_passed(): void
{
$this->expectException(\Prooph\EventStore\Exception\InvalidArgumentException::class);
$eventStore = $this->prophesize(EventStore::class);
new MariaDbProjectionManager($eventStore->reveal(), $this->connection);
}
/**
* @test
*/
public function it_throws_exception_when_invalid_wrapped_event_store_instance_passed(): void
{
$this->expectException(InvalidArgumentException::class);
$eventStore = $this->prophesize(EventStore::class);
$wrappedEventStore = $this->prophesize(EventStoreDecorator::class);
$wrappedEventStore->getInnerEventStore()->willReturn($eventStore->reveal())->shouldBeCalled();
new MariaDbProjectionManager($wrappedEventStore->reveal(), $this->connection);
}
/**
* @test
*/
public function it_throws_exception_when_fetching_projecton_names_with_missing_db_table(): void
{
$this->expectException(RuntimeException::class);
$this->connection->exec('DROP TABLE projections;');
$this->projectionManager->fetchProjectionNames(null, 200, 0);
}
/**
* @test
*/
public function it_throws_exception_when_fetching_projecton_names_regex_with_missing_db_table(): void
{
$this->expectException(RuntimeException::class);
$this->connection->exec('DROP TABLE projections;');
$this->projectionManager->fetchProjectionNamesRegex('^foo', 200, 0);
}
}
|
bsd-3-clause
|
myles/django-issues
|
src/issues/testsettings.py
|
404
|
DEBUG = True
DEBUG_TEMPLATE = True
SITE_ID = 1
DATABASE_ENGINE = 'sqlite3'
DATABASE_NAME = '/tmp/django-issues-devel.db'
INSTALLED_APPS = [
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'django.contrib.admin',
'django.contrib.comments',
'django.contrib.admindocs',
'django.contrib.comments',
'issues',
]
ROOT_URLCONF = 'issues.testurls'
|
bsd-3-clause
|
antony-wilson/ceda_opensearch
|
docs/autogen/html/classceda__opensearch_1_1views_1_1OpenSearch.js
|
387
|
var classceda__opensearch_1_1views_1_1OpenSearch =
[
[ "dispatch", "classceda__opensearch_1_1views_1_1OpenSearch.html#a7123a1c4be17f67395c85b885fa3f968", null ],
[ "get", "classceda__opensearch_1_1views_1_1OpenSearch.html#a483dbafbf3fc0ef4fb07b1d75e558e4c", null ],
[ "options", "classceda__opensearch_1_1views_1_1OpenSearch.html#a32a7bf385b20e0d900a9acd419bc80f3", null ]
];
|
bsd-3-clause
|
chandanachk/rakiya_maga
|
views/site/index.php
|
1687
|
<?php
/* @var $this yii\web\View */
$this->title = 'JOB SEARCH';
?>
<?php
use yii\helpers\Html;
use yii\widgets\ActiveForm;
use yii\helpers\Url;
$form = ActiveForm::begin([
'id' => 'active-form',
'options' => [
'class' => 'form-horizontal',
'enctype' => 'multipart/form-data'
],
])
/* ADD FORM FIELDS */
?>
<div class="site-index">
<div class="jumbotron">
<h1>JOB SITE HOME PAGE</h1>
<p class="lead">COMMING SOON...</p>
<!-- <p><a class="btn btn-lg btn-success" href="">SEARCH JOB</a></p> -->
</div>
<div class="control-group buttons">
<?php
echo $form->field($model, 'name')->textInput()->hint('Search Job')->label('Search');
?>
<div class="form-actions">
<?php echo Html::button( $content = 'Button', $options = ['id'=>'btnSearch','class'=>'btnSearch'] ); ?>
</div>
</div>
<?php ActiveForm::end();?>
<div class="body-content">
<div class="row">
<div class="col-lg-4">
</div>
<div class="col-lg-4">
</div>
<div class="col-lg-4">
</div>
</div>
</div>
</div>
<?php
$script = <<< JS
$('#btnSearch').on('click', function(e) {
alert("sdfgdsf");
var form = $(this);
$.ajax({
url:form.attr('action'),
type:'POST',
contentType: "application/json; charset=utf-8",
dataType: "json",
data: {id: '12321', 'other': 'dfsdfsf'},
success: function(data) {
alert(data);
}
});
});
JS;
$this->registerJs($script);
?>
|
bsd-3-clause
|
uonafya/jphes-core
|
dhis-2/dhis-services/dhis-service-dxf2/src/main/java/org/hisp/dhis/dxf2/events/trackedentity/TrackedEntityInstanceService.java
|
4452
|
package org.hisp.dhis.dxf2.events.trackedentity;
/*
* Copyright (c) 2004-2016, University of Oslo
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
*
* Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* Neither the name of the HISP project nor the names of its contributors may
* be used to endorse or promote products derived from this software without
* specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
* ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
* ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
import org.hisp.dhis.dxf2.common.ImportOptions;
import org.hisp.dhis.dxf2.importsummary.ImportSummaries;
import org.hisp.dhis.dxf2.importsummary.ImportSummary;
import org.hisp.dhis.trackedentity.TrackedEntityInstanceQueryParams;
import java.io.IOException;
import java.io.InputStream;
import java.util.List;
/**
* @author Morten Olav Hansen <mortenoh@gmail.com>
*/
public interface TrackedEntityInstanceService
{
int FLUSH_FREQUENCY = 50;
// -------------------------------------------------------------------------
// READ
// -------------------------------------------------------------------------
List<TrackedEntityInstance> getTrackedEntityInstances( TrackedEntityInstanceQueryParams params );
int getTrackedEntityInstanceCount( TrackedEntityInstanceQueryParams params );
TrackedEntityInstance getTrackedEntityInstance( String uid );
TrackedEntityInstance getTrackedEntityInstance( org.hisp.dhis.trackedentity.TrackedEntityInstance entityInstance );
TrackedEntityInstance getTrackedEntityInstance( org.hisp.dhis.trackedentity.TrackedEntityInstance entityInstance, boolean expandRelative );
// -------------------------------------------------------------------------
// CREATE
// -------------------------------------------------------------------------
ImportSummaries addTrackedEntityInstanceXml( InputStream inputStream, ImportOptions importOptions ) throws IOException;
ImportSummaries addTrackedEntityInstanceJson( InputStream inputStream, ImportOptions importOptions ) throws IOException;
ImportSummaries addTrackedEntityInstances( List<TrackedEntityInstance> trackedEntityInstances, ImportOptions importOptions );
ImportSummary addTrackedEntityInstance( TrackedEntityInstance trackedEntityInstance, ImportOptions importOptions );
// -------------------------------------------------------------------------
// UPDATE
// -------------------------------------------------------------------------
ImportSummary updateTrackedEntityInstanceXml( String id, InputStream inputStream, ImportOptions importOptions ) throws IOException;
ImportSummary updateTrackedEntityInstanceJson( String id, InputStream inputStream, ImportOptions importOptions ) throws IOException;
ImportSummaries updateTrackedEntityInstances( List<TrackedEntityInstance> trackedEntityInstances, ImportOptions importOptions );
ImportSummary updateTrackedEntityInstance( TrackedEntityInstance trackedEntityInstance, ImportOptions importOptions );
// -------------------------------------------------------------------------
// DELETE
// -------------------------------------------------------------------------
ImportSummary deleteTrackedEntityInstance( String uid );
ImportSummaries deleteTrackedEntityInstances( List<String> uids );
}
|
bsd-3-clause
|
bogdandrutu/grpc
|
tools/codegen/core/gen_static_metadata.py
|
10508
|
#!/usr/bin/env python2.7
# Copyright 2015, Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import hashlib
import itertools
import os
import sys
# configuration: a list of either strings or 2-tuples of strings
# a single string represents a static grpc_mdstr
# a 2-tuple represents a static grpc_mdelem (and appropriate grpc_mdstrs will
# also be created)
CONFIG = [
'grpc-timeout',
'grpc-internal-encoding-request',
':path',
'grpc-encoding',
'grpc-accept-encoding',
'user-agent',
':authority',
'host',
'grpc-message',
'grpc-status',
'grpc-census-bin',
'grpc-tracing-bin',
'',
('grpc-status', '0'),
('grpc-status', '1'),
('grpc-status', '2'),
('grpc-encoding', 'identity'),
('grpc-encoding', 'gzip'),
('grpc-encoding', 'deflate'),
('te', 'trailers'),
('content-type', 'application/grpc'),
(':method', 'POST'),
(':status', '200'),
(':status', '404'),
(':scheme', 'http'),
(':scheme', 'https'),
(':scheme', 'grpc'),
(':authority', ''),
(':method', 'GET'),
(':method', 'PUT'),
(':path', '/'),
(':path', '/index.html'),
(':status', '204'),
(':status', '206'),
(':status', '304'),
(':status', '400'),
(':status', '500'),
('accept-charset', ''),
('accept-encoding', ''),
('accept-encoding', 'gzip, deflate'),
('accept-language', ''),
('accept-ranges', ''),
('accept', ''),
('access-control-allow-origin', ''),
('age', ''),
('allow', ''),
('authorization', ''),
('cache-control', ''),
('content-disposition', ''),
('content-encoding', ''),
('content-language', ''),
('content-length', ''),
('content-location', ''),
('content-range', ''),
('content-type', ''),
('cookie', ''),
('date', ''),
('etag', ''),
('expect', ''),
('expires', ''),
('from', ''),
('host', ''),
('if-match', ''),
('if-modified-since', ''),
('if-none-match', ''),
('if-range', ''),
('if-unmodified-since', ''),
('last-modified', ''),
('load-reporting', ''),
('link', ''),
('location', ''),
('max-forwards', ''),
('proxy-authenticate', ''),
('proxy-authorization', ''),
('range', ''),
('referer', ''),
('refresh', ''),
('retry-after', ''),
('server', ''),
('set-cookie', ''),
('strict-transport-security', ''),
('transfer-encoding', ''),
('user-agent', ''),
('vary', ''),
('via', ''),
('www-authenticate', ''),
]
COMPRESSION_ALGORITHMS = [
'identity',
'deflate',
'gzip',
]
# utility: mangle the name of a config
def mangle(elem):
xl = {
'-': '_',
':': '',
'/': 'slash',
'.': 'dot',
',': 'comma',
' ': '_',
}
def m0(x):
if not x: return 'empty'
r = ''
for c in x:
put = xl.get(c, c.lower())
if not put: continue
last_is_underscore = r[-1] == '_' if r else True
if last_is_underscore and put == '_': continue
elif len(put) > 1:
if not last_is_underscore: r += '_'
r += put
r += '_'
else:
r += put
if r[-1] == '_': r = r[:-1]
return r
if isinstance(elem, tuple):
return 'grpc_mdelem_%s_%s' % (m0(elem[0]), m0(elem[1]))
else:
return 'grpc_mdstr_%s' % (m0(elem))
# utility: generate some hash value for a string
def fake_hash(elem):
return hashlib.md5(elem).hexdigest()[0:8]
# utility: print a big comment block into a set of files
def put_banner(files, banner):
for f in files:
print >>f, '/*'
for line in banner:
print >>f, ' * %s' % line
print >>f, ' */'
print >>f
# build a list of all the strings we need
all_strs = set()
all_elems = set()
static_userdata = {}
for elem in CONFIG:
if isinstance(elem, tuple):
all_strs.add(elem[0])
all_strs.add(elem[1])
all_elems.add(elem)
else:
all_strs.add(elem)
compression_elems = []
for mask in range(1, 1<<len(COMPRESSION_ALGORITHMS)):
val = ','.join(COMPRESSION_ALGORITHMS[alg]
for alg in range(0, len(COMPRESSION_ALGORITHMS))
if (1 << alg) & mask)
elem = ('grpc-accept-encoding', val)
all_strs.add(val)
all_elems.add(elem)
compression_elems.append(elem)
static_userdata[elem] = 1 + (mask | 1)
all_strs = sorted(list(all_strs), key=mangle)
all_elems = sorted(list(all_elems), key=mangle)
# output configuration
args = sys.argv[1:]
H = None
C = None
D = None
if args:
if 'header' in args:
H = sys.stdout
else:
H = open('/dev/null', 'w')
if 'source' in args:
C = sys.stdout
else:
C = open('/dev/null', 'w')
if 'dictionary' in args:
D = sys.stdout
else:
D = open('/dev/null', 'w')
else:
H = open(os.path.join(
os.path.dirname(sys.argv[0]), '../../../src/core/lib/transport/static_metadata.h'), 'w')
C = open(os.path.join(
os.path.dirname(sys.argv[0]), '../../../src/core/lib/transport/static_metadata.c'), 'w')
D = open(os.path.join(
os.path.dirname(sys.argv[0]), '../../../test/core/end2end/fuzzers/hpack.dictionary'), 'w')
# copy-paste copyright notice from this file
with open(sys.argv[0]) as my_source:
copyright = []
for line in my_source:
if line[0] != '#': break
for line in my_source:
if line[0] == '#':
copyright.append(line)
break
for line in my_source:
if line[0] != '#':
break
copyright.append(line)
put_banner([H,C], [line[2:].rstrip() for line in copyright])
hex_bytes = [ord(c) for c in "abcdefABCDEF0123456789"]
def esc_dict(line):
out = "\""
for c in line:
if 32 <= c < 127:
if c != ord('"'):
out += chr(c)
else:
out += "\\\""
else:
out += "\\x%02X" % c
return out + "\""
put_banner([H,C],
"""WARNING: Auto-generated code.
To make changes to this file, change
tools/codegen/core/gen_static_metadata.py, and then re-run it.
See metadata.h for an explanation of the interface here, and metadata.c for
an explanation of what's going on.
""".splitlines())
print >>H, '#ifndef GRPC_CORE_LIB_TRANSPORT_STATIC_METADATA_H'
print >>H, '#define GRPC_CORE_LIB_TRANSPORT_STATIC_METADATA_H'
print >>H
print >>H, '#include "src/core/lib/transport/metadata.h"'
print >>H
print >>C, '#include "src/core/lib/transport/static_metadata.h"'
print >>C
print >>H, '#define GRPC_STATIC_MDSTR_COUNT %d' % len(all_strs)
print >>H, 'extern grpc_mdstr grpc_static_mdstr_table[GRPC_STATIC_MDSTR_COUNT];'
for i, elem in enumerate(all_strs):
print >>H, '/* "%s" */' % elem
print >>H, '#define %s (&grpc_static_mdstr_table[%d])' % (mangle(elem).upper(), i)
print >>H
print >>C, 'grpc_mdstr grpc_static_mdstr_table[GRPC_STATIC_MDSTR_COUNT];'
print >>C
print >>D, '# hpack fuzzing dictionary'
for i, elem in enumerate(all_strs):
print >>D, '%s' % (esc_dict([len(elem)] + [ord(c) for c in elem]))
for i, elem in enumerate(all_elems):
print >>D, '%s' % (esc_dict([0, len(elem[0])] + [ord(c) for c in elem[0]] +
[len(elem[1])] + [ord(c) for c in elem[1]]))
print >>H, '#define GRPC_STATIC_MDELEM_COUNT %d' % len(all_elems)
print >>H, 'extern grpc_mdelem grpc_static_mdelem_table[GRPC_STATIC_MDELEM_COUNT];'
print >>H, 'extern uintptr_t grpc_static_mdelem_user_data[GRPC_STATIC_MDELEM_COUNT];'
for i, elem in enumerate(all_elems):
print >>H, '/* "%s": "%s" */' % elem
print >>H, '#define %s (&grpc_static_mdelem_table[%d])' % (mangle(elem).upper(), i)
print >>H
print >>C, 'grpc_mdelem grpc_static_mdelem_table[GRPC_STATIC_MDELEM_COUNT];'
print >>C, 'uintptr_t grpc_static_mdelem_user_data[GRPC_STATIC_MDELEM_COUNT] = {'
print >>C, ' %s' % ','.join('%d' % static_userdata.get(elem, 0) for elem in all_elems)
print >>C, '};'
print >>C
def str_idx(s):
for i, s2 in enumerate(all_strs):
if s == s2:
return i
def md_idx(m):
for i, m2 in enumerate(all_elems):
if m == m2:
return i
print >>H, 'extern const uint8_t grpc_static_metadata_elem_indices[GRPC_STATIC_MDELEM_COUNT*2];'
print >>C, 'const uint8_t grpc_static_metadata_elem_indices[GRPC_STATIC_MDELEM_COUNT*2] = {'
print >>C, ','.join('%d' % str_idx(x) for x in itertools.chain.from_iterable([a,b] for a, b in all_elems))
print >>C, '};'
print >>C
print >>H, 'extern const char *const grpc_static_metadata_strings[GRPC_STATIC_MDSTR_COUNT];'
print >>C, 'const char *const grpc_static_metadata_strings[GRPC_STATIC_MDSTR_COUNT] = {'
print >>C, '%s' % ',\n'.join(' "%s"' % s for s in all_strs)
print >>C, '};'
print >>C
print >>H, 'extern const uint8_t grpc_static_accept_encoding_metadata[%d];' % (1 << len(COMPRESSION_ALGORITHMS))
print >>C, 'const uint8_t grpc_static_accept_encoding_metadata[%d] = {' % (1 << len(COMPRESSION_ALGORITHMS))
print >>C, '0,%s' % ','.join('%d' % md_idx(elem) for elem in compression_elems)
print >>C, '};'
print >>C
print >>H, '#define GRPC_MDELEM_ACCEPT_ENCODING_FOR_ALGORITHMS(algs) (&grpc_static_mdelem_table[grpc_static_accept_encoding_metadata[(algs)]])'
print >>H, '#endif /* GRPC_CORE_LIB_TRANSPORT_STATIC_METADATA_H */'
H.close()
C.close()
|
bsd-3-clause
|
rescrv/ygor
|
ygor-cli.cc
|
3016
|
// Copyright (c) 2013-2014, Robert Escriva
// All rights reserved.
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are met:
//
// * Redistributions of source code must retain the above copyright notice,
// this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above copyright
// notice, this list of conditions and the following disclaimer in the
// documentation and/or other materials provided with the distribution.
// * Neither the name of ygor nor the names of its contributors may be used
// to endorse or promote products derived from this software without
// specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
// AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
// IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
// ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
// LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
// CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
// SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
// INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
// CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
// ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
// POSSIBILITY OF SUCH DAMAGE.
#ifdef HAVE_CONFIG_H
#include "config.h"
#endif
// STL
#include <vector>
// e
#include <e/subcommand.h>
int
main(int argc, const char* argv[])
{
std::vector<e::subcommand> cmds;
cmds.push_back(e::subcommand("armnod", "Generate random strings"));
cmds.push_back(e::subcommand("guacamole", "Generate random bytes"));
cmds.push_back(e::subcommand("configure", "Configure an experiment for ygor"));
cmds.push_back(e::subcommand("run", "Have ygor run an experiment"));
cmds.push_back(e::subcommand("inspect", "Inspect a data file"));
cmds.push_back(e::subcommand("cdf", "Generate a CDF of the data"));
//cmds.push_back(e::subcommand("percentiles", "Compute percentile values for data"));
cmds.push_back(e::subcommand("merge", "Merge multiple data files"));
//cmds.push_back(e::subcommand("summarize", "Generate a summary of the data"));
cmds.push_back(e::subcommand("timeseries", "Generate a timeseries of the data"));
//cmds.push_back(e::subcommand("t-test", "Run the Student's t-test on multiple data files"));
return dispatch_to_subcommands(argc, argv,
"ygor", "ygor",
PACKAGE_VERSION,
"ygor-",
"YGOR_EXEC_PATH", YGOR_EXEC_DIR,
&cmds.front(), cmds.size());
}
|
bsd-3-clause
|
crscardellino/dnnwsd
|
dnnwsd/pipeline/ladder.py
|
8100
|
# -*- coding: utf-8 -*-
import copy
import logging
import numpy as np
import os
import shutil
import tensorflow as tf
import unicodedata
from ..experiment.ladder import LadderNetworksExperiment
from ..utils.setup_logging import setup_logging
setup_logging()
logger = logging.getLogger(__name__)
def _write_results(results, evaluations, population_growths, labels, results_path):
if os.path.exists(results_path):
shutil.rmtree(results_path)
os.makedirs(results_path)
os.makedirs(os.path.join(results_path, 'evaluations'))
train_accuracy = []
train_mcp = []
train_lcr = []
test_accuracy = []
test_mcp = []
test_lcr = []
validation_accuracy = []
validation_mcp = []
validation_lcr = []
for result in results:
train_accuracy.append(result['train_accuracy'])
train_mcp.append(result['train_mcp'])
train_lcr.append(result['train_lcr'])
test_accuracy.append(result['test_accuracy'])
test_mcp.append(result['test_mcp'])
test_lcr.append(result['test_lcr'])
validation_accuracy.append(result['validation_accuracy'])
validation_mcp.append(result['validation_mcp'])
validation_lcr.append(result['validation_lcr'])
train_accuracy = np.array(train_accuracy, dtype=np.float32).mean(axis=0)
train_mcp = np.array(train_mcp, dtype=np.float32).mean(axis=0)
train_lcr = np.array(train_lcr, dtype=np.float32).mean(axis=0)
test_accuracy = np.array(test_accuracy, dtype=np.float32).mean(axis=0)
test_mcp = np.array(test_mcp, dtype=np.float32).mean(axis=0)
test_lcr = np.array(test_lcr, dtype=np.float32).mean(axis=0)
validation_accuracy = np.array(validation_accuracy, dtype=np.float32).mean(axis=0)
validation_mcp = np.array(validation_mcp, dtype=np.float32).mean(axis=0)
validation_lcr = np.array(validation_lcr, dtype=np.float32).mean(axis=0)
np.savetxt(os.path.join(results_path, 'train_accuracy'), train_accuracy, fmt='%.2f')
np.savetxt(os.path.join(results_path, 'train_mcp'), train_mcp, fmt='%.2f')
np.savetxt(os.path.join(results_path, 'train_lcr'), train_lcr, fmt='%.2f')
np.savetxt(os.path.join(results_path, 'test_accuracy'), test_accuracy, fmt='%.2f')
np.savetxt(os.path.join(results_path, 'test_mcp'), test_mcp, fmt='%.2f')
np.savetxt(os.path.join(results_path, 'test_lcr'), test_lcr, fmt='%.2f')
np.savetxt(os.path.join(results_path, 'validation_accuracy'), validation_accuracy, fmt='%.2f')
np.savetxt(os.path.join(results_path, 'validation_mcp'), validation_mcp, fmt='%.2f')
np.savetxt(os.path.join(results_path, 'validation_lcr'), validation_lcr, fmt='%.2f')
for eidx, evaluation in enumerate(evaluations):
epath = os.path.join(results_path, 'evaluations', '{:02d}.txt'.format(eidx))
with open(epath, 'w') as f:
for epoch, sentences in enumerate(evaluation):
f.write("{}\n".format("="*13))
f.write("Iteration {:03d}\n".format(epoch))
f.write("{}\n".format("="*13))
for sentence in sentences:
f.write(sentence.encode("utf-8") + "\n")
f.write("\n\n")
with open(os.path.join(results_path, "population_growth"), "w") as f:
pg_mean = np.mean(np.array(population_growths), axis=0)
for epoch, pg in enumerate(pg_mean):
for idx, label in enumerate(labels):
f.write(u"{:02d},{},{:.0f}\n".format(epoch, label, pg[idx]).encode("utf-8"))
class LadderNetworksPipeline(object):
_experiments = {
'vec': 'Word Vectors',
'vecpos': 'Word Vectors with PoS'
}
def __init__(self, dataset_directory, dataset_indexes, lemmas_path,
results_directory, layers, denoising_cost, **kwargs):
self._dataset_directory = dataset_directory
self._dataset_indexes = dataset_indexes
self._results_directory = results_directory
with open(lemmas_path, 'r') as f:
self._lemmas = unicodedata.normalize("NFC", f.read().decode("utf-8")).strip().split("\n")
self._layers = layers
self._denoising_cost = denoising_cost
self._repetitions = kwargs.pop('repetitions', 5)
self._epochs = kwargs.pop('epochs', 10)
self._noise_std = kwargs.pop('noise_std', 0.3)
self._starter_learning_rate = kwargs.pop('starter_learning_rate', 0.02)
self._train_ratio = kwargs.pop('train_ratio', 0.8)
self._test_ratio = kwargs.pop('test_ratio', 0.1)
self._validation_ratio = kwargs.pop('validation_ratio', 0.1)
self._evaluation_amount = kwargs.pop('evaluation_amount', 10)
self._population_growth_count = kwargs.pop('population_growth_count', 1000)
def run(self):
for dataset_index in self._dataset_indexes:
logger.info(u"Running set of experiments for lemma {}".format(self._lemmas[dataset_index]))
for experiment, experiment_name in self._experiments.iteritems():
logger.info(u"Running {} experiments".format(experiment_name))
dataset_path = os.path.join(self._dataset_directory, experiment, "{:03d}.p".format(dataset_index))
results_path = os.path.join(self._results_directory, "{:03d}".format(dataset_index), experiment)
results = []
evaluations = []
population_growths = []
labels = None
for repetition in xrange(self._repetitions):
logger.info(u"Running repetition {}".format(repetition + 1))
with tf.Graph().as_default() as g:
layers = copy.copy(self._layers[experiment])
denoising_cost = copy.copy(self._denoising_cost[experiment])
ladder_experiment = LadderNetworksExperiment(
dataset_path, layers, denoising_cost,
epochs=self._epochs, noise_std=self._noise_std,
starter_learning_rate=self._starter_learning_rate,
evaluation_amount=self._evaluation_amount,
population_growth_count=self._population_growth_count, train_ratio=self._train_ratio,
test_ratio=self._test_ratio, validation_ratio=self._validation_ratio
)
if not labels:
labels = ladder_experiment.dataset.labels
ladder_experiment.run()
logger.info(u"Finished experiments for repetition {} - {} experiment - lemma {}".format(
repetition+1, experiment_name, self._lemmas[dataset_index]
))
results.append(
copy.deepcopy(ladder_experiment.results)
)
evaluations.append([]) # repetition evaluations
for (epoch, sentences) in enumerate(ladder_experiment.evaluation_sentences):
evaluations[repetition].append([]) # epoch evaluations
for (eval_sent, y_pred) in sentences:
raw_sentence = ladder_experiment.dataset[eval_sent]
sense = ladder_experiment.dataset.labels[y_pred]
evaluations[repetition][epoch].append(
u"{} -- {}".format(sense, raw_sentence)
)
population_growths.append(np.array(ladder_experiment.population_growth)) # repetition population growth
del ladder_experiment
del g
logger.info(u"Finished all the {} experiment repetitions".format(experiment_name))
_write_results(results, evaluations, population_growths, labels, results_path)
logger.info(u"Finished all the experiments for lemma {}".format(self._lemmas[dataset_index]))
|
bsd-3-clause
|
TeamCohen/MinorThird
|
src/main/java/iitb/Model/GenericModel.java
|
11451
|
/** GenericModel.java
*
* @author Sunita Sarawagi
* @since 1.0
* @version 1.3
*/
package iitb.Model;
import gnu.trove.list.array.TIntArrayList;
import iitb.CRF.DataSequence;
import java.util.Arrays;
import java.util.BitSet;
import java.util.StringTokenizer;
public class GenericModel extends Model {
/**
*
*/
private static final long serialVersionUID = 6363225834538701213L;
int _numStates;
Edge _edges[]; // edges have to be sorted by their starting node id.
int edgeStart[]; // the index in the edges array where edges out of node i start.
int startStates[];
int endStates[];
int myLabel = -1;
public int label(int s) {return (myLabel == -1)?s:myLabel;}
public GenericModel(String spec, int thisLabel) throws Exception {
super(1);
name = spec;
myLabel = thisLabel;
if (spec.equals("naive"))
spec="1-chain";
if (spec.endsWith("-chain") || spec.endsWith("-long")) {
StringTokenizer tok = new StringTokenizer(spec,"-");
int len = Integer.parseInt(tok.nextToken());
_numStates = len;
startStates = new int[1];
startStates[0] = 0;
edgeStart = new int[_numStates];
if (len == 1) {
_edges = new Edge[1];
_edges[0] = new Edge(0,0);
endStates = new int[1];
endStates[0] = 0;
edgeStart[0] = 0;
} else {
_edges = new Edge[2*(len-1)];
for (int i = 0; i < len-1; i++) {
_edges[2*i] = new Edge(i,i+1);
_edges[2*i+1] = new Edge(i,len-1);
edgeStart[i] = 2*i;
}
_edges[_edges.length-1] = new Edge(len-2,len-2);
endStates = new int[2];
endStates[0] = 0; // to allow one word entities.
endStates[1] = len-1;
}
} else if (spec.endsWith("parallel")) {
StringTokenizer tok = new StringTokenizer(spec,"-");
int len = Integer.parseInt(tok.nextToken());
_numStates = len*(len+1)/2;
_edges = new Edge[len*(len-1)/2 + 1];
edgeStart = new int[_numStates];
startStates = new int[len];
endStates = new int[len];
int node = 0;
int e = 0;
for (int i = 0; i < len; i++) {
node += i;
for (int j = 0; j < i; j++) {
_edges[e++] = new Edge(node+j,node+j+1);
edgeStart[node+j] = e-1;
}
startStates[i] = node;
endStates[i] = node + i;
}
node += len;
_edges[e++] = new Edge(_numStates-2, _numStates-2);
assert (e == _edges.length);
assert (node == _numStates);
} else if (spec.equals("boundary") || (spec.equals("BCEU"))) {
// this implements a model where each label is either of a
// Unique word (state 0) or broken into a Start state
// (state 1) with a single token, Continuation state
// (state 2) with multiple tokens (only state with
// self-loop) and end state (state 3) with a single token.
// The number of states is thus 4, and number of edges 4
_numStates = 4;
_edges = new Edge[4];
_edges[0] = new Edge(1,2);
_edges[1] = new Edge(1,3);
_edges[2] = new Edge(2,2);
_edges[3] = new Edge(2,3);
startStates = new int[2];
startStates[0] = 0;
startStates[1] = 1;
endStates = new int[2];
endStates[0] = 0;
endStates[1] = 3;
edgeStart = new int[_numStates];
edgeStart[0] = 4;
edgeStart[1] = 0;
edgeStart[2] = 2;
edgeStart[3] = 4;
} else if (spec.equals("BCE")) {
// only "U" state missing, so "B" can also be an ending state.
_numStates = 3;
_edges = new Edge[4];
_edges[0] = new Edge(0,1);
_edges[1] = new Edge(0,2);
_edges[2] = new Edge(1,1);
_edges[3] = new Edge(1,2);
startStates = new int[1];
startStates[0] = 0;
endStates = new int[2];
endStates[0] = 0;
endStates[1] = 2;
edgeStart = new int[_numStates];
edgeStart[0] = 0;
edgeStart[1] = 2;
edgeStart[2] = 4;
}else if (spec.equals("BI")){
// there is a start state with a transition to an I state.
// start state can also be the end state to allow for one token labels.
_numStates = 2;
_edges = new Edge[2];
_edges[0] = new Edge(0,1);
_edges[1] = new Edge(1,1);
startStates = new int[1];
startStates[0] = 0;
endStates = new int[2];
endStates[0] = 0;
endStates[1] = 1;
edgeStart = new int[_numStates];
edgeStart[0] = 0;
edgeStart[1] = 1;
} else {
throw new Exception("Unknown graph type: " + spec);
}
}
public void setEdgeStartPointers() {
// sort the edges in increasing order of start node ids..
Arrays.sort(_edges);
edgeStart = new int[_numStates];
for (int i = 0; i < edgeStart.length; i++)
edgeStart[i] = _numStates;
for (int i = 0; i < _edges.length; i++) {
if (edgeStart[_edges[i].start] > i)
edgeStart[_edges[i].start] = i;
}
};
public void fillStartEnd() {
BitSet isStart = new BitSet(_numStates);
BitSet isEnd = new BitSet(_numStates);
isStart.flip(0,_numStates);
isEnd.flip(0,_numStates);
for (int i = 0; i < _edges.length; i++) {
isStart.set(_edges[i].end,false);
isEnd.set(_edges[i].start,false);
}
startStates = new int[isStart.cardinality()];
int prev = 0;
for (int i = 0; i < startStates.length; i++) {
startStates[i] = isStart.nextSetBit(prev);
prev = startStates[i]+1;
}
endStates = new int[isEnd.cardinality()];
prev = 0;
for (int i = 0; i < endStates.length; i++) {
endStates[i] = isEnd.nextSetBit(prev);
prev = endStates[i]+1;
}
}
public void setEdges(Object[] edges) {
_edges = new Edge[edges.length];
for (int i = 0; i < _edges.length; i++)
_edges[i] = (Edge)(edges[i]);
}
public void addEdge(int edgeNum, int st, int end) {
_edges[edgeNum] = new Edge(st,end);
}
public GenericModel(int numNodes, int numEdges) throws Exception {
super(numNodes);
_numStates = numNodes;
_edges = new Edge[numEdges];
}
public int numStates() {return _numStates;}
public int numEdges() {return _edges.length;}
public int numStartStates() {return startStates.length;}
public int startState(int i) {return (i < numStartStates())?startStates[i]:-1;}
public int numEndStates() {
return endStates.length;
}
public int endState(int i) {
return (i < numEndStates())?endStates[i]:-1;
}
public boolean isEndState(int i) {
// TODO -- convert this to binary search
for (int k = 0; k < endStates.length; k++)
if (endStates[k] == i)
return true;
return false;
}
public boolean isStartState(int i) {
// TODO -- convert this to binary search
for (int k = 0; k < startStates.length; k++)
if (startStates[k] == i)
return true;
return false;
}
public void stateMappings(DataSequence data) throws Exception {
return;
}
public void stateMappings(DataSequence data, int len, int start) throws Exception {
for (int i = 0; i < numStartStates(); i++) {
if (pathToEnd(data,startState(i),len-1,start+1)) {
data.set_y(start,startState(i));
return;
}
}
throw new Exception("No path in graph");
}
boolean pathToEnd(DataSequence data, int s, int lenLeft, int start) {
if (lenLeft == 0) {
return isEndState(s);
}
for (int e = edgeStart[s]; (e < numEdges()) && (_edges[e].start == s); e++) {
int child = _edges[e].end;
if (pathToEnd(data,child,lenLeft-1,start+1)) {
data.set_y(start,child);
return true;
}
}
return false;
}
public int stateMappingGivenLength(int label, int len, int posFromStart) throws Exception {
for (int i = 0; i < numStartStates(); i++) {
int stateId = pathToEnd(startState(i),len-1,posFromStart-1);
if (stateId >= 0) {
if (posFromStart==0)
return startState(i);
return stateId;
}
}
throw new Exception("No path in graph");
}
public void stateMappingGivenLength(int label, int len, TIntArrayList stateIds)
throws Exception {
stateIds.clear();
for (int i = 0; i < len; i++,stateIds.add(0));
for (int i = 0; i < numStartStates(); i++) {
if (pathToEnd(startState(i),len-1,1,stateIds)) {
stateIds.setQuick(0,startState(i));
return;
}
}
throw new Exception("No path in graph");
}
boolean pathToEnd(int s, int lenLeft, int start, TIntArrayList stateIds) {
assert (lenLeft >= 0);
if (lenLeft == 0) {
return isEndState(s);
}
for (int e = edgeStart[s]; (e < numEdges()) && (_edges[e].start == s); e++) {
int child = _edges[e].end;
if (pathToEnd(child,lenLeft-1,start+1,stateIds)) {
stateIds.setQuick(start,child);
return true;
}
}
return false;
}
/**
* @return
*/
private int pathToEnd(int s, int lenLeft, int posFromStart) {
if (lenLeft == 0) {
return isEndState(s)?s:-1;
}
for (int e = edgeStart[s]; (e < numEdges()) && (_edges[e].start == s); e++) {
int child = _edges[e].end;
int stateId = pathToEnd(child,lenLeft-1,posFromStart-1);
if (stateId >= 0) {
if (posFromStart == 0)
return child;
return stateId;
}
}
return -1;
}
public class GenericEdgeIterator implements EdgeIterator {
int edgeNum;
Edge edges[];
GenericEdgeIterator(Edge[] e) {
edges = e;
start();
};
public void start() {
edgeNum = 0;
}
public boolean hasNext() {
return (edgeNum < edges.length);
}
public Edge next() {
edgeNum++;
return edges[edgeNum-1];
}
/* (non-Javadoc)
* @see iitb.Model.EdgeIterator#nextIsOuter()
*/
public boolean nextIsOuter() {
return true;
}
};
public EdgeIterator edgeIterator() {
return new GenericEdgeIterator(_edges);
}
};
|
bsd-3-clause
|
vivo-project/Vitro
|
api/src/test/java/edu/cornell/mannlib/vitro/webapp/auth/policy/SelfEditingPolicy_2_Test.java
|
10319
|
/* $This file is distributed under the terms of the license in LICENSE$ */
package edu.cornell.mannlib.vitro.webapp.auth.policy;
import static edu.cornell.mannlib.vitro.webapp.auth.requestedAction.RequestedAction.SOME_LITERAL;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import stubs.edu.cornell.mannlib.vitro.webapp.auth.policy.bean.PropertyRestrictionBeanStub;
import stubs.javax.servlet.ServletContextStub;
import org.apache.jena.ontology.OntModel;
import org.apache.jena.ontology.OntModelSpec;
import org.apache.jena.rdf.model.ModelFactory;
import edu.cornell.mannlib.vitro.testing.AbstractTestClass;
import edu.cornell.mannlib.vitro.webapp.auth.identifier.ArrayIdentifierBundle;
import edu.cornell.mannlib.vitro.webapp.auth.identifier.Identifier;
import edu.cornell.mannlib.vitro.webapp.auth.identifier.IdentifierBundle;
import edu.cornell.mannlib.vitro.webapp.auth.identifier.common.HasProfile;
import edu.cornell.mannlib.vitro.webapp.auth.policy.ifaces.Authorization;
import edu.cornell.mannlib.vitro.webapp.auth.policy.ifaces.PolicyDecision;
import edu.cornell.mannlib.vitro.webapp.auth.requestedAction.propstmt.AddObjectPropertyStatement;
import edu.cornell.mannlib.vitro.webapp.auth.requestedAction.propstmt.EditDataPropertyStatement;
import edu.cornell.mannlib.vitro.webapp.auth.requestedAction.propstmt.EditObjectPropertyStatement;
import edu.cornell.mannlib.vitro.webapp.beans.Individual;
import edu.cornell.mannlib.vitro.webapp.beans.IndividualImpl;
import edu.cornell.mannlib.vitro.webapp.beans.Property;
import edu.cornell.mannlib.vitro.webapp.dao.VitroVocabulary;
public class SelfEditingPolicy_2_Test extends AbstractTestClass {
private static final Log log = LogFactory
.getLog(SelfEditingPolicy_2_Test.class);
/** We may edit objects in this arbitrary namespace. */
private static final String SAFE_NS = "http://test.mannlib.cornell.edu/ns/01#";
/** We are not allowed to edit objects in the administrative namespace. */
private static final String ADMIN_NS = VitroVocabulary.vitroURI;
/** The URI of a SelfEditor. */
private static final String SELFEDITOR_URI = SAFE_NS + "individual000";
/** Some things that are safe to edit. */
private static final String SAFE_RESOURCE = SAFE_NS + "individual123";
private static final String SAFE_PREDICATE = SAFE_NS + "hasHairStyle";
/** Some things that are not safe to edit. */
private static final String ADMIN_RESOURCE = ADMIN_NS + "individual666";
private static final String ADMIN_PREDICATE_1 = ADMIN_NS + "hasSuperPowers";
private static final String ADMIN_PREDICATE_2 = ADMIN_NS + "mayPrintMoney";
private static final String ADMIN_PREDICATE_3 = ADMIN_NS
+ "getsOutOfJailFree";
private static final String ADMIN_PREDICATE_4 = ADMIN_NS + "canDeleteModel";
/** The policy we are testing. */
SelfEditingPolicy policy;
/** A SelfEditing individual identifier. */
Individual seIndividual;
/** A bundle that contains a SelfEditing individual. */
IdentifierBundle ids;
/**
* An empty model that acts as a placeholder in the requested actions. The
* SelfEditingPolicy does not base its decisions on the contents of the
* model.
*/
private OntModel ontModel;
@Before
public void setUp() {
ServletContextStub ctx = new ServletContextStub();
PropertyRestrictionBeanStub.getInstance(new String[] { ADMIN_NS });
policy = new SelfEditingPolicy(ctx);
Assert.assertNotNull(policy);
seIndividual = new IndividualImpl();
seIndividual.setURI(SELFEDITOR_URI);
ids = new ArrayIdentifierBundle(new HasProfile(SELFEDITOR_URI));
ontModel = ModelFactory.createOntologyModel(OntModelSpec.OWL_MEM);
// setLoggerLevel(SelfEditingPolicySetupTest.class, Level.DEBUG);
}
// ----------------------------------------------------------------------
// General tests
// ----------------------------------------------------------------------
@Test
public void nullRequestedAction() {
PolicyDecision dec = policy.isAuthorized(ids, null);
Assert.assertNotNull(dec);
Assert.assertEquals(Authorization.INCONCLUSIVE, dec.getAuthorized());
}
@Test
public void nullIdentifierBundle() {
AddObjectPropertyStatement whatToAuth = new AddObjectPropertyStatement(
ontModel, SELFEDITOR_URI, new Property(SAFE_PREDICATE), SAFE_RESOURCE);
PolicyDecision dec = policy.isAuthorized(null, whatToAuth);
Assert.assertNotNull(dec);
Assert.assertEquals(Authorization.INCONCLUSIVE, dec.getAuthorized());
}
@Test
public void noSelfEditorIdentifier() {
ids.clear();
ids.add(new Identifier() { /* empty identifier */
});
assertAddObjectPropStmt(SELFEDITOR_URI, SAFE_PREDICATE, SAFE_RESOURCE,
Authorization.INCONCLUSIVE);
}
// ----------------------------------------------------------------------
// Tests against AddObjectPropStmt
// ----------------------------------------------------------------------
@Test
public void addObjectPropStmtSuccess1() {
assertAddObjectPropStmt(SELFEDITOR_URI, SAFE_PREDICATE, SAFE_RESOURCE,
Authorization.AUTHORIZED);
}
@Test
public void addObjectPropStmtSuccess2() {
assertAddObjectPropStmt(SAFE_RESOURCE, SAFE_PREDICATE, SELFEDITOR_URI,
Authorization.AUTHORIZED);
}
@Test
public void addObjectPropStmtUnsafePredicate1() {
assertAddObjectPropStmt(SELFEDITOR_URI, ADMIN_PREDICATE_1,
SAFE_RESOURCE, Authorization.INCONCLUSIVE);
}
@Test
public void addObjectPropStmtUnsafePredicate2() {
assertAddObjectPropStmt(SAFE_RESOURCE, ADMIN_PREDICATE_1,
SELFEDITOR_URI, Authorization.INCONCLUSIVE);
}
@Test
public void addObjectPropStmtUnsafePredicate3() {
assertAddObjectPropStmt(SELFEDITOR_URI, ADMIN_PREDICATE_2,
SAFE_RESOURCE, Authorization.INCONCLUSIVE);
}
@Test
public void addObjectPropStmtUnsafePredicate4() {
assertAddObjectPropStmt(SELFEDITOR_URI, ADMIN_PREDICATE_3,
SAFE_RESOURCE, Authorization.INCONCLUSIVE);
}
@Test
public void addObjectPropStmtUnsafePredicate5() {
assertAddObjectPropStmt(SELFEDITOR_URI, ADMIN_PREDICATE_4,
SAFE_RESOURCE, Authorization.INCONCLUSIVE);
}
// ----------------------------------------------------------------------
// Tests against EditObjPropStmt
// ----------------------------------------------------------------------
@Test
public void editObjectPropStmtSuccess1() {
assertEditObjPropStmt(SELFEDITOR_URI, SAFE_PREDICATE, SAFE_RESOURCE,
Authorization.AUTHORIZED);
}
@Test
public void editObjectPropStmtSuccess2() {
assertEditObjPropStmt(SAFE_RESOURCE, SAFE_PREDICATE, SELFEDITOR_URI,
Authorization.AUTHORIZED);
}
@Test
public void editObjectPropStmtEditorNotInvolved() {
// this is the case where the editor is not part of the stmt
assertEditObjPropStmt(SAFE_RESOURCE, SAFE_PREDICATE, SAFE_RESOURCE,
Authorization.INCONCLUSIVE);
}
@Test
public void editObjectPropStmtUnsafeResource() {
assertEditObjPropStmt(SELFEDITOR_URI, SAFE_PREDICATE, ADMIN_RESOURCE,
Authorization.INCONCLUSIVE);
}
@Test
public void editObjectPropStmtUnsafePredicate1() {
assertEditObjPropStmt(SELFEDITOR_URI, ADMIN_PREDICATE_4, SAFE_RESOURCE,
Authorization.INCONCLUSIVE);
}
@Test
public void editObjectPropStmtUnsafePredicate2() {
assertEditObjPropStmt(SAFE_RESOURCE, ADMIN_PREDICATE_4, SELFEDITOR_URI,
Authorization.INCONCLUSIVE);
}
@Test
public void editObjectPropStmtUnsafeBoth() {
assertEditObjPropStmt(SELFEDITOR_URI, ADMIN_PREDICATE_4,
ADMIN_RESOURCE, Authorization.INCONCLUSIVE);
}
// ----------------------------------------------------------------------
// Tests against EditDataPropStmt
// ----------------------------------------------------------------------
@Test
public void editDataPropSuccess() {
assertEditDataPropStmt(SELFEDITOR_URI, SAFE_PREDICATE, "junk",
Authorization.AUTHORIZED);
}
@Test
public void editDataPropUnsafePredicate() {
assertEditDataPropStmt(SELFEDITOR_URI, ADMIN_PREDICATE_1, "junk",
Authorization.INCONCLUSIVE);
}
@Test
public void editDataPropUnsafeResource() {
assertEditDataPropStmt(ADMIN_RESOURCE, SAFE_PREDICATE, null,
Authorization.INCONCLUSIVE);
}
@Test
public void editDataPropNoCloseRelation() {
assertEditDataPropStmt(SAFE_RESOURCE, SAFE_PREDICATE, null,
Authorization.INCONCLUSIVE);
}
@Test
public void editDataPropModelProhibited() {
// model prohibited
assertEditDataPropStmt(SAFE_RESOURCE, ADMIN_PREDICATE_1, null,
Authorization.INCONCLUSIVE);
}
// ------------------------------------------------------------------------
// Support methods
// ------------------------------------------------------------------------
/**
* Create an {@link AddObjectPropertyStatement}, test it, and compare to
* expected results.
*/
private void assertAddObjectPropStmt(String uriOfSub, String uriOfPred,
String uriOfObj, Authorization expectedAuthorization) {
AddObjectPropertyStatement whatToAuth = new AddObjectPropertyStatement(
ontModel, uriOfSub, new Property(uriOfPred), uriOfObj);
PolicyDecision dec = policy.isAuthorized(ids, whatToAuth);
log.debug(dec);
Assert.assertNotNull(dec);
Assert.assertEquals(expectedAuthorization, dec.getAuthorized());
}
/**
* Create an {@link EditObjectPropertyStatement}, test it, and compare to
* expected results.
*/
private void assertEditObjPropStmt(String uriOfSub, String uriOfPred,
String uriOfObj, Authorization expectedAuthorization) {
EditObjectPropertyStatement whatToAuth = new EditObjectPropertyStatement(
ontModel, uriOfSub, new Property(uriOfPred), uriOfObj);
PolicyDecision dec = policy.isAuthorized(ids, whatToAuth);
log.debug(dec);
Assert.assertNotNull(dec);
Assert.assertEquals(expectedAuthorization, dec.getAuthorized());
}
/**
* Create an {@link EditDataPropertyStatement}, test it, and compare to
* expected results.
*/
private void assertEditDataPropStmt(String individualURI,
String datapropURI, String data, Authorization expectedAuthorization) {
EditDataPropertyStatement whatToAuth = new EditDataPropertyStatement(
ontModel, individualURI, datapropURI, SOME_LITERAL);
PolicyDecision dec = policy.isAuthorized(ids, whatToAuth);
log.debug(dec);
Assert.assertNotNull(dec);
Assert.assertEquals(expectedAuthorization, dec.getAuthorized());
}
}
|
bsd-3-clause
|
OpenAMEE/askamee
|
config/environments/development.rb
|
1324
|
Askamee::Application.configure do
# Settings specified here will take precedence over those in config/application.rb
# In the development environment your application's code is reloaded on
# every request. This slows down response time but is perfect for development
# since you don't have to restart the web server when you make code changes.
config.cache_classes = false
# Log error messages when you accidentally call methods on nil.
config.whiny_nils = true
# Show full error reports and disable caching
config.consider_all_requests_local = true
config.action_controller.perform_caching = false
# Don't care if the mailer can't send
config.action_mailer.raise_delivery_errors = false
# Print deprecation notices to the Rails logger
config.active_support.deprecation = :log
# Only use best-standards-support built into browsers
config.action_dispatch.best_standards_support = :builtin
# Do not compress assets
config.assets.compress = false
# Expands the lines which load the assets
config.assets.debug = true
# Allow caching
config.action_controller.perform_caching = false
config.cache_store = :memory_store
config.middleware.use Rack::Cors do
allow do
origins '*'
resource '*', :headers => :any, :methods => :get
end
end
end
|
bsd-3-clause
|
Crystalnix/house-of-life-chromium
|
chrome/browser/sync/js_sync_manager_observer.cc
|
4177
|
// Copyright (c) 2011 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "chrome/browser/sync/js_sync_manager_observer.h"
#include <cstddef>
#include "base/logging.h"
#include "base/values.h"
#include "chrome/browser/sync/js_arg_list.h"
#include "chrome/browser/sync/js_event_router.h"
#include "chrome/browser/sync/sessions/session_state.h"
#include "chrome/browser/sync/syncable/model_type.h"
namespace browser_sync {
JsSyncManagerObserver::JsSyncManagerObserver(JsEventRouter* parent_router)
: parent_router_(parent_router) {
DCHECK(parent_router_);
}
JsSyncManagerObserver::~JsSyncManagerObserver() {}
void JsSyncManagerObserver::OnChangesApplied(
syncable::ModelType model_type,
const sync_api::BaseTransaction* trans,
const sync_api::SyncManager::ChangeRecord* changes,
int change_count) {
ListValue return_args;
return_args.Append(Value::CreateStringValue(
syncable::ModelTypeToString(model_type)));
ListValue* change_values = new ListValue();
return_args.Append(change_values);
for (int i = 0; i < change_count; ++i) {
change_values->Append(changes[i].ToValue(trans));
}
parent_router_->RouteJsEvent("onChangesApplied", JsArgList(&return_args));
}
void JsSyncManagerObserver::OnChangesComplete(
syncable::ModelType model_type) {
ListValue return_args;
return_args.Append(Value::CreateStringValue(
syncable::ModelTypeToString(model_type)));
parent_router_->RouteJsEvent("onChangesComplete", JsArgList(&return_args));
}
void JsSyncManagerObserver::OnSyncCycleCompleted(
const sessions::SyncSessionSnapshot* snapshot) {
ListValue return_args;
return_args.Append(snapshot->ToValue());
parent_router_->RouteJsEvent("onSyncCycleCompleted",
JsArgList(&return_args));
}
void JsSyncManagerObserver::OnAuthError(
const GoogleServiceAuthError& auth_error) {
ListValue return_args;
return_args.Append(auth_error.ToValue());
parent_router_->RouteJsEvent("onAuthError", JsArgList(&return_args));
}
void JsSyncManagerObserver::OnUpdatedToken(const std::string& token) {
ListValue return_args;
return_args.Append(Value::CreateStringValue("<redacted>"));
parent_router_->RouteJsEvent("onUpdatedToken", JsArgList(&return_args));
}
void JsSyncManagerObserver::OnPassphraseRequired(
sync_api::PassphraseRequiredReason reason) {
ListValue return_args;
return_args.Append(Value::CreateStringValue(
sync_api::PassphraseRequiredReasonToString(reason)));
parent_router_->RouteJsEvent("onPassphraseRequired",
JsArgList(&return_args));
}
void JsSyncManagerObserver::OnPassphraseAccepted(
const std::string& bootstrap_token) {
ListValue return_args;
return_args.Append(Value::CreateStringValue("<redacted>"));
parent_router_->RouteJsEvent("onPassphraseAccepted",
JsArgList(&return_args));
}
void JsSyncManagerObserver::OnEncryptionComplete(
const syncable::ModelTypeSet& encrypted_types) {
ListValue return_args;
return_args.Append(syncable::ModelTypeSetToValue(encrypted_types));
parent_router_->RouteJsEvent("onEncryptionComplete",
JsArgList(&return_args));
}
void JsSyncManagerObserver::OnMigrationNeededForTypes(
const syncable::ModelTypeSet& types) {
ListValue return_args;
return_args.Append(syncable::ModelTypeSetToValue(types));
parent_router_->RouteJsEvent("onMigrationNeededForTypes",
JsArgList(&return_args));
}
void JsSyncManagerObserver::OnInitializationComplete() {
parent_router_->RouteJsEvent("onInitializationComplete", JsArgList());
}
void JsSyncManagerObserver::OnStopSyncingPermanently() {
parent_router_->RouteJsEvent("onStopSyncingPermanently", JsArgList());
}
void JsSyncManagerObserver::OnClearServerDataSucceeded() {
parent_router_->RouteJsEvent("onClearServerDataSucceeded", JsArgList());
}
void JsSyncManagerObserver::OnClearServerDataFailed() {
parent_router_->RouteJsEvent("onClearServerDataFailed", JsArgList());
}
} // namespace browser_sync
|
bsd-3-clause
|
googlearchive/py-gfm
|
tests/test_strikethrough.py
|
1357
|
# Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file
# for details. All rights reserved. Use of this source code is governed by a
# BSD-style license that can be found in the LICENSE file.
import gfm
from test_case import TestCase
class TestStrikethrough(TestCase):
def setUp(self):
self.strikethrough = gfm.StrikethroughExtension([])
def test_double_tilde_strikes(self):
self.assert_renders("""
<p>This is <del>struck</del>, and so <del>is this</del>.</p>
""", """
This is ~~struck~~, and so ~~is this~~.
""", [self.strikethrough])
def test_single_tilde_doesnt_strike(self):
self.assert_renders("""
<p>This is ~not struck~, and this tilde <del>doesn't~stop~it</del>.</p>
""", """
This is ~not struck~, and this tilde ~~doesn't~stop~it~~.
""", [self.strikethrough])
def test_strikethrough_nests(self):
self.assert_renders("""
<p><del><strong>outer</strong></del>, <strong><del>inner</del></strong>.</p>
""", """
~~**outer**~~, **~~inner~~**.
""", [self.strikethrough])
def test_extra_tildes_dont_cause_strikethrough(self):
self.assert_renders("""
<p>~~~foo~~~, ~~~bar~~, ~~~~baz~~~~</p>
""", """
~~~foo~~~, ~~~bar~~, ~~~~baz~~~~
""", [])
|
bsd-3-clause
|
srickardti/openthread
|
src/core/thread/announce_sender.cpp
|
5373
|
/*
* Copyright (c) 2016-2018, The OpenThread Authors.
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
* 3. Neither the name of the copyright holder nor the
* names of its contributors may be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
* LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
*/
/**
* @file
* This file implements the AnnounceSender.
*/
#include "announce_sender.hpp"
#include <openthread/platform/radio.h>
#include "common/code_utils.hpp"
#include "common/instance.hpp"
#include "common/locator-getters.hpp"
#include "common/logging.hpp"
#include "common/random.hpp"
#include "meshcop/meshcop.hpp"
#include "meshcop/meshcop_tlvs.hpp"
#include "radio/radio.hpp"
namespace ot {
AnnounceSenderBase::AnnounceSenderBase(Instance &aInstance, Timer::Handler aHandler)
: InstanceLocator(aInstance)
, mChannelMask()
, mPeriod(0)
, mJitter(0)
, mCount(0)
, mChannel(0)
, mTimer(aInstance, aHandler, this)
{
}
void AnnounceSenderBase::SendAnnounce(Mac::ChannelMask aChannelMask, uint8_t aCount, uint32_t aPeriod, uint16_t aJitter)
{
VerifyOrExit(aPeriod != 0, OT_NOOP);
VerifyOrExit(aJitter < aPeriod, OT_NOOP);
aChannelMask.Intersect(Get<Mac::Mac>().GetSupportedChannelMask());
VerifyOrExit(!aChannelMask.IsEmpty(), OT_NOOP);
mChannelMask = aChannelMask;
mCount = aCount;
mPeriod = aPeriod;
mJitter = aJitter;
mChannel = Mac::ChannelMask::kChannelIteratorFirst;
mTimer.Start(Random::NonCrypto::AddJitter(mPeriod, mJitter));
otLogInfoMle("Starting periodic MLE Announcements tx, mask %s, count %u, period %u, jitter %u",
aChannelMask.ToString().AsCString(), aCount, aPeriod, aJitter);
exit:
return;
}
void AnnounceSenderBase::HandleTimer(void)
{
otError error;
error = mChannelMask.GetNextChannel(mChannel);
if (error == OT_ERROR_NOT_FOUND)
{
if (mCount != 0)
{
mCount--;
VerifyOrExit(mCount != 0, OT_NOOP);
}
mChannel = Mac::ChannelMask::kChannelIteratorFirst;
error = mChannelMask.GetNextChannel(mChannel);
}
OT_ASSERT(error == OT_ERROR_NONE);
Get<Mle::MleRouter>().SendAnnounce(mChannel, false);
mTimer.Start(Random::NonCrypto::AddJitter(mPeriod, mJitter));
exit:
return;
}
#if OPENTHREAD_CONFIG_ANNOUNCE_SENDER_ENABLE
AnnounceSender::AnnounceSender(Instance &aInstance)
: AnnounceSenderBase(aInstance, AnnounceSender::HandleTimer)
{
}
void AnnounceSender::HandleTimer(Timer &aTimer)
{
aTimer.GetOwner<AnnounceSender>().AnnounceSenderBase::HandleTimer();
}
void AnnounceSender::CheckState(void)
{
Mle::MleRouter & mle = Get<Mle::MleRouter>();
uint32_t interval = kRouterTxInterval;
uint32_t period;
Mac::ChannelMask channelMask;
switch (mle.GetRole())
{
case Mle::kRoleRouter:
case Mle::kRoleLeader:
interval = kRouterTxInterval;
break;
case Mle::kRoleChild:
#if OPENTHREAD_FTD
if (mle.IsRouterEligible() && mle.IsRxOnWhenIdle())
{
interval = kReedTxInterval;
break;
}
#endif
// fall through
case Mle::kRoleDisabled:
case Mle::kRoleDetached:
Stop();
ExitNow();
}
VerifyOrExit(Get<MeshCoP::ActiveDataset>().GetChannelMask(channelMask) == OT_ERROR_NONE, Stop());
period = interval / channelMask.GetNumberOfChannels();
if (period < kMinTxPeriod)
{
period = kMinTxPeriod;
}
VerifyOrExit(!IsRunning() || (period != GetPeriod()) || (GetChannelMask() != channelMask), OT_NOOP);
SendAnnounce(channelMask, 0, period, kMaxJitter);
exit:
return;
}
void AnnounceSender::Stop(void)
{
AnnounceSenderBase::Stop();
otLogInfoMle("Stopping periodic MLE Announcements tx");
}
void AnnounceSender::HandleNotifierEvents(Events aEvents)
{
if (aEvents.Contains(kEventThreadRoleChanged))
{
CheckState();
}
}
#endif // OPENTHREAD_CONFIG_ANNOUNCE_SENDER_ENABLE
} // namespace ot
|
bsd-3-clause
|
jasonbaker/envbuilder
|
envbuilder/help.py
|
1227
|
import sys
from envbuilder.command import BuiltinCommand, Command
from envbuilder.custom import _CustomCommand
from envbuilder.args import Arguments
from envbuilder.sh import output_packages
class Help(BuiltinCommand):
"""
Get help.
"""
name = 'help'
def run(self, *args, **kwargs):
# This method should never be called. We are assuming that
# parse_args will get called and will exit the program
raise NotImplementedError
def parse_args(self, raw_args):
if len(raw_args) >= 1:
secondary_command_name = raw_args[0]
secondary_command_cls = Command.lookup_command(secondary_command_name)
secondary_command = secondary_command_cls()
secondary_command.print_help()
else:
self.print_main_help()
sys.exit(0)
def print_main_help(self):
print "SYNTAX: envb [command]"
output_packages(Command.builtin_cmd_mapping, 'Common commands')
print '\nFor more info type "envb help <command>"'
def print_help(self):
args = sys.argv[1:]
args.append('help')
next_cmd = ' '.join(args)
print "Run envbuilder %s for more info." % next_cmd
|
bsd-3-clause
|
hithroc/TelegramFlitter
|
TelegramFlitter/CardSearchBot.cs
|
1685
|
using System;
using System.Collections.Generic;
using System.Linq;
using System.Threading.Tasks;
using Telegram.Bot;
using Telegram.Bot.Args;
using Telegram.Bot.Types.InlineQueryResults;
namespace TelegramFlitter
{
class CardSearchBot
{
private TelegramBotClient bot;
private Card[] cards;
private async void BotOnInlineQuery(object sender, InlineQueryEventArgs args)
{
Func<Card,InlineQueryResult> consturctResult = card =>
{
int width = card.Type != CardType.Problem ? 344 : 480;
int height = card.Type != CardType.Problem ? 480 : 344;
return new InlineQueryResultPhoto
{ ThumbUrl = card.ImageUrl,
Url = card.ImageUrl,
Id = card.Set + card.Number,
ThumbWidth = width,
ThumbHeight = height,
Width = width,
Height = height
};
};
var results = cards.AsParallel().Where(card => card.FullName.ToLower().Contains(args.InlineQuery.Query.ToLower())).Take(30).Select(consturctResult);
await bot.AnswerInlineQueryAsync(args.InlineQuery.Id, results.ToArray());
}
public CardSearchBot(string token, Card[] cards)
{
this.cards = cards;
bot = new TelegramBotClient(token);
bot.OnInlineQuery += BotOnInlineQuery;
}
public void Run()
{
bot.StartReceiving();
}
public void Stop()
{
bot.StopReceiving();
}
}
}
|
bsd-3-clause
|
axhm3a/ZendFrameworkCertification
|
library/Zend/Cache/Frontend/Class.php
|
6103
|
<?php
/**
* Zend Framework
*
* LICENSE
*
* This source file is subject to the new BSD license that is bundled
* with this package in the file LICENSE.txt.
* It is also available through the world-wide-web at this URL:
* http://framework.zend.com/license/new-bsd
* If you did not receive a copy of the license and are unable to
* obtain it through the world-wide-web, please send an email
* to license@zend.com so we can send you a copy immediately.
*
* @category Zend
* @package Zend_Cache
* @subpackage Frontend
* @copyright Copyright (c) 2005-2008 Zend Technologies USA Inc. (http://www.zend.com)
* @license http://framework.zend.com/license/new-bsd New BSD License
*/
/**
* Zend_Cache_Core
*/
require_once 'Zend/Cache/Core.php';
/**
* @package Zend_Cache
* @subpackage Frontend
* @copyright Copyright (c) 2005-2008 Zend Technologies USA Inc. (http://www.zend.com)
* @license http://framework.zend.com/license/new-bsd New BSD License
*/
class Zend_Cache_Frontend_Class extends Zend_Cache_Core
{
// ------------------
// --- Properties ---
// ------------------
/**
* Available options
*
* ====> (mixed) cached_entity :
* - if set to a class name, we will cache an abstract class and will use only static calls
* - if set to an object, we will cache this object methods
*
* ====> (boolean) cache_by_default :
* - if true, method calls will be cached by default
*
* ====> (array) cached_methods :
* - an array of method names which will be cached (even if cache_by_default = false)
*
* ====> (array) non_cached_methods :
* - an array of method names which won't be cached (even if cache_by_default = true)
*
* @var array available options
*/
protected $_specificOptions = array(
'cached_entity' => null,
'cache_by_default' => true,
'cached_methods' => array(),
'non_cached_methods' => array()
);
/**
* Tags array
*
* @var array
*/
private $_tags = array();
/**
* SpecificLifetime value
*
* false => no specific life time
*
* @var int
*/
private $_specificLifetime = false;
/**
* The cached object or the name of the cached abstract class
*
* @var mixed
*/
private $_cachedEntity = null;
/**
* The class name of the cached object or cached abstract class
*
* Used to differentiate between different classes with the same method calls.
*
* @var string
*/
private $_cachedEntityLabel = '';
// ----------------------
// --- Public methods ---
// ----------------------
/**
* Constructor
*
* @param array $options associative array of options
*/
public function __construct($options = array())
{
while (list($name, $value) = each($options)) {
$this->setOption($name, $value);
}
if (is_null($this->_specificOptions['cached_entity'])) {
Zend_Cache::throwException('cached_entity must be set !');
} else {
if (!is_string($this->_specificOptions['cached_entity']) && !is_object($this->_specificOptions['cached_entity'])) {
Zend_Cache::throwException('cached_entity must be an object or a class name');
}
}
$this->_cachedEntity = $this->_specificOptions['cached_entity'];
if(is_string($this->_cachedEntity)){
$this->_cachedEntityLabel = $this->_cachedEntity;
} else {
$ro = new ReflectionObject($this->_cachedEntity);
$this->_cachedEntityLabel = $ro->getName();
}
$this->setOption('automatic_serialization', true);
}
/**
* Set a specific life time
*
* @param int $specificLifetime
*/
public function setSpecificLifetime($specificLifetime = false)
{
$this->_specificLifetime = $specificLifetime;
}
/**
* Set the cache array
*
* @param array $tags
*/
public function setTagsArray($tags = array())
{
$this->_tags = $tags;
}
/**
* Main method : call the specified method or get the result from cache
*
* @param string $name method name
* @param array $parameters method parameters
* @return mixed result
*/
public function __call($name, $parameters)
{
$cacheBool1 = $this->_specificOptions['cache_by_default'];
$cacheBool2 = in_array($name, $this->_specificOptions['cached_methods']);
$cacheBool3 = in_array($name, $this->_specificOptions['non_cached_methods']);
$cache = (($cacheBool1 || $cacheBool2) && (!$cacheBool3));
if (!$cache) {
// We do not have not cache
return call_user_func_array(array($this->_cachedEntity, $name), $parameters);
}
$id = $this->_makeId($name, $parameters);
if ($this->test($id)) {
// A cache is available
$result = $this->load($id);
$output = $result[0];
$return = $result[1];
} else {
// A cache is not available
ob_start();
ob_implicit_flush(false);
$return = call_user_func_array(array($this->_cachedEntity, $name), $parameters);
$output = ob_get_contents();
ob_end_clean();
$data = array($output, $return);
$this->save($data, $id, $this->_tags, $this->_specificLifetime);
}
echo $output;
return $return;
}
// ------------------------------------
// --- Private or protected methods ---
// ------------------------------------
/**
* Make a cache id from the method name and parameters
*
* @param string $name method name
* @param array $parameters method parameters
* @return string cache id
*/
private function _makeId($name, $parameters)
{
return md5($this->_cachedEntityLabel . '__' . $name . '__' . serialize($parameters));
}
}
|
bsd-3-clause
|
8v060htwyc/whois
|
whois-api/src/test/java/net/ripe/db/whois/api/rest/CrossOriginFilterTest.java
|
4743
|
package net.ripe.db.whois.api.rest;
import com.google.common.net.HttpHeaders;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.Mock;
import org.mockito.runners.MockitoJUnitRunner;
import javax.ws.rs.HttpMethod;
import javax.ws.rs.container.ContainerRequestContext;
import javax.ws.rs.container.ContainerResponseContext;
import javax.ws.rs.core.MultivaluedHashMap;
import javax.ws.rs.core.MultivaluedMap;
import javax.ws.rs.core.UriInfo;
import static org.mockito.Matchers.anyObject;
import static org.mockito.Matchers.eq;
import static org.mockito.Mockito.never;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
@RunWith(MockitoJUnitRunner.class)
public class CrossOriginFilterTest {
@Mock
private ContainerRequestContext requestContext;
@Mock
private ContainerResponseContext responseContext;
@Mock
private UriInfo uriInfo;
@Mock
private MultivaluedMap<String, Object> responseHeaders;
private CrossOriginFilter subject;
@Before
public void setup() {
when(requestContext.getUriInfo()).thenReturn(uriInfo);
when(requestContext.getHeaders()).thenReturn(new MultivaluedHashMap());
when(responseContext.getHeaders()).thenReturn(responseHeaders);
this.subject = new CrossOriginFilter();
}
@Test
public void get_request_from_apps_db_ripe_net_is_allowed() throws Exception {
configureRequestContext(HttpMethod.GET, "https://apps.db.ripe.net", "/some/path");
subject.filter(requestContext, responseContext);
verify(responseHeaders).putSingle(HttpHeaders.ACCESS_CONTROL_ALLOW_ORIGIN, "https://apps.db.ripe.net");
}
@Test
public void get_request_from_outside_ripe_net_is_not_allowed() throws Exception {
configureRequestContext(HttpMethod.GET, "https://www.foo.net", "/some/path");
subject.filter(requestContext, responseContext);
verify(responseHeaders, never()).putSingle(eq(HttpHeaders.ACCESS_CONTROL_ALLOW_ORIGIN), anyObject());
}
@Test
public void preflight_request_from_apps_db_ripe_net_is_allowed() throws Exception {
configureRequestContext(HttpMethod.OPTIONS, "https://apps.db.ripe.net", "/some/path");
when(requestContext.getHeaderString(HttpHeaders.ACCESS_CONTROL_REQUEST_METHOD)).thenReturn(HttpMethod.POST);
when(requestContext.getHeaderString(HttpHeaders.ACCESS_CONTROL_REQUEST_HEADERS)).thenReturn(HttpHeaders.X_USER_IP);
subject.filter(requestContext, responseContext);
verify(responseHeaders).putSingle(HttpHeaders.ACCESS_CONTROL_ALLOW_ORIGIN, "https://apps.db.ripe.net");
verify(responseHeaders).putSingle(HttpHeaders.ACCESS_CONTROL_ALLOW_METHODS, "GET,POST,PUT,DELETE,HEAD");
verify(responseHeaders).putSingle(HttpHeaders.ACCESS_CONTROL_ALLOW_HEADERS, "X-Requested-With,Content-Type,Accept,Origin");
}
@Test
public void preflight_request_from_outside_ripe_net_is_not_allowed() throws Exception {
configureRequestContext(HttpMethod.OPTIONS, "https://www.foo.net", "/some/path");
when(requestContext.getHeaderString(HttpHeaders.ACCESS_CONTROL_REQUEST_METHOD)).thenReturn(HttpMethod.POST);
when(requestContext.getHeaderString(HttpHeaders.ACCESS_CONTROL_REQUEST_HEADERS)).thenReturn(HttpHeaders.X_USER_IP);
subject.filter(requestContext, responseContext);
verify(responseHeaders, never()).putSingle(eq(HttpHeaders.ACCESS_CONTROL_ALLOW_ORIGIN), anyObject());
verify(responseHeaders, never()).putSingle(eq(HttpHeaders.ACCESS_CONTROL_ALLOW_METHODS), anyObject());
verify(responseHeaders, never()).putSingle(eq(HttpHeaders.ACCESS_CONTROL_ALLOW_HEADERS), anyObject());
}
@Test
public void malformed_origin() throws Exception {
configureRequestContext(HttpMethod.GET, "?invalid?", "/some/path");
subject.filter(requestContext, responseContext);
verify(responseHeaders, never()).putSingle(eq(HttpHeaders.ACCESS_CONTROL_ALLOW_ORIGIN), anyObject());
}
@Test
public void host_and_port() throws Exception {
configureRequestContext(HttpMethod.GET, "http://host.ripe.net:8443", "/some/path");
subject.filter(requestContext, responseContext);
verify(responseHeaders).putSingle(HttpHeaders.ACCESS_CONTROL_ALLOW_ORIGIN, "http://host.ripe.net:8443");
}
// helper methods
private void configureRequestContext(final String method, final String origin, final String path) {
when(requestContext.getMethod()).thenReturn(method);
when(requestContext.getHeaderString(HttpHeaders.ORIGIN)).thenReturn(origin);
when(uriInfo.getPath()).thenReturn(path);
}
}
|
bsd-3-clause
|
figment/niflib
|
src/obj/NiBinaryVoxelData.cpp
|
7386
|
/* Copyright (c) 2006, NIF File Format Library and Tools
All rights reserved. Please see niflib.h for license. */
//-----------------------------------NOTICE----------------------------------//
// Some of this file is automatically filled in by a Python script. Only //
// add custom code in the designated areas or it will be overwritten during //
// the next update. //
//-----------------------------------NOTICE----------------------------------//
//--BEGIN FILE HEAD CUSTOM CODE--//
//--END CUSTOM CODE--//
#include "../../include/FixLink.h"
#include "../../include/ObjectRegistry.h"
#include "../../include/NIF_IO.h"
#include "../../include/obj/NiBinaryVoxelData.h"
using namespace Niflib;
//Definition of TYPE constant
const Type NiBinaryVoxelData::TYPE("NiBinaryVoxelData", &NiObject::TYPE );
NiBinaryVoxelData::NiBinaryVoxelData() : unknownShort1((unsigned short)0), unknownShort2((unsigned short)0), unknownShort3((unsigned short)0), numUnknownVectors((unsigned int)0), numUnknownBytes2((unsigned int)0) {
//--BEGIN CONSTRUCTOR CUSTOM CODE--//
//--END CUSTOM CODE--//
}
NiBinaryVoxelData::~NiBinaryVoxelData() {
//--BEGIN DESTRUCTOR CUSTOM CODE--//
//--END CUSTOM CODE--//
}
const Type & NiBinaryVoxelData::GetType() const {
return TYPE;
}
NiObject * NiBinaryVoxelData::Create() {
return new NiBinaryVoxelData;
}
void NiBinaryVoxelData::Read( istream& in, list<unsigned int> & link_stack, const NifInfo & info ) {
//--BEGIN PRE-READ CUSTOM CODE--//
//--END CUSTOM CODE--//
NiObject::Read( in, link_stack, info );
NifStream( unknownShort1, in, info );
NifStream( unknownShort2, in, info );
NifStream( unknownShort3, in, info );
for (unsigned int i1 = 0; i1 < 7; i1++) {
NifStream( unknown7Floats[i1], in, info );
};
for (unsigned int i1 = 0; i1 < 7; i1++) {
for (unsigned int i2 = 0; i2 < 12; i2++) {
NifStream( unknownBytes1[i1][i2], in, info );
};
};
NifStream( numUnknownVectors, in, info );
unknownVectors.resize(numUnknownVectors);
for (unsigned int i1 = 0; i1 < unknownVectors.size(); i1++) {
NifStream( unknownVectors[i1], in, info );
};
NifStream( numUnknownBytes2, in, info );
unknownBytes2.resize(numUnknownBytes2);
for (unsigned int i1 = 0; i1 < unknownBytes2.size(); i1++) {
NifStream( unknownBytes2[i1], in, info );
};
for (unsigned int i1 = 0; i1 < 5; i1++) {
NifStream( unknown5Ints[i1], in, info );
};
//--BEGIN POST-READ CUSTOM CODE--//
//--END CUSTOM CODE--//
}
void NiBinaryVoxelData::Write( ostream& out, const map<NiObjectRef,unsigned int> & link_map, list<NiObject *> & missing_link_stack, const NifInfo & info ) const {
//--BEGIN PRE-WRITE CUSTOM CODE--//
//--END CUSTOM CODE--//
NiObject::Write( out, link_map, missing_link_stack, info );
numUnknownBytes2 = (unsigned int)(unknownBytes2.size());
numUnknownVectors = (unsigned int)(unknownVectors.size());
NifStream( unknownShort1, out, info );
NifStream( unknownShort2, out, info );
NifStream( unknownShort3, out, info );
for (unsigned int i1 = 0; i1 < 7; i1++) {
NifStream( unknown7Floats[i1], out, info );
};
for (unsigned int i1 = 0; i1 < 7; i1++) {
for (unsigned int i2 = 0; i2 < 12; i2++) {
NifStream( unknownBytes1[i1][i2], out, info );
};
};
NifStream( numUnknownVectors, out, info );
for (unsigned int i1 = 0; i1 < unknownVectors.size(); i1++) {
NifStream( unknownVectors[i1], out, info );
};
NifStream( numUnknownBytes2, out, info );
for (unsigned int i1 = 0; i1 < unknownBytes2.size(); i1++) {
NifStream( unknownBytes2[i1], out, info );
};
for (unsigned int i1 = 0; i1 < 5; i1++) {
NifStream( unknown5Ints[i1], out, info );
};
//--BEGIN POST-WRITE CUSTOM CODE--//
//--END CUSTOM CODE--//
}
std::string NiBinaryVoxelData::asString( bool verbose ) const {
//--BEGIN PRE-STRING CUSTOM CODE--//
//--END CUSTOM CODE--//
stringstream out;
unsigned int array_output_count = 0;
out << NiObject::asString();
numUnknownBytes2 = (unsigned int)(unknownBytes2.size());
numUnknownVectors = (unsigned int)(unknownVectors.size());
out << " Unknown Short 1: " << unknownShort1 << endl;
out << " Unknown Short 2: " << unknownShort2 << endl;
out << " Unknown Short 3: " << unknownShort3 << endl;
array_output_count = 0;
for (unsigned int i1 = 0; i1 < 7; i1++) {
if ( !verbose && ( array_output_count > MAXARRAYDUMP ) ) {
out << "<Data Truncated. Use verbose mode to see complete listing.>" << endl;
break;
};
if ( !verbose && ( array_output_count > MAXARRAYDUMP ) ) {
break;
};
out << " Unknown 7 Floats[" << i1 << "]: " << unknown7Floats[i1] << endl;
array_output_count++;
};
array_output_count = 0;
for (unsigned int i1 = 0; i1 < 7; i1++) {
if ( !verbose && ( array_output_count > MAXARRAYDUMP ) ) {
out << "<Data Truncated. Use verbose mode to see complete listing.>" << endl;
break;
};
for (unsigned int i2 = 0; i2 < 12; i2++) {
if ( !verbose && ( array_output_count > MAXARRAYDUMP ) ) {
break;
};
out << " Unknown Bytes 1[" << i2 << "]: " << unknownBytes1[i1][i2] << endl;
array_output_count++;
};
};
out << " Num Unknown Vectors: " << numUnknownVectors << endl;
array_output_count = 0;
for (unsigned int i1 = 0; i1 < unknownVectors.size(); i1++) {
if ( !verbose && ( array_output_count > MAXARRAYDUMP ) ) {
out << "<Data Truncated. Use verbose mode to see complete listing.>" << endl;
break;
};
if ( !verbose && ( array_output_count > MAXARRAYDUMP ) ) {
break;
};
out << " Unknown Vectors[" << i1 << "]: " << unknownVectors[i1] << endl;
array_output_count++;
};
out << " Num Unknown Bytes 2: " << numUnknownBytes2 << endl;
array_output_count = 0;
for (unsigned int i1 = 0; i1 < unknownBytes2.size(); i1++) {
if ( !verbose && ( array_output_count > MAXARRAYDUMP ) ) {
out << "<Data Truncated. Use verbose mode to see complete listing.>" << endl;
break;
};
if ( !verbose && ( array_output_count > MAXARRAYDUMP ) ) {
break;
};
out << " Unknown Bytes 2[" << i1 << "]: " << unknownBytes2[i1] << endl;
array_output_count++;
};
array_output_count = 0;
for (unsigned int i1 = 0; i1 < 5; i1++) {
if ( !verbose && ( array_output_count > MAXARRAYDUMP ) ) {
out << "<Data Truncated. Use verbose mode to see complete listing.>" << endl;
break;
};
if ( !verbose && ( array_output_count > MAXARRAYDUMP ) ) {
break;
};
out << " Unknown 5 Ints[" << i1 << "]: " << unknown5Ints[i1] << endl;
array_output_count++;
};
return out.str();
//--BEGIN POST-STRING CUSTOM CODE--//
//--END CUSTOM CODE--//
}
void NiBinaryVoxelData::FixLinks( const map<unsigned int,NiObjectRef> & objects, list<unsigned int> & link_stack, list<NiObjectRef> & missing_link_stack, const NifInfo & info ) {
//--BEGIN PRE-FIXLINKS CUSTOM CODE--//
//--END CUSTOM CODE--//
NiObject::FixLinks( objects, link_stack, missing_link_stack, info );
//--BEGIN POST-FIXLINKS CUSTOM CODE--//
//--END CUSTOM CODE--//
}
std::list<NiObjectRef> NiBinaryVoxelData::GetRefs() const {
list<Ref<NiObject> > refs;
refs = NiObject::GetRefs();
return refs;
}
std::list<NiObject *> NiBinaryVoxelData::GetPtrs() const {
list<NiObject *> ptrs;
ptrs = NiObject::GetPtrs();
return ptrs;
}
//--This object has no eligable attributes. No example implementation generated--//
//--BEGIN MISC CUSTOM CODE--//
//--END CUSTOM CODE--//
|
bsd-3-clause
|
jmmease/pandas
|
pandas/tests/reshape/test_merge.py
|
64541
|
# pylint: disable=E1103
import pytest
from datetime import datetime, date
from numpy.random import randn
from numpy import nan
import numpy as np
import random
import pandas as pd
from pandas.compat import lrange, lzip
from pandas.core.reshape.concat import concat
from pandas.core.reshape.merge import merge, MergeError
from pandas.util.testing import assert_frame_equal, assert_series_equal
from pandas.core.dtypes.dtypes import CategoricalDtype
from pandas.core.dtypes.common import is_categorical_dtype, is_object_dtype
from pandas import DataFrame, Index, MultiIndex, Series, Categorical
import pandas.util.testing as tm
from pandas.api.types import CategoricalDtype as CDT
N = 50
NGROUPS = 8
def get_test_data(ngroups=NGROUPS, n=N):
unique_groups = lrange(ngroups)
arr = np.asarray(np.tile(unique_groups, n // ngroups))
if len(arr) < n:
arr = np.asarray(list(arr) + unique_groups[:n - len(arr)])
random.shuffle(arr)
return arr
class TestMerge(object):
def setup_method(self, method):
# aggregate multiple columns
self.df = DataFrame({'key1': get_test_data(),
'key2': get_test_data(),
'data1': np.random.randn(N),
'data2': np.random.randn(N)})
# exclude a couple keys for fun
self.df = self.df[self.df['key2'] > 1]
self.df2 = DataFrame({'key1': get_test_data(n=N // 5),
'key2': get_test_data(ngroups=NGROUPS // 2,
n=N // 5),
'value': np.random.randn(N // 5)})
self.left = DataFrame({'key': ['a', 'b', 'c', 'd', 'e', 'e', 'a'],
'v1': np.random.randn(7)})
self.right = DataFrame({'v2': np.random.randn(4)},
index=['d', 'b', 'c', 'a'])
def test_merge_inner_join_empty(self):
# GH 15328
df_empty = pd.DataFrame()
df_a = pd.DataFrame({'a': [1, 2]}, index=[0, 1], dtype='int64')
result = pd.merge(df_empty, df_a, left_index=True, right_index=True)
expected = pd.DataFrame({'a': []}, index=[], dtype='int64')
assert_frame_equal(result, expected)
def test_merge_common(self):
joined = merge(self.df, self.df2)
exp = merge(self.df, self.df2, on=['key1', 'key2'])
tm.assert_frame_equal(joined, exp)
def test_merge_index_singlekey_right_vs_left(self):
left = DataFrame({'key': ['a', 'b', 'c', 'd', 'e', 'e', 'a'],
'v1': np.random.randn(7)})
right = DataFrame({'v2': np.random.randn(4)},
index=['d', 'b', 'c', 'a'])
merged1 = merge(left, right, left_on='key',
right_index=True, how='left', sort=False)
merged2 = merge(right, left, right_on='key',
left_index=True, how='right', sort=False)
assert_frame_equal(merged1, merged2.loc[:, merged1.columns])
merged1 = merge(left, right, left_on='key',
right_index=True, how='left', sort=True)
merged2 = merge(right, left, right_on='key',
left_index=True, how='right', sort=True)
assert_frame_equal(merged1, merged2.loc[:, merged1.columns])
def test_merge_index_singlekey_inner(self):
left = DataFrame({'key': ['a', 'b', 'c', 'd', 'e', 'e', 'a'],
'v1': np.random.randn(7)})
right = DataFrame({'v2': np.random.randn(4)},
index=['d', 'b', 'c', 'a'])
# inner join
result = merge(left, right, left_on='key', right_index=True,
how='inner')
expected = left.join(right, on='key').loc[result.index]
assert_frame_equal(result, expected)
result = merge(right, left, right_on='key', left_index=True,
how='inner')
expected = left.join(right, on='key').loc[result.index]
assert_frame_equal(result, expected.loc[:, result.columns])
def test_merge_misspecified(self):
pytest.raises(ValueError, merge, self.left, self.right,
left_index=True)
pytest.raises(ValueError, merge, self.left, self.right,
right_index=True)
pytest.raises(ValueError, merge, self.left, self.left,
left_on='key', on='key')
pytest.raises(ValueError, merge, self.df, self.df2,
left_on=['key1'], right_on=['key1', 'key2'])
def test_index_and_on_parameters_confusion(self):
pytest.raises(ValueError, merge, self.df, self.df2, how='left',
left_index=False, right_index=['key1', 'key2'])
pytest.raises(ValueError, merge, self.df, self.df2, how='left',
left_index=['key1', 'key2'], right_index=False)
pytest.raises(ValueError, merge, self.df, self.df2, how='left',
left_index=['key1', 'key2'],
right_index=['key1', 'key2'])
def test_merge_overlap(self):
merged = merge(self.left, self.left, on='key')
exp_len = (self.left['key'].value_counts() ** 2).sum()
assert len(merged) == exp_len
assert 'v1_x' in merged
assert 'v1_y' in merged
def test_merge_different_column_key_names(self):
left = DataFrame({'lkey': ['foo', 'bar', 'baz', 'foo'],
'value': [1, 2, 3, 4]})
right = DataFrame({'rkey': ['foo', 'bar', 'qux', 'foo'],
'value': [5, 6, 7, 8]})
merged = left.merge(right, left_on='lkey', right_on='rkey',
how='outer', sort=True)
exp = pd.Series(['bar', 'baz', 'foo', 'foo', 'foo', 'foo', np.nan],
name='lkey')
tm.assert_series_equal(merged['lkey'], exp)
exp = pd.Series(['bar', np.nan, 'foo', 'foo', 'foo', 'foo', 'qux'],
name='rkey')
tm.assert_series_equal(merged['rkey'], exp)
exp = pd.Series([2, 3, 1, 1, 4, 4, np.nan], name='value_x')
tm.assert_series_equal(merged['value_x'], exp)
exp = pd.Series([6, np.nan, 5, 8, 5, 8, 7], name='value_y')
tm.assert_series_equal(merged['value_y'], exp)
def test_merge_copy(self):
left = DataFrame({'a': 0, 'b': 1}, index=lrange(10))
right = DataFrame({'c': 'foo', 'd': 'bar'}, index=lrange(10))
merged = merge(left, right, left_index=True,
right_index=True, copy=True)
merged['a'] = 6
assert (left['a'] == 0).all()
merged['d'] = 'peekaboo'
assert (right['d'] == 'bar').all()
def test_merge_nocopy(self):
left = DataFrame({'a': 0, 'b': 1}, index=lrange(10))
right = DataFrame({'c': 'foo', 'd': 'bar'}, index=lrange(10))
merged = merge(left, right, left_index=True,
right_index=True, copy=False)
merged['a'] = 6
assert (left['a'] == 6).all()
merged['d'] = 'peekaboo'
assert (right['d'] == 'peekaboo').all()
def test_intelligently_handle_join_key(self):
# #733, be a bit more 1337 about not returning unconsolidated DataFrame
left = DataFrame({'key': [1, 1, 2, 2, 3],
'value': lrange(5)}, columns=['value', 'key'])
right = DataFrame({'key': [1, 1, 2, 3, 4, 5],
'rvalue': lrange(6)})
joined = merge(left, right, on='key', how='outer')
expected = DataFrame({'key': [1, 1, 1, 1, 2, 2, 3, 4, 5],
'value': np.array([0, 0, 1, 1, 2, 3, 4,
np.nan, np.nan]),
'rvalue': [0, 1, 0, 1, 2, 2, 3, 4, 5]},
columns=['value', 'key', 'rvalue'])
assert_frame_equal(joined, expected)
def test_merge_join_key_dtype_cast(self):
# #8596
df1 = DataFrame({'key': [1], 'v1': [10]})
df2 = DataFrame({'key': [2], 'v1': [20]})
df = merge(df1, df2, how='outer')
assert df['key'].dtype == 'int64'
df1 = DataFrame({'key': [True], 'v1': [1]})
df2 = DataFrame({'key': [False], 'v1': [0]})
df = merge(df1, df2, how='outer')
# GH13169
# this really should be bool
assert df['key'].dtype == 'object'
df1 = DataFrame({'val': [1]})
df2 = DataFrame({'val': [2]})
lkey = np.array([1])
rkey = np.array([2])
df = merge(df1, df2, left_on=lkey, right_on=rkey, how='outer')
assert df['key_0'].dtype == 'int64'
def test_handle_join_key_pass_array(self):
left = DataFrame({'key': [1, 1, 2, 2, 3],
'value': lrange(5)}, columns=['value', 'key'])
right = DataFrame({'rvalue': lrange(6)})
key = np.array([1, 1, 2, 3, 4, 5])
merged = merge(left, right, left_on='key', right_on=key, how='outer')
merged2 = merge(right, left, left_on=key, right_on='key', how='outer')
assert_series_equal(merged['key'], merged2['key'])
assert merged['key'].notna().all()
assert merged2['key'].notna().all()
left = DataFrame({'value': lrange(5)}, columns=['value'])
right = DataFrame({'rvalue': lrange(6)})
lkey = np.array([1, 1, 2, 2, 3])
rkey = np.array([1, 1, 2, 3, 4, 5])
merged = merge(left, right, left_on=lkey, right_on=rkey, how='outer')
tm.assert_series_equal(merged['key_0'], Series([1, 1, 1, 1, 2,
2, 3, 4, 5],
name='key_0'))
left = DataFrame({'value': lrange(3)})
right = DataFrame({'rvalue': lrange(6)})
key = np.array([0, 1, 1, 2, 2, 3], dtype=np.int64)
merged = merge(left, right, left_index=True, right_on=key, how='outer')
tm.assert_series_equal(merged['key_0'], Series(key, name='key_0'))
def test_no_overlap_more_informative_error(self):
dt = datetime.now()
df1 = DataFrame({'x': ['a']}, index=[dt])
df2 = DataFrame({'y': ['b', 'c']}, index=[dt, dt])
pytest.raises(MergeError, merge, df1, df2)
def test_merge_non_unique_indexes(self):
dt = datetime(2012, 5, 1)
dt2 = datetime(2012, 5, 2)
dt3 = datetime(2012, 5, 3)
dt4 = datetime(2012, 5, 4)
df1 = DataFrame({'x': ['a']}, index=[dt])
df2 = DataFrame({'y': ['b', 'c']}, index=[dt, dt])
_check_merge(df1, df2)
# Not monotonic
df1 = DataFrame({'x': ['a', 'b', 'q']}, index=[dt2, dt, dt4])
df2 = DataFrame({'y': ['c', 'd', 'e', 'f', 'g', 'h']},
index=[dt3, dt3, dt2, dt2, dt, dt])
_check_merge(df1, df2)
df1 = DataFrame({'x': ['a', 'b']}, index=[dt, dt])
df2 = DataFrame({'y': ['c', 'd']}, index=[dt, dt])
_check_merge(df1, df2)
def test_merge_non_unique_index_many_to_many(self):
dt = datetime(2012, 5, 1)
dt2 = datetime(2012, 5, 2)
dt3 = datetime(2012, 5, 3)
df1 = DataFrame({'x': ['a', 'b', 'c', 'd']},
index=[dt2, dt2, dt, dt])
df2 = DataFrame({'y': ['e', 'f', 'g', ' h', 'i']},
index=[dt2, dt2, dt3, dt, dt])
_check_merge(df1, df2)
def test_left_merge_empty_dataframe(self):
left = DataFrame({'key': [1], 'value': [2]})
right = DataFrame({'key': []})
result = merge(left, right, on='key', how='left')
assert_frame_equal(result, left)
result = merge(right, left, on='key', how='right')
assert_frame_equal(result, left)
def test_merge_left_empty_right_empty(self):
# GH 10824
left = pd.DataFrame([], columns=['a', 'b', 'c'])
right = pd.DataFrame([], columns=['x', 'y', 'z'])
exp_in = pd.DataFrame([], columns=['a', 'b', 'c', 'x', 'y', 'z'],
index=pd.Index([], dtype=object),
dtype=object)
for kwarg in [dict(left_index=True, right_index=True),
dict(left_index=True, right_on='x'),
dict(left_on='a', right_index=True),
dict(left_on='a', right_on='x')]:
result = pd.merge(left, right, how='inner', **kwarg)
tm.assert_frame_equal(result, exp_in)
result = pd.merge(left, right, how='left', **kwarg)
tm.assert_frame_equal(result, exp_in)
result = pd.merge(left, right, how='right', **kwarg)
tm.assert_frame_equal(result, exp_in)
result = pd.merge(left, right, how='outer', **kwarg)
tm.assert_frame_equal(result, exp_in)
def test_merge_left_empty_right_notempty(self):
# GH 10824
left = pd.DataFrame([], columns=['a', 'b', 'c'])
right = pd.DataFrame([[1, 2, 3], [4, 5, 6], [7, 8, 9]],
columns=['x', 'y', 'z'])
exp_out = pd.DataFrame({'a': np.array([np.nan] * 3, dtype=object),
'b': np.array([np.nan] * 3, dtype=object),
'c': np.array([np.nan] * 3, dtype=object),
'x': [1, 4, 7],
'y': [2, 5, 8],
'z': [3, 6, 9]},
columns=['a', 'b', 'c', 'x', 'y', 'z'])
exp_in = exp_out[0:0] # make empty DataFrame keeping dtype
# result will have object dtype
exp_in.index = exp_in.index.astype(object)
def check1(exp, kwarg):
result = pd.merge(left, right, how='inner', **kwarg)
tm.assert_frame_equal(result, exp)
result = pd.merge(left, right, how='left', **kwarg)
tm.assert_frame_equal(result, exp)
def check2(exp, kwarg):
result = pd.merge(left, right, how='right', **kwarg)
tm.assert_frame_equal(result, exp)
result = pd.merge(left, right, how='outer', **kwarg)
tm.assert_frame_equal(result, exp)
for kwarg in [dict(left_index=True, right_index=True),
dict(left_index=True, right_on='x')]:
check1(exp_in, kwarg)
check2(exp_out, kwarg)
kwarg = dict(left_on='a', right_index=True)
check1(exp_in, kwarg)
exp_out['a'] = [0, 1, 2]
check2(exp_out, kwarg)
kwarg = dict(left_on='a', right_on='x')
check1(exp_in, kwarg)
exp_out['a'] = np.array([np.nan] * 3, dtype=object)
check2(exp_out, kwarg)
def test_merge_left_notempty_right_empty(self):
# GH 10824
left = pd.DataFrame([[1, 2, 3], [4, 5, 6], [7, 8, 9]],
columns=['a', 'b', 'c'])
right = pd.DataFrame([], columns=['x', 'y', 'z'])
exp_out = pd.DataFrame({'a': [1, 4, 7],
'b': [2, 5, 8],
'c': [3, 6, 9],
'x': np.array([np.nan] * 3, dtype=object),
'y': np.array([np.nan] * 3, dtype=object),
'z': np.array([np.nan] * 3, dtype=object)},
columns=['a', 'b', 'c', 'x', 'y', 'z'])
exp_in = exp_out[0:0] # make empty DataFrame keeping dtype
# result will have object dtype
exp_in.index = exp_in.index.astype(object)
def check1(exp, kwarg):
result = pd.merge(left, right, how='inner', **kwarg)
tm.assert_frame_equal(result, exp)
result = pd.merge(left, right, how='right', **kwarg)
tm.assert_frame_equal(result, exp)
def check2(exp, kwarg):
result = pd.merge(left, right, how='left', **kwarg)
tm.assert_frame_equal(result, exp)
result = pd.merge(left, right, how='outer', **kwarg)
tm.assert_frame_equal(result, exp)
for kwarg in [dict(left_index=True, right_index=True),
dict(left_index=True, right_on='x'),
dict(left_on='a', right_index=True),
dict(left_on='a', right_on='x')]:
check1(exp_in, kwarg)
check2(exp_out, kwarg)
def test_merge_nosort(self):
# #2098, anything to do?
from datetime import datetime
d = {"var1": np.random.randint(0, 10, size=10),
"var2": np.random.randint(0, 10, size=10),
"var3": [datetime(2012, 1, 12), datetime(2011, 2, 4),
datetime(
2010, 2, 3), datetime(2012, 1, 12),
datetime(
2011, 2, 4), datetime(2012, 4, 3),
datetime(
2012, 3, 4), datetime(2008, 5, 1),
datetime(2010, 2, 3), datetime(2012, 2, 3)]}
df = DataFrame.from_dict(d)
var3 = df.var3.unique()
var3.sort()
new = DataFrame.from_dict({"var3": var3,
"var8": np.random.random(7)})
result = df.merge(new, on="var3", sort=False)
exp = merge(df, new, on='var3', sort=False)
assert_frame_equal(result, exp)
assert (df.var3.unique() == result.var3.unique()).all()
def test_merge_nan_right(self):
df1 = DataFrame({"i1": [0, 1], "i2": [0, 1]})
df2 = DataFrame({"i1": [0], "i3": [0]})
result = df1.join(df2, on="i1", rsuffix="_")
expected = (DataFrame({'i1': {0: 0.0, 1: 1}, 'i2': {0: 0, 1: 1},
'i1_': {0: 0, 1: np.nan},
'i3': {0: 0.0, 1: np.nan},
None: {0: 0, 1: 0}})
.set_index(None)
.reset_index()[['i1', 'i2', 'i1_', 'i3']])
assert_frame_equal(result, expected, check_dtype=False)
df1 = DataFrame({"i1": [0, 1], "i2": [0.5, 1.5]})
df2 = DataFrame({"i1": [0], "i3": [0.7]})
result = df1.join(df2, rsuffix="_", on='i1')
expected = (DataFrame({'i1': {0: 0, 1: 1}, 'i1_': {0: 0.0, 1: nan},
'i2': {0: 0.5, 1: 1.5},
'i3': {0: 0.69999999999999996,
1: nan}})
[['i1', 'i2', 'i1_', 'i3']])
assert_frame_equal(result, expected)
def test_merge_type(self):
class NotADataFrame(DataFrame):
@property
def _constructor(self):
return NotADataFrame
nad = NotADataFrame(self.df)
result = nad.merge(self.df2, on='key1')
assert isinstance(result, NotADataFrame)
def test_join_append_timedeltas(self):
import datetime as dt
from pandas import NaT
# timedelta64 issues with join/merge
# GH 5695
d = {'d': dt.datetime(2013, 11, 5, 5, 56), 't': dt.timedelta(0, 22500)}
df = DataFrame(columns=list('dt'))
df = df.append(d, ignore_index=True)
result = df.append(d, ignore_index=True)
expected = DataFrame({'d': [dt.datetime(2013, 11, 5, 5, 56),
dt.datetime(2013, 11, 5, 5, 56)],
't': [dt.timedelta(0, 22500),
dt.timedelta(0, 22500)]})
assert_frame_equal(result, expected)
td = np.timedelta64(300000000)
lhs = DataFrame(Series([td, td], index=["A", "B"]))
rhs = DataFrame(Series([td], index=["A"]))
result = lhs.join(rhs, rsuffix='r', how="left")
expected = DataFrame({'0': Series([td, td], index=list('AB')),
'0r': Series([td, NaT], index=list('AB'))})
assert_frame_equal(result, expected)
def test_other_datetime_unit(self):
# GH 13389
df1 = pd.DataFrame({'entity_id': [101, 102]})
s = pd.Series([None, None], index=[101, 102], name='days')
for dtype in ['datetime64[D]', 'datetime64[h]', 'datetime64[m]',
'datetime64[s]', 'datetime64[ms]', 'datetime64[us]',
'datetime64[ns]']:
df2 = s.astype(dtype).to_frame('days')
# coerces to datetime64[ns], thus sholuld not be affected
assert df2['days'].dtype == 'datetime64[ns]'
result = df1.merge(df2, left_on='entity_id', right_index=True)
exp = pd.DataFrame({'entity_id': [101, 102],
'days': np.array(['nat', 'nat'],
dtype='datetime64[ns]')},
columns=['entity_id', 'days'])
tm.assert_frame_equal(result, exp)
def test_other_timedelta_unit(self):
# GH 13389
df1 = pd.DataFrame({'entity_id': [101, 102]})
s = pd.Series([None, None], index=[101, 102], name='days')
for dtype in ['timedelta64[D]', 'timedelta64[h]', 'timedelta64[m]',
'timedelta64[s]', 'timedelta64[ms]', 'timedelta64[us]',
'timedelta64[ns]']:
df2 = s.astype(dtype).to_frame('days')
assert df2['days'].dtype == dtype
result = df1.merge(df2, left_on='entity_id', right_index=True)
exp = pd.DataFrame({'entity_id': [101, 102],
'days': np.array(['nat', 'nat'],
dtype=dtype)},
columns=['entity_id', 'days'])
tm.assert_frame_equal(result, exp)
def test_overlapping_columns_error_message(self):
df = DataFrame({'key': [1, 2, 3],
'v1': [4, 5, 6],
'v2': [7, 8, 9]})
df2 = DataFrame({'key': [1, 2, 3],
'v1': [4, 5, 6],
'v2': [7, 8, 9]})
df.columns = ['key', 'foo', 'foo']
df2.columns = ['key', 'bar', 'bar']
expected = DataFrame({'key': [1, 2, 3],
'v1': [4, 5, 6],
'v2': [7, 8, 9],
'v3': [4, 5, 6],
'v4': [7, 8, 9]})
expected.columns = ['key', 'foo', 'foo', 'bar', 'bar']
assert_frame_equal(merge(df, df2), expected)
# #2649, #10639
df2.columns = ['key1', 'foo', 'foo']
pytest.raises(ValueError, merge, df, df2)
def test_merge_on_datetime64tz(self):
# GH11405
left = pd.DataFrame({'key': pd.date_range('20151010', periods=2,
tz='US/Eastern'),
'value': [1, 2]})
right = pd.DataFrame({'key': pd.date_range('20151011', periods=3,
tz='US/Eastern'),
'value': [1, 2, 3]})
expected = DataFrame({'key': pd.date_range('20151010', periods=4,
tz='US/Eastern'),
'value_x': [1, 2, np.nan, np.nan],
'value_y': [np.nan, 1, 2, 3]})
result = pd.merge(left, right, on='key', how='outer')
assert_frame_equal(result, expected)
left = pd.DataFrame({'value': pd.date_range('20151010', periods=2,
tz='US/Eastern'),
'key': [1, 2]})
right = pd.DataFrame({'value': pd.date_range('20151011', periods=2,
tz='US/Eastern'),
'key': [2, 3]})
expected = DataFrame({
'value_x': list(pd.date_range('20151010', periods=2,
tz='US/Eastern')) + [pd.NaT],
'value_y': [pd.NaT] + list(pd.date_range('20151011', periods=2,
tz='US/Eastern')),
'key': [1, 2, 3]})
result = pd.merge(left, right, on='key', how='outer')
assert_frame_equal(result, expected)
assert result['value_x'].dtype == 'datetime64[ns, US/Eastern]'
assert result['value_y'].dtype == 'datetime64[ns, US/Eastern]'
def test_merge_non_unique_period_index(self):
# GH #16871
index = pd.period_range('2016-01-01', periods=16, freq='M')
df = DataFrame([i for i in range(len(index))],
index=index, columns=['pnum'])
df2 = concat([df, df])
result = df.merge(df2, left_index=True, right_index=True, how='inner')
expected = DataFrame(
np.tile(np.arange(16, dtype=np.int64).repeat(2).reshape(-1, 1), 2),
columns=['pnum_x', 'pnum_y'], index=df2.sort_index().index)
tm.assert_frame_equal(result, expected)
def test_merge_on_periods(self):
left = pd.DataFrame({'key': pd.period_range('20151010', periods=2,
freq='D'),
'value': [1, 2]})
right = pd.DataFrame({'key': pd.period_range('20151011', periods=3,
freq='D'),
'value': [1, 2, 3]})
expected = DataFrame({'key': pd.period_range('20151010', periods=4,
freq='D'),
'value_x': [1, 2, np.nan, np.nan],
'value_y': [np.nan, 1, 2, 3]})
result = pd.merge(left, right, on='key', how='outer')
assert_frame_equal(result, expected)
left = pd.DataFrame({'value': pd.period_range('20151010', periods=2,
freq='D'),
'key': [1, 2]})
right = pd.DataFrame({'value': pd.period_range('20151011', periods=2,
freq='D'),
'key': [2, 3]})
exp_x = pd.period_range('20151010', periods=2, freq='D')
exp_y = pd.period_range('20151011', periods=2, freq='D')
expected = DataFrame({'value_x': list(exp_x) + [pd.NaT],
'value_y': [pd.NaT] + list(exp_y),
'key': [1, 2, 3]})
result = pd.merge(left, right, on='key', how='outer')
assert_frame_equal(result, expected)
assert result['value_x'].dtype == 'object'
assert result['value_y'].dtype == 'object'
def test_indicator(self):
# PR #10054. xref #7412 and closes #8790.
df1 = DataFrame({'col1': [0, 1], 'col_left': [
'a', 'b'], 'col_conflict': [1, 2]})
df1_copy = df1.copy()
df2 = DataFrame({'col1': [1, 2, 3, 4, 5], 'col_right': [2, 2, 2, 2, 2],
'col_conflict': [1, 2, 3, 4, 5]})
df2_copy = df2.copy()
df_result = DataFrame({
'col1': [0, 1, 2, 3, 4, 5],
'col_conflict_x': [1, 2, np.nan, np.nan, np.nan, np.nan],
'col_left': ['a', 'b', np.nan, np.nan, np.nan, np.nan],
'col_conflict_y': [np.nan, 1, 2, 3, 4, 5],
'col_right': [np.nan, 2, 2, 2, 2, 2]})
df_result['_merge'] = Categorical(
['left_only', 'both', 'right_only',
'right_only', 'right_only', 'right_only'],
categories=['left_only', 'right_only', 'both'])
df_result = df_result[['col1', 'col_conflict_x', 'col_left',
'col_conflict_y', 'col_right', '_merge']]
test = merge(df1, df2, on='col1', how='outer', indicator=True)
assert_frame_equal(test, df_result)
test = df1.merge(df2, on='col1', how='outer', indicator=True)
assert_frame_equal(test, df_result)
# No side effects
assert_frame_equal(df1, df1_copy)
assert_frame_equal(df2, df2_copy)
# Check with custom name
df_result_custom_name = df_result
df_result_custom_name = df_result_custom_name.rename(
columns={'_merge': 'custom_name'})
test_custom_name = merge(
df1, df2, on='col1', how='outer', indicator='custom_name')
assert_frame_equal(test_custom_name, df_result_custom_name)
test_custom_name = df1.merge(
df2, on='col1', how='outer', indicator='custom_name')
assert_frame_equal(test_custom_name, df_result_custom_name)
# Check only accepts strings and booleans
with pytest.raises(ValueError):
merge(df1, df2, on='col1', how='outer', indicator=5)
with pytest.raises(ValueError):
df1.merge(df2, on='col1', how='outer', indicator=5)
# Check result integrity
test2 = merge(df1, df2, on='col1', how='left', indicator=True)
assert (test2._merge != 'right_only').all()
test2 = df1.merge(df2, on='col1', how='left', indicator=True)
assert (test2._merge != 'right_only').all()
test3 = merge(df1, df2, on='col1', how='right', indicator=True)
assert (test3._merge != 'left_only').all()
test3 = df1.merge(df2, on='col1', how='right', indicator=True)
assert (test3._merge != 'left_only').all()
test4 = merge(df1, df2, on='col1', how='inner', indicator=True)
assert (test4._merge == 'both').all()
test4 = df1.merge(df2, on='col1', how='inner', indicator=True)
assert (test4._merge == 'both').all()
# Check if working name in df
for i in ['_right_indicator', '_left_indicator', '_merge']:
df_badcolumn = DataFrame({'col1': [1, 2], i: [2, 2]})
with pytest.raises(ValueError):
merge(df1, df_badcolumn, on='col1',
how='outer', indicator=True)
with pytest.raises(ValueError):
df1.merge(df_badcolumn, on='col1', how='outer', indicator=True)
# Check for name conflict with custom name
df_badcolumn = DataFrame(
{'col1': [1, 2], 'custom_column_name': [2, 2]})
with pytest.raises(ValueError):
merge(df1, df_badcolumn, on='col1', how='outer',
indicator='custom_column_name')
with pytest.raises(ValueError):
df1.merge(df_badcolumn, on='col1', how='outer',
indicator='custom_column_name')
# Merge on multiple columns
df3 = DataFrame({'col1': [0, 1], 'col2': ['a', 'b']})
df4 = DataFrame({'col1': [1, 1, 3], 'col2': ['b', 'x', 'y']})
hand_coded_result = DataFrame({'col1': [0, 1, 1, 3],
'col2': ['a', 'b', 'x', 'y']})
hand_coded_result['_merge'] = Categorical(
['left_only', 'both', 'right_only', 'right_only'],
categories=['left_only', 'right_only', 'both'])
test5 = merge(df3, df4, on=['col1', 'col2'],
how='outer', indicator=True)
assert_frame_equal(test5, hand_coded_result)
test5 = df3.merge(df4, on=['col1', 'col2'],
how='outer', indicator=True)
assert_frame_equal(test5, hand_coded_result)
def test_validation(self):
left = DataFrame({'a': ['a', 'b', 'c', 'd'],
'b': ['cat', 'dog', 'weasel', 'horse']},
index=range(4))
right = DataFrame({'a': ['a', 'b', 'c', 'd', 'e'],
'c': ['meow', 'bark', 'um... weasel noise?',
'nay', 'chirp']},
index=range(5))
# Make sure no side effects.
left_copy = left.copy()
right_copy = right.copy()
result = merge(left, right, left_index=True, right_index=True,
validate='1:1')
assert_frame_equal(left, left_copy)
assert_frame_equal(right, right_copy)
# make sure merge still correct
expected = DataFrame({'a_x': ['a', 'b', 'c', 'd'],
'b': ['cat', 'dog', 'weasel', 'horse'],
'a_y': ['a', 'b', 'c', 'd'],
'c': ['meow', 'bark', 'um... weasel noise?',
'nay']},
index=range(4),
columns=['a_x', 'b', 'a_y', 'c'])
result = merge(left, right, left_index=True, right_index=True,
validate='one_to_one')
assert_frame_equal(result, expected)
expected_2 = DataFrame({'a': ['a', 'b', 'c', 'd'],
'b': ['cat', 'dog', 'weasel', 'horse'],
'c': ['meow', 'bark', 'um... weasel noise?',
'nay']},
index=range(4))
result = merge(left, right, on='a', validate='1:1')
assert_frame_equal(left, left_copy)
assert_frame_equal(right, right_copy)
assert_frame_equal(result, expected_2)
result = merge(left, right, on='a', validate='one_to_one')
assert_frame_equal(result, expected_2)
# One index, one column
expected_3 = DataFrame({'b': ['cat', 'dog', 'weasel', 'horse'],
'a': ['a', 'b', 'c', 'd'],
'c': ['meow', 'bark', 'um... weasel noise?',
'nay']},
columns=['b', 'a', 'c'],
index=range(4))
left_index_reset = left.set_index('a')
result = merge(left_index_reset, right, left_index=True,
right_on='a', validate='one_to_one')
assert_frame_equal(result, expected_3)
# Dups on right
right_w_dups = right.append(pd.DataFrame({'a': ['e'], 'c': ['moo']},
index=[4]))
merge(left, right_w_dups, left_index=True, right_index=True,
validate='one_to_many')
with pytest.raises(MergeError):
merge(left, right_w_dups, left_index=True, right_index=True,
validate='one_to_one')
with pytest.raises(MergeError):
merge(left, right_w_dups, on='a', validate='one_to_one')
# Dups on left
left_w_dups = left.append(pd.DataFrame({'a': ['a'], 'c': ['cow']},
index=[3]))
merge(left_w_dups, right, left_index=True, right_index=True,
validate='many_to_one')
with pytest.raises(MergeError):
merge(left_w_dups, right, left_index=True, right_index=True,
validate='one_to_one')
with pytest.raises(MergeError):
merge(left_w_dups, right, on='a', validate='one_to_one')
# Dups on both
merge(left_w_dups, right_w_dups, on='a', validate='many_to_many')
with pytest.raises(MergeError):
merge(left_w_dups, right_w_dups, left_index=True,
right_index=True, validate='many_to_one')
with pytest.raises(MergeError):
merge(left_w_dups, right_w_dups, on='a',
validate='one_to_many')
# Check invalid arguments
with pytest.raises(ValueError):
merge(left, right, on='a', validate='jibberish')
# Two column merge, dups in both, but jointly no dups.
left = DataFrame({'a': ['a', 'a', 'b', 'b'],
'b': [0, 1, 0, 1],
'c': ['cat', 'dog', 'weasel', 'horse']},
index=range(4))
right = DataFrame({'a': ['a', 'a', 'b'],
'b': [0, 1, 0],
'd': ['meow', 'bark', 'um... weasel noise?']},
index=range(3))
expected_multi = DataFrame({'a': ['a', 'a', 'b'],
'b': [0, 1, 0],
'c': ['cat', 'dog', 'weasel'],
'd': ['meow', 'bark',
'um... weasel noise?']},
index=range(3))
with pytest.raises(MergeError):
merge(left, right, on='a', validate='1:1')
result = merge(left, right, on=['a', 'b'], validate='1:1')
assert_frame_equal(result, expected_multi)
def _check_merge(x, y):
for how in ['inner', 'left', 'outer']:
result = x.join(y, how=how)
expected = merge(x.reset_index(), y.reset_index(), how=how,
sort=True)
expected = expected.set_index('index')
# TODO check_names on merge?
assert_frame_equal(result, expected, check_names=False)
class TestMergeMulti(object):
def setup_method(self, method):
self.index = MultiIndex(levels=[['foo', 'bar', 'baz', 'qux'],
['one', 'two', 'three']],
labels=[[0, 0, 0, 1, 1, 2, 2, 3, 3, 3],
[0, 1, 2, 0, 1, 1, 2, 0, 1, 2]],
names=['first', 'second'])
self.to_join = DataFrame(np.random.randn(10, 3), index=self.index,
columns=['j_one', 'j_two', 'j_three'])
# a little relevant example with NAs
key1 = ['bar', 'bar', 'bar', 'foo', 'foo', 'baz', 'baz', 'qux',
'qux', 'snap']
key2 = ['two', 'one', 'three', 'one', 'two', 'one', 'two', 'two',
'three', 'one']
data = np.random.randn(len(key1))
self.data = DataFrame({'key1': key1, 'key2': key2,
'data': data})
def test_merge_on_multikey(self):
joined = self.data.join(self.to_join, on=['key1', 'key2'])
join_key = Index(lzip(self.data['key1'], self.data['key2']))
indexer = self.to_join.index.get_indexer(join_key)
ex_values = self.to_join.values.take(indexer, axis=0)
ex_values[indexer == -1] = np.nan
expected = self.data.join(DataFrame(ex_values,
columns=self.to_join.columns))
# TODO: columns aren't in the same order yet
assert_frame_equal(joined, expected.loc[:, joined.columns])
left = self.data.join(self.to_join, on=['key1', 'key2'], sort=True)
right = expected.loc[:, joined.columns].sort_values(['key1', 'key2'],
kind='mergesort')
assert_frame_equal(left, right)
def test_left_join_multi_index(self):
icols = ['1st', '2nd', '3rd']
def bind_cols(df):
iord = lambda a: 0 if a != a else ord(a)
f = lambda ts: ts.map(iord) - ord('a')
return (f(df['1st']) + f(df['3rd']) * 1e2 +
df['2nd'].fillna(0) * 1e4)
def run_asserts(left, right):
for sort in [False, True]:
res = left.join(right, on=icols, how='left', sort=sort)
assert len(left) < len(res) + 1
assert not res['4th'].isna().any()
assert not res['5th'].isna().any()
tm.assert_series_equal(
res['4th'], - res['5th'], check_names=False)
result = bind_cols(res.iloc[:, :-2])
tm.assert_series_equal(res['4th'], result, check_names=False)
assert result.name is None
if sort:
tm.assert_frame_equal(
res, res.sort_values(icols, kind='mergesort'))
out = merge(left, right.reset_index(), on=icols,
sort=sort, how='left')
res.index = np.arange(len(res))
tm.assert_frame_equal(out, res)
lc = list(map(chr, np.arange(ord('a'), ord('z') + 1)))
left = DataFrame(np.random.choice(lc, (5000, 2)),
columns=['1st', '3rd'])
left.insert(1, '2nd', np.random.randint(0, 1000, len(left)))
i = np.random.permutation(len(left))
right = left.iloc[i].copy()
left['4th'] = bind_cols(left)
right['5th'] = - bind_cols(right)
right.set_index(icols, inplace=True)
run_asserts(left, right)
# inject some nulls
left.loc[1::23, '1st'] = np.nan
left.loc[2::37, '2nd'] = np.nan
left.loc[3::43, '3rd'] = np.nan
left['4th'] = bind_cols(left)
i = np.random.permutation(len(left))
right = left.iloc[i, :-1]
right['5th'] = - bind_cols(right)
right.set_index(icols, inplace=True)
run_asserts(left, right)
def test_merge_right_vs_left(self):
# compare left vs right merge with multikey
for sort in [False, True]:
merged1 = self.data.merge(self.to_join, left_on=['key1', 'key2'],
right_index=True, how='left', sort=sort)
merged2 = self.to_join.merge(self.data, right_on=['key1', 'key2'],
left_index=True, how='right',
sort=sort)
merged2 = merged2.loc[:, merged1.columns]
assert_frame_equal(merged1, merged2)
def test_compress_group_combinations(self):
# ~ 40000000 possible unique groups
key1 = tm.rands_array(10, 10000)
key1 = np.tile(key1, 2)
key2 = key1[::-1]
df = DataFrame({'key1': key1, 'key2': key2,
'value1': np.random.randn(20000)})
df2 = DataFrame({'key1': key1[::2], 'key2': key2[::2],
'value2': np.random.randn(10000)})
# just to hit the label compression code path
merge(df, df2, how='outer')
def test_left_join_index_preserve_order(self):
left = DataFrame({'k1': [0, 1, 2] * 8,
'k2': ['foo', 'bar'] * 12,
'v': np.array(np.arange(24), dtype=np.int64)})
index = MultiIndex.from_tuples([(2, 'bar'), (1, 'foo')])
right = DataFrame({'v2': [5, 7]}, index=index)
result = left.join(right, on=['k1', 'k2'])
expected = left.copy()
expected['v2'] = np.nan
expected.loc[(expected.k1 == 2) & (expected.k2 == 'bar'), 'v2'] = 5
expected.loc[(expected.k1 == 1) & (expected.k2 == 'foo'), 'v2'] = 7
tm.assert_frame_equal(result, expected)
tm.assert_frame_equal(
result.sort_values(['k1', 'k2'], kind='mergesort'),
left.join(right, on=['k1', 'k2'], sort=True))
# test join with multi dtypes blocks
left = DataFrame({'k1': [0, 1, 2] * 8,
'k2': ['foo', 'bar'] * 12,
'k3': np.array([0, 1, 2] * 8, dtype=np.float32),
'v': np.array(np.arange(24), dtype=np.int32)})
index = MultiIndex.from_tuples([(2, 'bar'), (1, 'foo')])
right = DataFrame({'v2': [5, 7]}, index=index)
result = left.join(right, on=['k1', 'k2'])
expected = left.copy()
expected['v2'] = np.nan
expected.loc[(expected.k1 == 2) & (expected.k2 == 'bar'), 'v2'] = 5
expected.loc[(expected.k1 == 1) & (expected.k2 == 'foo'), 'v2'] = 7
tm.assert_frame_equal(result, expected)
tm.assert_frame_equal(
result.sort_values(['k1', 'k2'], kind='mergesort'),
left.join(right, on=['k1', 'k2'], sort=True))
# do a right join for an extra test
joined = merge(right, left, left_index=True,
right_on=['k1', 'k2'], how='right')
tm.assert_frame_equal(joined.loc[:, expected.columns], expected)
def test_left_join_index_multi_match_multiindex(self):
left = DataFrame([
['X', 'Y', 'C', 'a'],
['W', 'Y', 'C', 'e'],
['V', 'Q', 'A', 'h'],
['V', 'R', 'D', 'i'],
['X', 'Y', 'D', 'b'],
['X', 'Y', 'A', 'c'],
['W', 'Q', 'B', 'f'],
['W', 'R', 'C', 'g'],
['V', 'Y', 'C', 'j'],
['X', 'Y', 'B', 'd']],
columns=['cola', 'colb', 'colc', 'tag'],
index=[3, 2, 0, 1, 7, 6, 4, 5, 9, 8])
right = DataFrame([
['W', 'R', 'C', 0],
['W', 'Q', 'B', 3],
['W', 'Q', 'B', 8],
['X', 'Y', 'A', 1],
['X', 'Y', 'A', 4],
['X', 'Y', 'B', 5],
['X', 'Y', 'C', 6],
['X', 'Y', 'C', 9],
['X', 'Q', 'C', -6],
['X', 'R', 'C', -9],
['V', 'Y', 'C', 7],
['V', 'R', 'D', 2],
['V', 'R', 'D', -1],
['V', 'Q', 'A', -3]],
columns=['col1', 'col2', 'col3', 'val'])
right.set_index(['col1', 'col2', 'col3'], inplace=True)
result = left.join(right, on=['cola', 'colb', 'colc'], how='left')
expected = DataFrame([
['X', 'Y', 'C', 'a', 6],
['X', 'Y', 'C', 'a', 9],
['W', 'Y', 'C', 'e', nan],
['V', 'Q', 'A', 'h', -3],
['V', 'R', 'D', 'i', 2],
['V', 'R', 'D', 'i', -1],
['X', 'Y', 'D', 'b', nan],
['X', 'Y', 'A', 'c', 1],
['X', 'Y', 'A', 'c', 4],
['W', 'Q', 'B', 'f', 3],
['W', 'Q', 'B', 'f', 8],
['W', 'R', 'C', 'g', 0],
['V', 'Y', 'C', 'j', 7],
['X', 'Y', 'B', 'd', 5]],
columns=['cola', 'colb', 'colc', 'tag', 'val'],
index=[3, 3, 2, 0, 1, 1, 7, 6, 6, 4, 4, 5, 9, 8])
tm.assert_frame_equal(result, expected)
result = left.join(right, on=['cola', 'colb', 'colc'],
how='left', sort=True)
tm.assert_frame_equal(
result,
expected.sort_values(['cola', 'colb', 'colc'], kind='mergesort'))
# GH7331 - maintain left frame order in left merge
right.reset_index(inplace=True)
right.columns = left.columns[:3].tolist() + right.columns[-1:].tolist()
result = merge(left, right, how='left', on=left.columns[:-1].tolist())
expected.index = np.arange(len(expected))
tm.assert_frame_equal(result, expected)
def test_left_join_index_multi_match(self):
left = DataFrame([
['c', 0],
['b', 1],
['a', 2],
['b', 3]],
columns=['tag', 'val'],
index=[2, 0, 1, 3])
right = DataFrame([
['a', 'v'],
['c', 'w'],
['c', 'x'],
['d', 'y'],
['a', 'z'],
['c', 'r'],
['e', 'q'],
['c', 's']],
columns=['tag', 'char'])
right.set_index('tag', inplace=True)
result = left.join(right, on='tag', how='left')
expected = DataFrame([
['c', 0, 'w'],
['c', 0, 'x'],
['c', 0, 'r'],
['c', 0, 's'],
['b', 1, nan],
['a', 2, 'v'],
['a', 2, 'z'],
['b', 3, nan]],
columns=['tag', 'val', 'char'],
index=[2, 2, 2, 2, 0, 1, 1, 3])
tm.assert_frame_equal(result, expected)
result = left.join(right, on='tag', how='left', sort=True)
tm.assert_frame_equal(
result, expected.sort_values('tag', kind='mergesort'))
# GH7331 - maintain left frame order in left merge
result = merge(left, right.reset_index(), how='left', on='tag')
expected.index = np.arange(len(expected))
tm.assert_frame_equal(result, expected)
def test_left_merge_na_buglet(self):
left = DataFrame({'id': list('abcde'), 'v1': randn(5),
'v2': randn(5), 'dummy': list('abcde'),
'v3': randn(5)},
columns=['id', 'v1', 'v2', 'dummy', 'v3'])
right = DataFrame({'id': ['a', 'b', np.nan, np.nan, np.nan],
'sv3': [1.234, 5.678, np.nan, np.nan, np.nan]})
merged = merge(left, right, on='id', how='left')
rdf = right.drop(['id'], axis=1)
expected = left.join(rdf)
tm.assert_frame_equal(merged, expected)
def test_merge_na_keys(self):
data = [[1950, "A", 1.5],
[1950, "B", 1.5],
[1955, "B", 1.5],
[1960, "B", np.nan],
[1970, "B", 4.],
[1950, "C", 4.],
[1960, "C", np.nan],
[1965, "C", 3.],
[1970, "C", 4.]]
frame = DataFrame(data, columns=["year", "panel", "data"])
other_data = [[1960, 'A', np.nan],
[1970, 'A', np.nan],
[1955, 'A', np.nan],
[1965, 'A', np.nan],
[1965, 'B', np.nan],
[1955, 'C', np.nan]]
other = DataFrame(other_data, columns=['year', 'panel', 'data'])
result = frame.merge(other, how='outer')
expected = frame.fillna(-999).merge(other.fillna(-999), how='outer')
expected = expected.replace(-999, np.nan)
tm.assert_frame_equal(result, expected)
def test_join_multi_levels(self):
# GH 3662
# merge multi-levels
household = (
DataFrame(
dict(household_id=[1, 2, 3],
male=[0, 1, 0],
wealth=[196087.3, 316478.7, 294750]),
columns=['household_id', 'male', 'wealth'])
.set_index('household_id'))
portfolio = (
DataFrame(
dict(household_id=[1, 2, 2, 3, 3, 3, 4],
asset_id=["nl0000301109", "nl0000289783", "gb00b03mlx29",
"gb00b03mlx29", "lu0197800237", "nl0000289965",
np.nan],
name=["ABN Amro", "Robeco", "Royal Dutch Shell",
"Royal Dutch Shell",
"AAB Eastern Europe Equity Fund",
"Postbank BioTech Fonds", np.nan],
share=[1.0, 0.4, 0.6, 0.15, 0.6, 0.25, 1.0]),
columns=['household_id', 'asset_id', 'name', 'share'])
.set_index(['household_id', 'asset_id']))
result = household.join(portfolio, how='inner')
expected = (
DataFrame(
dict(male=[0, 1, 1, 0, 0, 0],
wealth=[196087.3, 316478.7, 316478.7,
294750.0, 294750.0, 294750.0],
name=['ABN Amro', 'Robeco', 'Royal Dutch Shell',
'Royal Dutch Shell',
'AAB Eastern Europe Equity Fund',
'Postbank BioTech Fonds'],
share=[1.00, 0.40, 0.60, 0.15, 0.60, 0.25],
household_id=[1, 2, 2, 3, 3, 3],
asset_id=['nl0000301109', 'nl0000289783', 'gb00b03mlx29',
'gb00b03mlx29', 'lu0197800237',
'nl0000289965']))
.set_index(['household_id', 'asset_id'])
.reindex(columns=['male', 'wealth', 'name', 'share']))
assert_frame_equal(result, expected)
assert_frame_equal(result, expected)
# equivalency
result2 = (merge(household.reset_index(), portfolio.reset_index(),
on=['household_id'], how='inner')
.set_index(['household_id', 'asset_id']))
assert_frame_equal(result2, expected)
result = household.join(portfolio, how='outer')
expected = (concat([
expected,
(DataFrame(
dict(share=[1.00]),
index=MultiIndex.from_tuples(
[(4, np.nan)],
names=['household_id', 'asset_id'])))
], axis=0).reindex(columns=expected.columns))
assert_frame_equal(result, expected)
# invalid cases
household.index.name = 'foo'
def f():
household.join(portfolio, how='inner')
pytest.raises(ValueError, f)
portfolio2 = portfolio.copy()
portfolio2.index.set_names(['household_id', 'foo'])
def f():
portfolio2.join(portfolio, how='inner')
pytest.raises(ValueError, f)
def test_join_multi_levels2(self):
# some more advanced merges
# GH6360
household = (
DataFrame(
dict(household_id=[1, 2, 2, 3, 3, 3, 4],
asset_id=["nl0000301109", "nl0000301109", "gb00b03mlx29",
"gb00b03mlx29", "lu0197800237", "nl0000289965",
np.nan],
share=[1.0, 0.4, 0.6, 0.15, 0.6, 0.25, 1.0]),
columns=['household_id', 'asset_id', 'share'])
.set_index(['household_id', 'asset_id']))
log_return = DataFrame(dict(
asset_id=["gb00b03mlx29", "gb00b03mlx29",
"gb00b03mlx29", "lu0197800237", "lu0197800237"],
t=[233, 234, 235, 180, 181],
log_return=[.09604978, -.06524096, .03532373, .03025441, .036997]
)).set_index(["asset_id", "t"])
expected = (
DataFrame(dict(
household_id=[2, 2, 2, 3, 3, 3, 3, 3],
asset_id=["gb00b03mlx29", "gb00b03mlx29",
"gb00b03mlx29", "gb00b03mlx29",
"gb00b03mlx29", "gb00b03mlx29",
"lu0197800237", "lu0197800237"],
t=[233, 234, 235, 233, 234, 235, 180, 181],
share=[0.6, 0.6, 0.6, 0.15, 0.15, 0.15, 0.6, 0.6],
log_return=[.09604978, -.06524096, .03532373,
.09604978, -.06524096, .03532373,
.03025441, .036997]
))
.set_index(["household_id", "asset_id", "t"])
.reindex(columns=['share', 'log_return']))
def f():
household.join(log_return, how='inner')
pytest.raises(NotImplementedError, f)
# this is the equivalency
result = (merge(household.reset_index(), log_return.reset_index(),
on=['asset_id'], how='inner')
.set_index(['household_id', 'asset_id', 't']))
assert_frame_equal(result, expected)
expected = (
DataFrame(dict(
household_id=[1, 2, 2, 2, 2, 3, 3, 3, 3, 3, 3, 4],
asset_id=["nl0000301109", "nl0000289783", "gb00b03mlx29",
"gb00b03mlx29", "gb00b03mlx29",
"gb00b03mlx29", "gb00b03mlx29", "gb00b03mlx29",
"lu0197800237", "lu0197800237",
"nl0000289965", None],
t=[None, None, 233, 234, 235, 233, 234,
235, 180, 181, None, None],
share=[1.0, 0.4, 0.6, 0.6, 0.6, 0.15,
0.15, 0.15, 0.6, 0.6, 0.25, 1.0],
log_return=[None, None, .09604978, -.06524096, .03532373,
.09604978, -.06524096, .03532373,
.03025441, .036997, None, None]
))
.set_index(["household_id", "asset_id", "t"]))
def f():
household.join(log_return, how='outer')
pytest.raises(NotImplementedError, f)
@pytest.fixture
def df():
return DataFrame(
{'A': ['foo', 'bar'],
'B': Series(['foo', 'bar']).astype('category'),
'C': [1, 2],
'D': [1.0, 2.0],
'E': Series([1, 2], dtype='uint64'),
'F': Series([1, 2], dtype='int32')})
class TestMergeDtypes(object):
def test_different(self, df):
# we expect differences by kind
# to be ok, while other differences should return object
left = df
for col in df.columns:
right = DataFrame({'A': df[col]})
result = pd.merge(left, right, on='A')
assert is_object_dtype(result.A.dtype)
@pytest.mark.parametrize('d1', [np.int64, np.int32,
np.int16, np.int8, np.uint8])
@pytest.mark.parametrize('d2', [np.int64, np.float64,
np.float32, np.float16])
def test_join_multi_dtypes(self, d1, d2):
dtype1 = np.dtype(d1)
dtype2 = np.dtype(d2)
left = DataFrame({'k1': np.array([0, 1, 2] * 8, dtype=dtype1),
'k2': ['foo', 'bar'] * 12,
'v': np.array(np.arange(24), dtype=np.int64)})
index = MultiIndex.from_tuples([(2, 'bar'), (1, 'foo')])
right = DataFrame({'v2': np.array([5, 7], dtype=dtype2)}, index=index)
result = left.join(right, on=['k1', 'k2'])
expected = left.copy()
if dtype2.kind == 'i':
dtype2 = np.dtype('float64')
expected['v2'] = np.array(np.nan, dtype=dtype2)
expected.loc[(expected.k1 == 2) & (expected.k2 == 'bar'), 'v2'] = 5
expected.loc[(expected.k1 == 1) & (expected.k2 == 'foo'), 'v2'] = 7
tm.assert_frame_equal(result, expected)
result = left.join(right, on=['k1', 'k2'], sort=True)
expected.sort_values(['k1', 'k2'], kind='mergesort', inplace=True)
tm.assert_frame_equal(result, expected)
@pytest.fixture
def left():
np.random.seed(1234)
return DataFrame(
{'X': Series(np.random.choice(
['foo', 'bar'],
size=(10,))).astype(CDT(['foo', 'bar'])),
'Y': np.random.choice(['one', 'two', 'three'], size=(10,))})
@pytest.fixture
def right():
np.random.seed(1234)
return DataFrame(
{'X': Series(['foo', 'bar']).astype(CDT(['foo', 'bar'])),
'Z': [1, 2]})
class TestMergeCategorical(object):
def test_identical(self, left):
# merging on the same, should preserve dtypes
merged = pd.merge(left, left, on='X')
result = merged.dtypes.sort_index()
expected = Series([CategoricalDtype(),
np.dtype('O'),
np.dtype('O')],
index=['X', 'Y_x', 'Y_y'])
assert_series_equal(result, expected)
def test_basic(self, left, right):
# we have matching Categorical dtypes in X
# so should preserve the merged column
merged = pd.merge(left, right, on='X')
result = merged.dtypes.sort_index()
expected = Series([CategoricalDtype(),
np.dtype('O'),
np.dtype('int64')],
index=['X', 'Y', 'Z'])
assert_series_equal(result, expected)
def test_other_columns(self, left, right):
# non-merge columns should preserve if possible
right = right.assign(Z=right.Z.astype('category'))
merged = pd.merge(left, right, on='X')
result = merged.dtypes.sort_index()
expected = Series([CategoricalDtype(),
np.dtype('O'),
CategoricalDtype()],
index=['X', 'Y', 'Z'])
assert_series_equal(result, expected)
# categories are preserved
assert left.X.values.is_dtype_equal(merged.X.values)
assert right.Z.values.is_dtype_equal(merged.Z.values)
@pytest.mark.parametrize(
'change', [lambda x: x,
lambda x: x.astype(CDT(['foo', 'bar', 'bah'])),
lambda x: x.astype(CDT(ordered=True))])
@pytest.mark.parametrize('how', ['inner', 'outer', 'left', 'right'])
def test_dtype_on_merged_different(self, change, how, left, right):
# our merging columns, X now has 2 different dtypes
# so we must be object as a result
X = change(right.X.astype('object'))
right = right.assign(X=X)
assert is_categorical_dtype(left.X.values)
# assert not left.X.values.is_dtype_equal(right.X.values)
merged = pd.merge(left, right, on='X', how=how)
result = merged.dtypes.sort_index()
expected = Series([np.dtype('O'),
np.dtype('O'),
np.dtype('int64')],
index=['X', 'Y', 'Z'])
assert_series_equal(result, expected)
def test_self_join_multiple_categories(self):
# GH 16767
# non-duplicates should work with multiple categories
m = 5
df = pd.DataFrame({
'a': ['a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i', 'j'] * m,
'b': ['t', 'w', 'x', 'y', 'z'] * 2 * m,
'c': [letter
for each in ['m', 'n', 'u', 'p', 'o']
for letter in [each] * 2 * m],
'd': [letter
for each in ['aa', 'bb', 'cc', 'dd', 'ee',
'ff', 'gg', 'hh', 'ii', 'jj']
for letter in [each] * m]})
# change them all to categorical variables
df = df.apply(lambda x: x.astype('category'))
# self-join should equal ourselves
result = pd.merge(df, df, on=list(df.columns))
assert_frame_equal(result, df)
def test_dtype_on_categorical_dates(self):
# GH 16900
# dates should not be coerced to ints
df = pd.DataFrame(
[[date(2001, 1, 1), 1.1],
[date(2001, 1, 2), 1.3]],
columns=['date', 'num2']
)
df['date'] = df['date'].astype('category')
df2 = pd.DataFrame(
[[date(2001, 1, 1), 1.3],
[date(2001, 1, 3), 1.4]],
columns=['date', 'num4']
)
df2['date'] = df2['date'].astype('category')
expected_outer = pd.DataFrame([
[pd.Timestamp('2001-01-01'), 1.1, 1.3],
[pd.Timestamp('2001-01-02'), 1.3, np.nan],
[pd.Timestamp('2001-01-03'), np.nan, 1.4]],
columns=['date', 'num2', 'num4']
)
result_outer = pd.merge(df, df2, how='outer', on=['date'])
assert_frame_equal(result_outer, expected_outer)
expected_inner = pd.DataFrame(
[[pd.Timestamp('2001-01-01'), 1.1, 1.3]],
columns=['date', 'num2', 'num4']
)
result_inner = pd.merge(df, df2, how='inner', on=['date'])
assert_frame_equal(result_inner, expected_inner)
@pytest.fixture
def left_df():
return DataFrame({'a': [20, 10, 0]}, index=[2, 1, 0])
@pytest.fixture
def right_df():
return DataFrame({'b': [300, 100, 200]}, index=[3, 1, 2])
class TestMergeOnIndexes(object):
@pytest.mark.parametrize(
"how, sort, expected",
[('inner', False, DataFrame({'a': [20, 10],
'b': [200, 100]},
index=[2, 1])),
('inner', True, DataFrame({'a': [10, 20],
'b': [100, 200]},
index=[1, 2])),
('left', False, DataFrame({'a': [20, 10, 0],
'b': [200, 100, np.nan]},
index=[2, 1, 0])),
('left', True, DataFrame({'a': [0, 10, 20],
'b': [np.nan, 100, 200]},
index=[0, 1, 2])),
('right', False, DataFrame({'a': [np.nan, 10, 20],
'b': [300, 100, 200]},
index=[3, 1, 2])),
('right', True, DataFrame({'a': [10, 20, np.nan],
'b': [100, 200, 300]},
index=[1, 2, 3])),
('outer', False, DataFrame({'a': [0, 10, 20, np.nan],
'b': [np.nan, 100, 200, 300]},
index=[0, 1, 2, 3])),
('outer', True, DataFrame({'a': [0, 10, 20, np.nan],
'b': [np.nan, 100, 200, 300]},
index=[0, 1, 2, 3]))])
def test_merge_on_indexes(self, left_df, right_df, how, sort, expected):
result = pd.merge(left_df, right_df,
left_index=True,
right_index=True,
how=how,
sort=sort)
tm.assert_frame_equal(result, expected)
|
bsd-3-clause
|
vojtasvoboda/GopayInline
|
src/Exception/GopayException.php
|
124
|
<?php
namespace Markette\GopayInline\Exception;
use RuntimeException;
class GopayException extends RuntimeException
{
}
|
bsd-3-clause
|
magiclabs/spree_payone
|
app/models/spree/payment_source/payone/payone_debit_payment_payment_source.rb
|
1043
|
# Spree payment method source for PAYONE debit payment.
module Spree
module PaymentSource
module Payone
class PayoneDebitPaymentPaymentSource < ActiveRecord::Base
has_many :payments, :as => :source
# Lists available actions.
def actions
%w{capture void credit}
end
# Indicates whether its possible to capture the payment.
def can_capture?(payment)
payment.state == 'pending' || payment.state == 'checkout'
end
# Indicates whether its possible to void the payment.
def can_void?(payment)
payment.state != 'void' && payment.state != 'completed' && payment.state != 'failed' && payment.state != 'processing'
end
# Indicates whether its possible to credit the payment.
def can_credit?(payment)
return false unless payment.state == 'completed'
return false unless payment.order.payment_state == 'credit_owed'
payment.credit_allowed > 0
end
end
end
end
end
|
bsd-3-clause
|
declarativitydotnet/p2
|
net/defrag.C
|
3422
|
// -*- c-basic-offset: 2; related-file-name: "frag.h" -*-
/*
* This file is distributed under the terms in the attached LICENSE file.
* If you do not find this file, copies can be found by writing to:
* Intel Research Berkeley, 2150 Shattuck Avenue, Suite 1300,
* Berkeley, CA, 94704. Attention: Intel License Inquiry.
* Or
* UC Berkeley EECS Computer Science Division, 387 Soda Hall #1776,
* Berkeley, CA, 94707. Attention: P2 Group.
*
*/
#include "defrag.h"
#include "val_str.h"
#include "val_tuple.h"
#include "val_null.h"
#include "val_int64.h"
#include "val_opaque.h"
#include "xdrbuf.h"
#include "netglobals.h"
DEFINE_ELEMENT_INITS(Defrag, "Defrag")
Defrag::Defrag(string name)
: Element(name, 1, 1),
_pull_cb(0)
{
}
Defrag::Defrag(TuplePtr args)
: Element(Val_Str::cast((*args)[2]), 1, 1), _pull_cb(0) { }
int Defrag::push(int port, TuplePtr t, b_cbv cb)
{
// Is this the right port?
assert(port == 0);
defragment(t);
// Unblock the puller if one is waiting
if (tuples_.size() > 0 && _pull_cb != 0) {
ELEM_INFO( "push: wakeup puller");
_pull_cb();
_pull_cb = 0;
}
return 1;
}
TuplePtr Defrag::pull(int port, b_cbv cb)
{
// Is this the right port?
assert(port == 0);
// Do I have a fragment?
if (tuples_.size() > 0) {
ELEM_INFO( "pull: will succeed");
TuplePtr t = tuples_.front();
tuples_.pop_front();
return t;
}
else {
// I don't have a tuple. Do I have a pull callback already?
if (_pull_cb == 0) {
// Accept the callback
ELEM_INFO( "pull: raincheck");
_pull_cb = cb;
}
else {
// I already have a pull callback
ELEM_INFO( "pull: underrun");
}
return TuplePtr();
}
}
void Defrag::defragment(TuplePtr t)
{
uint64_t seq_num = Val_Int64::cast((*t)[SEQ]);
unsigned offset = Val_Int64::cast((*t)[SEQ+1]);
unsigned chunks = Val_Int64::cast((*t)[SEQ+2]);
for (FragMap::iterator iter = fragments_.find(seq_num);
iter != fragments_.end(); iter++) {
if (Val_Int64::cast((*iter->second)[SEQ+1]) == offset) {
ELEM_INFO( "defragment: duplicate offset");
return;
}
}
fragments_.insert(std::make_pair(seq_num, t));
if (fragments_.count(seq_num) == chunks) {
// Put fragments back together
FdbufPtr fb(new Fdbuf());
for (unsigned i = 0; i < chunks; i++) {
TuplePtr p;
for (FragMap::iterator iter = fragments_.find(seq_num);
iter != fragments_.end(); iter++) {
if (Val_Int64::cast((*iter->second)[SEQ+1]) == i) {
p = iter->second;
fragments_.erase(iter);
break;
}
}
assert(p);
FdbufPtr payload = Val_Opaque::cast((*p)[SEQ+3]);
fb->push_bytes(payload->cstr(), payload->length());
}
// Unmarhsal and expand the packaged tuple
XDR xd;
xdrfdbuf_create(&xd, fb.get(), false, XDR_DECODE);
TuplePtr unmarshal = Tuple::xdr_unmarshal(&xd);
xdr_destroy(&xd);
TuplePtr defraged = Tuple::mk();
for (unsigned i = 0 ; i < t->size(); ) {
if (i == SEQ) {
defraged->append((*t)[i]); // Add the sequence number
for (unsigned j = 0; j < unmarshal->size(); j++) {
defraged->append((*unmarshal)[j]);
}
i += 4; // Move all the way pass the marshal field
}
else {
defraged->append((*t)[i++]);
}
}
tuples_.push_back(defraged);
}
}
|
bsd-3-clause
|
youtube/cobalt
|
third_party/llvm-project/compiler-rt/test/ubsan/TestCases/TypeCheck/Function/function.cpp
|
3447
|
// RUN: %clangxx -std=c++17 -fsanitize=function %s -O3 -g -o %t
// RUN: %run %t 2>&1 | FileCheck %s
// Verify that we can disable symbolization if needed:
// RUN: %env_ubsan_opts=symbolize=0 %run %t 2>&1 | FileCheck %s --check-prefix=NOSYM
// XFAIL: win32,win64
// Unsupported function flag
// UNSUPPORTED: openbsd
#include <stdint.h>
void f() {}
void g(int x) {}
void make_valid_call() {
// CHECK-NOT: runtime error: call to function g
reinterpret_cast<void (*)(int)>(reinterpret_cast<uintptr_t>(g))(42);
}
void make_invalid_call() {
// CHECK: function.cpp:[[@LINE+4]]:3: runtime error: call to function f() through pointer to incorrect function type 'void (*)(int)'
// CHECK-NEXT: function.cpp:[[@LINE-11]]: note: f() defined here
// NOSYM: function.cpp:[[@LINE+2]]:3: runtime error: call to function (unknown) through pointer to incorrect function type 'void (*)(int)'
// NOSYM-NEXT: ({{.*}}+0x{{.*}}): note: (unknown) defined here
reinterpret_cast<void (*)(int)>(reinterpret_cast<uintptr_t>(f))(42);
}
void f1(int) {}
void f2(unsigned int) {}
void f3(int) noexcept {}
void f4(unsigned int) noexcept {}
void check_noexcept_calls() {
void (*p1)(int);
p1 = &f1;
p1(0);
p1 = reinterpret_cast<void (*)(int)>(&f2);
// CHECK: function.cpp:[[@LINE+2]]:3: runtime error: call to function f2(unsigned int) through pointer to incorrect function type 'void (*)(int)'
// NOSYM: function.cpp:[[@LINE+1]]:3: runtime error: call to function (unknown) through pointer to incorrect function type 'void (*)(int)'
p1(0);
p1 = &f3;
p1(0);
p1 = reinterpret_cast<void (*)(int)>(&f4);
// CHECK: function.cpp:[[@LINE+2]]:3: runtime error: call to function f4(unsigned int) through pointer to incorrect function type 'void (*)(int)'
// NOSYM: function.cpp:[[@LINE+1]]:3: runtime error: call to function (unknown) through pointer to incorrect function type 'void (*)(int)'
p1(0);
void (*p2)(int) noexcept;
p2 = reinterpret_cast<void (*)(int) noexcept>(&f1);
// TODO: Unclear whether calling a non-noexcept function through a pointer to
// nexcept function should cause an error.
// CHECK-NOT: function.cpp:[[@LINE+2]]:3: runtime error: call to function f1(int) through pointer to incorrect function type 'void (*)(int) noexcept'
// NOSYM-NOT: function.cpp:[[@LINE+1]]:3: runtime error: call to function (unknown) through pointer to incorrect function type 'void (*)(int) noexcept'
p2(0);
p2 = reinterpret_cast<void (*)(int) noexcept>(&f2);
// CHECK: function.cpp:[[@LINE+2]]:3: runtime error: call to function f2(unsigned int) through pointer to incorrect function type 'void (*)(int) noexcept'
// NOSYM: function.cpp:[[@LINE+1]]:3: runtime error: call to function (unknown) through pointer to incorrect function type 'void (*)(int) noexcept'
p2(0);
p2 = &f3;
p2(0);
p2 = reinterpret_cast<void (*)(int) noexcept>(&f4);
// CHECK: function.cpp:[[@LINE+2]]:3: runtime error: call to function f4(unsigned int) through pointer to incorrect function type 'void (*)(int) noexcept'
// NOSYM: function.cpp:[[@LINE+1]]:3: runtime error: call to function (unknown) through pointer to incorrect function type 'void (*)(int) noexcept'
p2(0);
}
int main(void) {
make_valid_call();
make_invalid_call();
check_noexcept_calls();
// Check that no more errors will be printed.
// CHECK-NOT: runtime error: call to function
// NOSYM-NOT: runtime error: call to function
make_invalid_call();
}
|
bsd-3-clause
|
sorig/shogun
|
src/shogun/features/RandomFourierDotFeatures.cpp
|
2578
|
/*
* This software is distributed under BSD 3-clause license (see LICENSE file).
*
* Authors: Evangelos Anagnostopoulos, Bjoern Esser
*/
#include <shogun/base/Parameter.h>
#include <shogun/mathematics/Math.h>
#include <shogun/features/RandomFourierDotFeatures.h>
namespace shogun {
enum KernelName;
CRandomFourierDotFeatures::CRandomFourierDotFeatures()
{
init(NOT_SPECIFIED, SGVector<float64_t>());
}
CRandomFourierDotFeatures::CRandomFourierDotFeatures(CDotFeatures* features,
int32_t D, KernelName kernel_name, SGVector<float64_t> params)
: CRandomKitchenSinksDotFeatures(features, D)
{
init(kernel_name, params);
random_coeff = generate_random_coefficients();
}
CRandomFourierDotFeatures::CRandomFourierDotFeatures(CDotFeatures* features,
int32_t D, KernelName kernel_name, SGVector<float64_t> params,
SGMatrix<float64_t> coeff)
: CRandomKitchenSinksDotFeatures(features, D, coeff)
{
init(kernel_name, params);
}
CRandomFourierDotFeatures::CRandomFourierDotFeatures(CFile* loader)
{
SG_NOTIMPLEMENTED;
}
CRandomFourierDotFeatures::CRandomFourierDotFeatures(const CRandomFourierDotFeatures& orig)
: CRandomKitchenSinksDotFeatures(orig)
{
init(orig.kernel, orig.kernel_params);
}
CRandomFourierDotFeatures::~CRandomFourierDotFeatures()
{
}
void CRandomFourierDotFeatures::init(KernelName kernel_name, SGVector<float64_t> params)
{
kernel = kernel_name;
kernel_params = params;
constant = num_samples > 0 ? std::sqrt(2.0 / num_samples) : 1;
SG_ADD(
&kernel_params, "kernel_params",
"The parameters of the kernel to approximate");
SG_ADD((machine_int_t* ) &kernel, "kernel",
"The kernel to approximate");
SG_ADD(&constant, "constant", "A constant needed");
}
CFeatures* CRandomFourierDotFeatures::duplicate() const
{
return new CRandomFourierDotFeatures(*this);
}
const char* CRandomFourierDotFeatures::get_name() const
{
return "RandomFourierDotFeatures";
}
float64_t CRandomFourierDotFeatures::post_dot(float64_t dot_result, index_t par_idx)
{
dot_result += random_coeff(random_coeff.num_rows-1, par_idx);
return std::cos(dot_result) * constant;
}
SGVector<float64_t> CRandomFourierDotFeatures::generate_random_parameter_vector()
{
SGVector<float64_t> vec(feats->get_dim_feature_space()+1);
switch (kernel)
{
case GAUSSIAN:
for (index_t i=0; i<vec.vlen-1; i++)
{
vec[i] = std::sqrt((float64_t)1 / kernel_params[0]) *
std::sqrt(2.0) * CMath::normal_random(0.0, 1);
}
vec[vec.vlen-1] = CMath::random(0.0, 2 * CMath::PI);
break;
default:
SG_SERROR("Unknown kernel\n");
}
return vec;
}
}
|
bsd-3-clause
|
cornernote/yii-menu-module
|
menu/components/MenuAccessFilter.php
|
860
|
<?php
/**
* MenuAccessFilter
*
* @author Brett O'Donnell <cornernote@gmail.com>
* @author Zain Ul abidin <zainengineer@gmail.com>
* @copyright 2013 Mr PHP
* @link https://github.com/cornernote/yii-menu-module
* @license BSD-3-Clause https://raw.github.com/cornernote/yii-menu-module/master/LICENSE
*
* @package yii-menu-module
*/
class MenuAccessFilter extends CFilter
{
/**
* @param CFilterChain $filterChain
* @return bool
* @throws CHttpException
*/
protected function preFilter($filterChain)
{
$app = Yii::app();
/** @var MenuModule $menu */
$menu = $app->getModule('menu');
if (!in_array($app->getUser()->getName(), $menu->adminUsers))
throw new CHttpException(403, 'You are not allowed to access this page.');
return parent::preFilter($filterChain);
}
}
|
bsd-3-clause
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.