repo_name
stringlengths 4
116
| path
stringlengths 4
379
| size
stringlengths 1
7
| content
stringlengths 3
1.05M
| license
stringclasses 15
values |
---|---|---|---|---|
PressLabs/zipa
|
tests/test_resource_iter.py
|
845
|
import pytest
import httpretty
from requests.exceptions import HTTPError
from .fixtures import pretty_api
from zipa import api_test_com as t
@pytest.mark.httpretty
def test_iter_returns_single_object(pretty_api):
t.config.secure = False
for item in t.item['a']:
assert item.name == 'a'
@pytest.mark.httpretty
def test_iter_completes(pretty_api):
items = []
t.config.secure = False
for i in t.list:
items.append(i)
assert items == [{u'item1': u'name1'}, {u'item2': u'name2'},
{u'item3': u'name3'}, {u'item4': u'name4'},
{u'item5': u'name5'}]
@pytest.mark.httpretty
def test_iter_next_link_is_error(pretty_api):
items = []
t.config.secure = False
with pytest.raises(HTTPError):
for item in t.list.first:
items.append(item)
|
apache-2.0
|
missioncommand/mil-sym-java
|
renderer/mil-sym-renderer/src/main/java/sec/web/renderer/portable/RendererSystemTray.java
|
4584
|
package sec.web.renderer.portable;
import ArmyC2.C2SD.Utilities.RendererSettings;
import java.awt.AWTException;
import java.awt.Image;
import java.awt.Menu;
import java.awt.MenuItem;
import java.awt.PopupMenu;
import java.awt.SystemTray;
import java.awt.TrayIcon;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import java.io.IOException;
import java.net.URL;
import javax.swing.ImageIcon;
import javax.swing.JOptionPane;
import sec.web.renderer.SECRenderer;
public class RendererSystemTray {
public RendererSystemTray() {
}
public void createSystemTray() {
try {
isSysTraySupported();
String toolTip = "SEC MilStd 2525 Rendering Service";
final PopupMenu popup = new PopupMenu();
final TrayIcon trayIcon = new TrayIcon(createImage("images/globe.png", toolTip),toolTip);
final SystemTray sysTray = SystemTray.getSystemTray();
// create interaction
Menu displayMenu = new Menu("Display");
MenuItem aboutItem = new MenuItem("About");
MenuItem exitItem = new MenuItem("Exit");
// Add items to popup
popup.add(aboutItem);
popup.addSeparator();
popup.addSeparator();
//popup.add(displayMenu);
popup.add(exitItem);
trayIcon.setPopupMenu(popup);
trayIcon.displayMessage("caption", "text", TrayIcon.MessageType.ERROR);
//trayIcon.setToolTip(toolTip);
sysTray.add(trayIcon);
trayIcon.addActionListener(new ActionListener() {
@Override
public void actionPerformed(ActionEvent e) {
JOptionPane.showMessageDialog(null, "SEC's Portable Renderer");
}
});
aboutItem.addActionListener(new ActionListener() {
@Override
public void actionPerformed(ActionEvent e) {
SECRenderer sr = SECRenderer.getInstance();
String message = "";
String std = getSymbologyStandardString();
message += "\nSymbology Standard set to: " + std;
if(sr.isSinglePointServerRunning())
message += "\nSingle Point Service is running on 127.0.0.1:" + String.valueOf(sr.getSinglePointServerPort());
if(sr.isMultiPointServerRunning())
message += "\nMulti Point Service is running on 127.0.0.1:" + String.valueOf(sr.getMultiPointServerPort());
JOptionPane.showMessageDialog(null, "This Service is capable of rendering milstd 2525chB & 2525C graphics with USAS additions" + message,"About",JOptionPane.PLAIN_MESSAGE);
}
});
exitItem.addActionListener(new ActionListener() {
@Override
public void actionPerformed(ActionEvent e) {
sysTray.remove(trayIcon);
SECRenderer.getInstance().stopSinglePointServer();
SECRenderer.getInstance().stopMultiPointServer();
System.exit(0);
}
});
} catch (IOException e) {
e.printStackTrace();
} catch (AWTException e) {
e.printStackTrace();
} catch (Exception e) {
e.printStackTrace();
}
}
private void isSysTraySupported() throws Exception {
if (!SystemTray.isSupported()) {
throw new Exception("System Tray is not supported");
}
}
private Image createImage(String path, String descr) throws IOException {
URL imageURL = RendererSystemTray.class.getResource(path);
imageURL = RendererSystemTray.class.getClassLoader().getResource(path);
if (imageURL == null) {
throw new IOException("Resource not found:\t" + path);
}
return (new ImageIcon(imageURL, descr)).getImage();
}
private String getSymbologyStandardString()
{
String std = "2525B";
int symstd = RendererSettings.getInstance().getSymbologyStandard();
switch(symstd)
{
case RendererSettings.Symbology_2525B:
std = "2525B";
break;
case RendererSettings.Symbology_2525C:
std = "2525C";
break;
case 2://RendererSettings.Symbology_2525D:
std = "2525D";
break;//*/
}
return std;
}
}
|
apache-2.0
|
youdonghai/intellij-community
|
java/java-impl/src/com/intellij/codeInspection/streamMigration/MigrateToStreamFix.java
|
3125
|
/*
* Copyright 2000-2016 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.codeInspection.streamMigration;
import com.intellij.codeInspection.LambdaCanBeMethodReferenceInspection;
import com.intellij.codeInspection.LocalQuickFix;
import com.intellij.codeInspection.ProblemDescriptor;
import com.intellij.codeInspection.SimplifyStreamApiCallChainsInspection;
import com.intellij.codeInspection.streamMigration.StreamApiMigrationInspection.StreamSource;
import com.intellij.openapi.project.Project;
import com.intellij.psi.PsiElement;
import com.intellij.psi.PsiLoopStatement;
import com.intellij.psi.PsiStatement;
import com.intellij.psi.codeStyle.CodeStyleManager;
import com.intellij.psi.codeStyle.JavaCodeStyleManager;
import com.intellij.psi.impl.PsiDiamondTypeUtil;
import org.jetbrains.annotations.Nls;
import org.jetbrains.annotations.NotNull;
/**
* @author Tagir Valeev
*/
class MigrateToStreamFix implements LocalQuickFix {
private BaseStreamApiMigration myMigration;
protected MigrateToStreamFix(BaseStreamApiMigration migration) {
myMigration = migration;
}
@Nls
@NotNull
@Override
public String getName() {
return "Replace with "+myMigration.getReplacement();
}
@SuppressWarnings("DialogTitleCapitalization")
@NotNull
@Override
public String getFamilyName() {
return "Replace with Stream API equivalent";
}
@Override
public void applyFix(@NotNull Project project, @NotNull ProblemDescriptor descriptor) {
PsiElement element = descriptor.getPsiElement();
if (element instanceof PsiLoopStatement) {
PsiLoopStatement loopStatement = (PsiLoopStatement)element;
StreamSource source = StreamSource.tryCreate(loopStatement);
PsiStatement body = loopStatement.getBody();
if(body == null || source == null) return;
TerminalBlock tb = TerminalBlock.from(source, body);
PsiElement result = myMigration.migrate(project, body, tb);
if(result != null) {
tb.operations().forEach(StreamApiMigrationInspection.Operation::cleanUp);
simplifyAndFormat(project, result);
}
}
}
static void simplifyAndFormat(@NotNull Project project, PsiElement result) {
if (result == null) return;
LambdaCanBeMethodReferenceInspection.replaceAllLambdasWithMethodReferences(result);
PsiDiamondTypeUtil.removeRedundantTypeArguments(result);
result = SimplifyStreamApiCallChainsInspection.simplifyCollectionStreamCalls(result);
CodeStyleManager.getInstance(project).reformat(JavaCodeStyleManager.getInstance(project).shortenClassReferences(result));
}
}
|
apache-2.0
|
paineliu/tflearn
|
helen.py
|
1084
|
import os
from PIL import Image
img_path = '/home/palm/deep/helen/train'
lab_path = '/home/palm/deep/helen/annotation'
filename = '/home/palm/deep/helen/trainnames.txt'
f = open(filename)
index = 1
for each in f:
each = each.strip()
img_file = os.path.join(img_path, each + '.jpg')
img = Image.open(img_file)
width, height = img.size
img = img.resize((256, 256))
img = img.convert('L')
lab_file = os.path.join(lab_path, str(index) + '.txt')
fl = open(lab_file)
for line in fl:
line = line.strip()
item = line.split(',')
if len(item) == 2:
x = int(float(item[0]) * 256 / width)
y = int(float(item[1]) * 256 / height)
if x > 0 and x < img.size[0] and y > 0 and y < img.size[1]:
img.putpixel((x, y), 0xffffff)
img.putpixel((x-1, y), 0xffffff)
img.putpixel((x, y-1), 0xffffff)
img.putpixel((x-1, y-1), 0xffffff)
else:
print index, each, img.size, x, y
index += 1
img.show()
break
|
apache-2.0
|
jayware/entity-essentials
|
entity-essentials-api/src/main/java/org/jayware/e2/component/api/PropertyDeclarationException.java
|
1582
|
/**
* Entity Essentials -- A Component-based Entity System
*
* Copyright (C) 2017 Elmar Schug <elmar.schug@jayware.org>,
* Markus Neubauer <markus.neubauer@jayware.org>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jayware.e2.component.api;
public class PropertyDeclarationException
extends RuntimeException
{
public PropertyDeclarationException(final String message)
{
super(message);
}
public PropertyDeclarationException(StackTraceElement invocation)
{
super(createMessage(invocation));
}
private static String createMessage(StackTraceElement invocation)
{
String message = "Illegal invocation of ComponentProperty.property(<type>)";
if (invocation != null)
{
message += " at " + invocation.getClassName() + "." + invocation.getMethodName() + "(" + invocation.getFileName() + ":" + invocation.getLineNumber() + ")";
}
return message + ". This operation is intended to declare properties within component classes!";
}
}
|
apache-2.0
|
serlo-org/serlo-abc
|
src/components/common/TextPicker.js
|
2860
|
import { addIndex, map, max, reduce, repeat } from 'ramda';
import React, { Component } from 'react';
import { TouchableOpacity, View, Text } from 'react-native';
import {
BLACK_TRANSPARENT,
PRIMARY_WEAK,
GREEN,
RED
} from '../../styles/colors';
import { DEFAULT } from '../../styles/text';
const mapIndexed = addIndex(map);
class TextPicker extends Component {
constructor(props) {
super(props);
this.state = {
optionsVisible: false
};
}
selectOption = key => () => {
this.setState({
optionsVisible: false
});
if (this.props.onChange) {
this.props.onChange(key);
}
};
togglePickerOptions = () => {
this.setState({
optionsVisible: !this.state.optionsVisible
});
};
styles = {
button: {
backgroundColor: PRIMARY_WEAK,
padding: 5,
borderRadius: 20,
elevation: 10,
shadowColor: BLACK_TRANSPARENT,
shadowOpacity: 1,
shadowRadius: 0,
shadowOffset: {
height: 4,
width: 4
},
minWidth: this.props.size || 25
},
text: DEFAULT
};
render() {
const optionLengths = map(option => option.length, this.props.options);
const maxLength = reduce(max, 1, optionLengths);
const defaultText = repeat(' ', maxLength).join('');
return (
<View
style={{
flex: 1,
flexDirection: 'column',
justifyContent: 'flex-end'
}}
>
{this.state.optionsVisible || this.props.showFeedback
? mapIndexed((option, key) => {
if (this.props.selectedValue === option) {
return null;
}
return (
<TouchableOpacity onPress={this.selectOption(key)} key={key}>
<View
style={[
this.styles.button,
this.props.showFeedback &&
this.props.feedback &&
this.props.feedback.correctChoice === key && {
backgroundColor: GREEN
}
]}
>
<Text style={this.styles.text}>{option}</Text>
</View>
</TouchableOpacity>
);
}, this.props.options)
: null}
<TouchableOpacity onPress={this.togglePickerOptions}>
<View
style={[
this.styles.button,
this.props.showFeedback &&
this.props.feedback && {
backgroundColor: RED
}
]}
>
<Text style={this.styles.text}>
{this.props.selectedValue || defaultText}
</Text>
</View>
</TouchableOpacity>
</View>
);
}
}
export default TextPicker;
|
apache-2.0
|
hazendaz/assertj-core
|
src/test/java/org/assertj/core/api/longarray/LongArrayAssert_endsWith_Test.java
|
1287
|
/*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*
* Copyright 2012-2021 the original author or authors.
*/
package org.assertj.core.api.longarray;
import static org.assertj.core.test.LongArrays.arrayOf;
import org.assertj.core.api.LongArrayAssert;
import org.assertj.core.api.LongArrayAssertBaseTest;
import static org.mockito.Mockito.verify;
/**
* Tests for <code>{@link LongArrayAssert#endsWith(long...)}</code>.
*
* @author Alex Ruiz
*/
class LongArrayAssert_endsWith_Test extends LongArrayAssertBaseTest {
@Override
protected LongArrayAssert invoke_api_method() {
return assertions.endsWith(6L, 8L);
}
@Override
protected void verify_internal_effects() {
verify(arrays).assertEndsWith(getInfo(assertions), getActual(assertions), arrayOf(6L, 8L));
}
}
|
apache-2.0
|
nlamirault/abraracourcix
|
server/abraracourcixd/abraracourcixd.go
|
3779
|
// Copyright (C) 2015-2018 Nicolas Lamirault <nicolas.lamirault@gmail.com>
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package abraracourcixd
import (
"fmt"
"net"
"net/http"
"github.com/golang/glog"
"github.com/prometheus/client_golang/prometheus"
"golang.org/x/net/context"
"github.com/nlamirault/abraracourcix/api"
"github.com/nlamirault/abraracourcix/config"
"github.com/nlamirault/abraracourcix/storage"
_ "github.com/nlamirault/abraracourcix/storage/badger"
_ "github.com/nlamirault/abraracourcix/storage/boltdb"
_ "github.com/nlamirault/abraracourcix/storage/leveldb"
_ "github.com/nlamirault/abraracourcix/storage/mongodb"
_ "github.com/nlamirault/abraracourcix/storage/redis"
"github.com/nlamirault/abraracourcix/tracing"
_ "github.com/nlamirault/abraracourcix/tracing/jaeger"
_ "github.com/nlamirault/abraracourcix/tracing/zipkin"
)
const (
apiVersion = "v2beta"
)
func getStorage(conf *config.Configuration) (storage.Storage, error) {
glog.V(0).Infof("Create the backend using: %s", conf.Storage)
db, err := storage.New(conf)
if err != nil {
return nil, err
}
err = db.Init()
if err != nil {
return nil, err
}
return db, nil
}
func StartServer(configFilename string) {
conf, err := config.LoadFileConfig(configFilename)
if err != nil {
glog.Fatalf("failed to load configuration: %v", err)
}
db, err := getStorage(conf)
if err != nil {
glog.Fatalf("failed to load configuration: %v", err)
}
glog.V(1).Infof("Backend used: %s", db.Name())
tracer, err := tracing.New(conf)
if err != nil {
glog.Fatalf("failed to initialize OpenTracing: %v", err)
}
glog.V(0).Infoln("Create the gRPC servers")
ctx := context.Background()
ctx, cancel := context.WithCancel(ctx)
defer cancel()
grpcAddr := fmt.Sprintf(":%d", conf.API.GrpcPort)
lis, err := net.Listen("tcp", grpcAddr)
if err != nil {
glog.Fatalf("failed to listen: %v", err)
}
glog.V(0).Infof("Listen on %s", grpcAddr)
glog.V(1).Info("Create the authentication system")
serverAuth, err := newServerAuthentication(conf)
if err != nil {
glog.Fatalf("Failed to create authentication: %v", err)
}
grpcServer, err := registerServer(db, serverAuth, tracer, conf, grpcAddr)
if err != nil {
glog.Fatalf("Failed to register gRPC server: %s", err.Error())
}
gwmux, err := registerGateway(ctx, fmt.Sprintf("localhost:%d", conf.API.GrpcPort))
if err != nil {
glog.Fatalf("Failed to register JSON gateway: %s", err.Error())
}
httpmux := http.NewServeMux()
httpmux.HandleFunc("/", func(w http.ResponseWriter, r *http.Request) {
w.Write([]byte(`<html>
<head><title>Abraracourcix</title></head>
<body>
<h1>Abraracourcix</h1>
</body>
</html>`))
})
httpmux.Handle(fmt.Sprintf("/%s/", apiVersion), gwmux)
httpmux.Handle("/metrics", prometheus.Handler())
httpmux.HandleFunc("/version", api.VersionHandler)
api.ServeStaticFile(httpmux)
api.ServeSwagger(httpmux)
glog.V(0).Infof("Start gRPC server on %s", grpcAddr)
go grpcServer.Serve(lis)
gwAddr := fmt.Sprintf(":%d", conf.API.RestPort)
srv := &http.Server{
Addr: gwAddr,
Handler: grpcHandlerFunc(grpcServer, httpmux),
}
glog.V(0).Infof("Start HTTP server on %s", gwAddr)
glog.Fatal(srv.ListenAndServe())
}
|
apache-2.0
|
luciferous/finagle
|
finagle-netty4/src/main/scala/com/twitter/finagle/netty4/ssl/Netty4SslConfigurations.scala
|
3311
|
package com.twitter.finagle.netty4.ssl
import com.twitter.finagle.ssl.{ApplicationProtocols, SslConfigurationException, TrustCredentials}
import com.twitter.util.{Return, Throw, Try}
import io.netty.handler.ssl.{ApplicationProtocolConfig, SslContextBuilder, SslProvider}
import io.netty.handler.ssl.ApplicationProtocolConfig.{
Protocol,
SelectedListenerFailureBehavior,
SelectorFailureBehavior
}
import io.netty.handler.ssl.util.InsecureTrustManagerFactory
import scala.collection.JavaConverters._
import scala.util.control.NonFatal
/**
* Convenience functions for setting values on a Netty `SslContextBuilder`
* which are applicable to both client and server engines.
*/
private[finagle] object Netty4SslConfigurations {
/**
* Configures the trust credentials of the `SslContextBuilder`. This
* method mutates the `SslContextBuilder`, and returns it as the result.
*
* @note TrustCredentials.Unspecified does not change the builder,
*/
def configureTrust(
builder: SslContextBuilder,
trustCredentials: TrustCredentials
): SslContextBuilder = {
trustCredentials match {
case TrustCredentials.Unspecified =>
builder // Do Nothing
case TrustCredentials.Insecure =>
builder.trustManager(InsecureTrustManagerFactory.INSTANCE)
case TrustCredentials.CertCollection(file) =>
builder.trustManager(file)
case TrustCredentials.TrustManagerFactory(trustManagerFactory) =>
builder.trustManager(trustManagerFactory)
}
}
/**
* Configures the application protocols of the `SslContextBuilder`. This
* method mutates the `SslContextBuilder`, and returns it as the result.
*
* @note This also sets the `SelectorFailureBehavior` to NO_ADVERTISE,
* and the `SelectedListenerFailureBehavior` to ACCEPT as those are the
* only modes supported by both JDK and Native engines.
*/
def configureApplicationProtocols(
builder: SslContextBuilder,
applicationProtocols: ApplicationProtocols,
negotiationProtocol: Protocol
): SslContextBuilder = {
applicationProtocols match {
case ApplicationProtocols.Unspecified =>
builder // Do Nothing
case ApplicationProtocols.Supported(protos) =>
builder.applicationProtocolConfig(
new ApplicationProtocolConfig(
negotiationProtocol,
SelectorFailureBehavior.NO_ADVERTISE,
SelectedListenerFailureBehavior.ACCEPT,
protos.asJava
)
)
}
}
/**
* Configures the SSL provider with the JDK SSL provider if `forceJDK` is true.
*
* @note This is necessary in environments where the native engine could fail to load.
*/
def configureProvider(builder: SslContextBuilder, forceJdk: Boolean): SslContextBuilder =
if (forceJdk) builder.sslProvider(SslProvider.JDK)
else builder
/**
* Unwraps the `Try[SslContextBuilder]` and throws an `SslConfigurationException` for
* `NonFatal` errors.
*/
def unwrapTryContextBuilder(builder: Try[SslContextBuilder]): SslContextBuilder =
builder match {
case Return(sslContextBuilder) =>
sslContextBuilder
case Throw(NonFatal(nonFatal)) =>
throw new SslConfigurationException(nonFatal)
case Throw(throwable) =>
throw throwable
}
}
|
apache-2.0
|
CChengz/dot.r
|
workspace/fits/jexi-1.0b-all/src/com/crackj2ee/jexi/ui/ViewDecorator.java
|
3023
|
/*
* Created on 2004-7-25
* Author: Xuefeng, Copyright (C) 2004, Xuefeng.
*/
package com.crackj2ee.jexi.ui;
/**
* The view decorator.
*
* @author Xuefeng
*/
public abstract class ViewDecorator implements View {
// a reference to view component:
protected View component;
/**
* To create a wrapped view.
*
* @param component The view component that will be decorated (or wrapped).
*/
public ViewDecorator(View component) {
this.component = component;
}
/* (non-Javadoc)
* @see jexi.ui.View#onMouseMove(int, int)
*/
public void onMouseMove(int x, int y) {
this.component.onMouseMove(x, y);
}
/* (non-Javadoc)
* @see jexi.ui.View#onLButtonDown(int, int)
*/
public void onLButtonDown(int x, int y) {
this.component.onLButtonDown(x, y);
}
/* (non-Javadoc)
* @see jexi.ui.View#onLButtonUp(int, int)
*/
public void onLButtonUp(int x, int y) {
this.component.onLButtonUp(x, y);
}
/* (non-Javadoc)
* @see jexi.ui.View#onRButtonDown(int, int)
*/
public void onRButtonDown(int x, int y) {
this.component.onRButtonDown(x, y);
}
/* (non-Javadoc)
* @see jexi.ui.View#onRButtonUp(int, int)
*/
public void onRButtonUp(int x, int y) {
this.component.onRButtonUp(x, y);
}
/* (non-Javadoc)
* @see jexi.ui.View#onLButtonDblClick(int, int)
*/
public void onLButtonDblClick(int x, int y) {
this.component.onLButtonDblClick(x, y);
}
/* (non-Javadoc)
* @see jexi.ui.View#onSetCaret(int, int, int)
*/
public void onSetCaret(int x, int y, int height) {
this.component.onSetCaret(x, y, height);
}
/* (non-Javadoc)
* @see jexi.ui.View#init(jexi.core.Document)
*/
public void init(com.crackj2ee.jexi.core.Document document) {
this.component.init(document);
}
/* (non-Javadoc)
* @see jexi.ui.View#onKeyPressed(char)
*/
public void onKeyPressed(char c) {
this.component.onKeyPressed(c);
}
/* (non-Javadoc)
* @see jexi.ui.View#onFunctionKeyPressed(int, boolean, boolean, boolean)
*/
public void onFunctionKeyPressed(int keycode, boolean shift, boolean ctrl, boolean alt) {
this.component.onFunctionKeyPressed(keycode, shift, ctrl, alt);
}
/* (non-Javadoc)
* @see jexi.ui.View#onFormatChanged(java.lang.String, java.lang.Integer, java.lang.Boolean, java.lang.Boolean, java.lang.Boolean, jexi.ui.Color)
*/
public void onFormatChanged(String fontName, Integer fontSize,
Boolean bold, Boolean italic, Boolean underlined, Color color)
{
this.component.onFormatChanged(fontName, fontSize, bold, italic, underlined, color);
}
/* (non-Javadoc)
* @see jexi.ui.View#onInsertPictureFromFile(java.lang.String)
*/
public void onInsertPictureFromFile(String filename) {
this.component.onInsertPictureFromFile(filename);
}
}
|
apache-2.0
|
minimoog/virtualringbuffer
|
main.cpp
|
1576
|
#include "virtualringbuffer.h"
#include <string.h>
#include <stdio.h>
int main()
{
VirtualRingBuffer vbrb(8000);
char *indata = new char[2000];
char *outdata = new char[2000];
memset(indata, '1', 2000);
//simple test
memcpy(vbrb.writePointer(), indata, 2000);
vbrb.commit(2000);
memcpy(outdata, vbrb.readPointer(), 2000);
vbrb.decommit(2000);
if (memcmp(outdata, indata, 2000) != 0)
printf("failed on simple test\n");
//loop test
for (int i = 0; i < 100; ++i) {
memset(indata, '0' + i, 2000);
memcpy(vbrb.writePointer(), indata, 2000);
vbrb.commit(2000);
memcpy(outdata, vbrb.readPointer(), 2000);
vbrb.decommit(2000);
if (memcmp(outdata, indata, 2000) != 0)
printf("failed on loop test\n");
}
//wrapped test
//assuming page is 4096 size then vbrb will be 8192
//if we do 2000 * 5 commits when reading from rb 1808 will be from last write next 192 will be from first
for (int i = 0; i < 5; ++i) {
memset(indata, '0' + i, 2000);
memcpy(vbrb.writePointer(), indata, 2000);
vbrb.commit(2000);
}
memcpy(outdata, vbrb.readPointer(), 1808);
vbrb.decommit(1808);
for (int i = 0; i < 1808; ++i) {
if (outdata[i] != '4')
printf("failed on wrapped test\n");
}
memcpy(outdata, vbrb.readPointer(), 192);
vbrb.decommit(192);
for (int i = 0; i < 192; ++i) {
if (outdata[i] != '0')
printf("failed on wrapped test\n");
}
return 0;
}
|
apache-2.0
|
Lycsona/blog
|
blog-server/src/App/BlogBundle/Entity/User.php
|
591
|
<?php
namespace App\BlogBundle\Entity;
use Doctrine\ORM\Mapping as ORM;
use JMS\Serializer\Annotation as JMS;
use FOS\UserBundle\Model\User as BaseUser;
/**
* @ORM\Entity(repositoryClass="App\BlogBundle\Repository\UserRepository")
* @ORM\Table(name="`user`")
*
* Defines the properties of the User entity to represent the application Users.
*/
class User extends BaseUser
{
/**
* @ORM\Id
* @ORM\Column(type="integer")
* @ORM\GeneratedValue(strategy="AUTO")
*/
protected $id;
public function __construct()
{
parent::__construct();
}
}
|
apache-2.0
|
vzabavnov/AVRTL
|
samples/BlinkLED/main.cpp
|
491
|
/*
* BlinkLED.cpp
*
* Created: 3/27/2017 2:41:39 PM
* Author : Vadim Zabavnov
*/
#ifndef F_CPU
#define F_CPU 800000UL
#endif
#include <avr/io.h>
#include <util/delay.h>
#include <atlexpr.h>
#include <atlstd.h>
int main(void)
{
const atl::std::DigitalPortB thePort;
const int pinMask = atl::expr::CreateBitMask<uint8_t, 3>();
thePort.Direction = atl::Output;
while (1)
{
thePort.Write(pinMask);
_delay_ms(1000);
thePort.Clear();
_delay_ms(1000);
}
}
|
apache-2.0
|
sdgdsffdsfff/tddl
|
tddl-executor/src/main/java/com/taobao/tddl/executor/function/scalar/Not.java
|
901
|
package com.taobao.tddl.executor.function.scalar;
import com.taobao.tddl.common.exception.NotSupportException;
import com.taobao.tddl.executor.function.ScalarFunction;
import com.taobao.tddl.optimizer.core.expression.ISelectable.DATA_TYPE;
/**
* @since 5.1.0
*/
public class Not extends ScalarFunction {
public void compute(Object[] args) {
result = this.computeInner(args);
}
public DATA_TYPE getReturnType() {
return DATA_TYPE.BOOLEAN_VAL;
}
private Comparable computeInner(Object[] args) {
if (args[0] instanceof Number) {
return ((Number) args[0]).longValue() == 0 ? 1 : 0;
}
if (args[0] instanceof Boolean) {
if (((Boolean) args[0])) {
return false;
} else {
return true;
}
}
throw new NotSupportException("Not Function");
}
}
|
apache-2.0
|
canalplus/rx-player
|
src/compat/is_offline.ts
|
1766
|
/**
* Copyright 2015 CANAL+ Group
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* Some browsers have a builtin API to know if it's connected at least to a
* LAN network, at most to the internet.
*
* /!\ This feature can be dangerous as you can both have false positives and
* false negatives.
*
* False positives:
* - you can still play local contents (on localhost) if isOffline == true
* - on some browsers isOffline might be true even if we're connected to a LAN
* or a router (it would mean we're just not able to connect to the
* Internet). So we can eventually play LAN contents if isOffline == true
*
* False negatives:
* - in some cases, we even might have isOffline at false when we do not have
* any connection:
* - in browsers that do not support the feature
* - in browsers running in some virtualization softwares where the
* network adapters are always connected.
*
* Use with these cases in mind.
* @returns {Boolean}
*/
export default function isOffline() : boolean {
/* eslint-disable @typescript-eslint/no-unnecessary-boolean-literal-compare */
return navigator.onLine === false;
/* eslint-enable @typescript-eslint/no-unnecessary-boolean-literal-compare */
}
|
apache-2.0
|
OpenUniversity/ovirt-engine
|
frontend/webadmin/modules/uicommonweb/src/main/java/org/ovirt/engine/ui/uicommonweb/models/vms/ImportEntityData.java
|
2208
|
package org.ovirt.engine.ui.uicommonweb.models.vms;
import java.util.ArrayList;
import java.util.List;
import org.ovirt.engine.core.common.businessentities.ArchitectureType;
import org.ovirt.engine.core.common.businessentities.Cluster;
import org.ovirt.engine.core.common.businessentities.Quota;
import org.ovirt.engine.ui.uicommonweb.models.EntityModel;
import org.ovirt.engine.ui.uicommonweb.models.ListModel;
public abstract class ImportEntityData<E> extends EntityModel<E> {
boolean isExistsInSystem;
private EntityModel<Boolean> clone;
private ListModel<Cluster> cluster;
private ListModel<Quota> clusterQuota;
public ImportEntityData() {
setClone(new EntityModel<>(false));
setCluster(new ListModel<Cluster>());
setClusterQuota(new ListModel<Quota>());
}
public boolean isExistsInSystem() {
return isExistsInSystem;
}
public void setExistsInSystem(boolean isExistsInSystem) {
this.isExistsInSystem = isExistsInSystem;
}
public EntityModel<Boolean> getClone() {
return clone;
}
public void setClone(EntityModel<Boolean> clone) {
this.clone = clone;
}
public ListModel<Cluster> getCluster() {
return cluster;
}
public void setCluster(ListModel<Cluster> cluster) {
this.cluster = cluster;
}
public ListModel<Quota> getClusterQuota() {
return clusterQuota;
}
public void setClusterQuota(ListModel<Quota> clusterQuota) {
this.clusterQuota = clusterQuota;
}
public void selectClusterByName(String name) {
for (Cluster cluster : getCluster().getItems()) {
if (cluster.getName().equals(name)) {
getCluster().setSelectedItem(cluster);
break;
}
}
}
public List<String> getClusterNames() {
List<String> names = new ArrayList<>();
if (getCluster().getItems() != null) {
for (Cluster cluster : getCluster().getItems()) {
names.add(cluster.getName());
}
}
return names;
}
public abstract ArchitectureType getArchType();
public abstract String getName();
}
|
apache-2.0
|
ml4j/ml4j-impl
|
ml4j-base-impl/src/main/java/org/ml4j/nn/axons/AxonsGradientImpl.java
|
2374
|
/*
* Copyright 2017 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package org.ml4j.nn.axons;
import org.ml4j.Matrix;
public class AxonsGradientImpl implements AxonsGradient {
private TrainableAxons<?, ?, ?> axons;
private Matrix weightsGradient;
private Matrix leftToRightBiasGradient;
private Matrix rightToLeftBiasGradient;
/**
* @param axons The TrainableAxons that generated this
* gradient.
* @param weightsGradient The weights gradient.
* @param leftToRightBiasGradient Left to right bias gradient.
*/
public AxonsGradientImpl(TrainableAxons<?, ?, ?> axons, Matrix weightsGradient, Matrix leftToRightBiasGradient) {
super();
this.axons = axons;
this.weightsGradient = weightsGradient;
this.leftToRightBiasGradient = leftToRightBiasGradient;
}
/**
* @param axons The TrainableAxons that generated this
* gradient.
* @param weightsGradient The weights gradient.
* @param leftToRightBiasGradient Left to right bias gradient.
* @param rightToLeftBiasGradient Right to left bias gradient.
*/
public AxonsGradientImpl(TrainableAxons<?, ?, ?> axons, Matrix weightsGradient, Matrix leftToRightBiasGradient,
Matrix rightToLeftBiasGradient) {
super();
this.axons = axons;
this.weightsGradient = weightsGradient;
this.leftToRightBiasGradient = leftToRightBiasGradient;
this.rightToLeftBiasGradient = rightToLeftBiasGradient;
}
public TrainableAxons<?, ?, ?> getAxons() {
return axons;
}
@Override
public Matrix getWeightsGradient() {
return weightsGradient;
}
@Override
public Matrix getLeftToRightBiasGradient() {
return leftToRightBiasGradient;
}
@Override
public Matrix getRightToLeftBiasGradient() {
return rightToLeftBiasGradient;
}
}
|
apache-2.0
|
OpenUniversity/ovirt-engine
|
backend/manager/modules/bll/src/test/java/org/ovirt/engine/core/bll/common/predicates/RunningVmPredicateTest.java
|
1006
|
package org.ovirt.engine.core.bll.common.predicates;
import static org.hamcrest.CoreMatchers.is;
import static org.junit.Assert.assertThat;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.Mock;
import org.mockito.runners.MockitoJUnitRunner;
import org.ovirt.engine.core.common.businessentities.VM;
@RunWith(MockitoJUnitRunner.class)
public class RunningVmPredicateTest {
@Mock
private VM mockVm;
@Test
public void testApplyPositive() {
doTest(true);
}
@Test
public void testApplyNegative() {
doTest(false);
}
private void doTest(Boolean expectedResult) {
final RunningVmPredicate underTest = new RunningVmPredicate();
when(mockVm.isRunning()).thenReturn(expectedResult);
final boolean actual = underTest.test(mockVm);
verify(mockVm).isRunning();
assertThat(actual, is(expectedResult));
}
}
|
apache-2.0
|
devetude/BOJ-PSJ
|
src/boj_submitter/views/MainDialog.java
|
1621
|
package boj_submitter.views;
import java.awt.Dialog;
import java.awt.Dimension;
import java.awt.Font;
import java.awt.Frame;
import java.awt.Label;
import java.awt.Toolkit;
import java.awt.event.WindowEvent;
import java.awt.event.WindowListener;
/**
* 메인 다이얼로그 클래스
*
* @author devetude
*/
public class MainDialog extends Dialog {
private static final int WIDTH = 300;
private static final int HEIGHT = 100;
private static final String DIALOG_TITLE = "Notice";
/**
* 생성자
*
* @param owner
* @param msg
*/
public MainDialog(Frame owner, String msg) {
super(owner);
this.setTitle(DIALOG_TITLE);
this.setResizable(false);
this.setLayout(null);
Dimension windowSize = Toolkit.getDefaultToolkit().getScreenSize();
this.setBounds((windowSize.width - WIDTH) / 2, (windowSize.height - HEIGHT) / 2, WIDTH, HEIGHT);
Label msgLabel = new Label(msg);
msgLabel.setBounds(25, 50, 250, 20);
msgLabel.setFont(new Font(null, Font.PLAIN, 18));
msgLabel.setAlignment(Label.CENTER);
this.add(msgLabel);
this.addWindowListener(new WindowListener() {
@Override
public void windowOpened(WindowEvent e) {
}
@Override
public void windowIconified(WindowEvent e) {
}
@Override
public void windowDeiconified(WindowEvent e) {
}
@Override
public void windowDeactivated(WindowEvent e) {
}
@Override
public void windowClosing(WindowEvent e) {
dispose();
}
@Override
public void windowClosed(WindowEvent e) {
}
@Override
public void windowActivated(WindowEvent e) {
}
});
this.setVisible(true);
}
}
|
apache-2.0
|
dbondarchuk/MailWebCient
|
GmailWebClient/Scripts/internal/app.js
|
39
|
app = angular.module('mailApp', []);
|
apache-2.0
|
rackerlabs/finding_dory
|
dory/openstack/common/gettextutils.py
|
18030
|
# Copyright 2012 Red Hat, Inc.
# Copyright 2013 IBM Corp.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
gettext for openstack-common modules.
Usual usage in an openstack.common module:
from dory.openstack.common.gettextutils import _
"""
import copy
import gettext
import locale
from logging import handlers
import os
from babel import localedata
import six
_AVAILABLE_LANGUAGES = {}
# FIXME(dhellmann): Remove this when moving to oslo.i18n.
USE_LAZY = False
class TranslatorFactory(object):
"""Create translator functions
"""
def __init__(self, domain, localedir=None):
"""Establish a set of translation functions for the domain.
:param domain: Name of translation domain,
specifying a message catalog.
:type domain: str
:param lazy: Delays translation until a message is emitted.
Defaults to False.
:type lazy: Boolean
:param localedir: Directory with translation catalogs.
:type localedir: str
"""
self.domain = domain
if localedir is None:
localedir = os.environ.get(domain.upper() + '_LOCALEDIR')
self.localedir = localedir
def _make_translation_func(self, domain=None):
"""Return a new translation function ready for use.
Takes into account whether or not lazy translation is being
done.
The domain can be specified to override the default from the
factory, but the localedir from the factory is always used
because we assume the log-level translation catalogs are
installed in the same directory as the main application
catalog.
"""
if domain is None:
domain = self.domain
t = gettext.translation(domain,
localedir=self.localedir,
fallback=True)
# Use the appropriate method of the translation object based
# on the python version.
m = t.gettext if six.PY3 else t.ugettext
def f(msg):
"""oslo.i18n.gettextutils translation function."""
if USE_LAZY:
return Message(msg, domain=domain)
return m(msg)
return f
@property
def primary(self):
"The default translation function."
return self._make_translation_func()
def _make_log_translation_func(self, level):
return self._make_translation_func(self.domain + '-log-' + level)
@property
def log_info(self):
"Translate info-level log messages."
return self._make_log_translation_func('info')
@property
def log_warning(self):
"Translate warning-level log messages."
return self._make_log_translation_func('warning')
@property
def log_error(self):
"Translate error-level log messages."
return self._make_log_translation_func('error')
@property
def log_critical(self):
"Translate critical-level log messages."
return self._make_log_translation_func('critical')
# NOTE(dhellmann): When this module moves out of the incubator into
# oslo.i18n, these global variables can be moved to an integration
# module within each application.
# Create the global translation functions.
_translators = TranslatorFactory('dory')
# The primary translation function using the well-known name "_"
_ = _translators.primary
# Translators for log levels.
#
# The abbreviated names are meant to reflect the usual use of a short
# name like '_'. The "L" is for "log" and the other letter comes from
# the level.
_LI = _translators.log_info
_LW = _translators.log_warning
_LE = _translators.log_error
_LC = _translators.log_critical
# NOTE(dhellmann): End of globals that will move to the application's
# integration module.
def enable_lazy():
"""Convenience function for configuring _() to use lazy gettext
Call this at the start of execution to enable the gettextutils._
function to use lazy gettext functionality. This is useful if
your project is importing _ directly instead of using the
gettextutils.install() way of importing the _ function.
"""
global USE_LAZY
USE_LAZY = True
def install(domain):
"""Install a _() function using the given translation domain.
Given a translation domain, install a _() function using gettext's
install() function.
The main difference from gettext.install() is that we allow
overriding the default localedir (e.g. /usr/share/locale) using
a translation-domain-specific environment variable (e.g.
NOVA_LOCALEDIR).
Note that to enable lazy translation, enable_lazy must be
called.
:param domain: the translation domain
"""
from six import moves
tf = TranslatorFactory(domain)
moves.builtins.__dict__['_'] = tf.primary
class Message(six.text_type):
"""A Message object is a unicode object that can be translated.
Translation of Message is done explicitly using the translate() method.
For all non-translation intents and purposes, a Message is simply unicode,
and can be treated as such.
"""
def __new__(cls, msgid, msgtext=None, params=None,
domain='dory', *args):
"""Create a new Message object.
In order for translation to work gettext requires a message ID, this
msgid will be used as the base unicode text. It is also possible
for the msgid and the base unicode text to be different by passing
the msgtext parameter.
"""
# If the base msgtext is not given, we use the default translation
# of the msgid (which is in English) just in case the system locale is
# not English, so that the base text will be in that locale by default.
if not msgtext:
msgtext = Message._translate_msgid(msgid, domain)
# We want to initialize the parent unicode with the actual object that
# would have been plain unicode if 'Message' was not enabled.
msg = super(Message, cls).__new__(cls, msgtext)
msg.msgid = msgid
msg.domain = domain
msg.params = params
return msg
def translate(self, desired_locale=None):
"""Translate this message to the desired locale.
:param desired_locale: The desired locale to translate the message to,
if no locale is provided the message will be
translated to the system's default locale.
:returns: the translated message in unicode
"""
translated_message = Message._translate_msgid(self.msgid,
self.domain,
desired_locale)
if self.params is None:
# No need for more translation
return translated_message
# This Message object may have been formatted with one or more
# Message objects as substitution arguments, given either as a single
# argument, part of a tuple, or as one or more values in a dictionary.
# When translating this Message we need to translate those Messages too
translated_params = _translate_args(self.params, desired_locale)
translated_message = translated_message % translated_params
return translated_message
@staticmethod
def _translate_msgid(msgid, domain, desired_locale=None):
if not desired_locale:
system_locale = locale.getdefaultlocale()
# If the system locale is not available to the runtime use English
if not system_locale[0]:
desired_locale = 'en_US'
else:
desired_locale = system_locale[0]
locale_dir = os.environ.get(domain.upper() + '_LOCALEDIR')
lang = gettext.translation(domain,
localedir=locale_dir,
languages=[desired_locale],
fallback=True)
if six.PY3:
translator = lang.gettext
else:
translator = lang.ugettext
translated_message = translator(msgid)
return translated_message
def __mod__(self, other):
# When we mod a Message we want the actual operation to be performed
# by the parent class (i.e. unicode()), the only thing we do here is
# save the original msgid and the parameters in case of a translation
params = self._sanitize_mod_params(other)
unicode_mod = super(Message, self).__mod__(params)
modded = Message(self.msgid,
msgtext=unicode_mod,
params=params,
domain=self.domain)
return modded
def _sanitize_mod_params(self, other):
"""Sanitize the object being modded with this Message.
- Add support for modding 'None' so translation supports it
- Trim the modded object, which can be a large dictionary, to only
those keys that would actually be used in a translation
- Snapshot the object being modded, in case the message is
translated, it will be used as it was when the Message was created
"""
if other is None:
params = (other,)
elif isinstance(other, dict):
# Merge the dictionaries
# Copy each item in case one does not support deep copy.
params = {}
if isinstance(self.params, dict):
for key, val in self.params.items():
params[key] = self._copy_param(val)
for key, val in other.items():
params[key] = self._copy_param(val)
else:
params = self._copy_param(other)
return params
def _copy_param(self, param):
try:
return copy.deepcopy(param)
except Exception:
# Fallback to casting to unicode this will handle the
# python code-like objects that can't be deep-copied
return six.text_type(param)
def __add__(self, other):
msg = _('Message objects do not support addition.')
raise TypeError(msg)
def __radd__(self, other):
return self.__add__(other)
if six.PY2:
def __str__(self):
# NOTE(luisg): Logging in python 2.6 tries to str() log records,
# and it expects specifically a UnicodeError in order to proceed.
msg = _('Message objects do not support str() because they may '
'contain non-ascii characters. '
'Please use unicode() or translate() instead.')
raise UnicodeError(msg)
def get_available_languages(domain):
"""Lists the available languages for the given translation domain.
:param domain: the domain to get languages for
"""
if domain in _AVAILABLE_LANGUAGES:
return copy.copy(_AVAILABLE_LANGUAGES[domain])
localedir = '%s_LOCALEDIR' % domain.upper()
find = lambda x: gettext.find(domain,
localedir=os.environ.get(localedir),
languages=[x])
# NOTE(mrodden): en_US should always be available (and first in case
# order matters) since our in-line message strings are en_US
language_list = ['en_US']
# NOTE(luisg): Babel <1.0 used a function called list(), which was
# renamed to locale_identifiers() in >=1.0, the requirements master list
# requires >=0.9.6, uncapped, so defensively work with both. We can remove
# this check when the master list updates to >=1.0, and update all projects
list_identifiers = (getattr(localedata, 'list', None) or
getattr(localedata, 'locale_identifiers'))
locale_identifiers = list_identifiers()
for i in locale_identifiers:
if find(i) is not None:
language_list.append(i)
# NOTE(luisg): Babel>=1.0,<1.3 has a bug where some OpenStack supported
# locales (e.g. 'zh_CN', and 'zh_TW') aren't supported even though they
# are perfectly legitimate locales:
# https://github.com/mitsuhiko/babel/issues/37
# In Babel 1.3 they fixed the bug and they support these locales, but
# they are still not explicitly "listed" by locale_identifiers().
# That is why we add the locales here explicitly if necessary so that
# they are listed as supported.
aliases = {'zh': 'zh_CN',
'zh_Hant_HK': 'zh_HK',
'zh_Hant': 'zh_TW',
'fil': 'tl_PH'}
for (locale_, alias) in six.iteritems(aliases):
if locale_ in language_list and alias not in language_list:
language_list.append(alias)
_AVAILABLE_LANGUAGES[domain] = language_list
return copy.copy(language_list)
def translate(obj, desired_locale=None):
"""Gets the translated unicode representation of the given object.
If the object is not translatable it is returned as-is.
If the locale is None the object is translated to the system locale.
:param obj: the object to translate
:param desired_locale: the locale to translate the message to, if None the
default system locale will be used
:returns: the translated object in unicode, or the original object if
it could not be translated
"""
message = obj
if not isinstance(message, Message):
# If the object to translate is not already translatable,
# let's first get its unicode representation
message = six.text_type(obj)
if isinstance(message, Message):
# Even after unicoding() we still need to check if we are
# running with translatable unicode before translating
return message.translate(desired_locale)
return obj
def _translate_args(args, desired_locale=None):
"""Translates all the translatable elements of the given arguments object.
This method is used for translating the translatable values in method
arguments which include values of tuples or dictionaries.
If the object is not a tuple or a dictionary the object itself is
translated if it is translatable.
If the locale is None the object is translated to the system locale.
:param args: the args to translate
:param desired_locale: the locale to translate the args to, if None the
default system locale will be used
:returns: a new args object with the translated contents of the original
"""
if isinstance(args, tuple):
return tuple(translate(v, desired_locale) for v in args)
if isinstance(args, dict):
translated_dict = {}
for (k, v) in six.iteritems(args):
translated_v = translate(v, desired_locale)
translated_dict[k] = translated_v
return translated_dict
return translate(args, desired_locale)
class TranslationHandler(handlers.MemoryHandler):
"""Handler that translates records before logging them.
The TranslationHandler takes a locale and a target logging.Handler object
to forward LogRecord objects to after translating them. This handler
depends on Message objects being logged, instead of regular strings.
The handler can be configured declaratively in the logging.conf as follows:
[handlers]
keys = translatedlog, translator
[handler_translatedlog]
class = handlers.WatchedFileHandler
args = ('/var/log/api-localized.log',)
formatter = context
[handler_translator]
class = openstack.common.log.TranslationHandler
target = translatedlog
args = ('zh_CN',)
If the specified locale is not available in the system, the handler will
log in the default locale.
"""
def __init__(self, locale=None, target=None):
"""Initialize a TranslationHandler
:param locale: locale to use for translating messages
:param target: logging.Handler object to forward
LogRecord objects to after translation
"""
# NOTE(luisg): In order to allow this handler to be a wrapper for
# other handlers, such as a FileHandler, and still be able to
# configure it using logging.conf, this handler has to extend
# MemoryHandler because only the MemoryHandlers' logging.conf
# parsing is implemented such that it accepts a target handler.
handlers.MemoryHandler.__init__(self, capacity=0, target=target)
self.locale = locale
def setFormatter(self, fmt):
self.target.setFormatter(fmt)
def emit(self, record):
# We save the message from the original record to restore it
# after translation, so other handlers are not affected by this
original_msg = record.msg
original_args = record.args
try:
self._translate_and_log_record(record)
finally:
record.msg = original_msg
record.args = original_args
def _translate_and_log_record(self, record):
record.msg = translate(record.msg, self.locale)
# In addition to translating the message, we also need to translate
# arguments that were passed to the log method that were not part
# of the main message e.g., log.info(_('Some message %s'), this_one))
record.args = _translate_args(record.args, self.locale)
self.target.emit(record)
|
apache-2.0
|
reportportal/service-api
|
src/main/java/com/epam/ta/reportportal/core/imprt/impl/ImportStrategy.java
|
1162
|
/*
* Copyright 2019 EPAM Systems
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.epam.ta.reportportal.core.imprt.impl;
import com.epam.ta.reportportal.commons.ReportPortalUser;
import java.io.File;
/**
* Handler for processing launch importing.
*
* @author Pavel_Bortnik
*/
public interface ImportStrategy {
/**
* Processing launch importing.
*
* @param projectDetails project
* @param user user
* @param file zip file that contains xml test reports
* @return launch uuid
*/
String importLaunch(ReportPortalUser.ProjectDetails projectDetails, ReportPortalUser user, File file, String baseUrl);
}
|
apache-2.0
|
sysdevone/gab-cmdline
|
src/main/java/com/gabstudios/cmdline/UnsupportedException.java
|
2332
|
/*****************************************************************************************
*
* Copyright 2016 Gregory Brown. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*
*****************************************************************************************
*/
package com.gabstudios.cmdline;
import java.util.ArrayList;
import java.util.List;
/**
* An exception that is used if an action is not supported.
*
*
* @author Gregory Brown (sysdevone)
*
*/
public class UnsupportedException extends RuntimeException
{
/**
* Serialized version number.
*/
private static final long serialVersionUID = 2473729829921263263L;
/**
* Holds possible suggestions to return to the user.
*/
private List<String> _suggestionList;
/**
* Constructor that takes a message.
*
* @param message
* A <code>String</code> message.
*/
protected UnsupportedException(final String message)
{
super(message);
}
/**
* Constructor that takes a message.
*
* @param message
* A <code>String</code> message.
* @param suggestionList
* A <code>List</code> of possible suggestions to return to the
* user if the command was misspelled
*/
public UnsupportedException(final String message,
final List<String> suggestionList)
{
super(message);
this._suggestionList = new ArrayList<String>(suggestionList);
}
/**
* Gets a <code>List</code> of suggestion alternatives.
*
* @return A <code>List</code> instance containing zero to many
* <code>String</code>instances.
*/
public List<String> getSuggestionList()
{
return (this._suggestionList);
}
}
|
apache-2.0
|
mezz64/home-assistant
|
homeassistant/components/envirophat/sensor.py
|
8627
|
"""Support for Enviro pHAT sensors."""
from __future__ import annotations
from datetime import timedelta
import importlib
import logging
import voluptuous as vol
from homeassistant.components.sensor import (
PLATFORM_SCHEMA,
SensorDeviceClass,
SensorEntity,
SensorEntityDescription,
)
from homeassistant.const import (
CONF_DISPLAY_OPTIONS,
CONF_NAME,
ELECTRIC_POTENTIAL_VOLT,
PRESSURE_HPA,
TEMP_CELSIUS,
)
from homeassistant.core import HomeAssistant
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.entity_platform import AddEntitiesCallback
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
from homeassistant.util import Throttle
_LOGGER = logging.getLogger(__name__)
DEFAULT_NAME = "envirophat"
CONF_USE_LEDS = "use_leds"
MIN_TIME_BETWEEN_UPDATES = timedelta(seconds=60)
SENSOR_TYPES: tuple[SensorEntityDescription, ...] = (
SensorEntityDescription(
key="light",
name="light",
icon="mdi:weather-sunny",
),
SensorEntityDescription(
key="light_red",
name="light_red",
icon="mdi:invert-colors",
),
SensorEntityDescription(
key="light_green",
name="light_green",
icon="mdi:invert-colors",
),
SensorEntityDescription(
key="light_blue",
name="light_blue",
icon="mdi:invert-colors",
),
SensorEntityDescription(
key="accelerometer_x",
name="accelerometer_x",
native_unit_of_measurement="G",
icon="mdi:earth",
),
SensorEntityDescription(
key="accelerometer_y",
name="accelerometer_y",
native_unit_of_measurement="G",
icon="mdi:earth",
),
SensorEntityDescription(
key="accelerometer_z",
name="accelerometer_z",
native_unit_of_measurement="G",
icon="mdi:earth",
),
SensorEntityDescription(
key="magnetometer_x",
name="magnetometer_x",
icon="mdi:magnet",
),
SensorEntityDescription(
key="magnetometer_y",
name="magnetometer_y",
icon="mdi:magnet",
),
SensorEntityDescription(
key="magnetometer_z",
name="magnetometer_z",
icon="mdi:magnet",
),
SensorEntityDescription(
key="temperature",
name="temperature",
native_unit_of_measurement=TEMP_CELSIUS,
device_class=SensorDeviceClass.TEMPERATURE,
),
SensorEntityDescription(
key="pressure",
name="pressure",
native_unit_of_measurement=PRESSURE_HPA,
icon="mdi:gauge",
),
SensorEntityDescription(
key="voltage_0",
name="voltage_0",
native_unit_of_measurement=ELECTRIC_POTENTIAL_VOLT,
icon="mdi:flash",
),
SensorEntityDescription(
key="voltage_1",
name="voltage_1",
native_unit_of_measurement=ELECTRIC_POTENTIAL_VOLT,
icon="mdi:flash",
),
SensorEntityDescription(
key="voltage_2",
name="voltage_2",
native_unit_of_measurement=ELECTRIC_POTENTIAL_VOLT,
icon="mdi:flash",
),
SensorEntityDescription(
key="voltage_3",
name="voltage_3",
native_unit_of_measurement=ELECTRIC_POTENTIAL_VOLT,
icon="mdi:flash",
),
)
SENSOR_KEYS: list[str] = [desc.key for desc in SENSOR_TYPES]
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_DISPLAY_OPTIONS, default=SENSOR_KEYS): [vol.In(SENSOR_KEYS)],
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
vol.Optional(CONF_USE_LEDS, default=False): cv.boolean,
}
)
def setup_platform(
hass: HomeAssistant,
config: ConfigType,
add_entities: AddEntitiesCallback,
discovery_info: DiscoveryInfoType | None = None,
) -> None:
"""Set up the Sense HAT sensor platform."""
try:
envirophat = importlib.import_module("envirophat")
except OSError:
_LOGGER.error("No Enviro pHAT was found")
return
data = EnvirophatData(envirophat, config.get(CONF_USE_LEDS))
display_options = config[CONF_DISPLAY_OPTIONS]
entities = [
EnvirophatSensor(data, description)
for description in SENSOR_TYPES
if description.key in display_options
]
add_entities(entities, True)
class EnvirophatSensor(SensorEntity):
"""Representation of an Enviro pHAT sensor."""
def __init__(self, data, description: SensorEntityDescription):
"""Initialize the sensor."""
self.entity_description = description
self.data = data
def update(self):
"""Get the latest data and updates the states."""
self.data.update()
sensor_type = self.entity_description.key
if sensor_type == "light":
self._attr_native_value = self.data.light
elif sensor_type == "light_red":
self._attr_native_value = self.data.light_red
elif sensor_type == "light_green":
self._attr_native_value = self.data.light_green
elif sensor_type == "light_blue":
self._attr_native_value = self.data.light_blue
elif sensor_type == "accelerometer_x":
self._attr_native_value = self.data.accelerometer_x
elif sensor_type == "accelerometer_y":
self._attr_native_value = self.data.accelerometer_y
elif sensor_type == "accelerometer_z":
self._attr_native_value = self.data.accelerometer_z
elif sensor_type == "magnetometer_x":
self._attr_native_value = self.data.magnetometer_x
elif sensor_type == "magnetometer_y":
self._attr_native_value = self.data.magnetometer_y
elif sensor_type == "magnetometer_z":
self._attr_native_value = self.data.magnetometer_z
elif sensor_type == "temperature":
self._attr_native_value = self.data.temperature
elif sensor_type == "pressure":
self._attr_native_value = self.data.pressure
elif sensor_type == "voltage_0":
self._attr_native_value = self.data.voltage_0
elif sensor_type == "voltage_1":
self._attr_native_value = self.data.voltage_1
elif sensor_type == "voltage_2":
self._attr_native_value = self.data.voltage_2
elif sensor_type == "voltage_3":
self._attr_native_value = self.data.voltage_3
class EnvirophatData:
"""Get the latest data and update."""
def __init__(self, envirophat, use_leds):
"""Initialize the data object."""
self.envirophat = envirophat
self.use_leds = use_leds
# sensors readings
self.light = None
self.light_red = None
self.light_green = None
self.light_blue = None
self.accelerometer_x = None
self.accelerometer_y = None
self.accelerometer_z = None
self.magnetometer_x = None
self.magnetometer_y = None
self.magnetometer_z = None
self.temperature = None
self.pressure = None
self.voltage_0 = None
self.voltage_1 = None
self.voltage_2 = None
self.voltage_3 = None
@Throttle(MIN_TIME_BETWEEN_UPDATES)
def update(self):
"""Get the latest data from Enviro pHAT."""
# Light sensor reading: 16-bit integer
self.light = self.envirophat.light.light()
if self.use_leds:
self.envirophat.leds.on()
# the three color values scaled against the overall light, 0-255
self.light_red, self.light_green, self.light_blue = self.envirophat.light.rgb()
if self.use_leds:
self.envirophat.leds.off()
# accelerometer readings in G
(
self.accelerometer_x,
self.accelerometer_y,
self.accelerometer_z,
) = self.envirophat.motion.accelerometer()
# raw magnetometer reading
(
self.magnetometer_x,
self.magnetometer_y,
self.magnetometer_z,
) = self.envirophat.motion.magnetometer()
# temperature resolution of BMP280 sensor: 0.01°C
self.temperature = round(self.envirophat.weather.temperature(), 2)
# pressure resolution of BMP280 sensor: 0.16 Pa, rounding to 0.1 Pa
# with conversion to 100 Pa = 1 hPa
self.pressure = round(self.envirophat.weather.pressure() / 100.0, 3)
# Voltage sensor, reading between 0-3.3V
(
self.voltage_0,
self.voltage_1,
self.voltage_2,
self.voltage_3,
) = self.envirophat.analog.read_all()
|
apache-2.0
|
amitdhiman000/MyOffers
|
MyOffers/management/commands/preload.py
|
543
|
from django.core.management.base import (BaseCommand, CommandError)
from myadmin.backenddb import (insert_custom_areas, insert_default_categories)
class Command(BaseCommand):
help = 'Closes the specified poll for voting'
def add_arguments(self, parser):
pass
# parser.add_argument('poll_id', nargs='+', type=int)
def handle(self, *args, **options):
# load default areas
insert_custom_areas('Bangalore', 'Karnataka', 'India')
# load default categories
insert_default_categories()
|
apache-2.0
|
aws/aws-sdk-java
|
aws-java-sdk-appconfig/src/main/java/com/amazonaws/services/appconfig/model/transform/DeploymentSummaryJsonUnmarshaller.java
|
5349
|
/*
* Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.appconfig.model.transform;
import java.math.*;
import javax.annotation.Generated;
import com.amazonaws.services.appconfig.model.*;
import com.amazonaws.transform.SimpleTypeJsonUnmarshallers.*;
import com.amazonaws.transform.*;
import com.fasterxml.jackson.core.JsonToken;
import static com.fasterxml.jackson.core.JsonToken.*;
/**
* DeploymentSummary JSON Unmarshaller
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class DeploymentSummaryJsonUnmarshaller implements Unmarshaller<DeploymentSummary, JsonUnmarshallerContext> {
public DeploymentSummary unmarshall(JsonUnmarshallerContext context) throws Exception {
DeploymentSummary deploymentSummary = new DeploymentSummary();
int originalDepth = context.getCurrentDepth();
String currentParentElement = context.getCurrentParentElement();
int targetDepth = originalDepth + 1;
JsonToken token = context.getCurrentToken();
if (token == null)
token = context.nextToken();
if (token == VALUE_NULL) {
return null;
}
while (true) {
if (token == null)
break;
if (token == FIELD_NAME || token == START_OBJECT) {
if (context.testExpression("DeploymentNumber", targetDepth)) {
context.nextToken();
deploymentSummary.setDeploymentNumber(context.getUnmarshaller(Integer.class).unmarshall(context));
}
if (context.testExpression("ConfigurationName", targetDepth)) {
context.nextToken();
deploymentSummary.setConfigurationName(context.getUnmarshaller(String.class).unmarshall(context));
}
if (context.testExpression("ConfigurationVersion", targetDepth)) {
context.nextToken();
deploymentSummary.setConfigurationVersion(context.getUnmarshaller(String.class).unmarshall(context));
}
if (context.testExpression("DeploymentDurationInMinutes", targetDepth)) {
context.nextToken();
deploymentSummary.setDeploymentDurationInMinutes(context.getUnmarshaller(Integer.class).unmarshall(context));
}
if (context.testExpression("GrowthType", targetDepth)) {
context.nextToken();
deploymentSummary.setGrowthType(context.getUnmarshaller(String.class).unmarshall(context));
}
if (context.testExpression("GrowthFactor", targetDepth)) {
context.nextToken();
deploymentSummary.setGrowthFactor(context.getUnmarshaller(Float.class).unmarshall(context));
}
if (context.testExpression("FinalBakeTimeInMinutes", targetDepth)) {
context.nextToken();
deploymentSummary.setFinalBakeTimeInMinutes(context.getUnmarshaller(Integer.class).unmarshall(context));
}
if (context.testExpression("State", targetDepth)) {
context.nextToken();
deploymentSummary.setState(context.getUnmarshaller(String.class).unmarshall(context));
}
if (context.testExpression("PercentageComplete", targetDepth)) {
context.nextToken();
deploymentSummary.setPercentageComplete(context.getUnmarshaller(Float.class).unmarshall(context));
}
if (context.testExpression("StartedAt", targetDepth)) {
context.nextToken();
deploymentSummary.setStartedAt(DateJsonUnmarshallerFactory.getInstance("iso8601").unmarshall(context));
}
if (context.testExpression("CompletedAt", targetDepth)) {
context.nextToken();
deploymentSummary.setCompletedAt(DateJsonUnmarshallerFactory.getInstance("iso8601").unmarshall(context));
}
} else if (token == END_ARRAY || token == END_OBJECT) {
if (context.getLastParsedParentElement() == null || context.getLastParsedParentElement().equals(currentParentElement)) {
if (context.getCurrentDepth() <= originalDepth)
break;
}
}
token = context.nextToken();
}
return deploymentSummary;
}
private static DeploymentSummaryJsonUnmarshaller instance;
public static DeploymentSummaryJsonUnmarshaller getInstance() {
if (instance == null)
instance = new DeploymentSummaryJsonUnmarshaller();
return instance;
}
}
|
apache-2.0
|
opencord/voltha
|
voltha/adapters/openolt/openolt_flow_mgr.py
|
74670
|
#
# Copyright 2018 the original author or authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import copy
from twisted.internet import reactor
import grpc
from google.protobuf.json_format import MessageToDict
import hashlib
import ast
from simplejson import dumps
from voltha.protos.openflow_13_pb2 import OFPXMC_OPENFLOW_BASIC, \
ofp_flow_stats, OFPMT_OXM, Flows, FlowGroups, \
OFPXMT_OFB_VLAN_VID
from voltha.protos.device_pb2 import Port
import voltha.core.flow_decomposer as fd
from voltha.adapters.openolt.protos import openolt_pb2
from voltha.protos import tech_profile_pb2
from voltha.registry import registry
from common.tech_profile.tech_profile import Direction, TechProfile
# Flow categories
HSIA_FLOW = "HSIA_FLOW"
HSIA_TRANSPARENT = "HSIA_TRANSPARENT-{}"
DHCP_FLOW = "DHCP_FLOW"
EAPOL_FLOW = "EAPOL_FLOW"
LLDP_FLOW = "LLDP_FLOW"
EAP_ETH_TYPE = 0x888e
LLDP_ETH_TYPE = 0x88cc
IPV4_ETH_TYPE = 0x800
IPv6_ETH_TYPE = 0x86dd
IGMP_PROTO = 2
# FIXME - see also BRDCM_DEFAULT_VLAN in broadcom_onu.py
DEFAULT_MGMT_VLAN = 4091
RESERVED_VLAN = 4095
# Openolt Flow
UPSTREAM = "upstream"
DOWNSTREAM = "downstream"
PACKET_TAG_TYPE = "pkt_tag_type"
UNTAGGED = "untagged"
SINGLE_TAG = "single_tag"
DOUBLE_TAG = "double_tag"
# Classifier
ETH_TYPE = 'eth_type'
TPID = 'tpid'
IP_PROTO = 'ip_proto'
IN_PORT = 'in_port'
VLAN_VID = 'vlan_vid'
VLAN_PCP = 'vlan_pcp'
UDP_DST = 'udp_dst'
UDP_SRC = 'udp_src'
IPV4_DST = 'ipv4_dst'
IPV4_SRC = 'ipv4_src'
METADATA = 'metadata'
OUTPUT = 'output'
# Action
POP_VLAN = 'pop_vlan'
PUSH_VLAN = 'push_vlan'
TRAP_TO_HOST = 'trap_to_host'
class OpenOltFlowMgr(object):
def __init__(self, log, stub, device_id, logical_device_id,
platform, resource_mgr, data_model):
self.data_model = data_model
self.log = log
self.stub = stub
self.device_id = device_id
self.logical_device_id = logical_device_id
self.platform = platform
self.logical_flows_proxy = registry('core').get_proxy(
'/logical_devices/{}/flows'.format(self.logical_device_id))
self.flows_proxy = registry('core').get_proxy(
'/devices/{}/flows'.format(self.device_id))
self.root_proxy = registry('core').get_proxy('/')
self.resource_mgr = resource_mgr
self.tech_profile = dict()
self._populate_tech_profile_per_pon_port()
self.retry_add_flow_list = []
def update_logical_flows(self, flows_to_add, flows_to_remove,
device_rules_map):
try:
self.update_children_flows(device_rules_map)
except Exception as e:
self.log.error('Error updating children flows', error=e)
self.log.debug('logical flows update', flows_to_add=flows_to_add,
flows_to_remove=flows_to_remove)
for flow in flows_to_add:
try:
self.add_flow(flow)
except Exception as e:
self.log.error('failed to add flow', flow=flow, e=e)
for flow in flows_to_remove:
try:
self.remove_flow(flow)
except Exception as e:
self.log.error('failed to remove flow', flow=flow, e=e)
self.repush_all_different_flows()
def add_flow(self, flow):
self.log.debug('add flow', flow=flow)
classifier_info = dict()
action_info = dict()
us_meter_id = None
ds_meter_id = None
for field in fd.get_ofb_fields(flow):
if field.type == fd.ETH_TYPE:
classifier_info[ETH_TYPE] = field.eth_type
self.log.debug('field-type-eth-type',
eth_type=classifier_info[ETH_TYPE])
if classifier_info[ETH_TYPE] == IPv6_ETH_TYPE:
self.log.debug('Not handling IPv6 flows')
return
elif field.type == fd.IP_PROTO:
classifier_info[IP_PROTO] = field.ip_proto
self.log.debug('field-type-ip-proto',
ip_proto=classifier_info[IP_PROTO])
elif field.type == fd.IN_PORT:
classifier_info[IN_PORT] = field.port
self.log.debug('field-type-in-port',
in_port=classifier_info[IN_PORT])
elif field.type == fd.VLAN_VID:
classifier_info[VLAN_VID] = field.vlan_vid & 0xfff
self.log.debug('field-type-vlan-vid',
vlan=classifier_info[VLAN_VID])
elif field.type == fd.VLAN_PCP:
classifier_info[VLAN_PCP] = field.vlan_pcp
self.log.debug('field-type-vlan-pcp',
pcp=classifier_info[VLAN_PCP])
elif field.type == fd.UDP_DST:
classifier_info[UDP_DST] = field.udp_dst
self.log.debug('field-type-udp-dst',
udp_dst=classifier_info[UDP_DST])
elif field.type == fd.UDP_SRC:
classifier_info[UDP_SRC] = field.udp_src
self.log.debug('field-type-udp-src',
udp_src=classifier_info[UDP_SRC])
elif field.type == fd.IPV4_DST:
classifier_info[IPV4_DST] = field.ipv4_dst
self.log.debug('field-type-ipv4-dst',
ipv4_dst=classifier_info[IPV4_DST])
elif field.type == fd.IPV4_SRC:
classifier_info[IPV4_SRC] = field.ipv4_src
self.log.debug('field-type-ipv4-src',
ipv4_dst=classifier_info[IPV4_SRC])
elif field.type == fd.METADATA:
classifier_info[METADATA] = field.table_metadata
self.log.debug('field-type-metadata',
metadata=classifier_info[METADATA])
else:
raise NotImplementedError('field.type={}'.format(
field.type))
for action in fd.get_actions(flow):
if action.type == fd.OUTPUT:
action_info[OUTPUT] = action.output.port
self.log.debug('action-type-output',
output=action_info[OUTPUT],
in_port=classifier_info[IN_PORT])
elif action.type == fd.POP_VLAN:
if fd.get_goto_table_id(flow) is None:
self.log.debug('being taken care of by ONU', flow=flow)
return
action_info[POP_VLAN] = True
self.log.debug('action-type-pop-vlan',
in_port=classifier_info[IN_PORT])
elif action.type == fd.PUSH_VLAN:
action_info[PUSH_VLAN] = True
action_info[TPID] = action.push.ethertype
self.log.debug('action-type-push-vlan',
push_tpid=action_info[TPID],
in_port=classifier_info[IN_PORT])
if action.push.ethertype != 0x8100:
self.log.error('unhandled-tpid',
ethertype=action.push.ethertype)
elif action.type == fd.SET_FIELD:
# action_info['action_type'] = 'set_field'
_field = action.set_field.field.ofb_field
assert (action.set_field.field.oxm_class ==
OFPXMC_OPENFLOW_BASIC)
self.log.debug('action-type-set-field',
field=_field, in_port=classifier_info[IN_PORT])
if _field.type == fd.VLAN_VID:
self.log.debug('set-field-type-vlan-vid',
vlan_vid=_field.vlan_vid & 0xfff)
action_info[VLAN_VID] = (_field.vlan_vid & 0xfff)
elif _field.type == fd.VLAN_PCP:
self.log.debug('set-field-type-vlan-pcp',
vlan_pcp=_field.vlan_pcp & 0x7)
action_info[VLAN_PCP] = (_field.vlan_pcp & 0x7)
else:
self.log.error('unsupported-action-set-field-type',
field_type=_field.type)
else:
self.log.error('unsupported-action-type',
action_type=action.type,
in_port=classifier_info[IN_PORT])
if fd.get_goto_table_id(flow) is not None \
and POP_VLAN not in action_info:
self.log.debug('being taken care of by ONU', flow=flow)
return
flow_metadata = fd.get_metadata_from_write_metadata(flow)
if OUTPUT not in action_info and flow_metadata is not None:
# find flow in the next table
next_flow = self.find_next_flow(flow, flow_metadata)
if next_flow is None:
return
action_info[OUTPUT] = fd.get_out_port(next_flow)
for field in fd.get_ofb_fields(next_flow):
if field.type == fd.VLAN_VID:
classifier_info[METADATA] = field.vlan_vid & 0xfff
self.log.debug('flow-ports',
classifier_inport=classifier_info[IN_PORT],
action_output=action_info[OUTPUT])
(port_no, intf_id, onu_id, uni_id) \
= self.platform.extract_access_from_flow(
classifier_info[IN_PORT], action_info[OUTPUT])
# LLDP flow has nothing to do with any particular subscriber.
# So, lets not care about the Tech-profile, meters etc.
# Just add the flow and return.
if ETH_TYPE in classifier_info and \
classifier_info[ETH_TYPE] == LLDP_ETH_TYPE:
self.log.debug('lldp flow add')
self.add_lldp_flow(flow, port_no)
return
if ETH_TYPE in classifier_info and \
classifier_info[ETH_TYPE] == IPV4_ETH_TYPE and \
IP_PROTO in classifier_info and \
classifier_info[IP_PROTO] == 2:
self.log.debug('igmp flow add ignored, not implemented yet')
return
if IP_PROTO in classifier_info and \
classifier_info[IP_PROTO] == 17 and \
UDP_SRC in classifier_info and \
classifier_info[UDP_SRC] == 67:
self.log.debug('trap-dhcp-from-nni-flow')
self.add_dhcp_trap_nni(flow, classifier_info, port_no,
network_intf_id=0)
return
# Metadata 8 bytes:
# Most Significant 2 Bytes = Inner VLAN
# Next 2 Bytes = Tech Profile ID(TPID)
# Least Significant 4 Bytes = Port ID
# Flow METADATA carries Tech-Profile (TP) ID and is mandatory in all
# subscriber related flows.
# Note: If we are here, assert that the flow_metadata is not None
assert flow_metadata is not None
# Retrieve the TP-ID if one exists for the subscriber already
tp_id = self.resource_mgr.get_tech_profile_id_for_onu(intf_id, onu_id, uni_id)
if tp_id is not None:
# Assert that the tp_id received in flow metadata is same is the tp_id in use
# TODO:
# For now, tp_id updates, require that we tear down the service and
# and re-provision the service, i.e., dynamic TP updates not supported.
assert tp_id == fd.get_tp_id_from_metadata(flow_metadata), \
"tp-updates-not-supported"
else:
tp_id = fd.get_tp_id_from_metadata(flow_metadata)
self.log.info("received-tp-id-from-flow", tp_id=tp_id)
if self.platform.is_upstream(action_info[OUTPUT]):
us_meter_id = fd.get_meter_id_from_flow(flow)
else:
ds_meter_id = fd.get_meter_id_from_flow(flow)
self.divide_and_add_flow(intf_id, onu_id, uni_id, port_no,
classifier_info, action_info, flow, tp_id, us_meter_id, ds_meter_id)
def _is_uni_port(self, port_no):
try:
port = self.data_model.get_logical_port(self.logical_device_id,
'uni-{}'.format(port_no))
if port is not None:
return (not port.root_port), port.device_id
else:
return False, None
except Exception as e:
self.log.debug("port-not-found", e=e)
return False, None
def _is_upstream_flow(self, port_no):
return self._is_uni_port(port_no)[0]
def _is_downstream_flow(self, port_no):
return not self._is_upstream_flow(port_no)
def _clear_flow_id_from_rm(self, flow, flow_id, flow_direction):
try:
pon_intf, onu_id, uni_id, eth_type \
= self.platform.flow_extract_info(flow, flow_direction)
except ValueError:
self.log.error("failure-extracting-flow-info")
return
else:
if eth_type == LLDP_ETH_TYPE:
network_intf_id = self.data_model.olt_nni_intf_id()
onu_id = -1
uni_id = -1
self.resource_mgr.free_flow_id(network_intf_id, onu_id, uni_id, flow_id)
return
flows = self.resource_mgr.get_flow_id_info(pon_intf, onu_id, uni_id, flow_id)
assert (isinstance(flows, list))
self.log.debug("retrieved-flows", flows=flows)
for idx in range(len(flows)):
if flow_direction == flows[idx]['flow_type']:
flows.pop(idx)
self.update_flow_info_to_kv_store(pon_intf, onu_id,
uni_id, flow_id, flows)
if len(flows) > 0:
# There are still flows referencing the same flow_id.
# So the flow should not be freed yet.
# For ex: Case of HSIA where same flow is shared
# between DS and US.
return
self.resource_mgr.free_flow_id(pon_intf, onu_id, uni_id, flow_id)
flow_list = self.resource_mgr.get_current_flow_ids(pon_intf, onu_id, uni_id)
if flow_list is None:
tp_id = self.resource_mgr.get_tech_profile_id_for_onu(pon_intf, onu_id, uni_id)
tp_instance = self.get_tech_profile_instance(pon_intf, onu_id, uni_id, tp_id)
self.log.info("all-flows-cleared-for-onu")
self.log.info("initiate-sched-queue-teardown")
self.remove_us_scheduler_queues(pon_intf, onu_id, uni_id, tp_instance)
self.remove_ds_scheduler_queues(pon_intf, onu_id, uni_id, tp_instance)
def retry_add_flow(self, flow):
self.log.debug("retry-add-flow")
if flow.id in self.retry_add_flow_list:
self.retry_add_flow_list.remove(flow.id)
self.add_flow(flow)
def remove_flow(self, flow):
self.log.debug('trying to remove flows from logical flow :',
logical_flow=flow)
device_flows_to_remove = []
device_flows = self.flows_proxy.get('/').items
for f in device_flows:
if f.cookie == flow.id:
device_flows_to_remove.append(f)
for f in device_flows_to_remove:
(id, direction) = self.decode_stored_id(f.id)
flow_to_remove = openolt_pb2.Flow(flow_id=id, flow_type=direction)
try:
self.stub.FlowRemove(flow_to_remove)
except grpc.RpcError as grpc_e:
if grpc_e.code() == grpc.StatusCode.NOT_FOUND:
self.log.debug('This flow does not exist on the switch, '
'normal after an OLT reboot',
flow=flow_to_remove)
else:
raise grpc_e
# once we have successfully deleted the flow on the device
# release the flow_id on resource pool and also clear any
# data associated with the flow_id on KV store.
self._clear_flow_id_from_rm(f, id, direction)
self.log.debug('flow removed from device', flow=f,
flow_key=flow_to_remove)
if len(device_flows_to_remove) > 0:
new_flows = []
flows_ids_to_remove = [f.id for f in device_flows_to_remove]
for f in device_flows:
if f.id not in flows_ids_to_remove:
new_flows.append(f)
self.flows_proxy.update('/', Flows(items=new_flows))
self.log.debug('flows removed from the data store',
flow_ids_removed=flows_ids_to_remove,
number_of_flows_removed=(len(device_flows) - len(
new_flows)), expected_flows_removed=len(
device_flows_to_remove))
else:
self.log.debug('no device flow to remove for this flow (normal '
'for multi table flows)', flow=flow)
def get_tp_path(self, intf_id, ofp_port_name, techprofile_id):
return self.tech_profile[intf_id]. \
get_tp_path(techprofile_id,
ofp_port_name)
def delete_tech_profile_instance(self, intf_id, onu_id, uni_id,
ofp_port_name=None):
# Remove the TP instance associated with the ONU
if ofp_port_name is None:
ofp_port_name = self.data_model.serial_number(intf_id, onu_id)
tp_id = self.resource_mgr.get_tech_profile_id_for_onu(intf_id, onu_id,
uni_id)
tp_path = self.get_tp_path(intf_id, ofp_port_name, tp_id)
self.log.debug(" tp-path-in-delete", tp_path=tp_path)
return self.tech_profile[intf_id].delete_tech_profile_instance(tp_path)
def is_no_l2_modification_flow(self, classifier, action):
no_l2_classifier_set = {IN_PORT, METADATA, VLAN_VID}
no_l2_action_set = {OUTPUT}
incoming_classifier_set = set(classifier.keys())
incoming_action_set = set(action.keys())
if no_l2_classifier_set.issubset(incoming_classifier_set) and \
no_l2_action_set.issubset(incoming_action_set) and \
len(incoming_action_set) == 1:
return True
return False
def divide_and_add_flow(self, intf_id, onu_id, uni_id, port_no, classifier,
action, flow, tp_id, us_meter_id, ds_meter_id):
self.log.debug('sorting flow', intf_id=intf_id, onu_id=onu_id,
uni_id=uni_id, port_no=port_no,
classifier=classifier, action=action,
tp_id=tp_id, us_meter=us_meter_id,
ds_meter=ds_meter_id)
tp_instance = self.get_tech_profile_instance(intf_id, onu_id, uni_id, tp_id)
if tp_instance is None:
self.log.error("flow-not-added--tp-instance-unavailable")
return
pon_intf_onu_id = (intf_id, onu_id, uni_id)
alloc_id = \
self.resource_mgr.get_current_alloc_ids_for_onu(pon_intf_onu_id)
gem_ports = \
self.resource_mgr.get_current_gemport_ids_for_onu(pon_intf_onu_id)
if alloc_id is None or gem_ports is None:
self.log.error("alloc-id-or-gem-ports-unavailable",
alloc_id=alloc_id, gem_ports=gem_ports)
return
self.create_us_scheduler_queues(intf_id, onu_id, uni_id, tp_instance, us_meter_id)
self.create_ds_scheduler_queues(intf_id, onu_id, uni_id, tp_instance, ds_meter_id)
self.log.debug('Generated required alloc and gemport ids',
alloc_id=alloc_id, gemports=gem_ports)
ds_gem_port_attr_list = tp_instance.downstream_gem_port_attribute_list
us_gem_port_attr_list = tp_instance.upstream_gem_port_attribute_list
kwargs = dict()
kwargs['intf_id'] = intf_id
kwargs['onu_id'] = onu_id
kwargs['uni_id'] = uni_id
kwargs['port_no'] = port_no
kwargs['classifier'] = classifier
kwargs['action'] = action
kwargs['logical_flow'] = flow
kwargs['alloc_id'] = alloc_id
if IP_PROTO in classifier:
if classifier[IP_PROTO] == 17:
self.log.debug('dhcp flow add')
if VLAN_PCP in classifier:
gemport_id = self._get_gem_port_for_pcp(
classifier[VLAN_PCP], us_gem_port_attr_list
)
self.add_dhcp_trap_uni(intf_id, onu_id, uni_id, port_no,
classifier, action, flow, alloc_id,
gemport_id)
else:
self._install_flow_on_all_gemports(self.add_dhcp_trap_uni,
kwargs,
us_gem_port_attr_list)
elif classifier[IP_PROTO] == 2:
self.log.warn('igmp flow add ignored, not implemented yet')
return
else:
self.log.warn("Invalid-Classifier-to-handle",
classifier=classifier,
action=action)
return
elif ETH_TYPE in classifier:
if classifier[ETH_TYPE] == EAP_ETH_TYPE:
self.log.debug('eapol flow add')
vlan_id = classifier[VLAN_VID]
if vlan_id is None:
vlan_id = DEFAULT_MGMT_VLAN
if VLAN_PCP in classifier:
gemport_id = self._get_gem_port_for_pcp(
classifier[VLAN_PCP], us_gem_port_attr_list
)
self.add_eapol_flow(
intf_id, onu_id, uni_id, port_no, flow, alloc_id, gemport_id,
vlan_id=vlan_id)
else:
kwargs['vlan_id'] = vlan_id
self._install_flow_on_all_gemports(self.add_eapol_flow,
kwargs,
us_gem_port_attr_list)
elif PUSH_VLAN in action:
if VLAN_PCP in classifier:
gemport_id = self._get_gem_port_for_pcp(
classifier[VLAN_PCP], us_gem_port_attr_list
)
self.add_upstream_data_flow(intf_id, onu_id, uni_id, port_no, classifier,
action, flow, alloc_id, gemport_id)
else:
self._install_flow_on_all_gemports(self.add_upstream_data_flow,
kwargs, us_gem_port_attr_list
)
elif POP_VLAN in action:
if VLAN_PCP in classifier:
gemport_id = self._get_gem_port_for_pcp(
classifier[VLAN_PCP], ds_gem_port_attr_list
)
self.add_downstream_data_flow(intf_id, onu_id, uni_id, port_no, classifier,
action, flow, alloc_id, gemport_id)
else:
self._install_flow_on_all_gemports(self.add_downstream_data_flow,
kwargs, ds_gem_port_attr_list
)
elif self.is_no_l2_modification_flow(classifier, action) and \
self._is_upstream_flow(classifier[IN_PORT]):
kwargs['is_l2_mod_flow'] = False
if VLAN_PCP in classifier:
kwargs['gemport_id'] = self._get_gem_port_for_pcp(
classifier[VLAN_PCP], us_gem_port_attr_list
)
self.add_upstream_data_flow(**kwargs)
else:
self._install_flow_on_all_gemports(self.add_upstream_data_flow,
kwargs, us_gem_port_attr_list
)
elif self.is_no_l2_modification_flow(classifier, action) and \
self._is_downstream_flow(classifier[IN_PORT]):
kwargs['is_l2_mod_flow'] = False
if VLAN_PCP in classifier:
kwargs['gemport_id'] = self._get_gem_port_for_pcp(
classifier[VLAN_PCP], ds_gem_port_attr_list
)
self.add_downstream_data_flow(**kwargs)
else:
self._install_flow_on_all_gemports(self.add_downstream_data_flow,
kwargs, ds_gem_port_attr_list
)
else:
self.log.debug('Invalid-flow-type-to-handle',
classifier=classifier,
action=action, flow=flow)
return
# Download tech-profile to ONU
self.download_tech_profile(intf_id, onu_id, uni_id)
def download_tech_profile(self, intf_id, onu_id, uni_id):
(ofp_port_name, ofp_port_no) = \
self.data_model.get_ofp_port_name(intf_id, onu_id, uni_id)
if ofp_port_name is None:
self.log.error("port-name-not-found")
return
tp_id = self.resource_mgr.get_tech_profile_id_for_onu(intf_id, onu_id, uni_id)
tp_path = self.get_tp_path(intf_id, ofp_port_name, tp_id)
self.log.debug('Load-tech-profile-request-to-brcm-handler',
tp_path=tp_path)
self.data_model.onu_download_tech_profile(
intf_id, onu_id, uni_id, tp_path)
def get_scheduler(self, tech_profile_instance, direction, meter_id):
if direction == Direction.UPSTREAM:
scheduler = tech_profile_instance.us_scheduler
elif direction == Direction.DOWNSTREAM:
scheduler = tech_profile_instance.ds_scheduler
else:
raise Exception("invalid-direction")
meter_band = self.data_model.meter_band(meter_id)
traffic_shaping_info = None
if meter_band is not None:
cir = meter_band.bands[0].rate
cbs = meter_band.bands[0].burst_size
eir = meter_band.bands[1].rate
ebs = meter_band.bands[1].burst_size
pir = cir + eir
pbs = cbs + ebs
traffic_shaping_info = tech_profile_pb2.TrafficShapingInfo(
cir=cir,
cbs=cbs,
pir=pir,
pbs=pbs
)
scheduler_config = tech_profile_pb2.SchedulerConfig(
direction=TechProfile.get_parameter(
'direction', scheduler.direction),
additional_bw=TechProfile.get_parameter(
'additional_bw', scheduler.additional_bw),
priority=scheduler.priority,
weight=scheduler.weight,
sched_policy=TechProfile.get_parameter(
'q_sched_policy', scheduler.q_sched_policy)
)
traffic_scheduler = tech_profile_pb2.TrafficScheduler(
direction=scheduler.direction,
scheduler=scheduler_config,
alloc_id=scheduler.alloc_id,
traffic_shaping_info=traffic_shaping_info
)
return traffic_scheduler
@staticmethod
def get_traffic_queues(tech_profile_instance, direction):
if direction == Direction.UPSTREAM:
gemport_attribute_list = tech_profile_instance. \
upstream_gem_port_attribute_list
tp_scheduler_direction = tech_profile_instance.us_scheduler.direction
elif direction == Direction.DOWNSTREAM:
gemport_attribute_list = tech_profile_instance. \
downstream_gem_port_attribute_list
tp_scheduler_direction = tech_profile_instance.ds_scheduler.direction
else:
raise Exception("invalid-direction")
traffic_queues = list()
for i in range(len(gemport_attribute_list)):
traffic_queues.append(tech_profile_pb2.TrafficQueue(
direction=TechProfile.get_parameter('direction',
tp_scheduler_direction),
gemport_id=gemport_attribute_list[i].gemport_id,
pbit_map=gemport_attribute_list[i].pbit_map,
aes_encryption=ast.literal_eval(gemport_attribute_list[i].
aes_encryption),
sched_policy=TechProfile.get_parameter(
'sched_policy', gemport_attribute_list[i].
scheduling_policy),
priority=gemport_attribute_list[i].priority_q,
weight=gemport_attribute_list[i].weight,
discard_policy=TechProfile.get_parameter(
'discard_policy', gemport_attribute_list[i].
discard_policy)))
return traffic_queues
def create_us_scheduler_queues(self, intf_id, onu_id, uni_id, tp_instance, us_meter_id):
if us_meter_id is None:
self.log.debug("us-meter-unavailable--no-action")
return
kv_store_meter_id = self.resource_mgr.get_meter_id_for_onu(UPSTREAM,
intf_id,
onu_id, uni_id)
# Lets make a simple assumption that if the meter-id is present on the KV store,
# then the scheduler and queues configuration is applied on the OLT device
# in the given direction.
if kv_store_meter_id is not None:
# TODO: Dynamic meter update not supported for now
# TODO: The subscriber has to be un-provisioned and re-provisioned for meter update
assert kv_store_meter_id == us_meter_id
self.log.debug("scheduler-already-created-in-us")
return
traffic_sched = self.get_scheduler(tp_instance, Direction.UPSTREAM, us_meter_id)
try:
ofp_port_no = self.platform.mk_uni_port_num(intf_id,
onu_id, uni_id)
self.stub.CreateTrafficSchedulers(
tech_profile_pb2.TrafficSchedulers(
intf_id=intf_id,
onu_id=onu_id,
uni_id=uni_id,
port_no=ofp_port_no,
traffic_scheds=[traffic_sched]
))
except grpc.RpcError as grpc_e:
if grpc_e.code() == grpc.StatusCode.ALREADY_EXISTS:
self.log.warn("us-scheduler-already-exists")
else:
self.log.error("failure-to-create-us-scheduler")
return
# On receiving the CreateTrafficQueues request, the driver should create corresponding
# downstream queues.
try:
self.stub.CreateTrafficQueues(
tech_profile_pb2.TrafficQueues(
intf_id=intf_id,
onu_id=onu_id,
uni_id=uni_id,
port_no=ofp_port_no,
traffic_queues=
OpenOltFlowMgr.get_traffic_queues(tp_instance, Direction.UPSTREAM)
))
except grpc.RpcError as grpc_e:
if grpc_e.code() == grpc.StatusCode.ALREADY_EXISTS:
self.log.warn("ds-queues-already-exists")
else:
self.log.error("failure-to-create-ds-queues")
return
# After we succesfully applied the scheduler configuration on the OLT device,
# store the meter id on the KV store, for further reference
self.resource_mgr.update_meter_id_for_onu(UPSTREAM, intf_id, onu_id, uni_id, us_meter_id)
def create_ds_scheduler_queues(self, intf_id, onu_id, uni_id, tp_instance, ds_meter_id):
if ds_meter_id is None:
self.log.debug("ds-meter-unavailable--no-action")
return
kv_store_meter_id = self.resource_mgr.get_meter_id_for_onu(DOWNSTREAM,
intf_id,
onu_id, uni_id)
# Lets make a simple assumption that if the meter-id is present on the KV store,
# then the scheduler and queues configuration is applied on the OLT device
if kv_store_meter_id is not None:
# TODO: Dynamic meter update not supported for now
# TODO: The subscriber has to be un-provisioned and re-provisioned for meter update
assert kv_store_meter_id == ds_meter_id
self.log.debug("scheduler-already-created-in-ds")
return
traffic_sched = self.get_scheduler(tp_instance, Direction.DOWNSTREAM, ds_meter_id)
_, ofp_port_no = self.data_model.get_ofp_port_name(intf_id, onu_id, uni_id)
try:
self.stub.CreateTrafficSchedulers(
tech_profile_pb2.TrafficSchedulers(
intf_id=intf_id,
onu_id=onu_id,
uni_id=uni_id,
port_no=ofp_port_no,
traffic_scheds=[traffic_sched]
))
except grpc.RpcError as grpc_e:
if grpc_e.code() == grpc.StatusCode.ALREADY_EXISTS:
self.log.warn("ds-scheduler-already-exists")
else:
self.log.error("failure-to-create-ds-scheduler")
return
# On receiving the CreateTrafficQueues request, the driver should create corresponding
# downstream queues.
try:
self.stub.CreateTrafficQueues(
tech_profile_pb2.TrafficQueues(
intf_id=intf_id,
onu_id=onu_id,
uni_id=uni_id,
port_no=ofp_port_no,
traffic_queues=
OpenOltFlowMgr.get_traffic_queues(tp_instance, Direction.DOWNSTREAM)
))
except grpc.RpcError as grpc_e:
if grpc_e.code() == grpc.StatusCode.ALREADY_EXISTS:
self.log.warn("ds-queues-already-exists")
else:
self.log.error("failure-to-create-ds-queues")
return
# After we successfully applied the scheduler configuration on the OLT device,
# store the meter id on the KV store, for further reference
self.resource_mgr.update_meter_id_for_onu(DOWNSTREAM, intf_id, onu_id, uni_id, ds_meter_id)
def remove_us_scheduler_queues(self, intf_id, onu_id, uni_id, tp_instance):
us_meter_id = self.resource_mgr.get_meter_id_for_onu(UPSTREAM,
intf_id,
onu_id, uni_id)
traffic_sched = self.get_scheduler(tp_instance, Direction.UPSTREAM, us_meter_id)
_, ofp_port_no = self.data_model.get_ofp_port_name(intf_id, onu_id, uni_id)
try:
self.stub.RemoveTrafficQueues(
tech_profile_pb2.TrafficQueues(
intf_id=intf_id,
onu_id=onu_id,
uni_id=uni_id,
port_no=ofp_port_no,
traffic_queues=
OpenOltFlowMgr.get_traffic_queues(tp_instance, Direction.UPSTREAM)
))
self.log.debug("removed-upstream-Queues")
except grpc.RpcError as e:
self.log.error("failure-to-remove-us-queues", e=e)
try:
self.stub.RemoveTrafficSchedulers(
tech_profile_pb2.TrafficSchedulers(
intf_id=intf_id,
onu_id=onu_id,
uni_id=uni_id,
port_no=ofp_port_no,
traffic_scheds=[traffic_sched]
))
self.log.debug("removed-upstream-Schedulers")
except grpc.RpcError as e:
self.log.error("failure-to-remove-us-scheduler", e=e)
self.resource_mgr.remove_meter_id_for_onu(UPSTREAM, intf_id, onu_id, uni_id)
def remove_ds_scheduler_queues(self, intf_id, onu_id, uni_id, tp_instance):
ds_meter_id = self.resource_mgr.get_meter_id_for_onu(DOWNSTREAM,
intf_id,
onu_id, uni_id)
traffic_sched = self.get_scheduler(tp_instance, Direction.DOWNSTREAM, ds_meter_id)
_, ofp_port_no = self.data_model.get_ofp_port_name(intf_id, onu_id, uni_id)
try:
self.stub.RemoveTrafficQueues(
tech_profile_pb2.TrafficQueues(
intf_id=intf_id,
onu_id=onu_id,
uni_id=uni_id,
port_no=ofp_port_no,
traffic_queues=
OpenOltFlowMgr.get_traffic_queues(tp_instance, Direction.DOWNSTREAM)
))
self.log.debug("removed-downstream-Queues")
except grpc.RpcError as grpc_e:
self.log.error("failure-to-remove-ds-queues")
try:
self.stub.RemoveTrafficSchedulers(
tech_profile_pb2.TrafficSchedulers(
intf_id=intf_id,
onu_id=onu_id,
uni_id=uni_id,
port_no=ofp_port_no,
traffic_scheds=[traffic_sched]
))
self.log.debug("removed-downstream-Schedulers")
except grpc.RpcError as grpc_e:
self.log.error("failure-to-remove-ds-scheduler")
self.resource_mgr.remove_meter_id_for_onu(DOWNSTREAM, intf_id, onu_id, uni_id)
def get_tech_profile_instance(self, intf_id, onu_id, uni_id, tp_id):
(ofp_port_name, ofp_port_no) \
= self.data_model.get_ofp_port_name(intf_id, onu_id, uni_id)
if ofp_port_name is None:
self.log.error("port-name-not-found")
return None
# Check tech profile instance already exists for derived port name
tech_profile_instance = self.tech_profile[intf_id]. \
get_tech_profile_instance(tp_id, ofp_port_name)
if tech_profile_instance is None:
# create tech profile instance
tech_profile_instance = self.tech_profile[intf_id]. \
create_tech_profile_instance(tp_id, ofp_port_name,
intf_id)
if tech_profile_instance is None:
raise Exception('Tech-profile-instance-creation-failed')
self.resource_mgr.update_tech_profile_id_for_onu(intf_id, onu_id,
uni_id, tp_id)
# Fetch alloc id and gemports from tech profile instance
alloc_id = tech_profile_instance.us_scheduler.alloc_id
gem_port_ids = []
for i in range(len(
tech_profile_instance.upstream_gem_port_attribute_list)):
gem_port_ids.append(
tech_profile_instance.upstream_gem_port_attribute_list[i].
gemport_id)
# Update the allocated alloc_id and gem_port_id for the ONU/UNI to KV
# store
pon_intf_onu_id = (intf_id, onu_id, uni_id)
self.resource_mgr.resource_mgrs[intf_id].update_alloc_ids_for_onu(
pon_intf_onu_id,
list([alloc_id])
)
self.resource_mgr.resource_mgrs[intf_id].update_gemport_ids_for_onu(
pon_intf_onu_id,
gem_port_ids
)
self.resource_mgr.update_gemports_ponport_to_onu_map_on_kv_store(
gem_port_ids, intf_id, onu_id, uni_id
)
for gemport_id in gem_port_ids:
self.data_model.gemport_id_add(intf_id, onu_id, gemport_id)
else:
self.log.debug(
'Tech-profile-instance-already-exist-for-given port-name',
ofp_port_name=ofp_port_name)
return tech_profile_instance
def get_alloc_id_gem_port(self, intf_id, onu_id):
pon_intf_onu_id = (intf_id, onu_id)
# If we already have allocated alloc_id and gem_ports earlier, render them
alloc_id = \
self.resource_mgr.get_current_alloc_ids_for_onu(pon_intf_onu_id)
gem_port_ids = \
self.resource_mgr.get_current_gemport_ids_for_onu(pon_intf_onu_id)
return alloc_id, gem_port_ids
def add_upstream_data_flow(self, intf_id, onu_id, uni_id, port_no, classifier,
action, logical_flow, alloc_id, gemport_id, is_l2_mod_flow=True):
if is_l2_mod_flow:
classifier[PACKET_TAG_TYPE] = SINGLE_TAG
else:
classifier[PACKET_TAG_TYPE] = DOUBLE_TAG
self.add_hsia_flow(intf_id, onu_id, uni_id, port_no, classifier,
action, UPSTREAM,
logical_flow, alloc_id, gemport_id)
def add_downstream_data_flow(self, intf_id, onu_id, uni_id, port_no, classifier,
action, logical_flow, alloc_id, gemport_id, is_l2_mod_flow=True):
if is_l2_mod_flow:
classifier[PACKET_TAG_TYPE] = DOUBLE_TAG
classifier[POP_VLAN] = True
action[VLAN_VID] = classifier[VLAN_VID]
else:
classifier[PACKET_TAG_TYPE] = DOUBLE_TAG
self.add_hsia_flow(intf_id, onu_id, uni_id, port_no, classifier,
action, DOWNSTREAM,
logical_flow, alloc_id, gemport_id)
def add_hsia_flow(self, intf_id, onu_id, uni_id, port_no, classifier,
action, direction, logical_flow, alloc_id, gemport_id):
flow_store_cookie = self._get_flow_store_cookie(classifier,
gemport_id)
if self.resource_mgr.is_flow_cookie_on_kv_store(intf_id, onu_id,
uni_id,
flow_store_cookie):
self.log.debug('flow-exists--not-re-adding')
else:
# One of the OLT platform (Broadcom BAL) requires that symmetric
# flows require the same flow_id to be used across UL and DL.
# Since HSIA flow is the only symmetric flow currently, we need to
# re-use the flow_id across both direction. The 'flow_category'
# takes priority over flow_cookie to find any available HSIA_FLOW
# id for the ONU.
flow_category = HSIA_FLOW
if self.is_no_l2_modification_flow(classifier, action):
flow_category = HSIA_TRANSPARENT.format(classifier[VLAN_VID])
flow_id = self.resource_mgr.get_flow_id(intf_id, onu_id, uni_id,
flow_category=flow_category,
flow_pcp=classifier[VLAN_PCP])
if flow_id is None:
self.log.error("hsia-flow-unavailable")
return
flow = openolt_pb2.Flow(
access_intf_id=intf_id, onu_id=onu_id, uni_id=uni_id,
flow_id=flow_id, flow_type=direction, alloc_id=alloc_id,
network_intf_id=self.data_model.olt_nni_intf_id(),
gemport_id=gemport_id,
classifier=self.mk_classifier(classifier),
action=self.mk_action(action), priority=logical_flow.priority,
port_no=port_no, cookie=logical_flow.cookie)
if self.add_flow_to_device(flow, logical_flow, flow_store_cookie):
flow_info = self._get_flow_info_as_json_blob(flow,
flow_store_cookie,
flow_category)
self.update_flow_info_to_kv_store(flow.access_intf_id,
flow.onu_id, flow.uni_id,
flow.flow_id, flow_info)
def add_dhcp_trap_uni(self, intf_id, onu_id, uni_id, port_no, classifier,
action, logical_flow, alloc_id, gemport_id):
self.log.debug('add dhcp upstream trap', classifier=classifier,
intf_id=intf_id, onu_id=onu_id, uni_id=uni_id,
action=action)
action.clear()
action[TRAP_TO_HOST] = True
classifier[UDP_SRC] = 68
classifier[UDP_DST] = 67
classifier[PACKET_TAG_TYPE] = SINGLE_TAG
classifier.pop(VLAN_VID, None)
flow_store_cookie = self._get_flow_store_cookie(classifier,
gemport_id)
if self.resource_mgr.is_flow_cookie_on_kv_store(intf_id, onu_id,
uni_id,
flow_store_cookie):
self.log.debug('flow-exists--not-re-adding')
else:
flow_id = self.resource_mgr.get_flow_id(
intf_id, onu_id, uni_id,
flow_store_cookie=flow_store_cookie,
)
dhcp_flow = openolt_pb2.Flow(
onu_id=onu_id, uni_id=uni_id, flow_id=flow_id,
flow_type=UPSTREAM, access_intf_id=intf_id,
gemport_id=gemport_id, alloc_id=alloc_id,
network_intf_id=self.data_model.olt_nni_intf_id(),
priority=logical_flow.priority,
classifier=self.mk_classifier(classifier),
action=self.mk_action(action),
port_no=port_no,
cookie=logical_flow.cookie)
if self.add_flow_to_device(dhcp_flow, logical_flow, flow_store_cookie):
flow_info = self._get_flow_info_as_json_blob(dhcp_flow,
flow_store_cookie,
DHCP_FLOW)
self.update_flow_info_to_kv_store(dhcp_flow.access_intf_id,
dhcp_flow.onu_id,
dhcp_flow.uni_id,
dhcp_flow.flow_id,
flow_info)
def add_eapol_flow(self, intf_id, onu_id, uni_id, port_no, logical_flow,
alloc_id, gemport_id, vlan_id=DEFAULT_MGMT_VLAN, classifier=None, action=None):
uplink_classifier = dict()
uplink_classifier[ETH_TYPE] = EAP_ETH_TYPE
uplink_classifier[PACKET_TAG_TYPE] = SINGLE_TAG
uplink_classifier[VLAN_VID] = vlan_id
if classifier is not None:
uplink_classifier[VLAN_PCP] = classifier[VLAN_PCP]
uplink_action = dict()
uplink_action[TRAP_TO_HOST] = True
flow_store_cookie = self._get_flow_store_cookie(uplink_classifier,
gemport_id)
if self.resource_mgr.is_flow_cookie_on_kv_store(intf_id, onu_id,
uni_id,
flow_store_cookie):
self.log.debug('flow-exists--not-re-adding')
else:
# Add Upstream EAPOL Flow.
uplink_flow_id = self.resource_mgr.get_flow_id(
intf_id, onu_id, uni_id,
flow_store_cookie=flow_store_cookie
)
upstream_flow = openolt_pb2.Flow(
access_intf_id=intf_id, onu_id=onu_id, uni_id=uni_id,
flow_id=uplink_flow_id, flow_type=UPSTREAM, alloc_id=alloc_id,
network_intf_id=self.data_model.olt_nni_intf_id(),
gemport_id=gemport_id,
classifier=self.mk_classifier(uplink_classifier),
action=self.mk_action(uplink_action),
priority=logical_flow.priority,
port_no=port_no,
cookie=logical_flow.cookie)
logical_flow = copy.deepcopy(logical_flow)
logical_flow.match.oxm_fields.extend(fd.mk_oxm_fields([fd.vlan_vid(
vlan_id | 0x1000)]))
logical_flow.match.type = OFPMT_OXM
if self.add_flow_to_device(upstream_flow, logical_flow, flow_store_cookie):
flow_info = self._get_flow_info_as_json_blob(upstream_flow,
flow_store_cookie,
EAPOL_FLOW)
self.update_flow_info_to_kv_store(upstream_flow.access_intf_id,
upstream_flow.onu_id,
upstream_flow.uni_id,
upstream_flow.flow_id,
flow_info)
# Add Downstream EAPOL Flow, Only for first EAP flow (BAL
# requirement)
# On one of the platforms (Broadcom BAL), when same DL classifier
# vlan was used across multiple ONUs, eapol flow re-adds after
# flow delete (cases of onu reboot/disable) fails.
# In order to generate unique vlan, a combination of intf_id
# onu_id and uni_id is used.
# uni_id defaults to 0, so add 1 to it.
special_vlan_downstream_flow = 4090 - intf_id * onu_id * (uni_id + 1)
# Assert that we do not generate invalid vlans under no condition
assert special_vlan_downstream_flow >= 2
downlink_classifier = dict()
downlink_classifier[PACKET_TAG_TYPE] = SINGLE_TAG
downlink_classifier[ETH_TYPE] = EAP_ETH_TYPE
downlink_classifier[VLAN_VID] = special_vlan_downstream_flow
downlink_action = dict()
downlink_action[PUSH_VLAN] = True
downlink_action[VLAN_VID] = vlan_id
flow_store_cookie = self._get_flow_store_cookie(
downlink_classifier, gemport_id)
if self.resource_mgr.is_flow_cookie_on_kv_store(
intf_id, onu_id, uni_id, flow_store_cookie):
self.log.debug('flow-exists--not-re-adding')
else:
downlink_flow_id = self.resource_mgr.get_flow_id(
intf_id, onu_id, uni_id,
flow_store_cookie=flow_store_cookie
)
downstream_flow = openolt_pb2.Flow(
access_intf_id=intf_id, onu_id=onu_id, uni_id=uni_id,
flow_id=downlink_flow_id, flow_type=DOWNSTREAM,
alloc_id=alloc_id,
network_intf_id=self.data_model.olt_nni_intf_id(),
gemport_id=gemport_id,
classifier=self.mk_classifier(downlink_classifier),
action=self.mk_action(downlink_action),
priority=logical_flow.priority,
port_no=port_no,
cookie=logical_flow.cookie)
downstream_logical_flow = ofp_flow_stats(
id=logical_flow.id, cookie=logical_flow.cookie,
table_id=logical_flow.table_id,
priority=logical_flow.priority, flags=logical_flow.flags)
downstream_logical_flow.match.oxm_fields.extend(
fd.mk_oxm_fields(
[fd.in_port(fd.get_out_port(logical_flow)),
fd.vlan_vid(special_vlan_downstream_flow | 0x1000)]))
downstream_logical_flow.match.type = OFPMT_OXM
downstream_logical_flow.instructions.extend(
fd.mk_instructions_from_actions([fd.output(
self.platform.mk_uni_port_num(intf_id, onu_id,
uni_id))]))
if self.add_flow_to_device(downstream_flow,
downstream_logical_flow, flow_store_cookie):
flow_info = self._get_flow_info_as_json_blob(
downstream_flow, flow_store_cookie, EAPOL_FLOW)
self.update_flow_info_to_kv_store(
downstream_flow.access_intf_id, downstream_flow.onu_id,
downstream_flow.uni_id, downstream_flow.flow_id,
flow_info)
def repush_all_different_flows(self):
# Check if the device is supposed to have flows, if so add them
# Recover static flows after a reboot
logical_flows = self.logical_flows_proxy.get('/').items
devices_flows = self.flows_proxy.get('/').items
logical_flows_ids_provisioned = [f.cookie for f in devices_flows]
for logical_flow in logical_flows:
try:
if logical_flow.id not in logical_flows_ids_provisioned:
self.add_flow(logical_flow)
except Exception as e:
self.log.exception('Problem reading this flow', e=e)
def reset_flows(self):
self.flows_proxy.update('/', Flows(items=[]))
self.log.debug("purged-all-device-flows")
self.logical_flows_proxy.update('/', Flows(items=[]))
self.log.debug("purged-all-logical-flows")
""" Add a downstream DHCP trap flow on the NNI interface
"""
def add_dhcp_trap_nni(self, logical_flow, classifier,
port_no, network_intf_id=0):
self.log.info("trap-dhcp-of-nni-flow")
classifier[PACKET_TAG_TYPE] = DOUBLE_TAG
action = dict()
action[TRAP_TO_HOST] = True
# We manage flow_id resource pool on per PON port basis.
# Since this situation is tricky, as a hack, we pass the NNI port
# index (network_intf_id) as PON port Index for the flow_id resource
# pool. Also, there is no ONU Id available for trapping LLDP packets
# on NNI port, use onu_id as -1 (invalid)
# ****************** CAVEAT *******************
# This logic works if the NNI Port Id falls within the same valid
# range of PON Port Ids. If this doesn't work for some OLT Vendor
# we need to have a re-look at this.
# *********************************************
onu_id = -1
uni_id = -1
flow_store_cookie = self._get_flow_store_cookie(classifier)
if self.resource_mgr.is_flow_cookie_on_kv_store(
network_intf_id, onu_id, uni_id, flow_store_cookie):
self.log.debug('flow-exists--not-re-adding')
else:
flow_id = self.resource_mgr.get_flow_id(
network_intf_id, onu_id, uni_id,
flow_store_cookie=flow_store_cookie)
downstream_flow = openolt_pb2.Flow(
access_intf_id=-1, # access_intf_id not required
onu_id=onu_id, # onu_id not required
uni_id=uni_id, # uni_id not used
flow_id=flow_id,
flow_type=DOWNSTREAM,
network_intf_id=network_intf_id,
gemport_id=-1, # gemport_id not required
classifier=self.mk_classifier(classifier),
action=self.mk_action(action),
priority=logical_flow.priority,
port_no=port_no,
cookie=logical_flow.cookie)
self.log.debug('add dhcp downstream trap', classifier=classifier,
action=action, flow=downstream_flow,
port_no=port_no)
if self.add_flow_to_device(downstream_flow, logical_flow, flow_store_cookie):
flow_info = self._get_flow_info_as_json_blob(downstream_flow,
flow_store_cookie, DHCP_FLOW)
self.update_flow_info_to_kv_store(
network_intf_id, onu_id, uni_id, flow_id, flow_info)
def add_lldp_flow(self, logical_flow, port_no, network_intf_id=0):
classifier = dict()
classifier[ETH_TYPE] = LLDP_ETH_TYPE
classifier[PACKET_TAG_TYPE] = UNTAGGED
action = dict()
action[TRAP_TO_HOST] = True
# LLDP flow is installed to trap LLDP packets on the NNI port.
# We manage flow_id resource pool on per PON port basis.
# Since this situation is tricky, as a hack, we pass the NNI port
# index (network_intf_id) as PON port Index for the flow_id resource
# pool. Also, there is no ONU Id available for trapping LLDP packets
# on NNI port, use onu_id as -1 (invalid)
# ****************** CAVEAT *******************
# This logic works if the NNI Port Id falls within the same valid
# range of PON Port Ids. If this doesn't work for some OLT Vendor
# we need to have a re-look at this.
# *********************************************
onu_id = -1
uni_id = -1
flow_store_cookie = self._get_flow_store_cookie(classifier)
if self.resource_mgr.is_flow_cookie_on_kv_store(
network_intf_id, onu_id, uni_id, flow_store_cookie):
self.log.debug('flow-exists--not-re-adding')
else:
flow_id = self.resource_mgr.get_flow_id(
network_intf_id, onu_id, uni_id, flow_store_cookie=flow_store_cookie)
downstream_flow = openolt_pb2.Flow(
access_intf_id=-1, # access_intf_id not required
onu_id=onu_id, # onu_id not required
uni_id=uni_id, # uni_id not used
flow_id=flow_id,
flow_type=DOWNSTREAM,
network_intf_id=network_intf_id,
gemport_id=-1, # gemport_id not required
classifier=self.mk_classifier(classifier),
action=self.mk_action(action),
priority=logical_flow.priority,
port_no=port_no,
cookie=logical_flow.cookie)
self.log.debug('add lldp downstream trap', classifier=classifier,
action=action, flow=downstream_flow,
port_no=port_no)
if self.add_flow_to_device(downstream_flow, logical_flow, flow_store_cookie):
flow_info = self._get_flow_info_as_json_blob(downstream_flow,
flow_store_cookie,
LLDP_FLOW)
self.update_flow_info_to_kv_store(
network_intf_id, onu_id, uni_id, flow_id, flow_info)
@staticmethod
def mk_classifier(classifier_info):
classifier = openolt_pb2.Classifier()
if ETH_TYPE in classifier_info:
classifier.eth_type = classifier_info[ETH_TYPE]
if IP_PROTO in classifier_info:
classifier.ip_proto = classifier_info[IP_PROTO]
if VLAN_VID in classifier_info and \
classifier_info[VLAN_VID] != RESERVED_VLAN:
classifier.o_vid = classifier_info[VLAN_VID]
if METADATA in classifier_info and \
classifier_info[METADATA] != RESERVED_VLAN:
classifier.i_vid = classifier_info[METADATA]
if VLAN_PCP in classifier_info:
classifier.o_pbits = classifier_info[VLAN_PCP]
if UDP_SRC in classifier_info:
classifier.src_port = classifier_info[UDP_SRC]
if UDP_DST in classifier_info:
classifier.dst_port = classifier_info[UDP_DST]
if IPV4_DST in classifier_info:
classifier.dst_ip = classifier_info[IPV4_DST]
if IPV4_SRC in classifier_info:
classifier.src_ip = classifier_info[IPV4_SRC]
if PACKET_TAG_TYPE in classifier_info:
if classifier_info[PACKET_TAG_TYPE] == SINGLE_TAG:
classifier.pkt_tag_type = SINGLE_TAG
elif classifier_info[PACKET_TAG_TYPE] == DOUBLE_TAG:
classifier.pkt_tag_type = DOUBLE_TAG
elif classifier_info[PACKET_TAG_TYPE] == UNTAGGED:
classifier.pkt_tag_type = UNTAGGED
else:
classifier.pkt_tag_type = 'none'
return classifier
def mk_action(self, action_info):
action = openolt_pb2.Action()
if POP_VLAN in action_info:
action.o_vid = action_info[VLAN_VID]
action.cmd.remove_outer_tag = True
elif PUSH_VLAN in action_info:
action.o_vid = action_info[VLAN_VID]
action.cmd.add_outer_tag = True
if VLAN_PCP in action_info:
action.o_pbits = action_info[VLAN_PCP]
elif TRAP_TO_HOST in action_info:
action.cmd.trap_to_host = True
else:
self.log.info('Invalid-action-field', action_info=action_info)
return
return action
def is_eap_enabled(self, intf_id, onu_id, uni_id):
flows = self.logical_flows_proxy.get('/').items
for flow in flows:
eap_flow = False
eap_intf_id = None
eap_onu_id = None
eap_uni_id = None
for field in fd.get_ofb_fields(flow):
if field.type == fd.ETH_TYPE:
if field.eth_type == EAP_ETH_TYPE:
eap_flow = True
if field.type == fd.IN_PORT:
eap_intf_id = self.platform.intf_id_from_uni_port_num(
field.port)
eap_onu_id = self.platform.onu_id_from_port_num(field.port)
eap_uni_id = self.platform.uni_id_from_port_num(field.port)
if eap_flow:
self.log.debug('eap flow detected', onu_id=onu_id,
uni_id=uni_id, intf_id=intf_id,
eap_intf_id=eap_intf_id, eap_onu_id=eap_onu_id,
eap_uni_id=eap_uni_id)
if eap_flow and intf_id == eap_intf_id \
and onu_id == eap_onu_id and uni_id == eap_uni_id:
return True, flow
return False, None
def get_subscriber_vlan(self, port):
self.log.debug('looking from subscriber flow for port', port=port)
flows = self.logical_flows_proxy.get('/').items
for flow in flows:
in_port = fd.get_in_port(flow)
out_port = fd.get_out_port(flow)
if in_port == port and out_port is not None and \
self.platform.intf_id_to_port_type_name(out_port) \
== Port.ETHERNET_NNI:
fields = fd.get_ofb_fields(flow)
self.log.debug('subscriber flow found', fields=fields)
for field in fields:
if field.type == OFPXMT_OFB_VLAN_VID:
self.log.debug('subscriber vlan found',
vlan_id=field.vlan_vid)
return field.vlan_vid & 0x0fff
self.log.debug('No subscriber flow found', port=port)
return None
def add_flow_to_device(self, flow, logical_flow, flow_store_cookie=None):
self.log.debug('pushing flow to device', flow=flow)
try:
self.stub.FlowAdd(flow)
except grpc.RpcError as grpc_e:
if grpc_e.code() == grpc.StatusCode.ALREADY_EXISTS:
self.log.warn('flow already exists', e=grpc_e, flow=flow)
else:
self.log.error('failed to add flow',
logical_flow=logical_flow, flow=flow,
grpc_error=grpc_e)
# If the flow addition failed on the device, immediately
# free up the flow_id resource from the pool
intf_id = flow.access_intf_id if flow.access_intf_id > 0 else flow.network_intf_id
onu_id = flow.onu_id
uni_id = flow.uni_id
flow_id = flow.flow_id
self.resource_mgr.free_flow_id(intf_id, onu_id, uni_id, flow_id)
return False
else:
intf_onu_id = (flow.access_intf_id if flow.access_intf_id > 0 else flow.network_intf_id,
flow.onu_id, flow.uni_id)
logical_flow.intf_tuple.append(str(intf_onu_id))
if flow_store_cookie is not None:
logical_flow.flow_store_cookie = flow_store_cookie
self.register_flow(logical_flow, flow)
return True
def update_flow_info_to_kv_store(self, intf_id, onu_id, uni_id, flow_id,
flow):
self.resource_mgr.update_flow_id_info(intf_id, onu_id, uni_id,
flow_id, flow)
def register_flow(self, logical_flow, device_flow):
self.log.debug('registering flow in device',
logical_flow=logical_flow, device_flow=device_flow)
stored_flow = copy.deepcopy(logical_flow)
stored_flow.id = self.generate_stored_id(device_flow.flow_id,
device_flow.flow_type)
self.log.debug('generated device flow id', id=stored_flow.id,
flow_id=device_flow.flow_id,
direction=device_flow.flow_type)
stored_flow.cookie = logical_flow.id
flows = self.flows_proxy.get('/')
flows.items.extend([stored_flow])
self.flows_proxy.update('/', flows)
def find_next_flow(self, flow, metadata):
table_id = fd.get_goto_table_id(flow)
# Prior to ONOS 1.13.5, Metadata contained the UNI output port number.
# In 1.13.5 and later, the lower 32-bits is the output port number and
# the # upper 32-bits is the inner-vid we are looking for. Use just the
# lower 32 # bits. Allows this code to work with pre- and post-1.13.5
# ONOS OltPipeline
port = metadata & 0xFFFFFFFF
if table_id is None:
return None
flows = self.logical_flows_proxy.get('/').items
next_flows = []
for f in flows:
if f.table_id == table_id:
# FIXME
if fd.get_in_port(f) == fd.get_in_port(flow) and \
fd.get_out_port(f) == port:
next_flows.append(f)
if len(next_flows) == 0:
self.log.warning('no next flow found, it may be a timing issue',
flow=flow, number_of_flows=len(flows))
if flow.id in self.retry_add_flow_list:
self.log.debug('flow is already in retry list',
flow_id=flow.id)
else:
self.retry_add_flow_list.append(flow.id)
reactor.callLater(5, self.retry_add_flow, flow)
return None
next_flows.sort(key=lambda f: f.priority, reverse=True)
return next_flows[0]
def update_children_flows(self, device_rules_map):
for device_id, (flows, groups) in device_rules_map.iteritems():
if device_id != self.device_id:
self.root_proxy.update('/devices/{}/flows'.format(device_id),
Flows(items=flows.values()))
self.root_proxy.update('/devices/{}/flow_groups'.format(
device_id), FlowGroups(items=groups.values()))
def clear_flows_and_scheduler_for_logical_port(self, child_device,
logical_port):
ofp_port_name = logical_port.ofp_port.name
port_no = logical_port.ofp_port.port_no
pon_port = child_device.proxy_address.channel_id
onu_id = child_device.proxy_address.onu_id
uni_id = self.platform.uni_id_from_port_num(port_no)
tp_id = self.resource_mgr.get_tech_profile_id_for_onu(pon_port, onu_id,
uni_id)
tech_profile_instance = self.tech_profile[pon_port]. \
get_tech_profile_instance(
tp_id,
ofp_port_name)
flow_ids = self.resource_mgr.get_current_flow_ids(pon_port, onu_id,
uni_id)
self.log.debug("outstanding-flows-to-be-cleared", flow_ids=flow_ids)
if flow_ids:
for flow_id in flow_ids:
flow_infos = self.resource_mgr.get_flow_id_info(pon_port, onu_id,
uni_id, flow_id)
for flow_info in flow_infos:
direction = flow_info['flow_type']
flow_to_remove = openolt_pb2.Flow(flow_id=flow_id,
flow_type=direction)
try:
self.stub.FlowRemove(flow_to_remove)
except grpc.RpcError as grpc_e:
if grpc_e.code() == grpc.StatusCode.NOT_FOUND:
self.log.debug('This flow does not exist on switch, '
'normal after an OLT reboot',
flow=flow_to_remove)
else:
raise grpc_e
self.remove_us_scheduler_queues(pon_port, onu_id, uni_id, tech_profile_instance)
self.remove_ds_scheduler_queues(pon_port, onu_id, uni_id, tech_profile_instance)
def generate_stored_id(self, flow_id, direction):
if direction == UPSTREAM:
self.log.debug('upstream flow, shifting id')
return 0x1 << 15 | flow_id
elif direction == DOWNSTREAM:
self.log.debug('downstream flow, not shifting id')
return flow_id
else:
self.log.warn('Unrecognized direction', direction=direction)
return flow_id
def decode_stored_id(self, id):
if id >> 15 == 0x1:
return id & 0x7fff, UPSTREAM
else:
return id, DOWNSTREAM
def _populate_tech_profile_per_pon_port(self):
for arange in self.resource_mgr.device_info.ranges:
for intf_id in arange.intf_ids:
self.tech_profile[intf_id] = \
self.resource_mgr.resource_mgrs[intf_id].tech_profile
# Make sure we have as many tech_profiles as there are pon ports on
# the device
assert len(self.tech_profile) \
== self.resource_mgr.device_info.pon_ports
def _get_flow_info_as_json_blob(self, flow, flow_store_cookie,
flow_category=None):
json_blob = MessageToDict(message=flow,
preserving_proto_field_name=True)
self.log.debug("flow-info", json_blob=json_blob)
json_blob['flow_store_cookie'] = flow_store_cookie
if flow_category is not None:
json_blob['flow_category'] = flow_category
# For flows which trap out of the NNI, the access_intf_id is invalid
# (set to -1). In such cases, we need to refer to the network_intf_id.
if flow.access_intf_id != -1:
flow_info = self.resource_mgr.get_flow_id_info(
flow.access_intf_id, flow.onu_id, flow.uni_id, flow.flow_id)
else:
# Case of LLDP trap flow from the NNI. We can't use
# flow.access_intf_id in that case, as it is invalid.
# We use flow.network_intf_id.
flow_info = self.resource_mgr.get_flow_id_info(
flow.network_intf_id, flow.onu_id, flow.uni_id, flow.flow_id)
if flow_info is None:
flow_info = list()
flow_info.append(json_blob)
else:
assert (isinstance(flow_info, list))
flow_info.append(json_blob)
return flow_info
@staticmethod
def _get_flow_store_cookie(classifier, gem_port=None):
assert isinstance(classifier, dict)
# We need unique flows per gem_port
if gem_port is not None:
to_hash = dumps(classifier, sort_keys=True) + str(gem_port)
else:
to_hash = dumps(classifier, sort_keys=True)
return hashlib.md5(to_hash).hexdigest()[:12]
@staticmethod
def _get_gem_port_for_pcp(pcp, get_gem_port_for_pcp):
"""
Return gem_port id corresponding to a given pcp bit
:param pcp: Represents the p_bit
:param get_gem_port_for_pcp: Represents a list of gemport_attributes (DS or US)
:return: Gemport ID servicing the given pcp if found, else None
"""
for gem_port_attr in get_gem_port_for_pcp:
# The pbit_map appears as "0b00011010" in the Tech-Profile instance.
# The initial '0b' has to be stripped.
# The remaining string is reversed, then enumerated and matched against pcp index.
for i, p in enumerate(reversed(gem_port_attr.pbit_map[2:])):
if i == pcp and p == '1':
return gem_port_attr.gemport_id
return None
@staticmethod
def _install_flow_on_all_gemports(func, kwargs, gem_attr_list):
for gem_attr in gem_attr_list:
# The pbit_map appears as "0b00011010" in the Tech-Profile instance.
# The initial '0b' has to be stripped.
# The remaining string is reversed, then enumerated and matched against pbit 1.
for i, p in enumerate(reversed(gem_attr.pbit_map[2:])):
if p == '1':
kwargs['classifier'][VLAN_PCP] = i
# Add the gemport corresponding to this PCP
kwargs['gemport_id'] = gem_attr.gemport_id
func(**kwargs)
|
apache-2.0
|
RenzoH89/hsac-fitnesse-fixtures
|
src/main/java/nl/hsac/fitnesse/junit/HsacFitNesseRunner.java
|
8302
|
package nl.hsac.fitnesse.junit;
import fitnesse.ContextConfigurator;
import fitnesse.FitNesseContext;
import fitnesse.components.PluginsClassLoader;
import fitnesse.junit.FitNesseRunner;
import fitnesse.wiki.WikiPage;
import nl.hsac.fitnesse.fixture.Environment;
import nl.hsac.fitnesse.fixture.slim.web.SeleniumDriverSetup;
import nl.hsac.fitnesse.fixture.util.FileUtil;
import nl.hsac.fitnesse.fixture.util.selenium.SeleniumHelper;
import nl.hsac.fitnesse.junit.selenium.LocalSeleniumDriverClassFactoryFactory;
import nl.hsac.fitnesse.junit.selenium.LocalSeleniumDriverFactoryFactory;
import nl.hsac.fitnesse.junit.selenium.SeleniumDriverFactoryFactory;
import nl.hsac.fitnesse.junit.selenium.SeleniumGridDriverFactoryFactory;
import nl.hsac.fitnesse.junit.selenium.SeleniumJsonGridDriverFactoryFactory;
import nl.hsac.fitnesse.junit.selenium.SimpleSeleniumGridDriverFactoryFactory;
import org.apache.commons.lang3.StringUtils;
import org.junit.runner.notification.RunNotifier;
import org.junit.runners.model.InitializationError;
import java.io.File;
import java.io.FileInputStream;
import java.util.ArrayList;
import java.util.List;
/**
* JUnit Runner to run a FitNesse suite or page as JUnit test.
*
* The suite/page to run must be specified either via the Java property
* 'fitnesseSuiteToRun', or by adding a FitNesseSuite.Name annotation to the test class.
* If both are present the system property is used.
*
* The Selenium driver used for tests may be overridden (from what is configured in the wiki)
* by specifying the property 'seleniumGridUrl' and either 'seleniumBrowser' or 'seleniumCapabilities'.
* The default timeout (in seconds) for Selenium tests may be overridden by specifying the property
* 'seleniumDefaultTimeout'.
*
* The HTML generated for each page is saved in target/fitnesse-results
*/
public class HsacFitNesseRunner extends FitNesseRunner {
/** Output path for HTML results */
public final static String FITNESSE_RESULTS_PATH = "target/fitnesse-results";
/** Property to override suite to run */
public final static String SUITE_OVERRIDE_VARIABLE_NAME = "fitnesseSuiteToRun";
private final static String SELENIUM_DEFAULT_TIMEOUT_PROP = "seleniumDefaultTimeout";
protected final List<SeleniumDriverFactoryFactory> factoryFactories = new ArrayList<>();
public HsacFitNesseRunner(Class<?> suiteClass) throws InitializationError {
super(suiteClass);
try {
factoryFactories.add(new SimpleSeleniumGridDriverFactoryFactory());
factoryFactories.add(new SeleniumGridDriverFactoryFactory());
factoryFactories.add(new SeleniumJsonGridDriverFactoryFactory());
factoryFactories.add(new LocalSeleniumDriverFactoryFactory());
factoryFactories.add(new LocalSeleniumDriverClassFactoryFactory());
// we include images in output so build server will have single
// directory containing both HTML results and the images created by the tests
String outputDir = getOutputDir(suiteClass);
new File(outputDir).mkdirs();
Environment.getInstance().setFitNesseRoot(outputDir);
} catch (Exception e) {
throw new InitializationError(e);
}
}
@Override
protected String getSuiteName(Class<?> klass) throws InitializationError {
String name = System.getProperty(SUITE_OVERRIDE_VARIABLE_NAME);
if (StringUtils.isEmpty(name)) {
Suite nameAnnotation = klass.getAnnotation(Suite.class);
if (nameAnnotation == null) {
throw new InitializationError("There must be a @Suite annotation");
}
name = nameAnnotation.value();
}
return name;
}
@Override
protected String getFitNesseDir(Class<?> suiteClass) {
return "wiki";
}
@Override
protected String getOutputDir(Class<?> klass) throws InitializationError {
return FITNESSE_RESULTS_PATH;
}
@Override
protected String getFitNesseRoot(Class<?> suiteClass) {
return ContextConfigurator.DEFAULT_ROOT;
}
@Override
protected FitNesseContext createContext(Class<?> suiteClass) throws Exception {
// disable maven-classpath-plugin, we expect all jars to be loaded as part of this jUnit run
System.setProperty("fitnesse.wikitext.widgets.MavenClasspathSymbolType.Disable", "true");
new PluginsClassLoader(getFitNesseDir(suiteClass)).addPluginsToClassLoader();
return super.createContext(suiteClass);
}
@Override
protected void runPages(List<WikiPage> pages, RunNotifier notifier) {
boolean seleniumConfigOverridden = configureSeleniumIfNeeded();
try {
super.runPages(pages, notifier);
} finally {
if (seleniumConfigOverridden) {
try {
shutdownSelenium();
}
catch (Exception e) {
}
}
try {
Class<?> suiteClass = getTestClass().getJavaClass();
String outputDir = getOutputDir(suiteClass);
String suiteName = getSuiteName(suiteClass);
String filename = suiteName + ".html";
File overviewFile = new File(outputDir, filename);
if (overviewFile.exists()) {
String path = overviewFile.getAbsolutePath();
String overviewHtml = FileUtil.streamToString(new FileInputStream(path), path);
if (overviewHtml != null) {
String indexHtml = getIndexHtmlContent(overviewHtml);
FileUtil.writeFile(new File(outputDir, "index.html").getAbsolutePath(), indexHtml);
}
}
} catch (Exception e) {
}
}
}
/**
* Determines whether system properties should override Selenium configuration in wiki.
* If so Selenium will be configured according to property values, and locked so that wiki pages
* no longer control Selenium setup.
* @return true if Selenium was configured.
*/
protected boolean configureSeleniumIfNeeded() {
setSeleniumDefaultTimeOut();
try {
SeleniumHelper.DriverFactory factory = null;
SeleniumDriverFactoryFactory factoryFactory = getSeleniumDriverFactoryFactory();
if (factoryFactory != null) {
factory = factoryFactory.getDriverFactory();
if (factory != null) {
SeleniumDriverSetup.lockConfig();
Environment.getInstance().getSeleniumHelper().setDriverFactory(factory);
}
}
return factory != null;
} catch (Exception e) {
throw new RuntimeException("Error overriding Selenium config", e);
}
}
protected void setSeleniumDefaultTimeOut() {
String propValue = System.getProperty(SELENIUM_DEFAULT_TIMEOUT_PROP);
if (StringUtils.isNotEmpty(propValue)) {
try {
int timeoutSeconds = Integer.parseInt(propValue);
Environment.getInstance().getSeleniumHelper().setDefaultTimeoutSeconds(timeoutSeconds);
} catch (NumberFormatException e) {
throw new RuntimeException("Bad " + SELENIUM_DEFAULT_TIMEOUT_PROP + " system property: " + propValue, e);
}
}
}
protected SeleniumDriverFactoryFactory getSeleniumDriverFactoryFactory() {
SeleniumDriverFactoryFactory result = null;
for (SeleniumDriverFactoryFactory factory : factoryFactories) {
if (factory.willOverride()) {
result = factory;
break;
}
}
return result;
}
protected void shutdownSelenium() {
SeleniumDriverSetup.unlockConfig();
new SeleniumDriverSetup().stopDriver();
}
protected String getIndexHtmlContent(String overviewHtml) {
String result = overviewHtml;
String runSummary = SeleniumDriverSetup.getLastRunSummary();
if (runSummary != null) {
result = overviewHtml.replace("<table", runSummary + "<table");
}
return result;
}
}
|
apache-2.0
|
yeastrc/proxl-web-app
|
proxl_web_app/front_end/src/js/page_js/data_pages/project_search_ids_driven_pages/structure_page/linkable-positions-utils.js
|
9567
|
"use strict";
import {StructureAlignmentUtils} from "./structure-alignment-utils";
import {StructureUtils} from "./stucture-utils";
export class LinkablePositionUtils {
static getRenderedDistanceArray( renderedLinks, linkExclusionHandler ) {
let distanceArray = [ ];
let UDRsCounted = { };
if( renderedLinks.crosslinks ) {
for (let i = 0; i < renderedLinks['crosslinks'].length; i++) {
const link = renderedLinks['crosslinks'][i]['link'];
if( !linkExclusionHandler || !linkExclusionHandler.isLinkExcluded( link ) ) {
LinkablePositionUtils.addCrosslinkToCountedUDRs({UDRsCounted, link})
distanceArray.push(parseFloat(renderedLinks['crosslinks'][i]['link']['length']));
}
}
}
if( renderedLinks.looplinks ) {
for (let i = 0; i < renderedLinks['looplinks'].length; i++) {
const link = renderedLinks['looplinks'][i]['link'];
if( !LinkablePositionUtils.isUDRinRenderedUDRs( {
renderedUDRs: UDRsCounted,
protein1: link.protein1,
protein2: link.protein1,
position1: link.position1,
position2: link.position2 } )) {
if( !linkExclusionHandler || !linkExclusionHandler.isLinkExcluded( link ) ) {
distanceArray.push(parseFloat(renderedLinks['looplinks'][i]['link']['length']));
}
}
}
}
return distanceArray;
}
static getRenderedUDRs({ renderedLinks, linkExclusionHandler }) {
let renderedUDRs = { };
if( renderedLinks.crosslinks ) {
for (let i = 0; i < renderedLinks['crosslinks'].length; i++) {
const link = renderedLinks['crosslinks'][i]['link'];
if( !linkExclusionHandler || !linkExclusionHandler.isLinkExcluded( link ) ) {
LinkablePositionUtils.addCrosslinkToCountedUDRs({UDRsCounted: renderedUDRs, link})
}
}
}
if( renderedLinks.looplinks ) {
for (let i = 0; i < renderedLinks['looplinks'].length; i++) {
const link = renderedLinks['looplinks'][i]['link'];
if( !linkExclusionHandler || !linkExclusionHandler.isLinkExcluded( link ) ) {
LinkablePositionUtils.addLooplinkToCountedUDRs({UDRsCounted: renderedUDRs, link})
}
}
}
return renderedUDRs;
}
static addCrosslinkToCountedUDRs({UDRsCounted,link}) {
const protein1 = link['protein1'];
const protein2 = link['protein2'];
const position1 = link['position1'];
const position2 = link['position2'];
// add this to the list of UDRs we've counted so we can calculated total unique UDRs among cross- and loop-links
if( !( protein1 in UDRsCounted ) ) { UDRsCounted[ protein1 ] = { }; }
if( !( protein2 in UDRsCounted[ protein1 ] ) ) { UDRsCounted[ protein1 ][ protein2 ] = { }; }
if( !( position1 in UDRsCounted[ protein1 ][ protein2 ] ) ) { UDRsCounted[ protein1 ][ protein2 ][ position1 ] = { }; };
if( !( position2 in UDRsCounted[ protein1 ][ protein2 ][ position1 ] ) ) { UDRsCounted[ protein1 ][ protein2 ][ position1 ][ position2 ] = { }; };
}
static addLooplinkToCountedUDRs({UDRsCounted,link}) {
const protein1 = link['protein1'];
const protein2 = link['protein1'];
const position1 = link['position1'];
const position2 = link['position2'];
// add this to the list of UDRs we've counted so we can calculated total unique UDRs among cross- and loop-links
if( !( protein1 in UDRsCounted ) ) { UDRsCounted[ protein1 ] = { }; }
if( !( protein2 in UDRsCounted[ protein1 ] ) ) { UDRsCounted[ protein1 ][ protein2 ] = { }; }
if( !( position1 in UDRsCounted[ protein1 ][ protein2 ] ) ) { UDRsCounted[ protein1 ][ protein2 ][ position1 ] = { }; };
if( !( position2 in UDRsCounted[ protein1 ][ protein2 ][ position1 ] ) ) { UDRsCounted[ protein1 ][ protein2 ][ position1 ][ position2 ] = { }; };
}
static isUDRinRenderedUDRs({ renderedUDRs, protein1, protein2, position1, position2 } ) {
// console.log( renderedUDRs );
// console.log( protein1 );
// console.log( protein2 );
// console.log( position1 );
// console.log( position2 );
// console.log( "=============" );
// console.log( renderedUDRs[ protein1 ] );
// console.log( renderedUDRs[ protein1 ][ protein2 ] );
// console.log( renderedUDRs[ protein1 ][ protein2 ][ position1 ] );
// console.log( renderedUDRs[ protein1 ][ protein2 ][ position1 ][ position2 ] );
// console.log( "=============" );
if( protein1 in renderedUDRs ) {
if( protein2 in renderedUDRs[ protein1 ] &&
position1 in renderedUDRs[ protein1 ][ protein2 ] &&
position2 in renderedUDRs[ protein1 ][ protein2 ][ position1 ] ) {
return true;
}
// test swapping positions if it's within the same protein
if( protein1 === protein2 ) {
if( protein2 in renderedUDRs[ protein1 ] &&
position2 in renderedUDRs[ protein1 ][ protein2 ] &&
position1 in renderedUDRs[ protein1 ][ protein2 ][ position2 ] ) {
return true;
}
}
} else if( protein1 !== protein2 && protein2 in renderedUDRs ) {
if( protein1 in renderedUDRs[ protein2 ] &&
position2 in renderedUDRs[ protein2 ][ protein1 ] &&
position1 in renderedUDRs[ protein2 ][ protein1 ][ position2 ] ) {
return true;
}
}
return false;
}
static getDistanceArrayFromLinkablePositions( {
data,
visibleProteinsMap,
onlyShortest,
alignments,
structure,
linkExclusionHandler,
renderedLinks
}) {
const pdbDistanceArray = [ ];
for( let i = 0; i < data.length; i++ ) {
// all the actually-rendered UDRs. Do not include the distance for a non-rendered UDR
const renderedUDRs = LinkablePositionUtils.getRenderedUDRs({ renderedLinks, linkExclusionHandler } );
const protein1 = parseInt(data[ i ][ 'protein1' ]);
const protein2 = parseInt(data[ i ][ 'protein2' ]);
const position1 = parseInt(data[ i ][ 'position1' ]);
const position2 = parseInt(data[ i ][ 'position2' ]);
// don't included excluded things
if( ( linkExclusionHandler.isCrosslinkExcluded( protein1, position1, protein2, position2 ) ||
linkExclusionHandler.isLooplinkExcluded( protein1, position1, position2 ) ) &&
!LinkablePositionUtils.isUDRinRenderedUDRs( { renderedUDRs, protein1, protein2, position1, position2 } ) ) {
continue;
}
const chains1 = visibleProteinsMap[ protein1 ];
const chains2 = visibleProteinsMap[ protein2 ];
let shortestDistance = -1;
if( !chains1 || chains1 == undefined || chains1.length < 1 ) {
console.log( "ERROR: Got no chains for protein: " + protein1 );
return;
}
if( !chains2 || chains2 == undefined || chains2.length < 1 ) {
console.log( "ERROR: Got no chains for protein: " + protein2 );
return;
}
for( let j = 0; j < chains1.length; j++ ) {
const chain1 = chains1[ j ];
const coordsArray1 = StructureAlignmentUtils.findCACoords( protein1, position1, [ chain1 ], alignments, structure );
if( coordsArray1 == undefined || coordsArray1.length < 1 ) { continue; }
for( let k = 0; k < chains2.length; k++ ) {
const chain2 = chains2[ k ];
if( chain1 == chain2 && protein1 == protein2 && position1 == position2 ) { continue; }
const coordsArray2 = StructureAlignmentUtils.findCACoords( protein2, position2, [ chain2 ], alignments, structure );
if( coordsArray1 == undefined || coordsArray2.length < 1 ) { continue; }
const distance = StructureUtils.calculateDistance( coordsArray1[ 0 ], coordsArray2[ 0 ] );
if( !onlyShortest ) {
pdbDistanceArray.push( distance );
} else {
if( shortestDistance === -1 || shortestDistance > distance ) {
shortestDistance = distance;
}
}
}
}
if( onlyShortest ) {
if( shortestDistance != -1 ) {
pdbDistanceArray.push(shortestDistance);
}
}
}
return pdbDistanceArray;
}
}
|
apache-2.0
|
tylerchen/springmvc-mybatis-modules-project
|
security/src/main/java/com/foreveross/common/module/security/application/impl/SecurityAllInOneApplicationImpl.java
|
11251
|
/*******************************************************************************
* Copyright (c) 2014-2-28 @author <a href="mailto:iffiff1@hotmail.com">Tyler Chen</a>.
* All rights reserved.
*
* Contributors:
* <a href="mailto:iffiff1@hotmail.com">Tyler Chen</a> - initial API and implementation
******************************************************************************/
package com.foreveross.common.module.security.application.impl;
import java.io.IOException;
import java.text.MessageFormat;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import javax.servlet.FilterChain;
import javax.servlet.FilterConfig;
import javax.servlet.ServletException;
import javax.servlet.ServletRequest;
import javax.servlet.ServletResponse;
import org.springframework.security.access.AccessDeniedException;
import org.springframework.security.access.ConfigAttribute;
import org.springframework.security.access.SecurityConfig;
import org.springframework.security.access.SecurityMetadataSource;
import org.springframework.security.authentication.BadCredentialsException;
import org.springframework.security.authentication.UsernamePasswordAuthenticationToken;
import org.springframework.security.core.Authentication;
import org.springframework.security.core.AuthenticationException;
import org.springframework.security.core.GrantedAuthority;
import org.springframework.security.core.authority.SimpleGrantedAuthority;
import org.springframework.security.core.userdetails.UsernameNotFoundException;
import org.springframework.security.web.FilterInvocation;
import org.springframework.security.web.access.intercept.FilterInvocationSecurityMetadataSource;
import com.foreveross.common.module.security.application.SecurityAllInOneApplication;
import com.foreveross.common.module.security.application.SecurityAuthorizationDataApplication;
/**
* @author <a href="mailto:iffiff1@hotmail.com">Tyler Chen</a>
* @since 2014-2-28
*/
public class SecurityAllInOneApplicationImpl implements
SecurityAllInOneApplication {
SecurityAuthorizationDataApplication securityAuthorizationDataApplication;
Map<String, Object> resourceCache = new HashMap<String, Object>(1024);
Map<String, Object> userCache = new HashMap<String, Object>(1024);
public void setResourceCache(Map<String, Object> resourceCache) {
this.resourceCache = resourceCache;
}
public void setUserCache(Map<String, Object> userCache) {
this.userCache = userCache;
}
public void setSecurityAuthorizationDataApplication(
SecurityAuthorizationDataApplication securityAuthorizationDataApplication) {
this.securityAuthorizationDataApplication = securityAuthorizationDataApplication;
}
public Map<String, Object> findAccountByUsernameInMap(String username) {
return securityAuthorizationDataApplication
.findAccountByUsernameInMap(username);
}
public List<String> findRoleNameByUsername(String username) {
return securityAuthorizationDataApplication
.findRoleNameByUsername(username);
}
public Map<String, List<String>> findAllResourceNameAndRoleNameInResourceRolesMap() {
return securityAuthorizationDataApplication
.findAllResourceNameAndRoleNameInResourceRolesMap();
}
//===================================
@SuppressWarnings("unchecked")
public Authentication authenticate_AuthenticationProvider(
Authentication authentication) throws AuthenticationException {
String username = authentication.getPrincipal().toString();
String password = authentication.getCredentials().toString();
Map<String, Object> userDetails = new HashMap<String, Object>();
{
Map<String, Object> account = findAccountByUsernameInMap(username);
if (account == null || account.isEmpty()) {
throw new UsernameNotFoundException("Username not found.");
}
if (!password.equals(encode_PasswordEncoder((String) account
.get("PASSWORD")))) {
throw new BadCredentialsException("Password is not correct.");
}
userDetails.putAll(account);
}
{
List<GrantedAuthority> gAuthoritys = new ArrayList<GrantedAuthority>();
for (String role : findRoleNameByUsername(username)) {
SimpleGrantedAuthority gai = new SimpleGrantedAuthority(role);
gAuthoritys.add(gai);
}
userDetails.put("grantedAuthority", gAuthoritys);
userCache.put(username, userDetails);
}
{
UsernamePasswordAuthenticationToken result = new UsernamePasswordAuthenticationToken(
userDetails, authentication.getCredentials(),
(List<GrantedAuthority>) userDetails
.get("grantedAuthority"));
result.setDetails(authentication.getDetails());
return result;
}
}
public boolean supports_AuthenticationProvider(Class<?> paramClass) {
return true;
}
//===================================
public void afterPropertiesSet_InitializingBean() throws Exception {
}
//===================================
public String encode_PasswordEncoder(String password) {
return password;
}
//===================================org.springframework.security.web.access.intercept.FilterInvocationSecurityMetadataSource
public Collection<ConfigAttribute> getAllConfigAttributes_FilterInvocationSecurityMetadataSource() {
return null;
}
@SuppressWarnings("unchecked")
public Collection<ConfigAttribute> getAttributes_FilterInvocationSecurityMetadataSource(
Object object) throws IllegalArgumentException {
if (resourceCache.isEmpty()) {
Map<String, List<String>> map = findAllResourceNameAndRoleNameInResourceRolesMap();
resourceCache.putAll(map);
}
String url = ((FilterInvocation) object).getRequestUrl();
StringBuilder tempUrl = new StringBuilder(url);
{
int position = tempUrl.indexOf("?");
if (position != -1) {
url = url.substring(0, position);
tempUrl.delete(position - 1, tempUrl.length());
}
}
Collection<ConfigAttribute> attris = new ArrayList<ConfigAttribute>();
while (true) {
List<String> roles = (List<String>) resourceCache.get(tempUrl
.toString());
if (roles != null) {
for (String role : roles) {
attris.add(new SecurityConfig(role));
}
return attris;
} else {
if (tempUrl.charAt(tempUrl.length() - 1) == '*') {// process "/*" and "/**" situation
if (tempUrl.charAt(tempUrl.length() - 2) == '/') {// process "/??/*" -> "/??/**"
tempUrl.append('*');
continue;
} else {// process "/??/**" -> "/**"
int lastSpash = tempUrl.lastIndexOf("/");
if (lastSpash > -1
&& (lastSpash = tempUrl.lastIndexOf("/",
lastSpash - 1)) > -1) {
tempUrl.replace(lastSpash + 1, tempUrl.length(),
"**");
continue;
}
}
} else {// process "/??/url" -> "/??/*"
int lastSpash = tempUrl.lastIndexOf("/");
if (lastSpash > -1) {
tempUrl.replace(lastSpash + 1, tempUrl.length(), "*");
continue;
}
}
}
break;
}
if (attris.isEmpty()) {
attris.add(new SecurityConfig("ROLE_NOBODY"));
}
return attris;
}
public boolean supports_FilterInvocationSecurityMetadataSource(
Class<?> clazz) {
return true;
}
//=======================================org.springframework.security.access.intercept.AbstractSecurityInterceptor
FilterInvocationSecurityMetadataSource securityMetadataSource;
public void setSecurityMetadataSource(
FilterInvocationSecurityMetadataSource securityMetadataSource) {
this.securityMetadataSource = securityMetadataSource;
}
public Class<? extends Object> getSecureObjectClass_AbstractSecurityInterceptor() {
return FilterInvocation.class;
}
public SecurityMetadataSource obtainSecurityMetadataSource_AbstractSecurityInterceptor() {
return this.securityMetadataSource;
}
//=======================================javax.servlet.Filter
public void destroy_Filter() {
}
public void doFilter_Filter(ServletRequest request,
ServletResponse response, FilterChain chain) throws IOException,
ServletException {
FilterInvocation fi = new FilterInvocation(request, response, chain);
//InterceptorStatusToken token = null;
try {
//token = super.beforeInvocation(fi);
fi.getChain().doFilter(fi.getRequest(), fi.getResponse());
} finally {
//super.afterInvocation(token, null);
}
}
public void init_Filter(FilterConfig config) throws ServletException {
}
//=======================================org.springframework.security.access.AccessDecisionManager
@SuppressWarnings("unchecked")
public void decide_AccessDecisionManager(Authentication authentication,
Object url, Collection<ConfigAttribute> configAttributes) {
if (configAttributes == null || configAttributes.isEmpty()) {
return;
}
Map<String, Object> userDetails = (Map<String, Object>) authentication
.getPrincipal();
if (userDetails == null || userDetails.isEmpty()) {
throw new AccessDeniedException(MessageFormat.format(
"Denied to access [{0}][{1}]", url, userDetails
.get("USERNAME")));
}
if ("1".equals(userDetails.get("SUPER"))) {
return;
}
for (ConfigAttribute configAttribute : configAttributes) {
for (GrantedAuthority gAuthority : (List<GrantedAuthority>) userDetails
.get("grantedAuthority")) {
if (configAttribute.getAttribute().trim().equals(
gAuthority.getAuthority().trim())) {
return;
}
}
}
for (ConfigAttribute configAttribute : configAttributes) {
for (GrantedAuthority gAuthority : authentication.getAuthorities()) {
if (configAttribute.getAttribute().trim().equals(
gAuthority.getAuthority().trim())) {
return;
}
}
}
throw new AccessDeniedException(MessageFormat.format(
"Denied to access [{0}]", url));
}
public boolean supports_AccessDecisionManager(
ConfigAttribute configAttribute) {
return true;
}
public boolean supports_AccessDecisionManager(Class<?> clazz) {
return true;
}
public static void main(String[] args) {
String url = "/a/b/c/d/e?a=1";
System.out.println(Arrays.toString(url.split("/")));
System.out.println(url.substring(0, url.lastIndexOf('/', url
.lastIndexOf('/') - 1)));
StringBuilder tempUrl = new StringBuilder(url);
while (true) {
System.out.println(tempUrl);
{
if (tempUrl.charAt(tempUrl.length() - 1) == '*') {// process "/*" and "/**" situation
if (tempUrl.charAt(tempUrl.length() - 2) == '/') {// process "/*"
tempUrl.append('*');
continue;
} else {// process "/**" situation
int lastSpash = tempUrl.lastIndexOf("/");
if (lastSpash > -1
&& (lastSpash = tempUrl.lastIndexOf("/",
lastSpash - 1)) > -1) {
tempUrl.replace(lastSpash + 1, tempUrl.length(),
"**");
continue;
}
}
} else {// process "/url" situation
int lastSpash = tempUrl.lastIndexOf("/");
if (lastSpash > -1) {
tempUrl.replace(lastSpash + 1, tempUrl.length(), "*");
continue;
}
}
}
break;
}
}
}
|
apache-2.0
|
Tacticalmint/04_BattleTank
|
BattleTank/Source/BattleTank/Private/TankAimingComponent.cpp
|
1642
|
// Fill out your copyright notice in the Description page of Project Settings.
#include "TankAimingComponent.h"
#include "TankBarrel.h"
#include "TankTurret.h"
#include "Engine.h"
// Sets default values for this component's properties
UTankAimingComponent::UTankAimingComponent()
{
// Set this component to be initialized when the game starts, and to be ticked every frame. You can turn these features
// off to improve performance if you don't need them.
PrimaryComponentTick.bCanEverTick = false;
}
void UTankAimingComponent::SetBarrelReference(UTankBarrel* BarrelToSet)
{
Barrel = BarrelToSet;
}
void UTankAimingComponent::SetTurretReference(UTankTurret* TurretToSet)
{
Turret = TurretToSet;
}
void UTankAimingComponent::AimAt(FVector HitLocation, float LaunchSpeed)
{
if (!Barrel) { return; }
FVector OutLaunchVelocity;
FVector StartLocation = Barrel->GetSocketLocation(FName("Projectile"));
bool bHaveAimSolution = UGameplayStatics::SuggestProjectileVelocity(this, OutLaunchVelocity, StartLocation, HitLocation, LaunchSpeed, false, 0, 0, ESuggestProjVelocityTraceOption::DoNotTrace);
//calculate the OutLaunchVelocity
if (bHaveAimSolution)
{
auto AimDirection = OutLaunchVelocity.GetSafeNormal();
MoveBarrelTowards(AimDirection);
}
}
void UTankAimingComponent::MoveBarrelTowards(FVector AimDirection)
{
//work out difference between current barrel rotation and aim direction
auto BarrelRotator = Barrel->GetForwardVector().Rotation();
auto AimAsRotator = AimDirection.Rotation();
auto DeltaRotator = AimAsRotator - BarrelRotator;
Barrel->Elevate(DeltaRotator.Pitch);
Turret->Rotate(DeltaRotator.Yaw);
}
|
apache-2.0
|
SAP/openui5
|
src/sap.m/test/sap/m/qunit/rules/Button.qunit.js
|
1113
|
/*global QUnit */
sap.ui.define([
"sap/m/Button",
"sap/m/Page",
"sap/m/Panel",
"sap/ui/core/IconPool",
"test-resources/sap/ui/support/TestHelper"
], function (Button, Page, Panel, IconPool, testRule) {
"use strict";
QUnit.module("Button rule tests", {
setup: function () {
this.page = new Page({
content: [
new Panel({
id: "buttonTestsContext",
content: [
new Button(),
new Button({
icon: IconPool.getIconURI("add"),
text: "Add",
tooltip: "Add"
}),
new Button({
icon: IconPool.getIconURI("add"),
tooltip: "Add"
}),
new Button({
icon: IconPool.getIconURI("add"),
text: "Add"
}),
new Button({
icon: IconPool.getIconURI("add")
})
]
})
]
});
this.page.placeAt("qunit-fixture");
},
teardown: function () {
this.page.destroy();
}
});
testRule({
executionScopeType: "subtree",
executionScopeSelectors: "buttonTestsContext",
libName: "sap.m",
ruleId: "onlyIconButtonNeedsTooltip",
expectedNumberOfIssues: 1
});
});
|
apache-2.0
|
paulseawa/p4ic4idea
|
plugin/src/net/groboclown/idea/p4ic/v2/server/connection/ServerStatusController.java
|
3419
|
/* *************************************************************************
* (c) Copyright 2015 Zilliant Inc. All rights reserved. *
* *************************************************************************
* *
* THIS MATERIAL IS PROVIDED "AS IS." ZILLIANT INC. DISCLAIMS ALL *
* WARRANTIES OF ANY KIND WITH REGARD TO THIS MATERIAL, INCLUDING, *
* BUT NOT LIMITED TO ANY IMPLIED WARRANTIES OF NONINFRINGEMENT, *
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. *
* *
* Zilliant Inc. shall not be liable for errors contained herein *
* or for incidental or consequential damages in connection with the *
* furnishing, performance, or use of this material. *
* *
* Zilliant Inc. assumes no responsibility for the use or reliability *
* of interconnected equipment that is not furnished by Zilliant Inc, *
* or the use of Zilliant software with such equipment. *
* *
* This document or software contains trade secrets of Zilliant Inc. as *
* well as proprietary information which is protected by copyright. *
* All rights are reserved. No part of this document or software may be *
* photocopied, reproduced, modified or translated to another language *
* prior written consent of Zilliant Inc. *
* *
* ANY USE OF THIS SOFTWARE IS SUBJECT TO THE TERMS AND CONDITIONS *
* OF A SEPARATE LICENSE AGREEMENT. *
* *
* The information contained herein has been prepared by Zilliant Inc. *
* solely for use by Zilliant Inc., its employees, agents and customers. *
* Dissemination of the information and/or concepts contained herein to *
* other parties is prohibited without the prior written consent of *
* Zilliant Inc.. *
* *
* (c) Copyright 2015 Zilliant Inc. All rights reserved. *
* *
* *************************************************************************/
package net.groboclown.idea.p4ic.v2.server.connection;
/**
* An extension to the {@link ServerConnectedController} that is used by the
* {@link ClientExec} to announce when the server is connected or disconnected.
* This allows for a better, more central location for handling the connection
* information distribution.
*/
public interface ServerStatusController extends ServerConnectedController {
/**
* The server is now actually connected.
*/
void onConnected();
/**
* The server could not be reached.
*/
void onDisconnected();
/**
* A configuration problem occurred.
*/
void onConfigInvalid();
}
|
apache-2.0
|
Guille1406/The-Legend-of-Zelda-Hyrule-Conquest
|
Project/Dev_class11_handout/Motor2D/Particle_Explosion.cpp
|
7094
|
#include "Particle_Explosion.h"
#include "Particle.h"
P_Explosion::P_Explosion(Arrow* element, iPoint* object, iPoint position_static, SDL_Rect initial_rect, Explosion_Type type, iPoint perimeter_object, iPoint timelife_particle, fPoint speed_particle, Part_Direction p_direction, int num_particles, int num_textures)
{
if (element != nullptr)
{
pos.x = element->pos.x;
pos.y = element->pos.y;
arrow_to_follow = element;
object = nullptr;
}
else if (object != nullptr)
{
pos.x = object->x;
pos.y = object->y;
object_follow = object;
arrow_to_follow = nullptr;
}
else
{
pos.x = position_static.x;
pos.y = position_static.y;
object_follow = nullptr;
arrow_to_follow = nullptr;
}
//
timelife = timelife_particle;
number_particles = num_particles;
godelete = false;
size_rect = initial_rect.w;
n_textures = num_textures;
type_explosion = type;
//CIRCLE
if (type == Explosion_Type::CIRCLE)
{
pos.x -= num_particles * 2;
pos.y -= num_particles * 3;
fPoint save_pos = pos;
speed = speed_particle;
float part_entre = (num_particles - 4) / 4;
float speed_modify = -speed.y / part_entre;
int time_quart = num_particles / 2;
int num_test = 0;
float r = num_particles;
float pr = 2; // pr is the aspected pixel ratio which is almost equal to 2
for (int i = -r; i <= r; i++) // loop for horizontal movement
{
for (int j = -r; j <= r; j++) // loop for vertical movement
{
float d = ((i*pr) / r)*((i*pr) / r) + (j / r)*(j / r); //multiplying the i variable with pr to equalize pixel-width with the height
if (d >0.95 && d<1.08) // approximation
{
for (int k = 0; k < num_particles / 4; k++)
{
Particle* temp = new Particle(pos, iPoint(3, 3), timelife, speed, p_direction, initial_rect, size_rect, num_textures, true);
particle.push_back(temp);
num_test++;
}
pos.x += 3;
}
else
{
pos.x += 2;
}
}
pos.x = save_pos.x;
pos.y += 3;
}
number_particles = num_test;
}
else if (type == Explosion_Type::SEMICIRCLE_UPPER)
{
pos.x -= num_particles * 2;
pos.y -= num_particles * 3;
fPoint save_pos = pos;
speed = speed_particle;
float part_entre = (num_particles - 4) / 4;
float speed_modify = -speed.y / part_entre;
int time_quart = num_particles / 2;
int num_test = 0;
float r = num_particles;
float pr = 2; // pr is the aspected pixel ratio which is almost equal to 2
for (int i = -r; i <= 0; i++) // loop for horizontal movement
{
for (int j = -r; j <= r; j++) // loop for vertical movement
{
float d = ((i*pr) / r)*((i*pr) / r) + (j / r)*(j / r); //multiplying the i variable with pr to equalize pixel-width with the height
if (d > 0.95 && d < 1.08) // approximation
{
for (int k = 0; k < num_particles / 4; k++)
{
Particle* temp = new Particle(pos, iPoint(3, 3), timelife, speed, p_direction, initial_rect, size_rect, num_textures, true);
particle.push_back(temp);
num_test++;
}
pos.x += 3;
}
else
{
pos.x += 2;
}
}
pos.x = save_pos.x;
pos.y += 3;
}
number_particles = num_test;
}
else if (type == Explosion_Type::SEMICIRCLE_LOWER)
{
pos.x -= num_particles * 2;
pos.y -= num_particles * 3;
fPoint save_pos = pos;
speed = speed_particle;
float part_entre = (num_particles - 4) / 4;
float speed_modify = -speed.y / part_entre;
int time_quart = num_particles / 2;
int num_test = 0;
float r = num_particles;
float pr = 2; // pr is the aspected pixel ratio which is almost equal to 2
for (int i = -r; i <= 0; i++) // loop for horizontal movement
{
for (int j = -r; j <= r; j++) // loop for vertical movement
{
float d = ((i*pr) / r)*((i*pr) / r) + (j / r)*(j / r); //multiplying the i variable with pr to equalize pixel-width with the height
if (d > 0.95 && d < 1.08) // approximation
{
for (int k = 0; k < num_particles / 4; k++)
{
Particle* temp = new Particle(pos, iPoint(3, 3), timelife, speed, p_direction, initial_rect, size_rect, num_textures, true);
particle.push_back(temp);
num_test++;
}
pos.x += 3;
}
else
{
pos.x += 2;
}
}
pos.x = save_pos.x;
pos.y -= 3;
}
number_particles = num_test;
}
else if (type == Explosion_Type::CROSS)
{
speed = speed_particle;
int num_line = num_particles / 2;
int space_x = (perimeter_object.x * 2) / num_line;
int temp_x = -num_particles;
int space_y = (perimeter_object.y * 2) / num_line;
int temp_y = -num_particles;
int mid_pos = pos.x;
pos.x += temp_x;
bool setpos_y = false;
for (int i = 0; i < num_particles; i++)//
{
if (i < num_line)
{
if (i < num_line / 2)
{
speed.x = -60;
speed.y = 0;
Particle* temp = new Particle(pos, iPoint(0, 0), timelife, speed, p_direction, initial_rect, size_rect, num_textures, true);
particle.push_back(temp);
}
else
{
speed.x = 60;
speed.y = 0;
Particle* temp = new Particle(pos, iPoint(0, 0), timelife, speed, p_direction, initial_rect, size_rect, num_textures, true);
particle.push_back(temp);
}
pos.x += space_x;
setpos_y = true;
}
else
{
pos.x = mid_pos;
if (setpos_y)
{
pos.y += temp_y;
setpos_y = false;
}
if (i < num_line + num_line / 2)
{
speed.y = -60;
speed.x = 0;
Particle* temp = new Particle(pos, iPoint(0, 0), timelife, speed, p_direction, initial_rect, size_rect, num_textures, true);
particle.push_back(temp);
}
else
{
speed.y = 60;
speed.x = 0;
Particle* temp = new Particle(pos, iPoint(0, 0), timelife, speed, p_direction, initial_rect, size_rect, num_textures, true);
particle.push_back(temp);
}
pos.y += space_y;
}
}
}
else if (type == Explosion_Type::RANDOM)
{
speed = speed_particle;
for (int i = 0; i < num_particles; i++)//
{
Particle* temp = new Particle(pos, perimeter_object, timelife, speed, p_direction, initial_rect, size_rect, num_textures, true);
particle.push_back(temp);
}
}
}
P_Explosion::~P_Explosion()
{
for (std::vector<Particle*>::iterator item = particle.begin(); item != particle.cend(); ++item)
RELEASE(*item);
particle.clear();
}
bool P_Explosion::Update(float dt)
{
MoveParticles();
return true;
}
bool P_Explosion::PostUpdate()
{
render(pos);
return true;
}
void P_Explosion::render(fPoint pos)
{
int num_dead = 0;
//Check if the particle dead
for (int i = 0; i < number_particles; i++)
{
if (particle[i]->isDead())
{
num_dead++;
}
}
if (num_dead == number_particles)
{
godelete = true;
}
//Draw particles
for (int i = 0; i < number_particles; i++)
{
particle[i]->render();
}
}
void P_Explosion::MoveParticles()
{
if (type_explosion == RANDOM)
{
for (int i = 0; i < number_particles; i++)
{
particle[i]->SetSpeedGreavity(fPoint(0, 5));
}
}
for (int i = 0; i < number_particles; i++)
{
float temp = App->GetDT();
particle[i]->Move(fPoint(particle[i]->GetSpeed().x * temp, particle[i]->GetSpeed().y * temp));
}
}
|
apache-2.0
|
googleapis/java-vision
|
proto-google-cloud-vision-v1p3beta1/src/main/java/com/google/cloud/vision/v1p3beta1/ImportProductSetsRequestOrBuilder.java
|
2839
|
/*
* Copyright 2020 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/vision/v1p3beta1/product_search_service.proto
package com.google.cloud.vision.v1p3beta1;
public interface ImportProductSetsRequestOrBuilder
extends
// @@protoc_insertion_point(interface_extends:google.cloud.vision.v1p3beta1.ImportProductSetsRequest)
com.google.protobuf.MessageOrBuilder {
/**
*
*
* <pre>
* Required. The project in which the ProductSets should be imported.
* Format is `projects/PROJECT_ID/locations/LOC_ID`.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
java.lang.String getParent();
/**
*
*
* <pre>
* Required. The project in which the ProductSets should be imported.
* Format is `projects/PROJECT_ID/locations/LOC_ID`.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
com.google.protobuf.ByteString getParentBytes();
/**
*
*
* <pre>
* Required. The input content for the list of requests.
* </pre>
*
* <code>
* .google.cloud.vision.v1p3beta1.ImportProductSetsInputConfig input_config = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the inputConfig field is set.
*/
boolean hasInputConfig();
/**
*
*
* <pre>
* Required. The input content for the list of requests.
* </pre>
*
* <code>
* .google.cloud.vision.v1p3beta1.ImportProductSetsInputConfig input_config = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The inputConfig.
*/
com.google.cloud.vision.v1p3beta1.ImportProductSetsInputConfig getInputConfig();
/**
*
*
* <pre>
* Required. The input content for the list of requests.
* </pre>
*
* <code>
* .google.cloud.vision.v1p3beta1.ImportProductSetsInputConfig input_config = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
com.google.cloud.vision.v1p3beta1.ImportProductSetsInputConfigOrBuilder getInputConfigOrBuilder();
}
|
apache-2.0
|
techwyseintl/Northwood
|
newapp/lib/authenticated_system.rb
|
4495
|
module AuthenticatedSystem
protected
# Returns true or false if the user is logged in.
# Preloads @current_user with the user model if they're logged in.
def logged_in?
current_user != :false
end
# Accesses the current user from the session.
def current_user
@current_user ||= (session[:user] && User.find_by_id(session[:user])) || :false
end
# Store the given user in the session.
def current_user=(new_user)
session[:user] = (new_user.nil? || new_user.is_a?(Symbol)) ? nil : new_user.id
@current_user = new_user
end
# Check if the user is authorized.
#
# Override this method in your controllers if you want to restrict access
# to only a few actions or if you want to check if the user
# has the correct rights.
#
# Example:
#
# # only allow nonbobs
# def authorize?
# current_user.login != "bob"
# end
def authorized?
true
end
# Filter method to enforce a login requirement.
#
# To require logins for all actions, use this in your controllers:
#
# before_filter :login_required
#
# To require logins for specific actions, use this in your controllers:
#
# before_filter :login_required, :only => [ :edit, :update ]
#
# To skip this in a subclassed controller:
#
# skip_before_filter :login_required
#
def login_required
email, passwd = get_auth_data
self.current_user ||= User.authenticate(email, passwd) || :false if email && passwd
logged_in? && authorized? ? true : access_denied
end
# Redirect as appropriate when an access request fails.
#
# The default action is to redirect to the login screen.
#
# Override this method in your controllers if you want to have special
# behavior in case the user is not authorized
# to access the requested action. For example, a popup window might
# simply close itself.
def access_denied
respond_to do |accepts|
accepts.html do
store_location
if self.logged_in?
respond_with_403
else
redirect_to login_url
end
end
accepts.xml do
headers["Status"] = "Unauthorized"
headers["WWW-Authenticate"] = %(Basic realm="Web Password")
render :text => "Couldn't authenticate you", :status => '401 Unauthorized'
end
end
false
end
# Store the URI of the current request in the session.
#
# We can return to this location by calling #redirect_back_or_default.
def store_location
session[:return_to] = request.request_uri
end
# Redirect to the URI stored by the most recent store_location call or
# to the passed default.
def redirect_back_or_default(default)
session[:return_to] ? redirect_to_url(session[:return_to]) : redirect_to(default)
session[:return_to] = nil
end
# Redirect a linkto the URI stored by the most recent store_location call or
# to the passed default.
def link_to_back_or_default(default)
link = session[:return_to] ? session[:return_to] : default
session[:return_to] = nil
link
end
# Inclusion hook to make #current_user and #logged_in?
# available as ActionView helper methods.
def self.included(base)
base.send :helper_method, :current_user, :logged_in?
end
# When called with before_filter :login_from_cookie will check for an :auth_token
# cookie and log the user back in if apropriate
def login_from_cookie
return unless cookies[:auth_token] && !logged_in?
user = User.find_by_remember_token(cookies[:auth_token])
if user && user.remember_token?
user.remember_me
self.current_user = user
cookies[:auth_token] = { :value => self.current_user.remember_token , :expires => self.current_user.remember_token_expires_at }
flash[:notice] = "Logged in successfully"
end
end
private
@@http_auth_headers = %w(X-HTTP_AUTHORIZATION HTTP_AUTHORIZATION Authorization)
# gets BASIC auth info
def get_auth_data
auth_key = @@http_auth_headers.detect { |h| request.env.has_key?(h) }
auth_data = request.env[auth_key].to_s.split unless auth_key.blank?
return auth_data && auth_data[0] == 'Basic' ? Base64.decode64(auth_data[1]).split(':')[0..1] : [nil, nil]
end
end
|
apache-2.0
|
strapdata/elassandra5-rc
|
plugins/mapper-size/src/test/java/org/elasticsearch/index/mapper/size/SizeMappingTests.java
|
7253
|
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.mapper.size;
import java.util.Collection;
import org.elasticsearch.Version;
import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.compress.CompressedXContent;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.index.IndexService;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.mapper.NumberFieldMapper;
import org.elasticsearch.index.mapper.DocumentMapper;
import org.elasticsearch.index.mapper.LegacyNumberFieldMapper;
import org.elasticsearch.index.mapper.ParsedDocument;
import org.elasticsearch.index.mapper.SourceToParse;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.plugin.mapper.MapperSizePlugin;
import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.test.ESSingleNodeTestCase;
import org.elasticsearch.test.InternalSettingsPlugin;
import static org.hamcrest.Matchers.is;
import static org.hamcrest.Matchers.nullValue;
import static org.hamcrest.Matchers.instanceOf;
import org.apache.lucene.index.IndexableField;
public class SizeMappingTests extends ESSingleNodeTestCase {
@Override
protected Collection<Class<? extends Plugin>> getPlugins() {
return pluginList(MapperSizePlugin.class, InternalSettingsPlugin.class);
}
public void testSizeEnabled() throws Exception {
IndexService service = createIndex("test", Settings.EMPTY, "type", "_size", "enabled=true");
DocumentMapper docMapper = service.mapperService().documentMapper("type");
BytesReference source = XContentFactory.jsonBuilder()
.startObject()
.field("field", "value")
.endObject()
.bytes();
ParsedDocument doc = docMapper.parse(SourceToParse.source("test", "type", "1", source, XContentType.JSON));
boolean stored = false;
boolean points = false;
for (IndexableField field : doc.rootDoc().getFields("_size")) {
stored |= field.fieldType().stored();
points |= field.fieldType().pointDimensionCount() > 0;
}
assertTrue(stored);
assertTrue(points);
}
public void testSizeDisabled() throws Exception {
IndexService service = createIndex("test", Settings.EMPTY, "type", "_size", "enabled=false");
DocumentMapper docMapper = service.mapperService().documentMapper("type");
BytesReference source = XContentFactory.jsonBuilder()
.startObject()
.field("field", "value")
.endObject()
.bytes();
ParsedDocument doc = docMapper.parse(SourceToParse.source("test", "type", "1", source, XContentType.JSON));
assertThat(doc.rootDoc().getField("_size"), nullValue());
}
public void testSizeNotSet() throws Exception {
IndexService service = createIndex("test", Settings.EMPTY, "type");
DocumentMapper docMapper = service.mapperService().documentMapper("type");
BytesReference source = XContentFactory.jsonBuilder()
.startObject()
.field("field", "value")
.endObject()
.bytes();
ParsedDocument doc = docMapper.parse(SourceToParse.source("test", "type", "1", source, XContentType.JSON));
assertThat(doc.rootDoc().getField("_size"), nullValue());
}
public void testThatDisablingWorksWhenMerging() throws Exception {
IndexService service = createIndex("test", Settings.EMPTY, "type", "_size", "enabled=true");
DocumentMapper docMapper = service.mapperService().documentMapper("type");
assertThat(docMapper.metadataMapper(SizeFieldMapper.class).enabled(), is(true));
String disabledMapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("_size").field("enabled", false).endObject()
.endObject().endObject().string();
docMapper = service.mapperService().merge("type", new CompressedXContent(disabledMapping),
MapperService.MergeReason.MAPPING_UPDATE, false);
assertThat(docMapper.metadataMapper(SizeFieldMapper.class).enabled(), is(false));
}
public void testBWCMapper() throws Exception {
{
// IntPoint && docvalues=true for V_5_0_0_alpha5
IndexService service = createIndex("foo", Settings.EMPTY, "bar", "_size", "enabled=true");
DocumentMapper docMapper = service.mapperService().documentMapper("bar");
SizeFieldMapper mapper = docMapper.metadataMapper(SizeFieldMapper.class);
assertThat(mapper.enabled(), is(true));
MappedFieldType ft = mapper.fieldType();
assertThat(ft.hasDocValues(), is(true));
assertThat(mapper.fieldType(), instanceOf(NumberFieldMapper.NumberFieldType.class));
}
{
// IntPoint with docvalues=false if version > V_5_0_0_alpha2 && version < V_5_0_0_beta1
IndexService service = createIndex("foo2",
Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_5_0_0_alpha4.id).build(),
"bar", "_size", "enabled=true");
DocumentMapper docMapper = service.mapperService().documentMapper("bar");
SizeFieldMapper mapper = docMapper.metadataMapper(SizeFieldMapper.class);
assertThat(mapper.enabled(), is(true));
assertThat(mapper.fieldType().hasDocValues(), is(false));
assertThat(mapper.fieldType(), instanceOf(NumberFieldMapper.NumberFieldType.class));
}
{
// LegacyIntField with docvalues=false if version < V_5_0_0_alpha2
IndexService service = createIndex("foo3",
Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_5_0_0_alpha1.id).build(),
"bar", "_size", "enabled=true");
DocumentMapper docMapper = service.mapperService().documentMapper("bar");
SizeFieldMapper mapper = docMapper.metadataMapper(SizeFieldMapper.class);
assertThat(mapper.enabled(), is(true));
assertThat(mapper.fieldType().hasDocValues(), is(false));
assertThat(mapper.fieldType(), instanceOf(LegacyNumberFieldMapper.NumberFieldType.class));
}
}
}
|
apache-2.0
|
lastaflute/lastaflute-test-fortress
|
src/main/java/org/docksidestage/remote/fortress/wx/routing/restlike/lmlike/RemoteFortressWxRoutingRestlikeLmlikeBhv.java
|
1551
|
/*
* Copyright 2015-2021 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied. See the License for the specific language
* governing permissions and limitations under the License.
*/
package org.docksidestage.remote.fortress.wx.routing.restlike.lmlike;
import org.lastaflute.web.servlet.request.RequestManager;
/**
* The behavior for remote API of wx.routing.restlike.lmlike.
* <p>
* You can implement your original methods here.
* This class remains when re-generating.
* </p>
* @author FreeGen
*/
public class RemoteFortressWxRoutingRestlikeLmlikeBhv extends BsRemoteFortressWxRoutingRestlikeLmlikeBhv {
// ===================================================================================
// Constructor
// ===========
/**
* @param requestManager The manager of request, LastaFlute component. (NotNull)
*/
public RemoteFortressWxRoutingRestlikeLmlikeBhv(RequestManager requestManager) {
super(requestManager);
}
}
|
apache-2.0
|
ConsecroMUD/ConsecroMUD
|
com/suscipio_solutions/consecro_mud/Abilities/Prayers/Prayer_Sanctum.java
|
5096
|
package com.suscipio_solutions.consecro_mud.Abilities.Prayers;
import java.util.List;
import java.util.Vector;
import com.suscipio_solutions.consecro_mud.Abilities.interfaces.Ability;
import com.suscipio_solutions.consecro_mud.Common.interfaces.CMMsg;
import com.suscipio_solutions.consecro_mud.Locales.interfaces.Room;
import com.suscipio_solutions.consecro_mud.MOBS.interfaces.MOB;
import com.suscipio_solutions.consecro_mud.core.CMClass;
import com.suscipio_solutions.consecro_mud.core.CMLib;
import com.suscipio_solutions.consecro_mud.core.CMParms;
import com.suscipio_solutions.consecro_mud.core.CMath;
import com.suscipio_solutions.consecro_mud.core.interfaces.Environmental;
import com.suscipio_solutions.consecro_mud.core.interfaces.Physical;
@SuppressWarnings("rawtypes")
public class Prayer_Sanctum extends Prayer
{
@Override public String ID() { return "Prayer_Sanctum"; }
private final static String localizedName = CMLib.lang().L("Sanctum");
@Override public String name() { return localizedName; }
private final static String localizedStaticDisplay = CMLib.lang().L("(Sanctum)");
@Override public String displayText() { return localizedStaticDisplay; }
@Override public int classificationCode(){return Ability.ACODE_PRAYER|Ability.DOMAIN_WARDING;}
@Override public int abstractQuality(){ return Ability.QUALITY_OK_OTHERS;}
@Override protected int canAffectCode(){return CAN_ROOMS;}
@Override public long flags(){return Ability.FLAG_HOLY|Ability.FLAG_UNHOLY;}
protected boolean inRoom(MOB mob, Room R)
{
if(!CMLib.law().doesAnyoneHavePrivilegesHere(mob, text(), R))
{
mob.tell(L("You feel your muscles unwilling to cooperate."));
return false;
}
return true;
}
@Override
public boolean okMessage(final Environmental myHost, final CMMsg msg)
{
if(affected==null)
return super.okMessage(myHost,msg);
final Room R=(Room)affected;
if((msg.targetMinor()==CMMsg.TYP_ENTER)
&&(msg.target()==R)
&&(!msg.source().Name().equals(text()))
&&(msg.source().getClanRole(text())==null)
&&((msg.source().amFollowing()==null)
||((!msg.source().amFollowing().Name().equals(text()))
&&(msg.source().amFollowing().getClanRole(text())==null)))
&&(!CMLib.law().doesHavePriviledgesHere(msg.source(),R)))
{
msg.source().tell(L("You feel your muscles unwilling to cooperate."));
return false;
}
if((CMath.bset(msg.sourceMajor(),CMMsg.MASK_MALICIOUS))
||(CMath.bset(msg.targetMajor(),CMMsg.MASK_MALICIOUS))
||(CMath.bset(msg.othersMajor(),CMMsg.MASK_MALICIOUS)))
{
if((msg.source()!=null)
&&(msg.target()!=null)
&&(msg.source()!=affected)
&&(msg.source()!=msg.target()))
{
if(affected instanceof MOB)
{
final MOB mob=(MOB)affected;
if((CMLib.flags().aliveAwakeMobile(mob,true))
&&(!mob.isInCombat()))
{
String t="No fighting!";
if(text().indexOf(';')>0)
{
final List<String> V=CMParms.parseSemicolons(text(),true);
t=V.get(CMLib.dice().roll(1,V.size(),-1));
}
CMLib.commands().postSay(mob,msg.source(),t,false,false);
}
else
return super.okMessage(myHost,msg);
}
else
{
String t="You feel too peaceful here.";
if(text().indexOf(';')>0)
{
final List<String> V=CMParms.parseSemicolons(text(),true);
t=V.get(CMLib.dice().roll(1,V.size(),-1));
}
msg.source().tell(t);
}
final MOB victim=msg.source().getVictim();
if(victim!=null) victim.makePeace();
msg.source().makePeace();
msg.modify(msg.source(),msg.target(),msg.tool(),CMMsg.NO_EFFECT,"",CMMsg.NO_EFFECT,"",CMMsg.NO_EFFECT,"");
return false;
}
}
return super.okMessage(myHost,msg);
}
@Override
public boolean invoke(MOB mob, Vector commands, Physical givenTarget, boolean auto, int asLevel)
{
final Physical target=mob.location();
if(target==null) return false;
if(target.fetchEffect(ID())!=null)
{
mob.tell(L("This place is already a sanctum."));
return false;
}
if(!super.invoke(mob,commands,givenTarget,auto,asLevel))
return false;
final boolean success=proficiencyCheck(mob,0,auto);
if(success)
{
final CMMsg msg=CMClass.getMsg(mob,target,this,verbalCastCode(mob,target,auto),auto?"":L("^S<S-NAME> @x1 to make this place a sanctum.^?",prayForWord(mob)));
if(mob.location().okMessage(mob,msg))
{
mob.location().send(mob,msg);
setMiscText(mob.Name());
if((target instanceof Room)
&&(CMLib.law().doesOwnThisProperty(mob,((Room)target))))
{
final String landOwnerName=CMLib.law().getLandOwnerName((Room)target);
if(CMLib.clans().getClan(landOwnerName)!=null)
{
setMiscText(landOwnerName);
beneficialAffect(mob,target,asLevel,0);
}
else
{
target.addNonUninvokableEffect((Ability)this.copyOf());
CMLib.database().DBUpdateRoom((Room)target);
}
}
else
beneficialAffect(mob,target,asLevel,0);
}
}
else
beneficialWordsFizzle(mob,target,L("<S-NAME> @x1 to make this place a sanctum, but <S-IS-ARE> not answered.",prayForWord(mob)));
return success;
}
}
|
apache-2.0
|
klarna/HiveRunner
|
src/test/java/com/klarna/hiverunner/CommentTest.java
|
1388
|
/**
* Copyright (C) 2013-2021 Klarna AB
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.klarna.hiverunner;
import static org.junit.jupiter.api.Assertions.assertEquals;
import java.util.Arrays;
import java.util.List;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.ExtendWith;
import com.klarna.hiverunner.annotations.HiveSQL;
@ExtendWith(HiveRunnerExtension.class)
public class CommentTest {
@HiveSQL(files = {"CommentTest/comment.sql"})
public HiveShell hiveShell;
@Test
public void testPreceedingFullLineComment() {
List<String> results = hiveShell.executeQuery("set x");
assertEquals(Arrays.asList("x=1"), results);
}
@Test
public void testFullLineCommentInsideDeclaration() {
List<String> results = hiveShell.executeQuery("set y");
assertEquals(Arrays.asList("y=\"", "\""), results);
}
}
|
apache-2.0
|
finmath/finmath-experiments
|
src/main/java/net/finmath/experiments/montecarlo/BrownianMotionTests.java
|
2942
|
/*
* (c) Copyright Christian P. Fries, Germany. All rights reserved. Contact: email@christian-fries.de.
*
* Created on 10.02.2004
*/
package net.finmath.experiments.montecarlo;
import java.text.DecimalFormat;
import net.finmath.montecarlo.BrownianMotionLazyInit;
import net.finmath.montecarlo.RandomVariableFromDoubleArray;
import net.finmath.stochastic.RandomVariable;
import net.finmath.time.TimeDiscretization;
import net.finmath.time.TimeDiscretizationFromArray;
/**
* @author Christian Fries
*
*/
public class BrownianMotionTests {
static final DecimalFormat fromatterReal2 = new DecimalFormat("0.00");
static final DecimalFormat fromatterSci4 = new DecimalFormat(" 0.0000E00;-0.0000E00");
public static void main(String[] args)
{
// The parameters
final int numberOfPaths = 10000;
final int seed = 53252;
final double lastTime = 4.0;
final double dt = 0.1;
// Create the time discretization
final TimeDiscretization timeDiscretization = new TimeDiscretizationFromArray(0.0, (int)(lastTime/dt), dt);
// Test the quality of the Brownian motion
final BrownianMotionLazyInit brownian = new BrownianMotionLazyInit(
timeDiscretization,
1,
numberOfPaths,
seed
);
System.out.println("Average, variance and other properties of a BrownianMotionLazyInit.\nTime step size (dt): " + dt + " Number of path: " + numberOfPaths + "\n");
System.out.println(" " + "\t" + " int dW " + "\t" + " " + "\t" + "int dW dW" + "\t" + " ");
System.out.println("time " + "\t" + " mean " + "\t" + " var " + "\t" + " mean " + "\t" + " var ");
final RandomVariable brownianMotionRealization = new RandomVariableFromDoubleArray(0.0);
RandomVariable sumOfSquaredIncrements = new RandomVariableFromDoubleArray(0.0);
for(int timeIndex=0; timeIndex<timeDiscretization.getNumberOfTimeSteps(); timeIndex++) {
final RandomVariable brownianIncrement = brownian.getBrownianIncrement(timeIndex,0);
// Calculate W(t+dt) from dW
brownianMotionRealization.add(brownianIncrement);
final double time = timeDiscretization.getTime(timeIndex);
final double mean = brownianMotionRealization.getAverage();
final double variance = brownianMotionRealization.getVariance();
// Calculate x = \int dW(t) * dW(t)
final RandomVariable squaredIncrements = brownianIncrement.squared();
sumOfSquaredIncrements = sumOfSquaredIncrements.add(squaredIncrements);
final double meanOfSumOfSquaredIncrements = sumOfSquaredIncrements.getAverage();
final double varianceOfSumOfSquaredIncrements = sumOfSquaredIncrements.getVariance();
System.out.println(
fromatterReal2.format(time) + "\t" +
fromatterSci4.format(mean) + "\t" +
fromatterSci4.format(variance) + "\t" +
fromatterSci4.format(meanOfSumOfSquaredIncrements) + "\t" +
fromatterSci4.format(varianceOfSumOfSquaredIncrements) + "\t" +
""
);
}
}
}
|
apache-2.0
|
reportportal/commons-model
|
src/main/java/com/epam/ta/reportportal/ws/model/launch/UpdateLaunchRQ.java
|
2173
|
/*
* Copyright 2019 EPAM Systems
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.epam.ta.reportportal.ws.model.launch;
import com.epam.ta.reportportal.ws.model.attribute.ItemAttributeResource;
import com.fasterxml.jackson.annotation.JsonInclude;
import com.fasterxml.jackson.annotation.JsonInclude.Include;
import com.fasterxml.jackson.annotation.JsonProperty;
import io.swagger.annotations.ApiModelProperty;
import javax.validation.Valid;
import javax.validation.constraints.Size;
import java.util.Set;
import static com.epam.ta.reportportal.ws.model.ValidationConstraints.MAX_PARAMETERS_LENGTH;
/**
* Domain object for updating launch object.
*
* @author Aliaksei_Makayed
*/
@JsonInclude(Include.NON_NULL)
public class UpdateLaunchRQ {
@JsonProperty("mode")
@ApiModelProperty(allowableValues = "DEFAULT, DEBUG")
private Mode mode;
@JsonProperty("description")
private String description;
@Size(max = MAX_PARAMETERS_LENGTH)
@Valid
@JsonProperty("attributes")
private Set<ItemAttributeResource> attributes;
public String getDescription() {
return description;
}
public void setDescription(String description) {
this.description = description;
}
public Set<ItemAttributeResource> getAttributes() {
return attributes;
}
public void setAttributes(Set<ItemAttributeResource> attributes) {
this.attributes = attributes;
}
public Mode getMode() {
return mode;
}
public void setMode(Mode mode) {
this.mode = mode;
}
@Override
public String toString() {
final StringBuilder sb = new StringBuilder("UpdateLaunchRQ{");
sb.append("mode=").append(mode);
sb.append('}');
return sb.toString();
}
}
|
apache-2.0
|
xisberto/workschedule
|
src/net/xisberto/work_schedule/history/InstrucionDialog.java
|
2630
|
/*******************************************************************************
* Copyright 2014 xisberto
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
******************************************************************************/
package net.xisberto.work_schedule.history;
import net.xisberto.work_schedule.R;
import net.xisberto.work_schedule.settings.Settings;
import android.app.AlertDialog;
import android.app.Dialog;
import android.content.DialogInterface;
import android.content.DialogInterface.OnClickListener;
import android.os.Bundle;
import android.view.LayoutInflater;
import android.view.View;
import android.widget.CheckBox;
import com.actionbarsherlock.app.SherlockDialogFragment;
public class InstrucionDialog extends SherlockDialogFragment {
public interface InstructionCallback {
public void onInstructionsAccepted();
}
private InstructionCallback callback;
private View view;
public static InstrucionDialog newInstance(InstructionCallback callback) {
InstrucionDialog dialog = new InstrucionDialog();
dialog.callback = callback;
return dialog;
}
public void setInstructionCallback(InstructionCallback call) {
this.callback = call;
}
@Override
public Dialog onCreateDialog(Bundle savedInstanceState) {
OnClickListener clickCallback = new OnClickListener() {
@Override
public void onClick(DialogInterface dialog, int which) {
switch (which) {
case DialogInterface.BUTTON_POSITIVE:
CheckBox checkBox = (CheckBox) view
.findViewById(R.id.check_show_instructions);
if (checkBox.isChecked()) {
Settings.getInstance(getActivity()).setShowInstructions(false);
}
callback.onInstructionsAccepted();
break;
default:
break;
}
}
};
view = LayoutInflater.from(getActivity()).inflate(R.layout.dialog_instructions,
null);
AlertDialog.Builder builder = new AlertDialog.Builder(getActivity());
builder.setTitle(R.string.app_name).setView(view)
.setPositiveButton(android.R.string.ok, clickCallback)
.setNegativeButton(android.R.string.cancel, clickCallback);
return builder.create();
}
}
|
apache-2.0
|
tellesnobrega/storm_plugin
|
sahara/service/edp/workflow_creator/java_workflow.py
|
1791
|
# Copyright (c) 2013 RedHat Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from sahara.service.edp.workflow_creator import base_workflow
from sahara.utils import xmlutils as x
class JavaWorkflowCreator(base_workflow.OozieWorkflowCreator):
def __init__(self):
super(JavaWorkflowCreator, self).__init__('java')
def build_workflow_xml(self, main_class,
prepare={},
job_xml=None,
configuration=None,
java_opts=None,
arguments=[],
files=[], archives=[]):
for k, v in prepare.items():
self._add_to_prepare_element(k, v)
self._add_job_xml_element(job_xml)
self._add_configuration_elements(configuration)
x.add_text_element_to_tag(self.doc, self.tag_name,
'main-class', main_class)
if java_opts:
x.add_text_element_to_tag(self.doc, self.tag_name,
'java-opts', java_opts)
for arg in arguments:
x.add_text_element_to_tag(self.doc, self.tag_name,
'arg', arg)
self._add_files_and_archives(files, archives)
|
apache-2.0
|
PavelZubaha/pzubaha
|
chapter_002/tracker/src/main/java/ru/pzubaha/gui/HeaderBar.java
|
1227
|
package ru.pzubaha.gui;
/**
* Chapter 2. OOP.
* Lesson 4. Polymorphism.
*
* Class HeaderBar header is part of GUI Menu.
* Class contains solution of task 396.
*
* @author Pavel Zubaha (mailto:Apximar@gmail.com)
* @since 25.05.017
* @version 2
*/
public class HeaderBar {
/**
* Header for menu.
*/
private StringBuilder headerMenu = new StringBuilder();
/**
* copied id varieble.
*/
private String copiedId = null;
/**
* Constractor.
*/
public HeaderBar() {
this.headerMenu.append(String.format("%n==================%n T R A C K E R%n==================%n"));
}
/**
* getting StringBuilder instance of header menu.
* @return header of menu.
*/
public StringBuilder getHeaderBar() {
StringBuilder result = new StringBuilder();
result.append(this.headerMenu);
if (this.copiedId != null) {
result.append(String.format("Copied ID: %s%n", copiedId));
}
return result;
}
/**
* for setting copiedId.
* @param copiedId - copied by user id.
*/
protected void setCopiedId(String copiedId) {
this.copiedId = copiedId;
}
/**
* for getting copied Id.
* @return - copied by user id.
*/
protected String getCopiedId() {
String result = copiedId.toString();
return result;
}
}
|
apache-2.0
|
equella/Equella
|
Source/Plugins/Core/com.equella.admin/src/com/dytech/edge/admin/script/ifmodel/Equals.java
|
1096
|
/*
* Licensed to The Apereo Foundation under one or more contributor license
* agreements. See the NOTICE file distributed with this work for additional
* information regarding copyright ownership.
*
* The Apereo Foundation licenses this file to you under the Apache License,
* Version 2.0, (the "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.dytech.edge.admin.script.ifmodel;
public class Equals implements Equality {
public Equals() {
// Nothing to see here, move along...
}
@Override
public String toScript() {
return "==";
}
@Override
public String toEasyRead() {
return "<b>=</b>";
}
}
|
apache-2.0
|
java-prolog-connectivity/jpc
|
src/main/java/org/jpc/error/RepresentationError.java
|
198
|
package org.jpc.error;
import org.jpc.term.Compound;
public class RepresentationError extends IsoPrologError {
public RepresentationError(Compound exceptionTerm) {
super(exceptionTerm);
}
}
|
apache-2.0
|
AndriyBas/CBLMessenger
|
01-Basic/app/src/main/java/com/explain/cblmessenger/utils/Utils.java
|
2137
|
package com.explain.cblmessenger.utils;
import android.content.Context;
import android.net.ConnectivityManager;
import android.net.NetworkInfo;
import android.widget.Toast;
import com.explain.cblmessenger.CBLMessenger;
import com.explain.cblmessenger.Const;
import com.explain.cblmessenger.common.logger.Log;
import org.joda.time.DateTime;
import org.joda.time.DateTimeZone;
import java.text.ParseException;
import java.util.Date;
/**
* has some global utility functions
* Created by bamboo on 23.08.14.
*/
public class Utils {
/**
* Check if the network is connected
*
* @param c Context of the caller
* @return true - network is connected, false - else
*/
public static boolean isNetworkConnected(Context c) {
ConnectivityManager connectivityManager = (ConnectivityManager)
c.getSystemService(Context.CONNECTIVITY_SERVICE);
NetworkInfo info = connectivityManager.getActiveNetworkInfo();
if (info != null && info.isConnected()) {
return true;
}
return false;
}
/**
* @return current GMT/UTC time as java Date Object
*/
public static Date getGMTDateTimeAsDate() {
// use joda time to easily get global time
return new DateTime(DateTimeZone.UTC).toDate();
}
/**
* @return current GMT/UTC time in format [yyyy-MM-dd HH:mm:ss]
*/
public static String getGMTDateTimeAsString() {
// get global time
Date date = getGMTDateTimeAsDate();
// format it
final String gmtTime = Const.SDF.format(date);
Log.d("time", gmtTime);
return gmtTime;
}
public static Date stringDateToDate(String strDate) {
Date dateToReturn = null;
try {
dateToReturn = Const.SDF.parse(strDate);
} catch (ParseException e) {
e.printStackTrace();
}
return dateToReturn;
}
/**
* @param message
*/
public static void toast(String message) {
Toast.makeText(CBLMessenger.getAppContext(), message, Toast.LENGTH_SHORT)
.show();
}
}
|
apache-2.0
|
V119/spidersManager
|
src/com/sicdlib/service/IStopWordsService.java
|
136
|
package com.sicdlib.service;
import java.util.List;
public interface IStopWordsService {
public List<String> getAllStopWords();
}
|
apache-2.0
|
google/grr
|
grr/server/grr_response_server/databases/mem_test_base.py
|
359
|
#!/usr/bin/env python
"""Base class for all memory database tests."""
from grr_response_server.databases import db_test_mixin
from grr_response_server.databases import mem
class MemoryDBTestBase(db_test_mixin.DatabaseSetupMixin):
def CreateDatabase(self):
return mem.InMemoryDB(), None
def CreateBlobStore(self):
return self.CreateDatabase()
|
apache-2.0
|
his-eg/plugfy
|
src/sample/net/sf/plugfy/sample/SampleFieldInstance.java
|
658
|
/*
* Copyright 2013
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may use this file in compliance with the Apache License, Version 2.0.
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.sf.plugfy.sample;
/**
* a sample field type
*
* @author hendrik
*/
public class SampleFieldInstance extends SampleField {
// empty
}
|
apache-2.0
|
leafclick/intellij-community
|
platform/platform-impl/src/com/intellij/ide/actions/QuickChangeSchemesAction.java
|
1218
|
// Copyright 2000-2020 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.intellij.ide.actions;
import com.intellij.featureStatistics.FeatureUsageTracker;
import com.intellij.openapi.actionSystem.*;
import com.intellij.openapi.project.DumbAware;
import com.intellij.openapi.project.Project;
import org.jetbrains.annotations.NotNull;
public class QuickChangeSchemesAction extends QuickSwitchSchemeAction implements DumbAware {
@Override
protected void fillActions(Project project, @NotNull DefaultActionGroup group, @NotNull DataContext dataContext) {
final AnAction[] actions = getGroup().getChildren(null);
for (AnAction action : actions) {
group.add(action);
}
}
@Override
protected String getPopupTitle(@NotNull AnActionEvent e) {
return "Switch...";
}
@Override
public void actionPerformed(@NotNull AnActionEvent e) {
super.actionPerformed(e);
FeatureUsageTracker.getInstance().triggerFeatureUsed("ui.scheme.quickswitch");
}
private static DefaultActionGroup getGroup() {
return (DefaultActionGroup)ActionManager.getInstance().getAction(IdeActions.GROUP_CHANGE_SCHEME);
}
}
|
apache-2.0
|
googleapis/google-api-php-client-services
|
src/CloudNaturalLanguage/AnalyzeEntitiesResponse.php
|
1504
|
<?php
/*
* Copyright 2014 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
namespace Google\Service\CloudNaturalLanguage;
class AnalyzeEntitiesResponse extends \Google\Collection
{
protected $collection_key = 'entities';
protected $entitiesType = Entity::class;
protected $entitiesDataType = 'array';
/**
* @var string
*/
public $language;
/**
* @param Entity[]
*/
public function setEntities($entities)
{
$this->entities = $entities;
}
/**
* @return Entity[]
*/
public function getEntities()
{
return $this->entities;
}
/**
* @param string
*/
public function setLanguage($language)
{
$this->language = $language;
}
/**
* @return string
*/
public function getLanguage()
{
return $this->language;
}
}
// Adding a class alias for backwards compatibility with the previous class name.
class_alias(AnalyzeEntitiesResponse::class, 'Google_Service_CloudNaturalLanguage_AnalyzeEntitiesResponse');
|
apache-2.0
|
hmrc/amls-frontend
|
app/utils/JsonMapping.scala
|
3104
|
/*
* Copyright 2021 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package utils
import jto.validation._
import jto.validation.ValidationError
import play.api.libs.json.{PathNode => _, _}
import cats.data.Validated.{Invalid, Valid}
trait JsonMapping {
import play.api.libs.json
import play.api.libs.json.{JsPath, JsValue, Reads, Writes, JsSuccess, JsError}
import jto.validation.{KeyPathNode, IdxPathNode, PathNode}
def nodeToJsNode(n: PathNode): json.PathNode = {
n match {
case KeyPathNode(key) =>
json.KeyPathNode(key)
case IdxPathNode(idx) =>
json.IdxPathNode(idx)
}
}
private def pathToJsPath(p: Path): JsPath =
JsPath(p.path.map(nodeToJsNode _))
def convertError(error: ValidationError): play.api.libs.json.JsonValidationError = {
play.api.libs.json.JsonValidationError(error.message, error.args)
}
implicit def convertValidationErros(errors: Seq[ValidationError]): Seq[play.api.libs.json.JsonValidationError] = {
errors.map(convertError(_))
}
implicit def errorConversion(errs: Seq[(Path, Seq[ValidationError])]): Seq[(JsPath, Seq[play.api.libs.json.JsonValidationError])] =
errs map {
case (path, errors) =>
(pathToJsPath(path), convertValidationErros(errors))
}
implicit def genericJsonR[A]
(implicit
rule: Rule[JsValue, A]
): Reads[A] =
Reads {
json =>
rule.validate(json) match {
case Valid(x) => JsSuccess(x)
case Invalid(error) => JsError(error)
}
}
implicit def genericJsonW[A]
(implicit
write: Write[A, JsValue]
): Writes[A] =
Writes {
a =>
write.writes(a)
}
// This is here to prevent NoSuchMethodErrors from the validation library
implicit def pickInJson[II <: JsValue, O](p: Path)(implicit r: RuleLike[JsValue, O]): Rule[II, O] = {
def search(path: Path, json: JsValue): Option[JsValue] = path.path match {
case KeyPathNode(k) :: t =>
json match {
case JsObject(js) =>
js.find(_._1 == k).flatMap(kv => search(Path(t), kv._2))
case _ => None
}
case IdxPathNode(i) :: t =>
json match {
case JsArray(js) => js.lift(i).flatMap(j => search(Path(t), j))
case _ => None
}
case Nil => Some(json)
}
Rule[II, JsValue] { json =>
search(p, json) match {
case None => Invalid(Seq(Path -> Seq(ValidationError("error.required"))))
case Some(js) => Valid(js)
}
}.andThen(r)
}
}
object JsonMapping extends JsonMapping
|
apache-2.0
|
mwl/gwt-upload
|
core/src/main/java/gwtupload/server/UploadAction.java
|
8110
|
/*
* Copyright 2010 Manuel Carrasco Moñino. (manolo at apache/org)
* http://code.google.com/p/gwtupload
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package gwtupload.server;
import static gwtupload.shared.UConsts.TAG_CANCELED;
import static gwtupload.shared.UConsts.TAG_ERROR;
import gwtupload.server.exceptions.UploadActionException;
import gwtupload.server.exceptions.UploadCanceledException;
import gwtupload.shared.UConsts;
import java.io.IOException;
import java.io.InputStream;
import java.util.ArrayList;
import java.util.List;
import javax.servlet.ServletConfig;
import javax.servlet.ServletContext;
import javax.servlet.ServletException;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.apache.commons.fileupload.FileItem;
import org.apache.commons.lang3.tuple.Pair;
/**
* <p>Class used to manipulate the data received in the server side.</p>
*
* The user has to implement the method executeAction which receives the list of the FileItems
* sent to the server. Each FileItem represents a file or a form field.
*
* <p>Note: Temporary files are not deleted until the user calls removeSessionFiles(request).</p>
*
* @author Manolo Carrasco Moñino
*
*/
public class UploadAction extends UploadServlet {
private static final long serialVersionUID = -6790246163691420791L;
private boolean removeSessionFiles = false;
private boolean removeData = false;
@Override
public void init(ServletConfig config) throws ServletException {
super.init(config);
ServletContext ctx = config.getServletContext();
removeSessionFiles = "true".equalsIgnoreCase(ctx.getInitParameter("removeSessionFiles"));
removeData = "true".equalsIgnoreCase(ctx.getInitParameter("removeData"));
logger.info("UPLOAD-ACTION init: removeSessionFiles=" + removeSessionFiles + ", removeData=" + removeData);
}
/**
* Returns the content of a file as an InputStream if it is found in the
* FileItem vector.
*
* @param sessionFiles collection of files sent by the client
* @param parameter field name or file name of the desired file
* @return an ImputString
*/
public static InputStream getFileStream(List<FileItem> sessionFiles, String parameter) throws IOException {
FileItem item = findFileItem(sessionFiles, parameter);
return item == null ? null : item.getInputStream();
}
/**
* Returns the value of a text field present in the FileItem collection.
*
* @param sessionFiles collection of fields sent by the client
* @param fieldName field name
* @return the string value
*/
public static String getFormField(List<FileItem> sessionFiles, String fieldName) {
FileItem item = findItemByFieldName(sessionFiles, fieldName);
return item == null || item.isFormField() == false ? null : item.getString();
}
/**
* This method is called when all data is received in the server.
*
* Temporary files are not deleted until the user calls removeSessionFileItems(request)
*
* Override this method to customize the behavior
*
* @param request
* @param sessionFiles
*
* @return the text/html message to be sent to the client.
* In the case of null the standard response configured for this
* action will be sent.
*
* @throws UploadActionException
* In the case of error
*
*/
public String executeAction(HttpServletRequest request, List<FileItem> sessionFiles) throws UploadActionException {
return null;
}
/**
* This method is called when a received file is requested to be removed and
* is in the collection of items stored in session.
* If the item does't exist in session this method is not called
*
* After it, the item is removed from the session items collection.
*
* Override this method to customize the behavior
*
* @param request
* @param item The item in session
*
* @throws UploadActionException
* In the case of an error, the exception message is returned to
* the client and the item is not deleted from session
*
*/
public void removeItem(HttpServletRequest request, FileItem item) throws UploadActionException {
}
/**
* This method is called when a received file is requested to be removed.
* After it, the item is removed from the session items collection.
*
* Override this method to customize the behavior
*
* @param request
* @param fieldName The name of the filename input
*
* @throws UploadActionException
* In the case of an error, the exception message is returned to
* the client and the item is not deleted from session
*
*/
public void removeItem(HttpServletRequest request, String fieldName) throws UploadActionException {
}
protected void doGet(HttpServletRequest request, HttpServletResponse response) throws IOException, ServletException {
String parameter = request.getParameter(UConsts.PARAM_REMOVE);
if (parameter != null) {
try {
removeItem(request, parameter);
FileItem item = super.findFileItem(getMySessionFileItems(request), parameter);
if (item != null) {
removeItem(request, item);
}
} catch (Exception e) {
renderXmlResponse(request, response, "<" + TAG_ERROR + ">" + e.getMessage() + "</" + TAG_ERROR + ">");
return;
}
super.removeUploadedFile(request, response);
} else {
super.doGet(request, response);
}
}
protected void doPost(HttpServletRequest request, HttpServletResponse response) throws IOException, ServletException {
String error = null;
String message = null;
perThreadRequest.set(request);
try {
// Receive the files and form elements, updating the progress status
error = super.parsePostRequest(request, response);
if (error == null) {
// Call to the user code
message = executeAction(request, getMyLastReceivedFileItems(request));
}
} catch (UploadCanceledException e) {
renderXmlResponse(request, response, "<" + TAG_CANCELED + ">true</" + TAG_CANCELED + ">");
return;
} catch (UploadActionException e) {
logger.info("ExecuteUploadActionException when receiving a file.", e);
error = e.getMessage();
} catch (Exception e) {
logger.info("Unknown Exception when receiving a file.", e);
error = e.getMessage();
} finally {
perThreadRequest.set(null);
}
AbstractUploadListener listener = getCurrentListener(request);
if (error != null) {
renderXmlResponse(request, response, "<" + TAG_ERROR + ">" + error + "</" + TAG_ERROR + ">");
if (listener != null) {
listener.setException(new RuntimeException(error));
}
UploadServlet.removeSessionFileItems(request);
} else {
List<Pair<String, String>> stat = new ArrayList<Pair<String, String>>();
getFileItemsSummary(request, stat);
if (message != null) {
// see issue #139
stat.add(Pair.of("message", "<![CDATA[" + message + "]]>"));
}
renderXmlResponse(request, response, statusToString(stat), true);
}
finish(request);
if (removeSessionFiles) {
removeSessionFileItems(request, removeData);
}
}
}
|
apache-2.0
|
phrocker/sharkbite
|
native-iterators-jni/src/main/java/org/poma/accumulo/WrappedIterator.java
|
2520
|
package org.poma.accumulo;
import org.apache.accumulo.core.data.ArrayByteSequence;
import org.apache.accumulo.core.data.ByteSequence;
import org.apache.accumulo.core.data.Range;
import org.apache.accumulo.core.data.Value;
import org.apache.accumulo.core.iterators.IteratorEnvironment;
import org.apache.accumulo.core.iterators.SortedKeyValueIterator;
import pysharkbite.Key;
import java.io.IOException;
import java.util.Collection;
import java.util.Collections;
import java.util.Map;
import java.util.stream.Collectors;
public class WrappedIterator implements SortedKeyValueIterator<Key,Value>{
private final SortedKeyValueIterator<org.apache.accumulo.core.data.Key, Value> sortedKeyValueIterator;
WrappedIterator(SortedKeyValueIterator<org.apache.accumulo.core.data.Key, Value> sortedKeyValueIterator){
this.sortedKeyValueIterator=sortedKeyValueIterator;
}
public void seek(Range range, boolean inclusive, Collection<String> fams) throws IOException {
Collection<ByteSequence> families = fams.stream().map( x -> new ArrayByteSequence(x)).collect(Collectors.toList());
sortedKeyValueIterator.seek(range,families,inclusive);
}
public void seek(Range range, boolean inclusive) throws IOException {
Collection<ByteSequence> families = Collections.EMPTY_LIST;
sortedKeyValueIterator.seek(range,families,inclusive);
}
public void seek(Range range) throws IOException {
Collection<ByteSequence> families = Collections.EMPTY_LIST;
sortedKeyValueIterator.seek(range,families,false);
}
/***
* wrapped methods
*/
@Override
public Key getTopKey() {
return new Key(sortedKeyValueIterator.getTopKey());
}
public Value getTopValue() {
return sortedKeyValueIterator.getTopValue();
}
@Override
public SortedKeyValueIterator<Key, Value> deepCopy(IteratorEnvironment iteratorEnvironment) {
return null;
}
@Override
public void init(SortedKeyValueIterator<Key, Value> sortedKeyValueIterator, Map<String, String> map, IteratorEnvironment iteratorEnvironment) throws IOException {
}
public boolean hasTop() {
return sortedKeyValueIterator.hasTop();
}
public void next() throws IOException {
sortedKeyValueIterator.next();
}
public void seek(Range range, Collection<ByteSequence> columnFamilies, boolean inclusive) throws IOException {
sortedKeyValueIterator.seek(range,columnFamilies,inclusive);
}
}
|
apache-2.0
|
FreemapSlovakia/freemap-v3-react
|
src/components/gallery/galleryTileRenderrer.ts
|
4869
|
import color from 'color';
import { GalleryColorizeBy } from 'fm3/actions/galleryActions';
import { LatLon } from 'fm3/types/common';
import { LatLng } from 'leaflet';
type Marble = LatLon & {
rating: number;
userId: number;
createdAt: number;
takenAt?: number | null;
};
type Props = {
tile: HTMLCanvasElement | OffscreenCanvas;
zoom: number;
dpr: number;
colorizeBy: GalleryColorizeBy | null;
data: Marble[];
myUserId: number | null;
size: { x: number; y: number };
pointB: LatLng;
pointA: LatLng;
};
export function renderGalleryTile({
tile,
zoom,
dpr,
colorizeBy,
data,
myUserId,
size,
pointB,
pointA,
}: Props): void {
const ctx = (tile as any).getContext('2d');
if (!ctx) {
throw Error('no context');
}
const zk = Math.min(1, 1.1 ** zoom / 3);
ctx.scale(dpr, dpr);
ctx.strokeStyle = '#000';
ctx.fillStyle = '#ff0';
ctx.lineWidth = zk; // zoom > 9 ? 1.5 : 1;
const k = 2 ** zoom;
const s = new Set();
let items: Marble[];
if (colorizeBy === 'userId') {
items = data
.map((a) => ({ sort: Math.random(), value: a }))
.sort((a, b) => a.sort - b.sort)
.map((a) => a.value);
} else if (
colorizeBy === 'takenAt' ||
colorizeBy === 'createdAt' ||
colorizeBy === 'rating'
) {
items = data
.map((a) => ({ sort: Number(a[colorizeBy]), value: a }))
.sort((a, b) => a.sort - b.sort)
.map((a) => a.value);
} else if (colorizeBy === 'mine') {
items = data
.map((a) => ({
sort: a.userId === myUserId ? 1 : 0,
value: a,
}))
.sort((a, b) => a.sort - b.sort)
.map((a) => a.value);
} else {
items = data;
}
// remove "dense" pictures
const marbles: Marble[] = items
.reverse()
.map(({ lat, lon, ...rest }) => {
return {
lat: Math.round(lat * k),
lon: Math.round(lon * k),
...rest,
};
})
.filter(({ lat, lon }) => {
const key = `${lat},${lon}`;
const has = s.has(key);
if (!has) {
s.add(key);
}
return !has;
})
.map(({ lat, lon, ...rest }) => ({
lat: lat / k,
lon: lon / k,
...rest,
}))
.reverse();
for (const { lat, lon } of marbles) {
const y =
size.y - ((lat - pointB.lat) / (pointA.lat - pointB.lat)) * size.y;
const x = ((lon - pointA.lng) / (pointB.lng - pointA.lng)) * size.x;
ctx.beginPath();
ctx.arc(x, y, 4 * zk, 0, 2 * Math.PI);
ctx.stroke();
}
ctx.lineWidth = 0.25 * zk; // zoom > 9 ? 1.5 : 1;
const now = Date.now() / 1000;
for (const { lat, lon, rating, createdAt, takenAt, userId } of marbles) {
const y =
size.y - ((lat - pointB.lat) / (pointA.lat - pointB.lat)) * size.y;
const x = ((lon - pointA.lng) / (pointB.lng - pointA.lng)) * size.x;
ctx.beginPath();
ctx.arc(x, y, 3.5 * zk, 0, 2 * Math.PI);
switch (colorizeBy) {
case 'userId':
ctx.fillStyle = color.lch(90, 70, -userId * 11313).hex();
break;
case 'rating':
ctx.fillStyle = color
.hsv(60, 100, (Math.tanh(rating - 2.5) + 1) * 50)
.hex();
break;
case 'takenAt':
ctx.fillStyle = takenAt
? color
.hsl(
60,
100,
// 100 - ((now - takenAt) * 10) ** 0.2,
100 - ((now - takenAt) * 100) ** 0.185,
)
.hex()
: '#a22';
break;
case 'season':
{
if (!takenAt) {
ctx.fillStyle = '#800';
break;
}
const hs = 366 / 4;
const winter = [70, -5, -52];
const spring = [70, -62, 42];
const summer = [90, -4, 74];
const fall = [70, 48, 43];
// 2847600
const x = ((takenAt - 1206000) % 31557600) / 60 / 60 / 24;
const fill = (from: number[], to: number[], n: number) => {
ctx.fillStyle = color
.lab(...[0, 1, 2].map((i) => from[i] * (1 - n) + to[i] * n))
.hex();
};
if (x < hs) {
fill(winter, spring, x / hs);
} else if (x < 2 * hs) {
fill(spring, summer, (x - hs) / hs);
} else if (x < 3 * hs) {
fill(summer, fall, (x - 2 * hs) / hs);
} else {
fill(fall, winter, (x - 3 * hs) / hs);
}
}
break;
case 'createdAt':
ctx.fillStyle = color
.hsl(
60,
100,
// 100 - ((now - createdAt) * 10) ** 0.2,
100 - ((now - createdAt) * 100) ** 0.185,
)
.hex();
break;
case 'mine':
ctx.fillStyle = userId === myUserId ? '#ff0' : '#fa4';
break;
}
ctx.fill();
ctx.stroke();
}
}
|
apache-2.0
|
buffetboy2001/model-argument-switching
|
src/main/java/com/github/buffetboy2001/mas/algorithms/FixedStepIntegrationArguments.java
|
885
|
/**
*
*/
package com.github.buffetboy2001.mas.algorithms;
import com.github.buffetboy2001.mas.interfaces.IFixedStepIntegratorArguments;
/**
* This class does not contain a setter, so the value cannot be updated within
* the lifetime of the object.
*
* @author SBOWMAN
*
*/
public class FixedStepIntegrationArguments<NUMERICTYPE extends Number> extends FixedStepIterativeAlgorithmArguments<NUMERICTYPE> implements
IFixedStepIntegratorArguments<NUMERICTYPE> {
/**
* Constructor.
*
* @param clazz
* The numeric class, must extend from {@link Number}.
* @param fixedStepSize
* The step size to be used for integration arguements.
*/
public FixedStepIntegrationArguments(Class<NUMERICTYPE> clazz, NUMERICTYPE fixedStepSize) {
super(clazz, fixedStepSize);
}
}
|
apache-2.0
|
arshvin/scripts
|
mvn_cleaner/mvn-hash-cleaner.py
|
5629
|
__author__ = 'ivan.v.polyakov'
import re
import os, os.path
from datetime import datetime
import argparse
parser = argparse.ArgumentParser(description="Maven's repo cleaner of artifacts with git-hash")
parser.add_argument("-p", "--repo-path", metavar="PATH", dest="path", action="store", required=True, type = str)
parser.add_argument("-d", "--dry-run", dest="dry", action="store_true", help = "Running with simulate mode")
parser.add_argument("--keep-default", metavar="INT", dest="rescued", action="store", type = int,
help = "quantity of rescued similar artifacts from erasing", default = 5)
parser.add_argument("--vip", metavar="PATTERN:INT", dest="vips", action="append", default=[],
help = "key:value pairs for overriding --keep-default parameter for custom artafacts")
parser.add_argument("--dont-touch", metavar="PATTERN", dest="dont_touch", action="append", default=[],
help = "artifacts that wont be erased")
parser.add_argument("-c", "--sort-by", metavar="CRITERIA", dest="criteria", choices = ['ctime','atime','mtime'],
default='ctime', help="criteria of sorting similar artifacts. Dafault is ctime")
args = parser.parse_args()
tmp_dict = {}
for item in args.vips:
tmp_dict[item.split(":")[0]] = int(item.split(":")[1])
args.vips = tmp_dict
pattern = "[a-z0-9]{40}"
sorting_criteria = {
"ctime":(os.path.getctime, "created"),
"atime":(os.path.getatime, "accessed last"),
"mtime":(os.path.getmtime, "modified last")
}
def walker(root):
for current_dir, dir_list, files_list in os.walk(root):
if len(dir_list) == 0:
yield current_dir
###########
#Must return true or false
has_hash = lambda(path): re.search(pattern, path, re.IGNORECASE)
is_empty = lambda(path): not bool(os.path.getsize(path))
def not_untoucheable(path):
result = not any([(item in path) for item in args.dont_touch])
if not result:
print "untouchable directory %s" % path
return result
###########
class Furnace():
builds = {}
def __init__(self, criteria):
self.criteria = criteria
def feed(self,path):
if not os.path.isdir(path): #Not interesting
return
artfact_version = path.split(os.path.sep)[-2] + re.sub(pattern, "", os.path.basename(path))
if not artfact_version in self.builds:
self.builds[artfact_version] = []
self.builds[artfact_version].append(path)
def eject_dross(self):
collection = []
for artifact in self.builds: # Further we'll sort the builds by creating time and depending on quantity_of_rescued we
# prune excesses
candidates = self.builds[artifact]
candidates.sort(key = self.criteria, reverse=True)
if not any([(item in artifact) for item in args.vips]):
collection.extend(candidates[args.rescued:])
# For debug only
# for item in candidates[args.rescued:]:
# print "SELECTED %s %s" % (item, datetime.fromtimestamp(self.criteria(item)))
# for item in candidates[:args.rescued]:
# print "RESCUEd %s %s" % (item, datetime.fromtimestamp(self.criteria(item)))
else:
for vip in args.vips:
if vip in artifact:
collection.extend(candidates[args.vips[vip]:])
# For debug only
# for item in candidates[args.vips[vip]:]:
# print "SELECTED VIP %s " % item
# for item in candidates[:args.vips[vip]]:
# print "RESCUED VIP %s " % item
break
return collection
def delete_dir(root):
for current_dir, dir_list, files_list in os.walk(root):
if dir_list != []:
for directory in dir_list:
delete_dir(os.path.join(current_dir, directory))
else:
for file in files_list:
os.remove(os.path.join(current_dir,file))
print "build %s %s at %s was removed" % (current_dir,sorting_criteria[args.criteria][1],datetime.fromtimestamp(sorting_criteria[args.criteria][0](current_dir))
.strftime("%Y-%m-%d %H:%M:%S"))
os.rmdir(current_dir)
#####################################
#####################################
#### Let's have ZARUBA!!!
#####################################
#####################################
if args.dry:
print "SCRIPT WORKS IN SIMULATE MODE. NOTHING WILL BE REMOVED"
furnace = Furnace(sorting_criteria[args.criteria][0])
pipe = [has_hash, not_untoucheable]
for path in walker(args.path):
if is_empty(path):
if not args.dry:
try:
os.rmdir(path)
except Exception as e:
print(e)
print "removed empty dir '%s'" % path
elif (all((f(path) for f in pipe))):
furnace.feed(path)
else:
print "skipped directory %s" % path
if not args.dry:
for path in furnace.eject_dross():
try:
delete_dir(path)
except Exception as e:
print(e)
else: # --dry-run
for path in furnace.eject_dross():
print "build %s %s at %s was removed" % (path,sorting_criteria[args.criteria][1],
datetime.fromtimestamp(sorting_criteria[args.criteria][0](path)).strftime("%Y-%m-%d %H:%M:%S"))
|
apache-2.0
|
Jasig/SSP-Platform
|
uportal-war/src/main/java/org/jasig/portal/layout/dlm/remoting/registry/ChannelBean.java
|
3257
|
/**
* Licensed to Apereo under one or more contributor license
* agreements. See the NOTICE file distributed with this work
* for additional information regarding copyright ownership.
* Apereo licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file
* except in compliance with the License. You may obtain a
* copy of the License at the following location:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.jasig.portal.layout.dlm.remoting.registry;
import java.io.Serializable;
import org.apache.commons.lang.builder.CompareToBuilder;
import org.apache.commons.lang.builder.EqualsBuilder;
import org.apache.commons.lang.builder.HashCodeBuilder;
public class ChannelBean implements Comparable<ChannelBean>, Serializable {
private String id;
private String description;
private String fname;
private String name;
private String state;
private String title;
private int typeId;
private String iconUrl;
public ChannelBean() {
}
public String getId() {
return id;
}
public void setId(String id) {
this.id = id;
}
public String getDescription() {
return description;
}
public void setDescription(String description) {
this.description = description;
}
public String getFname() {
return fname;
}
public void setFname(String fname) {
this.fname = fname;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public String getState() {
return state;
}
public void setState(String state) {
this.state = state;
}
public String getTitle() {
return title;
}
public void setTitle(String title) {
this.title = title;
}
public int getTypeId() {
return typeId;
}
public void setTypeId(int typeId) {
this.typeId = typeId;
}
public String getIconUrl() {
return iconUrl;
}
public void setIconUrl(String iconUrl) {
this.iconUrl = iconUrl;
}
public int compareTo(ChannelBean channel) {
return new CompareToBuilder().append(this.id, channel.getId())
.toComparison();
}
/**
* @see java.lang.Object#equals(Object)
*/
@Override
public boolean equals(Object object) {
if (object == this) {
return true;
}
if (!(object instanceof ChannelBean)) {
return false;
}
ChannelBean rhs = (ChannelBean) object;
return new EqualsBuilder().append(this.id, rhs.getId()).isEquals();
}
/**
* @see java.lang.Object#hashCode()
*/
@Override
public int hashCode() {
return new HashCodeBuilder(464270933, -1074792143).append(this.id)
.toHashCode();
}
}
|
apache-2.0
|
skwasjer/SilentHunter
|
src/SilentHunter.FileFormats/ChunkedFiles/Chunk.cs
|
3085
|
using System.IO;
using System.Threading.Tasks;
using SilentHunter.FileFormats.IO;
namespace SilentHunter.FileFormats.ChunkedFiles
{
/// <summary>
/// Represents a file chunk, which is identified via a strongly typed magic.
/// </summary>
/// <typeparam name="TMagic">The type of the magic.</typeparam>
public abstract class Chunk<TMagic> : IChunk<TMagic>
{
private long _size;
/// <summary>
/// Initializes a new instance of <see cref="Chunk{TMagic}" />.
/// </summary>
/// <param name="magic">The magic for this chunk.</param>
protected Chunk(TMagic magic)
{
Magic = magic;
}
/// <summary>
/// When implemented, deserializes the implemented class from specified <paramref name="stream" />.
/// </summary>
/// <param name="stream">The stream.</param>
Task IRawSerializable.DeserializeAsync(Stream stream)
{
return DeserializeAsync(stream);
}
/// <summary>
/// When implemented, serializes the implemented class to specified <paramref name="stream" />.
/// </summary>
/// <param name="stream">The stream.</param>
Task IRawSerializable.SerializeAsync(Stream stream)
{
return SerializeAsync(stream);
}
/// <summary>
/// Gets or sets the magic.
/// </summary>
object IChunk.Magic
{
get => Magic;
set => Magic = (TMagic)value;
}
/// <summary>
/// Gets or sets the magic.
/// </summary>
public TMagic Magic { get; set; }
long IChunk.Size
{
get => _size;
set => _size = value;
}
/// <summary>
/// Gets the size of the chunk.
/// </summary>
public virtual long Size => _size;
/// <summary>
/// Gets or sets the file offset.
/// </summary>
public long FileOffset { get; set; }
/// <summary>
/// Gets or sets the parent file.
/// </summary>
public IChunkFile ParentFile { get; set; }
/// <summary>
/// Returns a string that represents the current object.
/// </summary>
/// <returns>
/// A string that represents the current object.
/// </returns>
public override string ToString()
{
return string.Format("{0}: magic={1}, size={2}", GetType().Name, Magic, Size);
}
/// <summary>
/// Gets array of raw chunk bytes. This array is only filled if inheritors did not fully implement deserialization.
/// </summary>
public byte[] Bytes
{
get;
private set;
}
/// <summary>
/// When implemented, deserializes the implemented class from specified <paramref name="stream" />.
/// </summary>
/// <param name="stream">The stream.</param>
protected virtual Task DeserializeAsync(Stream stream)
{
_size = stream.Length - stream.Position;
var buffer = Bytes = new byte[_size];
return stream.ReadAsync(buffer, 0, (int)_size);
}
/// <summary>
/// When implemented, serializes the implemented class to specified <paramref name="stream" />.
/// </summary>
/// <param name="stream">The stream.</param>
protected virtual Task SerializeAsync(Stream stream)
{
if (Bytes == null || Bytes.Length == 0)
{
return Task.CompletedTask;
}
return stream.WriteAsync(Bytes, 0, Bytes.Length);
}
}
}
|
apache-2.0
|
rum-ata/pdt39
|
addressbook-web-tests/src/test/java/ru/stqa/pft/addressbook/tests/TestBase.java
|
1155
|
package ru.stqa.pft.addressbook.tests;
import org.openqa.selenium.remote.BrowserType;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.testng.annotations.AfterMethod;
import org.testng.annotations.AfterSuite;
import org.testng.annotations.BeforeMethod;
import org.testng.annotations.BeforeSuite;
import ru.stqa.pft.addressbook.appmanager.ApplicationManager;
import java.lang.reflect.Method;
import java.util.Arrays;
/**
* Created by Константин on 19.03.2017.
*/
public class TestBase {
Logger logger = LoggerFactory.getLogger(TestBase.class);
protected static final ApplicationManager app
= new ApplicationManager(System.getProperty("browser", BrowserType.FIREFOX));
@BeforeSuite
public void setUp() throws Exception {
app.init();
}
@AfterSuite (alwaysRun = true)
public void tearDown() {
app.stop();
}
@BeforeMethod
public void logTestStart(Method m, Object[] p){
logger.info("Start test " + m.getName() + " parameters" + Arrays.asList(p));
}
@AfterMethod (alwaysRun = true)
public void logTestStop(Method m){
logger.info("Stop test " + m.getName());
}
}
|
apache-2.0
|
mhidaka/playgroundthon
|
SampleProject/CSharp/Properties/AssemblyInfo.cs
|
1569
|
using System.Reflection;
using System.Runtime.CompilerServices;
using System.Runtime.InteropServices;
// Les informations générales relatives à un assembly dépendent de
// l'ensemble d'attributs suivant. Changez les valeurs de ces attributs pour modifier les informations
// associées à un assembly.
[assembly: AssemblyTitle("FrameworkTestSuite")]
[assembly: AssemblyDescription("")]
[assembly: AssemblyConfiguration("")]
[assembly: AssemblyCompany("")]
[assembly: AssemblyProduct("FrameworkTestSuite")]
[assembly: AssemblyCopyright("Copyright © 2013")]
[assembly: AssemblyTrademark("")]
[assembly: AssemblyCulture("")]
// L'affectation de la valeur false à ComVisible rend les types invisibles dans cet assembly
// aux composants COM. Si vous devez accéder à un type dans cet assembly à partir de
// COM, affectez la valeur true à l'attribut ComVisible sur ce type.
[assembly: ComVisible(false)]
// Le GUID suivant est pour l'ID de la typelib si ce projet est exposé à COM
[assembly: Guid("487162bc-567a-440f-81a8-db526c2c7acd")]
// Les informations de version pour un assembly se composent des quatre valeurs suivantes :
//
// Version principale
// Version secondaire
// Numéro de build
// Révision
//
// Vous pouvez spécifier toutes les valeurs ou indiquer les numéros de build et de révision par défaut
// en utilisant '*', comme indiqué ci-dessous :
// [assembly: AssemblyVersion("1.0.*")]
[assembly: AssemblyVersion("1.0.0.0")]
[assembly: AssemblyFileVersion("1.0.0.0")]
|
apache-2.0
|
ali-ince/neo4j-dotnet-driver
|
Neo4j.Driver/Neo4j.Driver/Internal/Result/Record.cs
|
1490
|
// Copyright (c) 2002-2019 "Neo4j,"
// Neo4j Sweden AB [http://neo4j.com]
//
// This file is part of Neo4j.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
using System.Collections.Generic;
using Neo4j.Driver;
namespace Neo4j.Driver.Internal.Result
{
internal class Record : IRecord
{
public object this[int index] => Values[Keys[index]];
public object this[string key] => Values[key];
public IReadOnlyDictionary<string, object> Values { get; }
public IReadOnlyList<string> Keys { get; }
public Record(string[] keys, object[] values)
{
Throw.ProtocolException.IfNotEqual(keys.Length, values.Length, nameof(keys), nameof(values));
var valueKeys = new Dictionary<string, object>();
for (var i = 0; i < keys.Length; i++)
{
valueKeys.Add(keys[i], values[i]);
}
Values = valueKeys;
Keys = keys;
}
}
}
|
apache-2.0
|
bmhm/ffb.depot.client
|
src/main/java/de/bmarwell/ffb/depot/client/value/FfbDepotNummer.java
|
2307
|
/*
* Copyright 2018 The ffb.depot.client contributors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package de.bmarwell.ffb.depot.client.value;
import de.bmarwell.ffb.depot.client.FfbMobileClient;
import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonValue;
import java.util.Comparator;
import org.immutables.value.Value;
/**
* Dieses Objekt hält eine FFB-Depotnummer.
*
* <p>Ein gesondertes Objekt gegenüber der {@link FfbLoginKennung} ist notwendig, da die Loginkennung geändert werden kann.
* Die Depotnummer enthält oftmals nicht das <i>-01</i>-Suffix, wird von der FFB fest vergeben und kann nicht geändert
* werden.</p>
*/
@Value.Immutable
public interface FfbDepotNummer extends Comparable<FfbDepotNummer> {
/**
* Erstellt eine Depotnummer als Immutable Objekt, die dem Konstruktor {@link FfbMobileClient} übergeben werden kann.
*
* @param depotnummer
* Die Depotnummer als String.
*
* <p><b>Hinweis:</b> Sie muss nicht gleich mit dem Login sein. Der Standard-Login hat noch ein <i>-01</i> als
* Suffix, oder wurde vom Benutzer in etwas ganz anderes geändert. Die Depotnummer hingegen wurde von der FFB
* vergeben und kann nicht geändert werden.</p>
* @return ein Depotnummer-Objekt, immutable.
*/
@JsonCreator
static FfbDepotNummer of(final String depotnummer) {
return ImmutableFfbDepotNummer.of(depotnummer);
}
static FfbDepotNummer empty() {
return of("");
}
@Value.Parameter
@JsonValue
String getDepotNummer();
@Override
default int compareTo(final FfbDepotNummer other) {
final Comparator<FfbDepotNummer> comparator = Comparator.comparing(FfbDepotNummer::getDepotNummer);
return comparator.compare(this, other);
}
}
|
apache-2.0
|
wuyuehang/yuehan9
|
gfxnuts/src/unittest_noise2d_wood.cpp
|
2281
|
#include "noise2d.hpp"
#include "ppm.hpp"
int main()
{
noise2d a;
int amp_factor = 8.0;
unsigned char *texture = new unsigned char [noise2d::MetaWidth*amp_factor*noise2d::MetaHeight*amp_factor*3];
int NumLayers = 7;
int layer_factor[NumLayers];
float base_frequency_factor = 0.2; // be friendly with it
float s = 2.0;
float layer_accumulate = 1.0*(1-pow(1/s, NumLayers))/(1-1/s);
for (int i = 0; i < NumLayers; i++) {
layer_factor[i] = pow(s, i);
}
ppm b;
for (int i = 0; i < noise2d::MetaHeight*amp_factor; i++) {
for (int j = 0; j < noise2d::MetaWidth*amp_factor; j++) {
float noise_sum = 0.0;
for (int k = 0; k < NumLayers; k++) {
float fi = float(i)/noise2d::MetaWidth/amp_factor*noise2d::MetaWidth;
float fj = float(j)/noise2d::MetaHeight/amp_factor*noise2d::MetaHeight;
float v = a.lerp(fi*base_frequency_factor*layer_factor[k], fj*base_frequency_factor*layer_factor[k]);
//v = fabs(2.0*v-1.0);
noise_sum += 1.0/layer_factor[k]*v;
}
noise_sum /= layer_accumulate;
noise_sum *= 20;
noise_sum = noise_sum - int(noise_sum);
texture[3*i*noise2d::MetaWidth*amp_factor+j*3] = (unsigned char)(255*noise_sum);
texture[3*i*noise2d::MetaWidth*amp_factor+j*3+1] = (unsigned char)(255*noise_sum);;
texture[3*i*noise2d::MetaWidth*amp_factor+j*3+2] = 0;
}
}
b.serialize("unittest_noise2d_wood_2048x2048_mixed_f.ppm", noise2d::MetaWidth*amp_factor, noise2d::MetaHeight*amp_factor, texture);
for (int i = 0; i < noise2d::MetaHeight*amp_factor; i++) {
for (int j = 0; j < noise2d::MetaWidth*amp_factor; j++) {
float fi = float(i)/noise2d::MetaWidth/amp_factor*noise2d::MetaWidth;
float fj = float(j)/noise2d::MetaHeight/amp_factor*noise2d::MetaHeight;
float noise_sum = a.lerp(fi*base_frequency_factor, fj*base_frequency_factor);
noise_sum *= 10;
noise_sum = noise_sum - int(noise_sum);
texture[3*i*noise2d::MetaWidth*amp_factor+j*3] = (unsigned char)(255*noise_sum);
texture[3*i*noise2d::MetaWidth*amp_factor+j*3+1] = (unsigned char)(255*noise_sum);;
texture[3*i*noise2d::MetaWidth*amp_factor+j*3+2] = 0;
}
}
b.serialize("unittest_noise2d_wood_2048x2048_single_f.ppm", noise2d::MetaWidth*amp_factor, noise2d::MetaHeight*amp_factor, texture);
delete [] texture;
return 0;
}
|
apache-2.0
|
griffon/griffon
|
subprojects/griffon-core-api/src/main/java/griffon/core/resources/package-info.java
|
768
|
/*
* SPDX-License-Identifier: Apache-2.0
*
* Copyright 2008-2022 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* Resource management and injection support.
*
* @since 2.0.0
*/
package griffon.core.resources;
|
apache-2.0
|
Alpistinho/SPICE16
|
Spice/Spice/CurrentSource.cpp
|
839
|
#include "stdafx.h"
#include "CurrentSource.h"
namespace spiceSimulator {
CurrentSource::CurrentSource(void)
{
}
CurrentSource::CurrentSource(Node* n1, Node* n2, double current, double phase, unsigned long newKey) {
node1 = n1;
node2 = n2;
value = std::complex<double>(current*std::cos(phase), current*std::sin(phase));
key = newKey;
}
CurrentSource::~CurrentSource(void)
{
}
ComponentType CurrentSource::getComponentType() {
return ComponentType::CurrentSource;
}
void CurrentSource::getFrequencyStamp(std::vector<std::vector<std::complex<double>>>* equationSystem, double frequency) {
unsigned long columns = (*equationSystem)[0].size(); // number of columns the matrix has
(*equationSystem)[node1->getNodeNumber()][columns-1] += value;
(*equationSystem)[node2->getNodeNumber()][columns-1] -= value;
}
}
|
apache-2.0
|
thommay/chef
|
chef/spec/unit/cookbook/metadata_spec.rb
|
13181
|
#
# Author:: Adam Jacob (<adam@opscode.com>)
# Copyright:: Copyright (c) 2008 Opscode, Inc.
# License:: Apache License, Version 2.0
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
require File.expand_path(File.join(File.dirname(__FILE__), "..", "..", "spec_helper"))
require 'chef/cookbook/metadata'
describe Chef::Cookbook::Metadata do
before(:each) do
@cookbook = Chef::Cookbook.new('test_cookbook')
@meta = Chef::Cookbook::Metadata.new(@cookbook)
end
describe "initialize" do
it "should return a Chef::Cookbook::Metadata object" do
@meta.should be_a_kind_of(Chef::Cookbook::Metadata)
end
it "should allow a cookbook as the first argument" do
lambda { Chef::Cookbook::Metadata.new(@cookbook) }.should_not raise_error
end
it "should allow an maintainer name for the second argument" do
lambda { Chef::Cookbook::Metadata.new(@cookbook, 'Bobo T. Clown') }.should_not raise_error
end
it "should set the maintainer name from the second argument" do
md = Chef::Cookbook::Metadata.new(@cookbook, 'Bobo T. Clown')
md.maintainer.should == 'Bobo T. Clown'
end
it "should allow an maintainer email for the third argument" do
lambda { Chef::Cookbook::Metadata.new(@cookbook, 'Bobo T. Clown', 'bobo@clown.co') }.should_not raise_error
end
it "should set the maintainer email from the third argument" do
md = Chef::Cookbook::Metadata.new(@cookbook, 'Bobo T. Clown', 'bobo@clown.co')
md.maintainer_email.should == 'bobo@clown.co'
end
it "should allow a license for the fourth argument" do
lambda { Chef::Cookbook::Metadata.new(@cookbook, 'Bobo T. Clown', 'bobo@clown.co', 'Clown License v1') }.should_not raise_error
end
it "should set the license from the fourth argument" do
md = Chef::Cookbook::Metadata.new(@cookbook, 'Bobo T. Clown', 'bobo@clown.co', 'Clown License v1')
md.license.should == 'Clown License v1'
end
end
describe "cookbook" do
it "should return the cookbook we were initialized with" do
@meta.cookbook.should eql(@cookbook)
end
end
describe "name" do
it "should return the name of the cookbook" do
@meta.name.should eql(@cookbook.name)
end
end
describe "platforms" do
it "should return the current platform hash" do
@meta.platforms.should be_a_kind_of(Hash)
end
end
describe "adding a supported platform" do
it "should support adding a supported platform with a single expression" do
@meta.supports("ubuntu", ">= 8.04")
@meta.platforms["ubuntu"].should == [ '>= 8.04' ]
end
it "should support adding a supported platform with multiple expressions" do
@meta.supports("ubuntu", ">= 8.04", "= 9.04")
@meta.platforms["ubuntu"].should == [ '>= 8.04', "= 9.04" ]
end
end
describe "meta-data attributes" do
params = {
:maintainer => "Adam Jacob",
:maintainer_email => "adam@opscode.com",
:license => "Apache v2.0",
:description => "Foobar!",
:long_description => "Much Longer\nSeriously",
:version => "0.6.0"
}
params.sort { |a,b| a.to_s <=> b.to_s }.each do |field, field_value|
describe field do
it "should be set-able via #{field}" do
@meta.send(field, field_value).should eql(field_value)
end
it "should be get-able via #{field}" do
@meta.send(field, field_value)
@meta.send(field).should eql(field_value)
end
end
end
describe "version transformation" do
it "should transform an '0.6' version to '0.6.0'" do
@meta.send(:version, "0.6").should eql("0.6.0")
end
it "should spit out '0.6.0' after transforming '0.6'" do
@meta.send(:version, "0.6")
@meta.send(:version).should eql("0.6.0")
end
end
end
describe "dependency specification" do
dep_types = {
:depends => [ :dependencies, "foo::bar", ">> 0.2" ],
:recommends => [ :recommendations, "foo::bar", ">> 0.2" ],
:suggests => [ :suggestions, "foo::bar", ">> 0.2" ],
:conflicts => [ :conflicting, "foo::bar", ">> 0.2" ],
:provides => [ :providing, "foo::bar", ">> 0.2" ],
:replaces => [ :replacing, "foo::bar", ">> 0.2" ],
}
dep_types.sort { |a,b| a.to_s <=> b.to_s }.each do |dep, dep_args|
check_with = dep_args.shift
describe dep do
it "should be set-able via #{dep}" do
@meta.send(dep, *dep_args).should == [dep_args[1]]
end
it "should be get-able via #{check_with}" do
@meta.send(dep, *dep_args)
@meta.send(check_with).should == { dep_args[0] => [dep_args[1]] }
end
end
end
end
describe "cookbook attributes" do
it "should allow you set an attributes metadata" do
attrs = {
"display_name" => "MySQL Databases",
"multiple_values" => true,
"type" => 'string',
"required" => false,
"recipes" => [ "mysql::server", "mysql::master" ],
"default" => [ ]
}
@meta.attribute("/db/mysql/databases", attrs).should == attrs
end
it "should not accept anything but a string for display_name" do
lambda {
@meta.attribute("db/mysql/databases", :display_name => "foo")
}.should_not raise_error(ArgumentError)
lambda {
@meta.attribute("db/mysql/databases", :display_name => Hash.new)
}.should raise_error(ArgumentError)
end
it "should not accept anything but a string for the description" do
lambda {
@meta.attribute("db/mysql/databases", :description => "foo")
}.should_not raise_error(ArgumentError)
lambda {
@meta.attribute("db/mysql/databases", :description => Hash.new)
}.should raise_error(ArgumentError)
end
it "should let multiple_values be true or false" do
lambda {
@meta.attribute("db/mysql/databases", :multiple_values => true)
}.should_not raise_error(ArgumentError)
lambda {
@meta.attribute("db/mysql/databases", :multiple_values => false)
}.should_not raise_error(ArgumentError)
lambda {
@meta.attribute("db/mysql/databases", :multiple_values => Hash.new)
}.should raise_error(ArgumentError)
end
it "should set multiple_values to false by default" do
@meta.attribute("db/mysql/databases", {})
@meta.attributes["db/mysql/databases"][:multiple_values].should == false
end
it "should let type be string, array or hash" do
lambda {
@meta.attribute("db/mysql/databases", :type => "string")
}.should_not raise_error(ArgumentError)
lambda {
@meta.attribute("db/mysql/databases", :type => "array")
}.should_not raise_error(ArgumentError)
lambda {
@meta.attribute("db/mysql/databases", :type => "hash")
}.should_not raise_error(ArgumentError)
lambda {
@meta.attribute("db/mysql/databases", :type => Array.new)
}.should raise_error(ArgumentError)
end
it "should let required be true or false" do
lambda {
@meta.attribute("db/mysql/databases", :required => true)
}.should_not raise_error(ArgumentError)
lambda {
@meta.attribute("db/mysql/databases", :required => false)
}.should_not raise_error(ArgumentError)
lambda {
@meta.attribute("db/mysql/databases", :required => Hash.new)
}.should raise_error(ArgumentError)
end
it "should set required to false by default" do
@meta.attribute("db/mysql/databases", {})
@meta.attributes["db/mysql/databases"][:required].should == false
end
it "should make sure recipes is an array" do
lambda {
@meta.attribute("db/mysql/databases", :recipes => [])
}.should_not raise_error(ArgumentError)
lambda {
@meta.attribute("db/mysql/databases", :required => Hash.new)
}.should raise_error(ArgumentError)
end
it "should set recipes to an empty array by default" do
@meta.attribute("db/mysql/databases", {})
@meta.attributes["db/mysql/databases"][:recipes].should == []
end
it "should allow the default value to be a string, array, or hash" do
lambda {
@meta.attribute("db/mysql/databases", :default => [])
}.should_not raise_error(ArgumentError)
lambda {
@meta.attribute("db/mysql/databases", :default => {})
}.should_not raise_error(ArgumentError)
lambda {
@meta.attribute("db/mysql/databases", :default => "alice in chains")
}.should_not raise_error(ArgumentError)
lambda {
@meta.attribute("db/mysql/databases", :required => :not_gonna_do_it)
}.should raise_error(ArgumentError)
end
end
describe "checking version expression" do
it "should accept >> 8.04" do
@meta._check_version_expression(">> 8.04").should == [ ">>", "8.04" ]
end
it "should accept >= 8.04" do
@meta._check_version_expression(">= 8.04").should == [ ">=", "8.04" ]
end
it "should accept = 8.04" do
@meta._check_version_expression("= 8.04").should == [ "=", "8.04" ]
end
it "should accept <= 8.04" do
@meta._check_version_expression("<= 8.04").should == [ "<=", "8.04" ]
end
it "should accept << 8.04" do
@meta._check_version_expression("<< 8.04").should == [ "<<", "8.04" ]
end
it "should raise an exception on an invalid version expression" do
lambda {
@meta._check_version_expression("tried to << love you")
}.should raise_error(ArgumentError)
end
end
describe "recipes" do
before(:each) do
@cookbook.recipe_files = [ "default.rb", "enlighten.rb" ]
@meta = Chef::Cookbook::Metadata.new(@cookbook)
end
it "should have the names of the recipes" do
@meta.recipes["test_cookbook"].should == ""
@meta.recipes["test_cookbook::enlighten"].should == ""
end
it "should let you set the description for a recipe" do
@meta.recipe "test_cookbook", "It, um... tests stuff?"
@meta.recipes["test_cookbook"].should == "It, um... tests stuff?"
end
it "should automatically provide each recipe" do
@meta.providing.has_key?("test_cookbook").should == true
@meta.providing.has_key?("test_cookbook::enlighten").should == true
end
end
describe "json" do
before(:each) do
@cookbook.recipe_files = [ "default.rb", "enlighten.rb" ]
@meta = Chef::Cookbook::Metadata.new(@cookbook)
@meta.version "1.0"
@meta.maintainer "Bobo T. Clown"
@meta.maintainer_email "bobo@example.com"
@meta.long_description "I have a long arm!"
@meta.supports :ubuntu, ">> 8.04"
@meta.depends "bobo", "= 1.0"
@meta.depends "bobotclown", "= 1.1"
@meta.recommends "snark", "<< 3.0"
@meta.suggests "kindness", ">> 2.0", "<< 4.0"
@meta.conflicts "hatred"
@meta.provides "foo(:bar, :baz)"
@meta.replaces "snarkitron"
@meta.recipe "test_cookbook::enlighten", "is your buddy"
@meta.attribute "bizspark/has_login",
:display_name => "You have nothing"
@meta.version "1.2.3"
end
describe "serialize" do
before(:each) do
@serial = JSON.parse(@meta.to_json)
end
it "should serialize to a json hash" do
JSON.parse(@meta.to_json).should be_a_kind_of(Hash)
end
%w{
name
description
long_description
maintainer
maintainer_email
license
platforms
dependencies
suggestions
recommendations
conflicting
providing
replacing
attributes
recipes
version
}.each do |t|
it "should include '#{t}'" do
@serial[t].should == @meta.send(t.to_sym)
end
end
end
describe "deserialize" do
before(:each) do
@deserial = Chef::Cookbook::Metadata.from_json(@meta.to_json)
end
it "should deserialize to a Chef::Cookbook::Metadata object" do
@deserial.should be_a_kind_of(Chef::Cookbook::Metadata)
end
%w{
name
description
long_description
maintainer
maintainer_email
license
platforms
dependencies
suggestions
recommendations
conflicting
providing
replacing
attributes
recipes
version
}.each do |t|
it "should match '#{t}'" do
@deserial.send(t.to_sym).should == @meta.send(t.to_sym)
end
end
end
end
end
|
apache-2.0
|
Aleatoribus/aleator.stream
|
diagnostics.php
|
5513
|
<!DOCTYPE html>
<html>
<head>
<title>Diagnostics</title>
<meta charset="UTF-8"/>
<link rel="icon" href="favicon.ico"/>
</head>
<body>
<?php
function get_browser_name(){
$user_agent = $_SERVER['HTTP_USER_AGENT'];
if(strpos($user_agent, 'Opera') || strpos($user_agent, 'OPR/')){
return 'Opera';
}
else if(strpos($user_agent, 'Edge')){
return 'Edge';
}
else if(strpos($user_agent, 'Chrome')){
return 'Chrome';
}
else if(strpos($user_agent, 'Safari')){
return 'Safari';
}
else if(strpos($user_agent, 'Firefox')){
return 'Firefox';
}
else if(strpos($user_agent, 'MSIE') || strpos($user_agent, 'Trident/7')){
return 'Internet Explorer';
}
else{
return 'Unknown';
}
}
print '<h2>Session diagnostics</h2>';
session_start();
if(isset($_SESSION['username'])){
print '<p>You are logged in as: ' . $_SESSION['username'] . '</p>';
}
else{
print '<p>You are not logged in</p>';
}
print '<p>Session ID: ' . session_id() . '</p>';
print '<p>We see your IP address as: ' . $_SERVER['REMOTE_ADDR'] . '</p>';
print '<p>You are accessing our website via: ' . $_SERVER['HTTP_HOST'] . ' on remote port: ' . $_SERVER['SERVER_PORT'] . ' and local port: ' . $_SERVER['REMOTE_PORT'] . '</p>';
print '<p>This virtual host is: ' . $_SERVER['SERVER_NAME'] . ' on ' . $_SERVER['SERVER_ADDR'] . ':' . $_SERVER['SERVER_PORT'] . '</p>';
if($_SERVER['HTTPS'] == "on"){
print '<p>You are using HTTPS connection security. We\'ve enabled this by force.</p>';
}
else if($_SERVER['HTTPS'] != "on" && $_SERVER['HTTP_HOST'] == "z54pzh3e2qg4phj5.onion"){
print '<p>You are not using HTTPS connection security, but you are connecting via TOR.</p>';
}
else{
print '<p>You are not using HTTPS connection security. This is very bad, please report this!</p>';
}
print '<p id="noscript">JavaScript is not enabled, or is being blocked on this page.</p>';
print '<p id="script"></p>';
print '<h2>MKV playback test</h2>';
print '<p>You\'re using: ' . get_browser_name() . '</p>';
if(get_browser_name() == 'Chrome'){
print '<p>The video below should play!</p>';
}
else{
print '<p>The video below might not play.</p>';
}
print '<video width="25%" height="25%" controls><source src="media/jellyfish.mkv" type="video/mp4"></video>';
print '<h2>OpenSSL string encryption test</h2>';
$plaintext = shell_exec('echo "The quick brown fox jumps over the lazy dog." >> /var/www/aleator.stream/tmp/diag.txt && cat /var/www/aleator.stream/tmp/diag.txt');
print '<p><strong>Original string: </strong>' . $plaintext . '</p>';
$aes128 = shell_exec('openssl aes-128-cbc -a -salt -in /var/www/aleator.stream/tmp/diag.txt -out /var/www/aleator.stream/tmp/diag-aes128.txt -pass pass:password && cat /var/www/aleator.stream/tmp/diag-aes128.txt && rm -f /var/www/aleator.stream/tmp/diag-aes128.txt');
print '<p><strong>AES-128-CBC: </strong>' . $aes128 . '</p>';
$aes192 = shell_exec('openssl aes-192-cbc -a -salt -in /var/www/aleator.stream/tmp/diag.txt -out /var/www/aleator.stream/tmp/diag-aes192.txt -pass pass:password && cat /var/www/aleator.stream/tmp/diag-aes192.txt && rm -f /var/www/aleator.stream/tmp/diag-aes192.txt');
print '<p><strong>AES-192-CBC: </strong>' . $aes192 . '</p>';
$aes256 = shell_exec('openssl aes-256-cbc -a -salt -in /var/www/aleator.stream/tmp/diag.txt -out /var/www/aleator.stream/tmp/diag-aes256.txt -pass pass:password && cat /var/www/aleator.stream/tmp/diag-aes256.txt && rm -f /var/www/aleator.stream/tmp/diag-aes256.txt');
print '<p><strong>AES-256-CBC: </strong>' . $aes256 . '</p>';
$camellia128 = shell_exec('openssl camellia-128-cbc -a -salt -in /var/www/aleator.stream/tmp/diag.txt -out /var/www/aleator.stream/tmp/diag-camellia128.txt -pass pass:password && cat /var/www/aleator.stream/tmp/diag-camellia128.txt && rm -f /var/www/aleator.stream/tmp/diag-camellia128.txt');
print '<p><strong>Camellia-128-CBC: </strong>' . $camellia128 . '</p>';
$camellia192 = shell_exec('openssl camellia-192-cbc -a -salt -in /var/www/aleator.stream/tmp/diag.txt -out /var/www/aleator.stream/tmp/diag-camellia192.txt -pass pass:password && cat /var/www/aleator.stream/tmp/diag-camellia192.txt && rm -f /var/www/aleator.stream/tmp/diag-camellia192.txt');
print '<p><strong>Camellia-192-CBC: </strong>' . $camellia192 . '</p>';
$camellia256 = shell_exec('openssl camellia-256-cbc -a -salt -in /var/www/aleator.stream/tmp/diag.txt -out /var/www/aleator.stream/tmp/diag-camellia256.txt -pass pass:password && cat /var/www/aleator.stream/tmp/diag-camellia256.txt && rm -f /var/www/aleator.stream/tmp/diag-camellia256.txt');
print '<p><strong>Camellia-256-CBC: </strong>' . $camellia256 . '</p>';
$bf = shell_exec('openssl bf-cbc -a -salt -in /var/www/aleator.stream/tmp/diag.txt -out /var/www/aleator.stream/tmp/diag-bf.txt -pass pass:password && cat /var/www/aleator.stream/tmp/diag-bf.txt && rm -f /var/www/aleator.stream/tmp/diag-bf.txt');
print '<p><strong>BF-CBC: </strong>' . $bf . '</p>';
shell_exec('rm -f /var/www/aleator.stream/tmp/diag.txt');
?>
<script>
function scriptTest(){
var noscript = document.getElementById('noscript');
var script = document.getElementById('script');
noscript.style.display = 'none';
script.innerHTML = "JavaScript is enabled.";
}
window.onload = scriptTest();
</script>
</body>
</html>
|
apache-2.0
|
zxylvlp/LockFree
|
TestConcurrentQueue.cpp
|
1055
|
/*
* TestConcurrentQueue.cpp
*
* Created on: 2017年7月2日
* Author: zxy
*/
#include "concurrent_queue.h"
#include "SimpleSpinLock.h"
#include <thread>
#include <vector>
#include <iostream>
int main() {
ConcurrentQueue<uint64_t> cq;
SimpleSpinLock ssl;
std::vector<std::thread> threads;
for (int i=0;i<10;i++) {
std::thread t([i, &cq, &ssl]{
if (i & 1) {
uint64_t j = 0;
while (true) {
cq.enqueue(j, i);
//ssl.lock();
//std::cout << "e: " << j << " tid: " << i <<std::endl;
//ssl.unlock();
j++;
}
} else {
while (true) {
uint64_t j = cq.dequeue(i);
//ssl.lock();
//std::cout << "d: " << j << " tid: " << i << std::endl;
//ssl.unlock();
}
}
});
threads.emplace_back(std::move(t));
}
while (true);
}
|
apache-2.0
|
NakedObjectsGroup/NakedObjectsFramework
|
NakedFramework/NakedFramework.Metamodel/SemanticsProvider/ShortValueSemanticsProvider.cs
|
2175
|
// Copyright Naked Objects Group Ltd, 45 Station Road, Henley on Thames, UK, RG9 1AT
// Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License.
// You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0.
// Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and limitations under the License.
using System;
using System.Collections.Generic;
using System.Globalization;
using NakedFramework.Architecture.Facet;
using NakedFramework.Architecture.Spec;
using NakedFramework.Architecture.SpecImmutable;
using NakedFramework.Core.Error;
namespace NakedFramework.Metamodel.SemanticsProvider;
[Serializable]
public sealed class ShortValueSemanticsProvider : ValueSemanticsProviderAbstract<short>, IShortValueFacet {
private const short DefaultValueConst = 0;
private const bool Immutable = true;
public ShortValueSemanticsProvider(IObjectSpecImmutable spec, ISpecification holder)
: base(Type, holder, AdaptedType, Immutable, DefaultValueConst, spec) { }
public static Type Type => typeof(IShortValueFacet);
public static Type AdaptedType => typeof(short);
public static KeyValuePair<Type, Func<IObjectSpecImmutable, ISpecification, IValueSemanticsProvider>> Factory => new(AdaptedType, (o, s) => new ShortValueSemanticsProvider(o, s));
protected override short DoParse(string entry) {
try {
return short.Parse(entry, NumberStyles.AllowLeadingSign | NumberStyles.AllowDecimalPoint | NumberStyles.AllowThousands);
}
catch (FormatException) {
throw new InvalidEntryException(FormatMessage(entry));
}
catch (OverflowException) {
throw new InvalidEntryException(OutOfRangeMessage(entry, short.MinValue, short.MaxValue));
}
}
protected override string TitleStringWithMask(string mask, short value) => value.ToString(mask);
}
|
apache-2.0
|
mmartell/geode-native
|
tests/cli/NewFwkLib/FwkTest.cs
|
52569
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
using System;
using System.Collections;
using System.Collections.Generic;
using System.Runtime.Serialization;
using System.Text;
using System.Xml;
#pragma warning disable 618
namespace Apache.Geode.Client.FwkLib
{
using Apache.Geode.DUnitFramework;
using Apache.Geode.Client.Tests;
using Apache.Geode.Client;
using NEWAPI = Apache.Geode.Client.Tests;
//using Region = Apache.Geode.Client.IRegion<Object, Object>;
/// <summary>
/// Exception thrown when 'Call' is invoked on a client thread/process/...
/// that has already exited (either due to some error/exception on the
/// client side or due to its 'Dispose' function being called).
/// </summary>
[Serializable]
public class FwkException : Exception
{
/// <summary>
/// Constructor to create an exception object with empty message.
/// </summary>
public FwkException()
: base()
{
}
/// <summary>
/// Constructor to create an exception object with the given message.
/// </summary>
/// <param name="message">The exception message.</param>
public FwkException(string message)
: base(message)
{
}
/// <summary>
/// Constructor to create an exception object with the given message
/// and with the given inner Exception.
/// </summary>
/// <param name="message">The exception message.</param>
/// <param name="innerException">The inner Exception object.</param>
public FwkException(string message, Exception innerException)
: base(message, innerException)
{
}
/// <summary>
/// Constructor to allow deserialization of this exception by .Net remoting
/// </summary>
public FwkException(SerializationInfo info, StreamingContext context)
: base(info, context)
{
}
}
[Serializable]
public struct FwkTaskData
{
#region Private members
private string m_regionTag;
private string m_name;
private int m_numClients;
private int m_numKeys;
private int m_valueSize;
private int m_numThreads;
#endregion
#region Constructor
public FwkTaskData(string regionTag, string name, int numClients,
int numKeys, int valueSize, int numThreads)
{
m_regionTag = regionTag;
m_name = name;
m_numClients = numClients;
m_numKeys = numKeys;
m_valueSize = valueSize;
m_numThreads = numThreads;
}
#endregion
#region Public methods and accessors
public string RegionTag
{
get
{
return m_regionTag;
}
}
public string Name
{
get
{
return m_name;
}
}
public int NumClients
{
get
{
return m_numClients;
}
}
public int NumKeys
{
get
{
return m_numKeys;
}
}
public int ValueSize
{
get
{
return m_valueSize;
}
}
public int NumThreads
{
get
{
return m_numThreads;
}
}
public string GetLogString()
{
return string.Format("{0}-{1}-Clients-{2}-Keys-{3}-VSize-{4}-Threads-{5}",
m_regionTag, m_name, m_numClients, m_numKeys, m_valueSize, m_numThreads);
}
public string GetCSVString()
{
return string.Format("{0},{1},{2},{3},{4},{5}",
m_regionTag, m_name, m_numClients, m_numKeys, m_valueSize, m_numThreads);
}
#endregion
}
public abstract class FwkTest<TKey, TVal> : FwkReadData
{
#region Private members
private static FwkTest<TKey, TVal> m_currentTest = null;
private FwkTaskData m_taskData;
private List<string> m_taskRecords;
private const NEWAPI.CredentialGenerator.ClassCode DefaultSecurityCode =
NEWAPI.CredentialGenerator.ClassCode.LDAP;
#endregion
#region Public accessors and constants
public static FwkTest<TKey, TVal> CurrentTest
{
get
{
return m_currentTest;
}
}
public const string JavaServerBB = "Cacheservers";
public const string EndPointTag = "ENDPOINT:";
public const string HeapLruLimitKey = "heapLruLimit";
public const string RedundancyLevelKey = "redundancyLevel";
public const string ConflateEventsKey = "conflateEvents";
public const string SecurityParams = "securityParams";
public const string SecurityScheme = "securityScheme";
public const string JavaServerEPCountKey = "ServerEPCount";
public const string EndPoints = "EndPoints";
public static Properties<string, string> PER_CLIENT_FOR_MULTIUSER = null;
#endregion
#region Protected members
protected FwkTaskData TaskData
{
get
{
return m_taskData;
}
}
#endregion
#region Public methods
public FwkTest()
: base()
{
m_currentTest = this;
m_taskData = new FwkTaskData();
m_taskRecords = new List<string>();
}
public virtual void FwkException(string message)
{
FwkSevere(message);
throw new FwkException(message);
}
public virtual void FwkException(string fmt, params object[] paramList)
{
FwkException(string.Format(fmt, paramList));
}
public virtual void FwkSevere(string message)
{
Util.Log(Util.LogLevel.Error, "FWKLIB:: Task[{0}] Severe: {1}",
TaskName, message);
}
public virtual void FwkSevere(string fmt, params object[] paramList)
{
FwkSevere(string.Format(fmt, paramList));
}
public virtual void FwkWarn(string message)
{
Util.Log(Util.LogLevel.Warning, "FWKLIB:: Task[{0}]: {1}",
TaskName, message);
}
public virtual void FwkWarn(string fmt, params object[] paramList)
{
FwkWarn(string.Format(fmt, paramList));
}
public virtual void FwkInfo(string message)
{
Util.Log(Util.LogLevel.Info, "FWKLIB:: Task[{0}]: {1}",
TaskName, message);
}
public virtual void FwkInfo(string fmt, params object[] paramList)
{
FwkInfo(string.Format(fmt, paramList));
}
public virtual void FwkAssert(bool condition, string message)
{
if (!condition)
{
FwkException(message);
}
}
public virtual void FwkAssert(bool condition, string fmt,
params object[] paramList)
{
if (!condition)
{
FwkException(fmt, paramList);
}
}
public static void LogException(string message)
{
throw new FwkException(message);
}
public static void LogException(string fmt, params object[] paramList)
{
LogException(string.Format(fmt, paramList));
}
public static void LogSevere(string message)
{
Util.Log(Util.LogLevel.Error, "FWKLIB:: Severe: {0}", message);
}
public static void LogSevere(string fmt, params object[] paramList)
{
LogSevere(string.Format(fmt, paramList));
}
public static void LogWarn(string message)
{
Util.Log(Util.LogLevel.Warning, "FWKLIB:: {0}", message);
}
public static void LogWarn(string fmt, params object[] paramList)
{
LogWarn(string.Format(fmt, paramList));
}
public static void LogInfo(string message)
{
Util.Log(Util.LogLevel.Info, "FWKLIB:: {0}", message);
}
public static void LogInfo(string fmt, params object[] paramList)
{
LogInfo(string.Format(fmt, paramList));
}
public static void LogAssert(bool condition, string message)
{
if (!condition)
{
LogException(message);
}
}
public static void LogAssert(bool condition, string fmt,
params object[] paramList)
{
if (!condition)
{
LogException(fmt, paramList);
}
}
public virtual IRegion<TKey, TVal> GetRootRegion()
{
string rootRegionData = GetStringValue("regionSpec");
//rootRegionData = rootRegionData + "New";
if (rootRegionData == null)
{
return null;
}
string rootRegionName = GetRegionName(rootRegionData);
try
{
return CacheHelper<TKey, TVal>.GetVerifyRegion(rootRegionName);
}
catch
{
return null;
}
}
public CredentialGenerator GetCredentialGenerator()
{
int schemeNumber;
try
{
schemeNumber = (int)Util.BBGet(string.Empty,
FwkReadData.TestRunNumKey);
}
catch (Exception)
{
schemeNumber = 1;
}
int schemeSkip = 1;
string securityScheme;
string bb = "GFE_BB";
string key = "scheme";
do
{
securityScheme = GetStringValue(SecurityScheme);
Util.BBSet(bb, key, securityScheme);
}
while (++schemeSkip <= schemeNumber);
NEWAPI.CredentialGenerator.ClassCode secCode;
try
{
secCode = (NEWAPI.CredentialGenerator.ClassCode)Enum.Parse(typeof(
NEWAPI.CredentialGenerator.ClassCode), securityScheme, true);
}
catch (Exception)
{
FwkWarn("Skipping unknown security scheme {0}. Using default " +
"security scheme {1}.", securityScheme, DefaultSecurityCode);
secCode = DefaultSecurityCode;
}
if (secCode == NEWAPI.CredentialGenerator.ClassCode.None)
{
return null;
}
NEWAPI.CredentialGenerator gen = NEWAPI.CredentialGenerator.Create(secCode, false);
if (gen == null)
{
FwkWarn("Skipping security scheme {0} with no generator. Using " +
"default security scheme.", secCode, DefaultSecurityCode);
secCode = DefaultSecurityCode;
gen = NEWAPI.CredentialGenerator.Create(secCode, false);
}
return gen;
}
public void GetClientSecurityProperties(ref Properties<string, string> props,
string regionName)
{
string securityParams = GetStringValue(SecurityParams);
NEWAPI.CredentialGenerator gen;//= GetCredentialGenerator();
if (securityParams == null || securityParams.Length == 0 ||
(gen = GetCredentialGenerator()) == null)
{
FwkInfo("Security is DISABLED.");
return;
}
FwkInfo("Security params is: " + securityParams);
FwkInfo("Security scheme: " + gen.GetClassCode());
string dataDir = Util.GetFwkLogDir(Util.SystemType) + "/data";
gen.Init(dataDir, dataDir);
if (props == null)
{
props = new Properties<string, string>();
}
Properties<string, string> credentials;
Random rnd = new Random();
if (securityParams.Equals("valid"))
{
FwkInfo("Getting valid credentials");
credentials = gen.GetValidCredentials(rnd.Next());
}
else if (securityParams.Equals("invalid"))
{
FwkInfo("Getting invalid credentials");
credentials = gen.GetInvalidCredentials(rnd.Next());
}
else
{
FwkInfo("Getting credentials for a list of operations");
List<OperationCode> opCodes = new List<OperationCode>();
while (securityParams != null && securityParams.Length > 0)
{
securityParams = securityParams.ToLower().Replace("_", "");
OperationCode opCode;
if (securityParams == "create" || securityParams == "update")
{
opCode = OperationCode.Put;
}
else
{
opCode = (OperationCode)Enum.Parse(typeof(
OperationCode), securityParams, true);
}
opCodes.Add(opCode);
securityParams = GetStringValue(SecurityParams);
FwkInfo("Next security params: {0}", securityParams);
}
// For now only XML based authorization is supported
NEWAPI.AuthzCredentialGenerator authzGen = new NEWAPI.XmlAuthzCredentialGenerator();
authzGen.Init(gen);
List<string> regionNameList = new List<string>();
if (regionName == null || regionName.Length == 0)
{
regionName = GetStringValue("regionPaths");
}
while (regionName != null && regionName.Length > 0)
{
regionNameList.Add(regionName);
regionName = GetStringValue("regionPaths");
}
string[] regionNames = null;
if (regionNameList.Count > 0)
{
regionNames = regionNameList.ToArray();
}
credentials = authzGen.GetAllowedCredentials(opCodes.ToArray(),
regionNames, rnd.Next());
}
PER_CLIENT_FOR_MULTIUSER = credentials;
NEWAPI.Utility.GetClientProperties(gen.AuthInit, credentials, ref props);
FwkInfo("Security properties entries: {0}", props);
}
private string[] GetRoundRobinEP()
{
int contactnum = GetUIntValue("contactNum");
string label = "EndPoints";
int epCount = (int)Util.BBGet(JavaServerBB, JavaServerEPCountKey);
//int epCount = (int)Util.BBGet("GFE_BB", "EP_COUNT");
if (contactnum < 0)
contactnum = epCount;
string[] rbEP = new string[contactnum];
string currEPKey = "CURRENTEP_COUNT";
int currentEP = (int)Util.BBIncrement("GFERR_BB", currEPKey);
for (int i = 0; i < contactnum; i++)
{
if (currentEP > epCount)
{
Util.BBSet("GFERR_BB", currEPKey, 0);
currentEP = (int)Util.BBIncrement("GFERR_BB", currEPKey);
}
string key = label + "_" + currentEP.ToString();
string ep = (string)Util.BBGet(JavaServerBB, key);
rbEP[i] = ep;
FwkInfo("GetRoundRobinEP = {0} key = {1} currentEP = {2}", ep, key, currentEP);
// rbEP[i] = ep;
}
return rbEP;
}
private void XMLParseEndPoints(string ep, bool isServer, PoolFactory pf)
{
string[] eps = ep.Split(',');
if (isServer)
{
bool disableShufflingEP = GetBoolValue("disableShufflingEP"); // smoke perf test
if (disableShufflingEP)
{
string[] rbep = GetRoundRobinEP();
for (int cnt = 0; cnt < rbep.Length; cnt++)
{
FwkInfo("round robin endpoint = {0}", rbep[cnt]);
//string[] rep = rbep[cnt].Split(',');
//foreach (string endpoint in eps)
//{
string hostName = rbep[cnt].Split(':')[0];
int portNum = int.Parse(rbep[cnt].Split(':')[1]);
pf.AddServer(hostName, portNum);
//}
}
}
else
{
foreach (string endpoint in eps)
{
string hostName = endpoint.Split(':')[0];
int portNum = int.Parse(endpoint.Split(':')[1]);
pf.AddServer(hostName, portNum);
}
}
}
else
{
foreach (string endpoint in eps)
{
string hostName = endpoint.Split(':')[0];
int portNum = int.Parse(endpoint.Split(':')[1]);
pf.AddLocator(hostName, portNum);
}
}
}
private void CreateCache()
{
Properties<string,string> dsProps = new Properties<string,string>();
ResetKey("PdxReadSerialized");
bool pdxReadSerialized = GetBoolValue("PdxReadSerialized");
ResetKey("isDurable");
bool isDC = GetBoolValue("isDurable");
ResetKey("durableTimeout");
int durableTimeout = 300;
string durableClientId = "";
string conflateEvents = GetStringValue(ConflateEventsKey);
if (isDC)
{
durableTimeout = GetUIntValue("durableTimeout");
bool isFeeder = GetBoolValue("isFeeder");
if (isFeeder)
{
durableClientId = "Feeder";
// VJR: Setting FeederKey because listener cannot read boolean isFeeder
// FeederKey is used later on by Verify task to identify feeder's key in BB
durableClientId = String.Format("ClientName_{0}_Count", Util.ClientNum);
}
else
{
durableClientId = String.Format("ClientName_{0}", Util.ClientNum);
}
//Util.BBSet("DURABLEBB", durableClientId,0);
CacheHelper<TKey, TVal>.InitConfigForPoolDurable(durableClientId, durableTimeout, conflateEvents, false);
}
else if (pdxReadSerialized)
{
CacheHelper<TKey, TVal>.InitConfigPdxReadSerialized(dsProps, pdxReadSerialized);
}
else
CacheHelper<TKey, TVal>.InitConfigPool(dsProps);
}
public void CreateCacheConnect()
{
CreateCache();
}
public virtual void CreatePool()
{
CreateCache();
PoolFactory pf = CacheHelper<TKey, TVal>.DCache.GetPoolFactory();
ResetKey("poolSpec");
string poolRegionData = GetStringValue("poolSpec");
FwkInfo("PoolSpec is :{0}", poolRegionData);
string poolName = null;
SetPoolAttributes(pf, poolRegionData, ref poolName);
if (CacheHelper<TKey, TVal>.DCache.GetPoolManager().Find(poolName) == null)
{
Pool pool = pf.Create(poolName, CacheHelper<TKey, TVal>.DCache);
FwkInfo("Pool attributes are {0}:", PoolAttributesToString(pool));
}
}
public void SetPoolAttributes(PoolFactory pf, string spec, ref string poolName)
{
ReadXmlData(null, pf, spec, ref poolName);
}
private void SetThisPoolAttributes(PoolFactory pf,string key, string value)
{
switch (key)
{
case "free-connection-timeout":
int fct = int.Parse(value);
pf.SetFreeConnectionTimeout(TimeSpan.FromSeconds(fct));
break;
case "idle-timeout":
int it = int.Parse(value);
pf.SetIdleTimeout(TimeSpan.FromSeconds(it));
break;
case "load-conditioning-interval":
int lci = int.Parse(value);
pf.SetLoadConditioningInterval(TimeSpan.FromSeconds(lci));
break;
case "max-connections":
int mxc = int.Parse(value);
pf.SetMaxConnections(mxc);
break;
case "min-connections":
int minc = int.Parse(value);
pf.SetMinConnections(minc);
break;
case "ping-interval":
int pi = int.Parse(value);
pf.SetPingInterval(TimeSpan.FromSeconds(pi));
break;
case "read-timeout":
int rt = int.Parse(value);
pf.SetReadTimeout(TimeSpan.FromSeconds(rt));
break;
case "retry-attempts":
int ra = int.Parse(value);
pf.SetRetryAttempts(ra);
break;
case "server-group":
pf.SetServerGroup(value);
break;
case "socket-buffer-size":
int bs = int.Parse(value);
pf.SetSocketBufferSize(bs);
break;
case "subscription-ack-interval":
int acki = int.Parse(value);
pf.SetSubscriptionAckInterval(TimeSpan.FromSeconds(acki));
break;
case "subscription-enabled":
if (value == "true")
{
pf.SetSubscriptionEnabled(true);
}
else
{
pf.SetSubscriptionEnabled(false);
}
break;
case "thread-local-connections":
if (value == "true")
{
pf.SetThreadLocalConnections(true);
}
else
{
pf.SetThreadLocalConnections(false);
}
break;
case "subscription-message-tracking-timeout":
int smtt = int.Parse(value);
pf.SetSubscriptionMessageTrackingTimeout(TimeSpan.FromSeconds(smtt));
break;
case "subscription-redundancy":
int sr = int.Parse(value);
pf.SetSubscriptionRedundancy(sr);
break;
case "locators":
string locatorAddress = (string)Util.BBGet(string.Empty, "LOCATOR_ADDRESS_POOL");
XMLParseEndPoints(locatorAddress, false, pf);
break;
case "servers":
string ServerEndPoints = (string)Util.BBGet("Cacheservers", "ENDPOINT:");
XMLParseEndPoints(ServerEndPoints, true, pf);
break;
}
}
/*
public PoolFactory CreatePoolFactoryAndSetAttribute()
{
PoolFactory pf = PoolManager.CreateFactory();
ResetKey("poolSpec");
string poolRegionData = GetStringValue("poolSpec");
//FwkInfo("PoolSpec is :{0}", poolRegionData);
//Properties prop = GetNewPoolAttributes(poolRegionData);
staing poolName = null;
SetPoolAttributes(pf, poolRegionData,poolName);
return pf;
}*/
private void ReadXmlData(RegionFactory af, PoolFactory pf,string spec, ref string poolname)
{
const string DriverNodeName = "test-driver";
string xmlFile = Util.BBGet(string.Empty, "XMLFILE") as string;
XmlNode node = XmlNodeReaderWriter.GetInstance(xmlFile).GetNode(
'/' + DriverNodeName);
XmlNodeList xmlNodes = node.SelectNodes("data");
if (xmlNodes != null)
{
foreach (XmlNode xmlNode in xmlNodes)
{
XmlAttribute tmpattr = xmlNode.Attributes["name"];
if(tmpattr.Value == spec)
{
if (xmlNode.FirstChild.Name == "snippet")
{
string regionName;
if (xmlNode.FirstChild.FirstChild.Name == "region")
{
XmlAttribute nameattr = xmlNode.FirstChild.FirstChild.Attributes["name"];
regionName = nameattr.Value;
XmlNode attrnode = xmlNode.FirstChild.FirstChild.FirstChild;
if (attrnode.Name == "region-attributes")
{
XmlAttributeCollection attrcoll = attrnode.Attributes;
if (attrcoll != null)
{
foreach (XmlAttribute eachattr in attrcoll)
{
SetThisAttribute(eachattr.Name, eachattr, af, ref poolname);
}
}
if (attrnode.ChildNodes != null)
{
foreach (XmlNode tmpnode in attrnode.ChildNodes)
{
SetThisAttribute(tmpnode.Name, tmpnode, af, ref poolname);
}
}
else
{
throw new IllegalArgException("The xml file passed has an unknown format");
}
}
}
else if (xmlNode.FirstChild.FirstChild.Name == "pool")
{
XmlAttribute nameattr = xmlNode.FirstChild.FirstChild.Attributes["name"];
poolname = nameattr.Value;
// Now collect the pool atributes
Properties<string,string> prop = new Properties<string,string>();
XmlAttributeCollection attrcoll = xmlNode.FirstChild.FirstChild.Attributes;
if (attrcoll != null)
{
foreach (XmlAttribute eachattr in attrcoll)
{
SetThisPoolAttributes(pf, eachattr.Name, eachattr.Value);
}
}
else
{
throw new IllegalArgException("The xml file passed has an unknown format");
}
}
}
}
}
}
}
private static ExpirationAction StrToExpirationAction(string str)
{
return (ExpirationAction)Enum.Parse(typeof(ExpirationAction),
str.Replace("-", string.Empty), true);
}
public void SetRegionAttributes(RegionFactory af, string spec, ref string poolname)
{
ReadXmlData(af, null,spec, ref poolname);
}
public static void SetThisAttribute(string name, XmlNode node, RegionFactory af, ref string poolname)
{
string value = node.Value;
switch (name)
{
case "caching-enabled":
if (value == "true")
{
af.SetCachingEnabled(true);
}
else
{
af.SetCachingEnabled(false);
}
break;
case "load-factor":
float lf = float.Parse(value);
af.SetLoadFactor(lf);
break;
case "concurrency-level":
int cl = int.Parse(value);
af.SetConcurrencyLevel(cl);
break;
case "lru-entries-limit":
uint lel = uint.Parse(value);
af.SetLruEntriesLimit(lel);
break;
case "initial-capacity":
int ic = int.Parse(value);
af.SetInitialCapacity(ic);
break;
case "disk-policy":
if (value == "none")
{
af.SetDiskPolicy(DiskPolicyType.None);
}
else if (value == "overflows")
{
af.SetDiskPolicy(DiskPolicyType.Overflows);
}
else
{
throw new IllegalArgException("Unknown disk policy");
}
break;
case "concurrency-checks-enabled":
bool cce = bool.Parse(value);
af.SetConcurrencyChecksEnabled(cce);
break;
case "pool-name":
if (value.Length != 0)
{
af.SetPoolName(value);
}
else
{
af.SetPoolName(value);
}
poolname = value;
break;
case "region-time-to-live":
XmlNode nlrttl = node.FirstChild;
if (nlrttl.Name == "expiration-attributes")
{
XmlAttributeCollection exAttrColl = nlrttl.Attributes;
ExpirationAction action = StrToExpirationAction(exAttrColl["action"].Value);
string rttl = exAttrColl["timeout"].Value;
af.SetRegionTimeToLive(action, TimeSpan.FromSeconds(uint.Parse(rttl)));
}
else
{
throw new IllegalArgException("The xml file passed has an unknowk format");
}
break;
case "region-idle-time":
XmlNode nlrit = node.FirstChild;
if (nlrit.Name == "expiration-attributes")
{
XmlAttributeCollection exAttrColl = nlrit.Attributes;
ExpirationAction action = StrToExpirationAction(exAttrColl["action"].Value);
string rit = exAttrColl["timeout"].Value;
af.SetRegionIdleTimeout(action, TimeSpan.FromSeconds(uint.Parse(rit)));
}
else
{
throw new IllegalArgException("The xml file passed has an unknowk format");
}
break;
case "entry-time-to-live":
XmlNode nlettl = node.FirstChild;
if (nlettl.Name == "expiration-attributes")
{
XmlAttributeCollection exAttrColl = nlettl.Attributes;
ExpirationAction action = StrToExpirationAction(exAttrColl["action"].Value);
string ettl = exAttrColl["timeout"].Value;
af.SetEntryTimeToLive(action, TimeSpan.FromSeconds(uint.Parse(ettl)));
}
else
{
throw new IllegalArgException("The xml file passed has an unknowk format");
}
break;
case "entry-idle-time":
XmlNode nleit = node.FirstChild;
if (nleit.Name == "expiration-attributes")
{
XmlAttributeCollection exAttrColl = nleit.Attributes;
ExpirationAction action = StrToExpirationAction(exAttrColl["action"].Value);
string eit = exAttrColl["timeout"].Value;
af.SetEntryIdleTimeout(action, TimeSpan.FromSeconds(uint.Parse(eit)));
}
else
{
throw new IllegalArgException("The xml file passed has an unknowk format");
}
break;
case "cache-loader":
XmlAttributeCollection loaderattrs = node.Attributes;
string loaderlibrary = null;
string loaderfunction = null;
foreach (XmlAttribute tmpattr in loaderattrs)
{
if (tmpattr.Name == "library")
{
loaderlibrary = tmpattr.Value;
}
else if (tmpattr.Name == "function")
{
loaderfunction = tmpattr.Value;
}
else
{
throw new IllegalArgException("cahe-loader attributes in improper format");
}
}
if (loaderlibrary != null && loaderfunction != null)
{
if (loaderfunction.IndexOf('.') < 0)
{
Type myType = typeof(FwkTest<TKey, TVal>);
loaderfunction = myType.Namespace + '.' +
loaderlibrary + "<" + typeof(TKey) + "," + typeof(TVal) + ">." + loaderfunction;
loaderlibrary = myType.Assembly.FullName;
ICacheLoader<TKey, TVal> loader = null;
String createCacheLoader = myType.Namespace + '.' +
loaderlibrary + "<" + typeof(TKey) + "," + typeof(TVal) + ">." + "createCacheLoader";
Util.Log(Util.LogLevel.Info, "Instantiated Loader = {0} ", loaderfunction);
af.SetCacheLoader(loader);
}
//af.SetCacheLoader(loaderlibrary, loaderfunction);
}
break;
case "cache-listener":
XmlAttributeCollection listenerattrs = node.Attributes;
string listenerlibrary = null;
string listenerfunction = null;
foreach (XmlAttribute tmpattr in listenerattrs)
{
if (tmpattr.Name == "library")
{
listenerlibrary = tmpattr.Value;
}
else if (tmpattr.Name == "function")
{
listenerfunction = tmpattr.Value;
}
else
{
throw new IllegalArgException("cahe-listener attributes in improper format");
}
}
if (listenerlibrary != null && listenerfunction != null)
{
if (listenerfunction.IndexOf('.') < 0)
{
Type myType = typeof(FwkTest<TKey, TVal>);
listenerfunction = myType.Namespace + '.' +
listenerlibrary + "<" + typeof(TKey) + "," + typeof(TVal) + ">." + listenerfunction;
//Util.Log(Util.LogLevel.Info, "rjk1 cache listener in fwktest: myType.Namespace {0} " +
// " listenerlibrary {1} listenerfunction {2}", myType.Namespace, listenerlibrary, listenerfunction);
//listenerlibrary = myType.Assembly.FullName;
//Util.Log(Util.LogLevel.Info, "rjk cache listener in fwktest inside if condition: listenerlibrary {0} listenerfunction {1}", listenerlibrary, listenerfunction);
Util.Log(Util.LogLevel.Info, "listenerlibrary is {0} and listenerfunction is {1}", listenerlibrary, listenerfunction);
ICacheListener<TKey, TVal> listener = null;
String perfTestCacheListener = myType.Namespace + '.' +
listenerlibrary + "<" + typeof(TKey) + "," + typeof(TVal) + ">." + "createPerfTestCacheListener";
String conflationTestCacheListener = myType.Namespace + '.' +
listenerlibrary + "<" + typeof(TKey) + "," + typeof(TVal) + ">." + "createConflationTestCacheListener";
String latencyListener = myType.Namespace + '.' +
listenerlibrary + "<" + typeof(TKey) + "," + typeof(TVal) + ">." + "createLatencyListenerP";
String dupChecker = myType.Namespace + '.' +
listenerlibrary + "<" + typeof(TKey) + "," + typeof(TVal) + ">." + "createDupChecker";
String createDurableCacheListener = myType.Namespace + '.' +
listenerlibrary + "<" + typeof(TKey) + "," + typeof(TVal) + ">." + "createDurableCacheListener";
String createConflationTestCacheListenerDC = myType.Namespace + '.' +
listenerlibrary + "<" + typeof(TKey) + "," + typeof(TVal) + ">." + "createConflationTestCacheListenerDC";
String createDurablePerfListener = myType.Namespace + '.' +
listenerlibrary + "<" + typeof(TKey) + "," + typeof(TVal) + ">." + "createDurablePerfListener";
String CreateDurableCacheListenerSP = myType.Namespace + '.' +
listenerlibrary + "<" + typeof(TKey) + "," + typeof(TVal) + ">." + "createDurableCacheListenerSP";
String createLatencyListener = myType.Namespace + '.' +
listenerlibrary + "<" + typeof(TKey) + "," + typeof(TVal) + ">." + "createLatencyListener";
String createSilenceListener = myType.Namespace + '.' +
listenerlibrary + "<" + typeof(TKey) + "," + typeof(TVal) + ">." + "createSilenceListener";
String createDeltaValidationCacheListener = myType.Namespace + '.' +
listenerlibrary + "<" + typeof(TKey) + "," + typeof(TVal) + ">." + "createDeltaValidationCacheListener";
String createSilenceListenerPdx = myType.Namespace + '.' +
listenerlibrary + "<" + typeof(TKey) + "," + typeof(TVal) + ">." + "createSilenceListenerPdx";
if (String.Compare(listenerfunction, perfTestCacheListener, true) == 0) {
listener = new PerfTestCacheListener<TKey, TVal>();
}
else if (String.Compare(listenerfunction, conflationTestCacheListener, true) == 0) {
listener = new ConflationTestCacheListener<TKey, TVal>();
}
else if (String.Compare(listenerfunction, latencyListener, true) == 0) {
listener = new LatencyListener<TKey, TVal>();
}
else if (String.Compare(listenerfunction, dupChecker, true) == 0) {
listener = new DupChecker<TKey, TVal>();
}
else if (String.Compare(listenerfunction, createDurableCacheListener, true) == 0) {
listener = new DurableListener<TKey, TVal>();
}
else if (String.Compare(listenerfunction, createConflationTestCacheListenerDC, true) == 0) {
listener = new ConflationTestCacheListenerDC<TKey, TVal>();
}
else if (String.Compare(listenerfunction, createDurablePerfListener, true) == 0) {
listener = new DurablePerfListener<TKey, TVal>();
}
else if (String.Compare(listenerfunction, createSilenceListener, true) == 0)
{
listener = new SilenceListener<TKey, TVal>();
}
else if (String.Compare(listenerfunction, createSilenceListenerPdx, true) == 0)
{
listener = new PDXSilenceListener<TKey, TVal>();
}
else if (String.Compare(listenerfunction, createDeltaValidationCacheListener, true) == 0)
{
listener = new DeltaClientValidationListener<TKey, TVal>();
}
Util.Log(Util.LogLevel.Info, "Instantiated Listener = {0} ", listenerfunction);
af.SetCacheListener(listener);
}
//af.SetCacheListener(listenerlibrary, listenerfunction);
}
break;
case "cache-writer":
XmlAttributeCollection writerattrs = node.Attributes;
string writerlibrary = null;
string writerfunction = null;
foreach (XmlAttribute tmpattr in writerattrs)
{
if (tmpattr.Name == "library")
{
writerlibrary = tmpattr.Value;
}
else if (tmpattr.Name == "function")
{
writerfunction = tmpattr.Value;
}
else
{
throw new IllegalArgException("cahe-loader attributes in improper format");
}
}
if (writerlibrary != null && writerfunction != null)
{
if (writerfunction.IndexOf('.') < 0)
{
Type myType = typeof(FwkTest<TKey, TVal>);
writerfunction = myType.Namespace + '.' +
writerlibrary + "<" + typeof(TKey) + "," + typeof(TVal) + ">." + writerfunction;
writerlibrary = myType.Assembly.FullName;
}
af.SetCacheWriter(writerlibrary, writerfunction);
}
break;
case "persistence-manager":
string pmlibrary = null;
string pmfunction = null;
Properties<string, string> prop = new Properties<string, string>();
XmlAttributeCollection pmattrs = node.Attributes;
foreach (XmlAttribute attr in pmattrs)
{
if (attr.Name == "library")
{
pmlibrary = attr.Value;
}
else if (attr.Name == "function")
{
pmfunction = attr.Value;
}
else
{
throw new IllegalArgException("Persistence Manager attributes in wrong format: " + attr.Name);
}
}
if (node.FirstChild.Name == "properties")
{
XmlNodeList pmpropnodes = node.FirstChild.ChildNodes;
foreach (XmlNode propnode in pmpropnodes)
{
if (propnode.Name == "property")
{
XmlAttributeCollection keyval = propnode.Attributes;
XmlAttribute keynode = keyval["name"];
XmlAttribute valnode = keyval["value"];
if (keynode.Value == "PersistenceDirectory" || keynode.Value == "EnvironmentDirectory")
{
prop.Insert(keynode.Value, valnode.Value);
}
else if (keynode.Value == "CacheSizeGb" || keynode.Value == "CacheSizeMb"
|| keynode.Value == "PageSize" || keynode.Value == "MaxFileSize")
{
prop.Insert(keynode.Value, valnode.Value);
}
}
}
}
af.SetPersistenceManager(pmlibrary, pmfunction, prop);
break;
}
}
private static string ConvertStringArrayToString(string[] array)
{
//
// Concatenate all the elements into a StringBuilder.
//
StringBuilder builder = new StringBuilder();
foreach (string value in array)
{
builder.Append(value);
builder.Append('.');
}
return builder.ToString();
}
private string PoolAttributesToString(Pool attrs)
{
StringBuilder attrsSB = new StringBuilder();
attrsSB.Append(Environment.NewLine + "poolName: " +
attrs.Name);
attrsSB.Append(Environment.NewLine + "FreeConnectionTimeout: " +
attrs.FreeConnectionTimeout);
attrsSB.Append(Environment.NewLine + "LoadConditioningInterval: " +
attrs.LoadConditioningInterval);
attrsSB.Append(Environment.NewLine + "SocketBufferSize: " +
attrs.SocketBufferSize);
attrsSB.Append(Environment.NewLine + "ReadTimeout: " +
attrs.ReadTimeout);
attrsSB.Append(Environment.NewLine + "MinConnections: " +
attrs.MinConnections);
attrsSB.Append(Environment.NewLine + "MaxConnections: " +
attrs.MaxConnections);
attrsSB.Append(Environment.NewLine + "StatisticInterval: " +
attrs.StatisticInterval);
attrsSB.Append(Environment.NewLine + "RetryAttempts: " +
attrs.RetryAttempts);
attrsSB.Append(Environment.NewLine + "SubscriptionEnabled: " +
attrs.SubscriptionEnabled);
attrsSB.Append(Environment.NewLine + "SubscriptionRedundancy: " +
attrs.SubscriptionRedundancy);
attrsSB.Append(Environment.NewLine + "SubscriptionAckInterval: " +
attrs.SubscriptionAckInterval);
attrsSB.Append(Environment.NewLine + "SubscriptionMessageTrackingTimeout: " +
attrs.SubscriptionMessageTrackingTimeout);
attrsSB.Append(Environment.NewLine + "ServerGroup: " +
attrs.ServerGroup);
attrsSB.Append(Environment.NewLine + "IdleTimeout: " +
attrs.IdleTimeout);
attrsSB.Append(Environment.NewLine + "PingInterval: " +
attrs.PingInterval);
attrsSB.Append(Environment.NewLine + "ThreadLocalConnections: " +
attrs.ThreadLocalConnections);
attrsSB.Append(Environment.NewLine + "MultiuserAuthentication: " +
attrs.MultiuserAuthentication);
attrsSB.Append(Environment.NewLine + "PRSingleHopEnabled: " +
attrs.PRSingleHopEnabled);
attrsSB.Append(Environment.NewLine + "Locators: " );
if (attrs.Locators != null && attrs.Locators.Length > 0)
{
foreach (string value in attrs.Locators)
{
attrsSB.Append(value);
attrsSB.Append(',');
}
}
attrsSB.Append(Environment.NewLine + "Servers: " );
if (attrs.Servers != null && attrs.Servers.Length > 0)
{
foreach (string value in attrs.Servers)
{
attrsSB.Append(value);
attrsSB.Append(',');
}
}
attrsSB.Append(Environment.NewLine);
return attrsSB.ToString();
}
public virtual IRegion<TKey,TVal> CreateRootRegion()
{
return CreateRootRegion(null);
}
public virtual IRegion<TKey, TVal> CreateRootRegion(string regionName)
{
string rootRegionData = GetStringValue("regionSpec");
return CreateRootRegion(regionName, rootRegionData);
}
public virtual IRegion<TKey, TVal> CreateRootRegion(string regionName,
string rootRegionData)
{
string tagName = GetStringValue("TAG");
string endpoints = Util.BBGet(JavaServerBB, EndPointTag + tagName)
as string;
return CreateRootRegion(regionName, rootRegionData, endpoints);
}
public virtual IRegion<TKey, TVal> CreateRootRegion(string regionName,
string rootRegionData, string endpoints)
{
if (rootRegionData != null && rootRegionData.Length > 0)
{
string rootRegionName;
if (regionName == null || regionName.Length == 0)
{
rootRegionName = GetRegionName(rootRegionData);
}
else
{
rootRegionName = regionName;
}
if (rootRegionName != null && rootRegionName.Length > 0)
{
IRegion<TKey, TVal> region;
if ((region = CacheHelper<TKey, TVal>.GetRegion(rootRegionName)) == null)
{
Properties<string,string> dsProps = new Properties<string,string>();
GetClientSecurityProperties(ref dsProps, rootRegionName);
// Check for any setting of heap LRU limit
int heapLruLimit = GetUIntValue(HeapLruLimitKey);
if (heapLruLimit > 0)
{
dsProps.Insert("heap-lru-limit", heapLruLimit.ToString());
}
string conflateEvents = GetStringValue(ConflateEventsKey);
if (conflateEvents != null && conflateEvents.Length > 0)
{
dsProps.Insert("conflate-events", conflateEvents);
}
ResetKey("sslEnable");
bool isSslEnable = GetBoolValue("sslEnable");
if (isSslEnable)
{
dsProps.Insert("ssl-enabled", "true");
string keyStorePath = Util.GetFwkLogDir(Util.SystemType) + "/data/keystore";
string pubkey = keyStorePath + "/client_truststore.pem";
string privkey = keyStorePath + "/client_keystore.pem";
dsProps.Insert("ssl-keystore", privkey);
dsProps.Insert("ssl-truststore", pubkey);
}
//Properties rootAttrs = GetNewRegionAttributes(rootRegionData);
// Check if this is a thin-client region; if so set the endpoints
RegionFactory rootAttrs = null;
//RegionFactory rootAttrs = CacheHelper<TKey, TVal>.DCache.CreateRegionFactory(RegionShortcut.PROXY);
string m_isPool = null;
//SetRegionAttributes(rootAttrs, rootRegionData, ref m_isPool);
int redundancyLevel = 0;
redundancyLevel = GetUIntValue(RedundancyLevelKey);
//string m_isPool = rootAttrs.Find("pool-name");
string mode = Util.GetEnvironmentVariable("POOLOPT");
if (endpoints != null && endpoints.Length > 0)
{
//redundancyLevel = GetUIntValue(RedundancyLevelKey);
//if (redundancyLevel > 0)
//{
if (mode == "poolwithendpoints" || mode == "poolwithlocator" )//|| m_isPool != null)
{
FwkInfo("Setting the pool-level configurations");
CacheHelper<TKey, TVal>.InitConfigPool(dsProps);
}
try
{
rootAttrs = CacheHelper<TKey, TVal>.DCache.CreateRegionFactory(RegionShortcut.PROXY);
}
catch (Exception e)
{
FwkException("GOT this {0}",e.Message);
}
FwkInfo("Creating region factory");
SetRegionAttributes(rootAttrs, rootRegionData, ref m_isPool);
}
rootAttrs = CreatePool(rootAttrs, redundancyLevel);
ResetKey("NumberOfRegion");
int numRegions = GetUIntValue("NumberOfRegion");
if (numRegions < 1)
numRegions = 1;
for (int i = 0; i < numRegions; i++)
{
if(i>0)
region = CacheHelper<TKey, TVal>.CreateRegion(rootRegionName+"_"+i, rootAttrs);
else
region = CacheHelper<TKey, TVal>.CreateRegion(rootRegionName, rootAttrs);
}
Apache.Geode.Client.RegionAttributes<TKey, TVal> regAttr = region.Attributes;
FwkInfo("Region attributes for {0}: {1}", rootRegionName,
CacheHelper<TKey, TVal>.RegionAttributesToString(regAttr));
}
return region;
}
}
return null;
}
private void ParseEndPoints(string ep, bool isServer, int redundancyLevel)
{
string poolName = "_Test_Pool";
PoolFactory pf = CacheHelper<TKey, TVal>.DCache.GetPoolFactory();
string[] eps = ep.Split(',');
foreach (string endpoint in eps)
{
string hostName = endpoint.Split(':')[0];
int portNum = int.Parse(endpoint.Split(':')[1]);
if (isServer)
{
FwkInfo("adding pool host port for server");
pf.AddServer(hostName, portNum);
}
else
{
FwkInfo("adding pool host port for server");
pf.AddLocator(hostName, portNum);
}
}
pf.SetSubscriptionEnabled(true);
ResetKey("multiUserMode");
bool multiUserMode = GetBoolValue("multiUserMode");
if (multiUserMode)
{
pf.SetMultiuserAuthentication(true);
FwkInfo("MultiUser Mode is set to true");
}
else
{
pf.SetMultiuserAuthentication(false);
FwkInfo("MultiUser Mode is set to false");
}
pf.SetFreeConnectionTimeout(TimeSpan.FromMilliseconds(180000));
pf.SetReadTimeout(TimeSpan.FromMilliseconds(180000));
pf.SetMinConnections(20);
pf.SetMaxConnections(30);
if (redundancyLevel > 0)
pf.SetSubscriptionRedundancy(redundancyLevel);
if (CacheHelper<TKey, TVal>.DCache.GetPoolManager().Find(poolName) == null)
{
Pool pool = pf.Create(poolName, CacheHelper<TKey, TVal>.DCache);
FwkInfo("Pool attributes are {0}:", PoolAttributesToString(pool));
}
FwkInfo("Create Pool complete with poolName= {0}", poolName);
}
public virtual RegionFactory CreatePool(RegionFactory attr, int redundancyLevel)
{
string mode = Util.GetEnvironmentVariable("POOLOPT");
if (mode == "poolwithendpoints")
{
string EndPoints = Util.BBGet(JavaServerBB, EndPointTag) as string;
ParseEndPoints(EndPoints, true, redundancyLevel);
attr = attr.SetPoolName("_Test_Pool");
}
else if (mode == "poolwithlocator")
{
string locatorAddress = (string)Util.BBGet(string.Empty, "LOCATOR_ADDRESS_POOL");
ParseEndPoints(locatorAddress, false, redundancyLevel);
attr = attr.SetPoolName("_Test_Pool");
}
return attr;
}
// TODO: format an appropriate line for logging.
public virtual void SetTaskRunInfo(string regionTag, string taskName,
int numKeys, int numClients, int valueSize, int numThreads)
{
m_taskData = new FwkTaskData(regionTag, taskName, numKeys, numClients,
valueSize, numThreads);
}
public virtual void AddTaskRunRecord(int iters, TimeSpan elapsedTime)
{
double opsPerSec = iters / elapsedTime.TotalSeconds;
double micros = elapsedTime.TotalMilliseconds * 1000;
string recordStr = string.Format("{0} -- {1}ops/sec, {2} ops, {3} micros",
m_taskData.GetLogString(), opsPerSec, iters, micros);
lock (((ICollection)m_taskRecords).SyncRoot)
{
m_taskRecords.Add(recordStr);
}
Util.RawLog(string.Format("[PerfSuite] {0}{1}", recordStr,
Environment.NewLine));
Util.RawLog(string.Format("[PerfData],{0},{1},{2},{3},{4}{5}",
m_taskData.GetCSVString(), opsPerSec, iters, micros,
DateTime.Now.ToString("G"), Environment.NewLine));
}
public virtual void RunTask(ClientTask task, int numThreads,
int iters, int timedInterval, int maxTime, object data)
{
ClientBase client = null;
try
{
if (numThreads > 1)
{
client = ClientGroup.Create(UnitThread.Create, numThreads);
}
else
{
client = new UnitThread();
}
UnitFnMethod<UnitFnMethod<int, object>, int, object> taskDeleg =
new UnitFnMethod<UnitFnMethod<int, object>, int, object>(
client.Call<int, object>);
task.StartRun();
IAsyncResult taskRes = taskDeleg.BeginInvoke(
task.DoTask, iters, data, null, null);
if (timedInterval > 0)
{
System.Threading.Thread.Sleep(timedInterval);
task.EndRun();
}
if (maxTime <= 0)
{
taskRes.AsyncWaitHandle.WaitOne();
}
else if (!taskRes.AsyncWaitHandle.WaitOne(maxTime, false))
{
throw new ClientTimeoutException("RunTask() timed out.");
}
taskDeleg.EndInvoke(taskRes);
task.EndRun();
}
finally
{
if (client != null)
{
client.Dispose();
}
}
}
public virtual void EndTask()
{
lock (((ICollection)m_taskRecords).SyncRoot)
{
if (m_taskRecords.Count > 0)
{
StringBuilder summarySB = new StringBuilder();
foreach (string taskRecord in m_taskRecords)
{
summarySB.Append(Environment.NewLine + '\t' + taskRecord);
}
FwkInfo("TIMINGS:: Summary: {0}", summarySB.ToString());
m_taskRecords.Clear();
}
}
ClearCachedKeys();
PopTaskName();
}
public QueryService CheckQueryService()
{
string mode = Util.GetEnvironmentVariable("POOLOPT");
Pool/*<TKey, TVal>*/ pool = CacheHelper<TKey, TVal>.DCache.GetPoolManager().Find("_Test_Pool");
return pool.GetQueryService();
}
#endregion
}
}
|
apache-2.0
|
IdentityServer/IdentityServer4.Templates
|
src/IdentityServer4EntityFramework/Migrations/ConfigurationDb/ConfigurationDbContextModelSnapshot.cs
|
32305
|
// <auto-generated />
using System;
using IdentityServer4.EntityFramework.DbContexts;
using Microsoft.EntityFrameworkCore;
using Microsoft.EntityFrameworkCore.Infrastructure;
using Microsoft.EntityFrameworkCore.Storage.ValueConversion;
namespace IdentityServer4EntityFramework.Migrations.ConfigurationDb
{
[DbContext(typeof(ConfigurationDbContext))]
partial class ConfigurationDbContextModelSnapshot : ModelSnapshot
{
protected override void BuildModel(ModelBuilder modelBuilder)
{
#pragma warning disable 612, 618
modelBuilder
.HasAnnotation("ProductVersion", "3.1.0");
modelBuilder.Entity("IdentityServer4.EntityFramework.Entities.ApiResource", b =>
{
b.Property<int>("Id")
.ValueGeneratedOnAdd()
.HasColumnType("INTEGER");
b.Property<string>("AllowedAccessTokenSigningAlgorithms")
.HasColumnType("TEXT")
.HasMaxLength(100);
b.Property<DateTime>("Created")
.HasColumnType("TEXT");
b.Property<string>("Description")
.HasColumnType("TEXT")
.HasMaxLength(1000);
b.Property<string>("DisplayName")
.HasColumnType("TEXT")
.HasMaxLength(200);
b.Property<bool>("Enabled")
.HasColumnType("INTEGER");
b.Property<DateTime?>("LastAccessed")
.HasColumnType("TEXT");
b.Property<string>("Name")
.IsRequired()
.HasColumnType("TEXT")
.HasMaxLength(200);
b.Property<bool>("NonEditable")
.HasColumnType("INTEGER");
b.Property<bool>("ShowInDiscoveryDocument")
.HasColumnType("INTEGER");
b.Property<DateTime?>("Updated")
.HasColumnType("TEXT");
b.HasKey("Id");
b.HasIndex("Name")
.IsUnique();
b.ToTable("ApiResources");
});
modelBuilder.Entity("IdentityServer4.EntityFramework.Entities.ApiResourceClaim", b =>
{
b.Property<int>("Id")
.ValueGeneratedOnAdd()
.HasColumnType("INTEGER");
b.Property<int>("ApiResourceId")
.HasColumnType("INTEGER");
b.Property<string>("Type")
.IsRequired()
.HasColumnType("TEXT")
.HasMaxLength(200);
b.HasKey("Id");
b.HasIndex("ApiResourceId");
b.ToTable("ApiResourceClaims");
});
modelBuilder.Entity("IdentityServer4.EntityFramework.Entities.ApiResourceProperty", b =>
{
b.Property<int>("Id")
.ValueGeneratedOnAdd()
.HasColumnType("INTEGER");
b.Property<int>("ApiResourceId")
.HasColumnType("INTEGER");
b.Property<string>("Key")
.IsRequired()
.HasColumnType("TEXT")
.HasMaxLength(250);
b.Property<string>("Value")
.IsRequired()
.HasColumnType("TEXT")
.HasMaxLength(2000);
b.HasKey("Id");
b.HasIndex("ApiResourceId");
b.ToTable("ApiResourceProperties");
});
modelBuilder.Entity("IdentityServer4.EntityFramework.Entities.ApiResourceScope", b =>
{
b.Property<int>("Id")
.ValueGeneratedOnAdd()
.HasColumnType("INTEGER");
b.Property<int>("ApiResourceId")
.HasColumnType("INTEGER");
b.Property<string>("Scope")
.IsRequired()
.HasColumnType("TEXT")
.HasMaxLength(200);
b.HasKey("Id");
b.HasIndex("ApiResourceId");
b.ToTable("ApiResourceScopes");
});
modelBuilder.Entity("IdentityServer4.EntityFramework.Entities.ApiResourceSecret", b =>
{
b.Property<int>("Id")
.ValueGeneratedOnAdd()
.HasColumnType("INTEGER");
b.Property<int>("ApiResourceId")
.HasColumnType("INTEGER");
b.Property<DateTime>("Created")
.HasColumnType("TEXT");
b.Property<string>("Description")
.HasColumnType("TEXT")
.HasMaxLength(1000);
b.Property<DateTime?>("Expiration")
.HasColumnType("TEXT");
b.Property<string>("Type")
.IsRequired()
.HasColumnType("TEXT")
.HasMaxLength(250);
b.Property<string>("Value")
.IsRequired()
.HasColumnType("TEXT")
.HasMaxLength(4000);
b.HasKey("Id");
b.HasIndex("ApiResourceId");
b.ToTable("ApiResourceSecrets");
});
modelBuilder.Entity("IdentityServer4.EntityFramework.Entities.ApiScope", b =>
{
b.Property<int>("Id")
.ValueGeneratedOnAdd()
.HasColumnType("INTEGER");
b.Property<string>("Description")
.HasColumnType("TEXT")
.HasMaxLength(1000);
b.Property<string>("DisplayName")
.HasColumnType("TEXT")
.HasMaxLength(200);
b.Property<bool>("Emphasize")
.HasColumnType("INTEGER");
b.Property<bool>("Enabled")
.HasColumnType("INTEGER");
b.Property<string>("Name")
.IsRequired()
.HasColumnType("TEXT")
.HasMaxLength(200);
b.Property<bool>("Required")
.HasColumnType("INTEGER");
b.Property<bool>("ShowInDiscoveryDocument")
.HasColumnType("INTEGER");
b.HasKey("Id");
b.HasIndex("Name")
.IsUnique();
b.ToTable("ApiScopes");
});
modelBuilder.Entity("IdentityServer4.EntityFramework.Entities.ApiScopeClaim", b =>
{
b.Property<int>("Id")
.ValueGeneratedOnAdd()
.HasColumnType("INTEGER");
b.Property<int>("ScopeId")
.HasColumnType("INTEGER");
b.Property<string>("Type")
.IsRequired()
.HasColumnType("TEXT")
.HasMaxLength(200);
b.HasKey("Id");
b.HasIndex("ScopeId");
b.ToTable("ApiScopeClaims");
});
modelBuilder.Entity("IdentityServer4.EntityFramework.Entities.ApiScopeProperty", b =>
{
b.Property<int>("Id")
.ValueGeneratedOnAdd()
.HasColumnType("INTEGER");
b.Property<string>("Key")
.IsRequired()
.HasColumnType("TEXT")
.HasMaxLength(250);
b.Property<int>("ScopeId")
.HasColumnType("INTEGER");
b.Property<string>("Value")
.IsRequired()
.HasColumnType("TEXT")
.HasMaxLength(2000);
b.HasKey("Id");
b.HasIndex("ScopeId");
b.ToTable("ApiScopeProperties");
});
modelBuilder.Entity("IdentityServer4.EntityFramework.Entities.Client", b =>
{
b.Property<int>("Id")
.ValueGeneratedOnAdd()
.HasColumnType("INTEGER");
b.Property<int>("AbsoluteRefreshTokenLifetime")
.HasColumnType("INTEGER");
b.Property<int>("AccessTokenLifetime")
.HasColumnType("INTEGER");
b.Property<int>("AccessTokenType")
.HasColumnType("INTEGER");
b.Property<bool>("AllowAccessTokensViaBrowser")
.HasColumnType("INTEGER");
b.Property<bool>("AllowOfflineAccess")
.HasColumnType("INTEGER");
b.Property<bool>("AllowPlainTextPkce")
.HasColumnType("INTEGER");
b.Property<bool>("AllowRememberConsent")
.HasColumnType("INTEGER");
b.Property<string>("AllowedIdentityTokenSigningAlgorithms")
.HasColumnType("TEXT")
.HasMaxLength(100);
b.Property<bool>("AlwaysIncludeUserClaimsInIdToken")
.HasColumnType("INTEGER");
b.Property<bool>("AlwaysSendClientClaims")
.HasColumnType("INTEGER");
b.Property<int>("AuthorizationCodeLifetime")
.HasColumnType("INTEGER");
b.Property<bool>("BackChannelLogoutSessionRequired")
.HasColumnType("INTEGER");
b.Property<string>("BackChannelLogoutUri")
.HasColumnType("TEXT")
.HasMaxLength(2000);
b.Property<string>("ClientClaimsPrefix")
.HasColumnType("TEXT")
.HasMaxLength(200);
b.Property<string>("ClientId")
.IsRequired()
.HasColumnType("TEXT")
.HasMaxLength(200);
b.Property<string>("ClientName")
.HasColumnType("TEXT")
.HasMaxLength(200);
b.Property<string>("ClientUri")
.HasColumnType("TEXT")
.HasMaxLength(2000);
b.Property<int?>("ConsentLifetime")
.HasColumnType("INTEGER");
b.Property<DateTime>("Created")
.HasColumnType("TEXT");
b.Property<string>("Description")
.HasColumnType("TEXT")
.HasMaxLength(1000);
b.Property<int>("DeviceCodeLifetime")
.HasColumnType("INTEGER");
b.Property<bool>("EnableLocalLogin")
.HasColumnType("INTEGER");
b.Property<bool>("Enabled")
.HasColumnType("INTEGER");
b.Property<bool>("FrontChannelLogoutSessionRequired")
.HasColumnType("INTEGER");
b.Property<string>("FrontChannelLogoutUri")
.HasColumnType("TEXT")
.HasMaxLength(2000);
b.Property<int>("IdentityTokenLifetime")
.HasColumnType("INTEGER");
b.Property<bool>("IncludeJwtId")
.HasColumnType("INTEGER");
b.Property<DateTime?>("LastAccessed")
.HasColumnType("TEXT");
b.Property<string>("LogoUri")
.HasColumnType("TEXT")
.HasMaxLength(2000);
b.Property<bool>("NonEditable")
.HasColumnType("INTEGER");
b.Property<string>("PairWiseSubjectSalt")
.HasColumnType("TEXT")
.HasMaxLength(200);
b.Property<string>("ProtocolType")
.IsRequired()
.HasColumnType("TEXT")
.HasMaxLength(200);
b.Property<int>("RefreshTokenExpiration")
.HasColumnType("INTEGER");
b.Property<int>("RefreshTokenUsage")
.HasColumnType("INTEGER");
b.Property<bool>("RequireClientSecret")
.HasColumnType("INTEGER");
b.Property<bool>("RequireConsent")
.HasColumnType("INTEGER");
b.Property<bool>("RequirePkce")
.HasColumnType("INTEGER");
b.Property<bool>("RequireRequestObject")
.HasColumnType("INTEGER");
b.Property<int>("SlidingRefreshTokenLifetime")
.HasColumnType("INTEGER");
b.Property<bool>("UpdateAccessTokenClaimsOnRefresh")
.HasColumnType("INTEGER");
b.Property<DateTime?>("Updated")
.HasColumnType("TEXT");
b.Property<string>("UserCodeType")
.HasColumnType("TEXT")
.HasMaxLength(100);
b.Property<int?>("UserSsoLifetime")
.HasColumnType("INTEGER");
b.HasKey("Id");
b.HasIndex("ClientId")
.IsUnique();
b.ToTable("Clients");
});
modelBuilder.Entity("IdentityServer4.EntityFramework.Entities.ClientClaim", b =>
{
b.Property<int>("Id")
.ValueGeneratedOnAdd()
.HasColumnType("INTEGER");
b.Property<int>("ClientId")
.HasColumnType("INTEGER");
b.Property<string>("Type")
.IsRequired()
.HasColumnType("TEXT")
.HasMaxLength(250);
b.Property<string>("Value")
.IsRequired()
.HasColumnType("TEXT")
.HasMaxLength(250);
b.HasKey("Id");
b.HasIndex("ClientId");
b.ToTable("ClientClaims");
});
modelBuilder.Entity("IdentityServer4.EntityFramework.Entities.ClientCorsOrigin", b =>
{
b.Property<int>("Id")
.ValueGeneratedOnAdd()
.HasColumnType("INTEGER");
b.Property<int>("ClientId")
.HasColumnType("INTEGER");
b.Property<string>("Origin")
.IsRequired()
.HasColumnType("TEXT")
.HasMaxLength(150);
b.HasKey("Id");
b.HasIndex("ClientId");
b.ToTable("ClientCorsOrigins");
});
modelBuilder.Entity("IdentityServer4.EntityFramework.Entities.ClientGrantType", b =>
{
b.Property<int>("Id")
.ValueGeneratedOnAdd()
.HasColumnType("INTEGER");
b.Property<int>("ClientId")
.HasColumnType("INTEGER");
b.Property<string>("GrantType")
.IsRequired()
.HasColumnType("TEXT")
.HasMaxLength(250);
b.HasKey("Id");
b.HasIndex("ClientId");
b.ToTable("ClientGrantTypes");
});
modelBuilder.Entity("IdentityServer4.EntityFramework.Entities.ClientIdPRestriction", b =>
{
b.Property<int>("Id")
.ValueGeneratedOnAdd()
.HasColumnType("INTEGER");
b.Property<int>("ClientId")
.HasColumnType("INTEGER");
b.Property<string>("Provider")
.IsRequired()
.HasColumnType("TEXT")
.HasMaxLength(200);
b.HasKey("Id");
b.HasIndex("ClientId");
b.ToTable("ClientIdPRestrictions");
});
modelBuilder.Entity("IdentityServer4.EntityFramework.Entities.ClientPostLogoutRedirectUri", b =>
{
b.Property<int>("Id")
.ValueGeneratedOnAdd()
.HasColumnType("INTEGER");
b.Property<int>("ClientId")
.HasColumnType("INTEGER");
b.Property<string>("PostLogoutRedirectUri")
.IsRequired()
.HasColumnType("TEXT")
.HasMaxLength(2000);
b.HasKey("Id");
b.HasIndex("ClientId");
b.ToTable("ClientPostLogoutRedirectUris");
});
modelBuilder.Entity("IdentityServer4.EntityFramework.Entities.ClientProperty", b =>
{
b.Property<int>("Id")
.ValueGeneratedOnAdd()
.HasColumnType("INTEGER");
b.Property<int>("ClientId")
.HasColumnType("INTEGER");
b.Property<string>("Key")
.IsRequired()
.HasColumnType("TEXT")
.HasMaxLength(250);
b.Property<string>("Value")
.IsRequired()
.HasColumnType("TEXT")
.HasMaxLength(2000);
b.HasKey("Id");
b.HasIndex("ClientId");
b.ToTable("ClientProperties");
});
modelBuilder.Entity("IdentityServer4.EntityFramework.Entities.ClientRedirectUri", b =>
{
b.Property<int>("Id")
.ValueGeneratedOnAdd()
.HasColumnType("INTEGER");
b.Property<int>("ClientId")
.HasColumnType("INTEGER");
b.Property<string>("RedirectUri")
.IsRequired()
.HasColumnType("TEXT")
.HasMaxLength(2000);
b.HasKey("Id");
b.HasIndex("ClientId");
b.ToTable("ClientRedirectUris");
});
modelBuilder.Entity("IdentityServer4.EntityFramework.Entities.ClientScope", b =>
{
b.Property<int>("Id")
.ValueGeneratedOnAdd()
.HasColumnType("INTEGER");
b.Property<int>("ClientId")
.HasColumnType("INTEGER");
b.Property<string>("Scope")
.IsRequired()
.HasColumnType("TEXT")
.HasMaxLength(200);
b.HasKey("Id");
b.HasIndex("ClientId");
b.ToTable("ClientScopes");
});
modelBuilder.Entity("IdentityServer4.EntityFramework.Entities.ClientSecret", b =>
{
b.Property<int>("Id")
.ValueGeneratedOnAdd()
.HasColumnType("INTEGER");
b.Property<int>("ClientId")
.HasColumnType("INTEGER");
b.Property<DateTime>("Created")
.HasColumnType("TEXT");
b.Property<string>("Description")
.HasColumnType("TEXT")
.HasMaxLength(2000);
b.Property<DateTime?>("Expiration")
.HasColumnType("TEXT");
b.Property<string>("Type")
.IsRequired()
.HasColumnType("TEXT")
.HasMaxLength(250);
b.Property<string>("Value")
.IsRequired()
.HasColumnType("TEXT")
.HasMaxLength(4000);
b.HasKey("Id");
b.HasIndex("ClientId");
b.ToTable("ClientSecrets");
});
modelBuilder.Entity("IdentityServer4.EntityFramework.Entities.IdentityResource", b =>
{
b.Property<int>("Id")
.ValueGeneratedOnAdd()
.HasColumnType("INTEGER");
b.Property<DateTime>("Created")
.HasColumnType("TEXT");
b.Property<string>("Description")
.HasColumnType("TEXT")
.HasMaxLength(1000);
b.Property<string>("DisplayName")
.HasColumnType("TEXT")
.HasMaxLength(200);
b.Property<bool>("Emphasize")
.HasColumnType("INTEGER");
b.Property<bool>("Enabled")
.HasColumnType("INTEGER");
b.Property<string>("Name")
.IsRequired()
.HasColumnType("TEXT")
.HasMaxLength(200);
b.Property<bool>("NonEditable")
.HasColumnType("INTEGER");
b.Property<bool>("Required")
.HasColumnType("INTEGER");
b.Property<bool>("ShowInDiscoveryDocument")
.HasColumnType("INTEGER");
b.Property<DateTime?>("Updated")
.HasColumnType("TEXT");
b.HasKey("Id");
b.HasIndex("Name")
.IsUnique();
b.ToTable("IdentityResources");
});
modelBuilder.Entity("IdentityServer4.EntityFramework.Entities.IdentityResourceClaim", b =>
{
b.Property<int>("Id")
.ValueGeneratedOnAdd()
.HasColumnType("INTEGER");
b.Property<int>("IdentityResourceId")
.HasColumnType("INTEGER");
b.Property<string>("Type")
.IsRequired()
.HasColumnType("TEXT")
.HasMaxLength(200);
b.HasKey("Id");
b.HasIndex("IdentityResourceId");
b.ToTable("IdentityResourceClaims");
});
modelBuilder.Entity("IdentityServer4.EntityFramework.Entities.IdentityResourceProperty", b =>
{
b.Property<int>("Id")
.ValueGeneratedOnAdd()
.HasColumnType("INTEGER");
b.Property<int>("IdentityResourceId")
.HasColumnType("INTEGER");
b.Property<string>("Key")
.IsRequired()
.HasColumnType("TEXT")
.HasMaxLength(250);
b.Property<string>("Value")
.IsRequired()
.HasColumnType("TEXT")
.HasMaxLength(2000);
b.HasKey("Id");
b.HasIndex("IdentityResourceId");
b.ToTable("IdentityResourceProperties");
});
modelBuilder.Entity("IdentityServer4.EntityFramework.Entities.ApiResourceClaim", b =>
{
b.HasOne("IdentityServer4.EntityFramework.Entities.ApiResource", "ApiResource")
.WithMany("UserClaims")
.HasForeignKey("ApiResourceId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
});
modelBuilder.Entity("IdentityServer4.EntityFramework.Entities.ApiResourceProperty", b =>
{
b.HasOne("IdentityServer4.EntityFramework.Entities.ApiResource", "ApiResource")
.WithMany("Properties")
.HasForeignKey("ApiResourceId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
});
modelBuilder.Entity("IdentityServer4.EntityFramework.Entities.ApiResourceScope", b =>
{
b.HasOne("IdentityServer4.EntityFramework.Entities.ApiResource", "ApiResource")
.WithMany("Scopes")
.HasForeignKey("ApiResourceId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
});
modelBuilder.Entity("IdentityServer4.EntityFramework.Entities.ApiResourceSecret", b =>
{
b.HasOne("IdentityServer4.EntityFramework.Entities.ApiResource", "ApiResource")
.WithMany("Secrets")
.HasForeignKey("ApiResourceId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
});
modelBuilder.Entity("IdentityServer4.EntityFramework.Entities.ApiScopeClaim", b =>
{
b.HasOne("IdentityServer4.EntityFramework.Entities.ApiScope", "Scope")
.WithMany("UserClaims")
.HasForeignKey("ScopeId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
});
modelBuilder.Entity("IdentityServer4.EntityFramework.Entities.ApiScopeProperty", b =>
{
b.HasOne("IdentityServer4.EntityFramework.Entities.ApiScope", "Scope")
.WithMany("Properties")
.HasForeignKey("ScopeId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
});
modelBuilder.Entity("IdentityServer4.EntityFramework.Entities.ClientClaim", b =>
{
b.HasOne("IdentityServer4.EntityFramework.Entities.Client", "Client")
.WithMany("Claims")
.HasForeignKey("ClientId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
});
modelBuilder.Entity("IdentityServer4.EntityFramework.Entities.ClientCorsOrigin", b =>
{
b.HasOne("IdentityServer4.EntityFramework.Entities.Client", "Client")
.WithMany("AllowedCorsOrigins")
.HasForeignKey("ClientId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
});
modelBuilder.Entity("IdentityServer4.EntityFramework.Entities.ClientGrantType", b =>
{
b.HasOne("IdentityServer4.EntityFramework.Entities.Client", "Client")
.WithMany("AllowedGrantTypes")
.HasForeignKey("ClientId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
});
modelBuilder.Entity("IdentityServer4.EntityFramework.Entities.ClientIdPRestriction", b =>
{
b.HasOne("IdentityServer4.EntityFramework.Entities.Client", "Client")
.WithMany("IdentityProviderRestrictions")
.HasForeignKey("ClientId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
});
modelBuilder.Entity("IdentityServer4.EntityFramework.Entities.ClientPostLogoutRedirectUri", b =>
{
b.HasOne("IdentityServer4.EntityFramework.Entities.Client", "Client")
.WithMany("PostLogoutRedirectUris")
.HasForeignKey("ClientId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
});
modelBuilder.Entity("IdentityServer4.EntityFramework.Entities.ClientProperty", b =>
{
b.HasOne("IdentityServer4.EntityFramework.Entities.Client", "Client")
.WithMany("Properties")
.HasForeignKey("ClientId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
});
modelBuilder.Entity("IdentityServer4.EntityFramework.Entities.ClientRedirectUri", b =>
{
b.HasOne("IdentityServer4.EntityFramework.Entities.Client", "Client")
.WithMany("RedirectUris")
.HasForeignKey("ClientId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
});
modelBuilder.Entity("IdentityServer4.EntityFramework.Entities.ClientScope", b =>
{
b.HasOne("IdentityServer4.EntityFramework.Entities.Client", "Client")
.WithMany("AllowedScopes")
.HasForeignKey("ClientId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
});
modelBuilder.Entity("IdentityServer4.EntityFramework.Entities.ClientSecret", b =>
{
b.HasOne("IdentityServer4.EntityFramework.Entities.Client", "Client")
.WithMany("ClientSecrets")
.HasForeignKey("ClientId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
});
modelBuilder.Entity("IdentityServer4.EntityFramework.Entities.IdentityResourceClaim", b =>
{
b.HasOne("IdentityServer4.EntityFramework.Entities.IdentityResource", "IdentityResource")
.WithMany("UserClaims")
.HasForeignKey("IdentityResourceId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
});
modelBuilder.Entity("IdentityServer4.EntityFramework.Entities.IdentityResourceProperty", b =>
{
b.HasOne("IdentityServer4.EntityFramework.Entities.IdentityResource", "IdentityResource")
.WithMany("Properties")
.HasForeignKey("IdentityResourceId")
.OnDelete(DeleteBehavior.Cascade)
.IsRequired();
});
#pragma warning restore 612, 618
}
}
}
|
apache-2.0
|
daileyet/openlibs.utilities
|
src/main/java/com/openthinks/libs/utilities/handler/annotation/GroupRef.java
|
864
|
package com.openthinks.libs.utilities.handler.annotation;
import static java.lang.annotation.ElementType.FIELD;
import static java.lang.annotation.RetentionPolicy.RUNTIME;
import java.lang.annotation.Documented;
import java.lang.annotation.Retention;
import java.lang.annotation.Target;
@Documented
@Retention(RUNTIME)
@Target({FIELD})
/**
* ClassName: GroupRef <br>
* Function: 指定所属的上级 {@link Handler}. <br>
* date: May 31, 2018 2:57:37 PM <br>
*
* @since JDK 1.8
*/
public @interface GroupRef {
/**
*
* name:target parent name, field name. <br>
*
* @return target parent name
*/
String name();
/**
*
* key:target parent mapped key. <br>
* <b>Notice:</b>只有在 name不起作用时才会使用
*
* @return string key
*/
String key() default Mapped.NULL;
}
|
apache-2.0
|
aajjbb/contest-files
|
USACO/Crosswords.cpp
|
1481
|
#include <bits/stdc++.h>
template<typename T> T gcd(T a, T b) {
if(!b) return a;
return gcd(b, a % b);
}
template<typename T> T lcm(T a, T b) {
return a * b / gcd(a, b);
}
template<typename T> void chmin(T& a, T b) { a = (a > b) ? b : a; }
template<typename T> void chmax(T& a, T b) { a = (a < b) ? b : a; }
int in() { int x; scanf("%d", &x); return x; }
using namespace std;
typedef long long Int;
typedef unsigned uint;
const int MAXN = 55;
int N, M;
string S[MAXN];
bool try_hor(int x, int y) {
if (y - 1 >= 0 && S[x][y - 1] == '.') {
return false;
}
if (y + 2 >= M) return false;
for (int i = 1; i <= 2; i++) {
if (S[x][y + i] == '#') {
return false;
}
}
return true;
}
bool try_ver(int x, int y) {
if (x - 1 >= 0 && S[x - 1][y] == '.') {
return false;
}
if (x + 2 >= N) return false;
for (int i = 1; i <= 2; i++) {
if (S[x + i][y] == '#') {
return false;
}
}
return true;
}
int main(void) {
freopen("crosswords.in", "r", stdin);
freopen("crosswords.out", "w", stdout);
cin >> N >> M;
for (int i = 0; i < N; i++) {
cin >> S[i];
}
vector<pair<int, int> > ans;
for (int i = 0; i < N; i++) {
for (int j = 0; j < M; j++) {
if (S[i][j] == '#') continue;
if (try_hor(i, j) || try_ver(i, j)) {
ans.push_back(make_pair(i + 1, j + 1));
}
}
}
cout << ans.size() << "\n";
for (int i = 0; i < (int) ans.size(); i++) {
cout << ans[i].first << " " << ans[i].second << "\n";
}
return 0;
}
|
apache-2.0
|
thurt/arangodb
|
js/server/modules/@arangodb/formatter.js
|
8098
|
/*jshint strict: false, unused: false */
/*global FORMAT_DATETIME, PARSE_DATETIME */
////////////////////////////////////////////////////////////////////////////////
/// @brief formatter functions
///
/// @file
///
/// DISCLAIMER
///
/// Copyright 2011-2012 triagens GmbH, Cologne, Germany
///
/// Licensed under the Apache License, Version 2.0 (the "License");
/// you may not use this file except in compliance with the License.
/// You may obtain a copy of the License at
///
/// http://www.apache.org/licenses/LICENSE-2.0
///
/// Unless required by applicable law or agreed to in writing, software
/// distributed under the License is distributed on an "AS IS" BASIS,
/// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
/// See the License for the specific language governing permissions and
/// limitations under the License.
///
/// Copyright holder is triAGENS GmbH, Cologne, Germany
///
/// @author Dr. Frank Celler
/// @author Copyright 2011-2012, triAGENS GmbH, Cologne, Germany
////////////////////////////////////////////////////////////////////////////////
var arangodb = require("@arangodb");
// -----------------------------------------------------------------------------
// --SECTION-- formatter
// -----------------------------------------------------------------------------
////////////////////////////////////////////////////////////////////////////////
/// @brief parses a number
////////////////////////////////////////////////////////////////////////////////
exports.number = function (value, info, lang) {
var error;
var format;
var result;
if (info.hasOwnProperty('format')) {
format = info.format;
if (format === "%d") {
result = value.toFixed(0);
}
else if (format === "%f") {
result = String(value);
}
else {
error = new arangodb.ArangoError();
error.errorNum = arangodb.ERROR_NOT_IMPLEMENTED;
error.errorMessage = "format '" + format + "' not implemented";
throw error;
}
}
else {
result = value;
}
return result;
};
////////////////////////////////////////////////////////////////////////////////
/// @brief format a float value
////////////////////////////////////////////////////////////////////////////////
exports.formatFloat = function (value, args) {
if (undefined === value || null === value) {
return null;
}
if (undefined === args) {
args = {};
}
var decPlaces = isNaN(args.decPlaces = Math.abs(args.decPlaces)) ? 2 : args.decPlaces;
var decSeparator =
args.decSeparator === undefined ? "." : args.decSeparator;
var thouSeparator =
args.thouSeparator === undefined ? "," : args.thouSeparator;
var sign = value < 0 ? "-" : "";
var i = '';
i += parseInt(value = Math.abs(+value || 0).toFixed(decPlaces), 10);
var j = i.length;
j = (j > 3) ? (j % 3) : 0;
return sign + (j ? i.substr(0, j) + thouSeparator : "") +
i.substr(j).replace(/(\d{3})(?=\d)/g, "$1" + thouSeparator) +
(decPlaces ? decSeparator + Math.abs(value - i).toFixed(decPlaces).slice(2) : "");
};
////////////////////////////////////////////////////////////////////////////////
/// @brief format a datetime value
////////////////////////////////////////////////////////////////////////////////
exports.formatDatetime = function (value, args) {
if (undefined === value || null === value) {
return null;
}
if (undefined === args) {
args = {};
}
if (undefined === args.pattern) {
args.pattern = "yyyy-MM-dd'T'HH:mm:ssZ";
}
if (undefined === args.timezone) {
args.timezone = null;
}
if (undefined === args.lang) {
args.lang = null;
}
return FORMAT_DATETIME(value, args.pattern, args.timezone, args.lang);
};
////////////////////////////////////////////////////////////////////////////////
/// @brief join array
////////////////////////////////////////////////////////////////////////////////
exports.joinNumbers = function (value, args) {
if (undefined === value || null === value) {
return null;
}
return value.join();
};
// -----------------------------------------------------------------------------
// --SECTION-- parser
// -----------------------------------------------------------------------------
////////////////////////////////////////////////////////////////////////////////
/// @brief parse a number
////////////////////////////////////////////////////////////////////////////////
exports.parseFloat = function (value, args) {
if (undefined === value || null === value) {
return null;
}
if (undefined === args) {
args = {};
}
var decPlaces = isNaN(args.decPlaces = Math.abs(args.decPlaces)) ? 2 : args.decPlaces;
var decSeparator = args.decSeparator === undefined ? "." : args.decSeparator;
var thouSeparator = args.thouSeparator === undefined ? "," : args.thouSeparator;
var str = "";
str += value;
str = str.replace(thouSeparator, "");
if ("." !== decSeparator) {
str = str.replace(decSeparator, ".");
}
if (decPlaces > 0) {
return parseFloat(str);
}
return parseFloat(str);
};
////////////////////////////////////////////////////////////////////////////////
/// @brief format a datetime value
////////////////////////////////////////////////////////////////////////////////
exports.parseDatetime = function (value, args) {
if (undefined === value || null === value) {
return null;
}
if (undefined === args) {
args = {};
}
if (undefined === args.pattern) {
args.pattern = "yyyy-MM-dd'T'HH:mm:ssZ";
}
if (undefined === args.timezone) {
args.timezone = null;
}
if (undefined === args.lang) {
args.lang = null;
}
return PARSE_DATETIME(value, args.pattern, args.timezone, args.lang);
};
////////////////////////////////////////////////////////////////////////////////
/// @brief split array
////////////////////////////////////////////////////////////////////////////////
exports.splitNumbers = function (value, args) {
var result = [];
var i;
if (undefined === value) {
return null;
}
var values = value.split(",");
for (i = 0; i < values.length; ++i) {
result[i] = parseFloat(values[i]);
}
return result;
};
// -----------------------------------------------------------------------------
// --SECTION-- validators
// -----------------------------------------------------------------------------
////////////////////////////////////////////////////////////////////////////////
/// @brief validate >
////////////////////////////////////////////////////////////////////////////////
exports.validateNotNull = function (value, args) {
if (undefined === value || null === value) {
return false;
}
return true;
};
////////////////////////////////////////////////////////////////////////////////
/// @brief validate >
////////////////////////////////////////////////////////////////////////////////
exports.validateGT = function (value, args) {
if (undefined === value) {
return false;
}
if (undefined === args) {
args = {};
}
var cmpValue = args.compareValue;
return value > cmpValue;
};
////////////////////////////////////////////////////////////////////////////////
/// @brief validate >
////////////////////////////////////////////////////////////////////////////////
exports.validateEQ = function (value, args) {
if (undefined === value) {
return false;
}
if (undefined === args) {
args = {};
}
var cmpValue = args.compareValue;
return value === cmpValue;
};
// -----------------------------------------------------------------------------
// --SECTION-- END-OF-FILE
// -----------------------------------------------------------------------------
// Local Variables:
// mode: outline-minor
// outline-regexp: "^\\(/// @brief\\|/// @addtogroup\\|// --SECTION--\\|/// @page\\|/// @\\}\\)"
// End:
|
apache-2.0
|
masaki-yamakawa/geode
|
geode-assembly/src/distributedTest/java/org/apache/geode/management/internal/rest/DeploymentSemanticVersionJarDUnitTest.java
|
10908
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more contributor license
* agreements. See the NOTICE file distributed with this work for additional information regarding
* copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License. You may obtain a
* copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package org.apache.geode.management.internal.rest;
import static org.apache.geode.test.junit.assertions.ClusterManagementListResultAssert.assertManagementListResult;
import static org.apache.geode.test.junit.assertions.ClusterManagementRealizationResultAssert.assertManagementResult;
import static org.assertj.core.api.Assertions.assertThat;
import java.io.File;
import java.nio.file.Paths;
import java.util.Set;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.ClassRule;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.TemporaryFolder;
import org.apache.geode.distributed.internal.InternalConfigurationPersistenceService;
import org.apache.geode.internal.classloader.ClassPathLoader;
import org.apache.geode.management.api.ClusterManagementService;
import org.apache.geode.management.api.RealizationResult;
import org.apache.geode.management.cluster.client.ClusterManagementServiceBuilder;
import org.apache.geode.management.configuration.Deployment;
import org.apache.geode.management.runtime.DeploymentInfo;
import org.apache.geode.test.compiler.JarBuilder;
import org.apache.geode.test.dunit.rules.ClusterStartupRule;
import org.apache.geode.test.dunit.rules.MemberVM;
import org.apache.geode.test.junit.assertions.ClusterManagementListResultAssert;
public class DeploymentSemanticVersionJarDUnitTest {
@ClassRule
public static TemporaryFolder stagingTempDir = new TemporaryFolder();
@Rule
public ClusterStartupRule cluster = new ClusterStartupRule();
private MemberVM locator0, locator1, server2;
private static File stagedDir;
private static File semanticJarVersion0, semanticJarVersion1, semanticJarVersion2,
semanticJarVersion0b, semanticJarVersion0c;
@BeforeClass
public static void beforeClass() throws Exception {
stagedDir = stagingTempDir.getRoot();
JarBuilder jarBuilder = new JarBuilder();
semanticJarVersion0 = new File(stagedDir, "def-1.0.jar");
jarBuilder.buildJar(semanticJarVersion0, createClassContent("version1", "Def"));
semanticJarVersion1 = new File(stagedDir, "def-1.1.jar");
jarBuilder.buildJar(semanticJarVersion1, createClassContent("version2", "Def"));
semanticJarVersion2 = new File(stagedDir, "def-1.2.jar");
jarBuilder.buildJar(semanticJarVersion2, createClassContent("version3", "Def"));
semanticJarVersion0b = new File(stagingTempDir.newFolder("v1b"), "def-1.0.jar");
jarBuilder.buildJar(semanticJarVersion0b, createClassContent("version1b", "Def"));
semanticJarVersion0c = new File(stagingTempDir.newFolder("v1c"), "def.jar");
jarBuilder.buildJar(semanticJarVersion0c, createClassContent("version1c", "Def"));
}
private ClusterManagementService client;
private Deployment deployment;
@Before
public void before() throws Exception {
locator0 = cluster.startLocatorVM(0, l -> l.withHttpService());
int locator0Port = locator0.getPort();
locator1 =
cluster.startLocatorVM(1, l -> l.withHttpService().withConnectionToLocator(locator0Port));
server2 = cluster.startServerVM(2, locator0Port, locator1.getPort());
client = new ClusterManagementServiceBuilder()
.setPort(locator0.getHttpPort())
.build();
deployment = new Deployment();
}
private static String createClassContent(String version, String functionName) {
return "package jddunit.function;" + "import org.apache.geode.cache.execute.Function;"
+ "import org.apache.geode.cache.execute.FunctionContext;" + "public class "
+ functionName + " implements Function {" + "public boolean hasResult() {return true;}"
+ "public String getId() {return \"" + functionName + "\";}"
+ "public String getVersion() {return \"" + version + "\";}"
+ "public void execute(FunctionContext context) {context.getResultSender().lastResult(\""
+ version + "\");}}";
}
@Test
public void deploy() {
deployment.setFile(semanticJarVersion0);
assertManagementResult(client.create(deployment)).isSuccessful();
MemberVM.invokeInEveryMember(() -> {
assertThat(Paths.get(".").resolve("cluster_config").resolve("cluster").toFile().list())
.containsExactly("def-1.0.jar");
Set<String> deployedJars = getDeployedJarsFromClusterConfig();
assertThat(deployedJars).containsExactly("def-1.0.jar");
}, locator0, locator1);
assertThat(server2.getWorkingDir().list()).containsExactly("def-1.0.v1.jar");
server2.invoke(() -> verifyLoadAndHasVersion("def", "jddunit.function.Def", "version1"));
deployment.setFile(semanticJarVersion1);
assertManagementResult(client.create(deployment)).isSuccessful();
MemberVM.invokeInEveryMember(() -> {
assertThat(Paths.get(".").resolve("cluster_config").resolve("cluster").toFile().list())
.containsExactly("def-1.1.jar");
Set<String> deployedJars = getDeployedJarsFromClusterConfig();
assertThat(deployedJars).containsExactly("def-1.1.jar");
}, locator0, locator1);
assertThat(server2.getWorkingDir().list())
.containsExactlyInAnyOrder("def-1.0.v1.jar", "def-1.1.v2.jar");
server2.invoke(() -> verifyLoadAndHasVersion("def", "jddunit.function.Def", "version2"));
MemberVM server3 = cluster.startServerVM(3, locator0.getPort(), locator1.getPort());
assertThat(server3.getWorkingDir().list())
.containsExactlyInAnyOrder("def-1.1.v1.jar");
server3.invoke(() -> verifyLoadAndHasVersion("def", "jddunit.function.Def", "version2"));
// stop server3 and then deploy def-1.2.jar
server3.stop(false);
deployment.setFile(semanticJarVersion2);
assertManagementResult(client.create(deployment)).isSuccessful();
MemberVM.invokeInEveryMember(() -> {
assertThat(Paths.get(".").resolve("cluster_config").resolve("cluster").toFile().list())
.containsExactly("def-1.2.jar");
Set<String> deployedJars = getDeployedJarsFromClusterConfig();
assertThat(deployedJars).containsExactly("def-1.2.jar");
}, locator0, locator1);
assertThat(server2.getWorkingDir().list()).containsExactlyInAnyOrder(
"def-1.0.v1.jar", "def-1.1.v2.jar", "def-1.2.v3.jar");
server2.invoke(() -> verifyLoadAndHasVersion("def", "jddunit.function.Def", "version3"));
// restart server3 and make sure it will get the def.1.2
server3 = cluster.startServerVM(3, locator0.getPort(), locator1.getPort());
assertThat(server3.getWorkingDir().list()).containsExactly("def-1.2.v1.jar");
server3.invoke(() -> verifyLoadAndHasVersion("def", "jddunit.function.Def", "version3"));
// redeploy def-1.2 would not result in error but report already deployed
deployment.setFile(semanticJarVersion2);
assertManagementResult(client.create(deployment)).isSuccessful().hasMemberStatus().extracting(
RealizationResult::getMessage)
.containsExactlyInAnyOrder("Already deployed", "Already deployed");
MemberVM.invokeInEveryMember(() -> {
assertThat(Paths.get(".").resolve("cluster_config").resolve("cluster").toFile().list())
.containsExactly("def-1.2.jar");
Set<String> deployedJars = getDeployedJarsFromClusterConfig();
assertThat(deployedJars).containsExactly("def-1.2.jar");
}, locator0, locator1);
}
@Test
public void deploySameJarNameWithDifferentContent() throws Exception {
deployment.setFile(semanticJarVersion0);
assertManagementResult(client.create(deployment)).isSuccessful()
.hasMemberStatus()
.extracting(RealizationResult::getMessage).asString()
.contains("def-1.0.v1.jar");
deployment.setFile(semanticJarVersion0b);
assertManagementResult(client.create(deployment)).isSuccessful()
.hasMemberStatus()
.extracting(RealizationResult::getMessage).asString()
.contains("def-1.0.v2.jar");
}
@Test
public void deployWithPlainWillCleanSemanticVersion() throws Exception {
// deploy def-1.0.jar
deployment.setFile(semanticJarVersion0);
assertManagementResult(client.create(deployment)).isSuccessful()
.hasMemberStatus()
.extracting(RealizationResult::getMessage).asString()
.containsOnlyOnce("def-1.0.v1.jar");
// deploy def.jar
deployment.setFile(semanticJarVersion0c);
assertManagementResult(client.create(deployment)).isSuccessful()
.hasMemberStatus()
.extracting(RealizationResult::getMessage).asString()
.containsOnlyOnce("def.v2.jar");
MemberVM.invokeInEveryMember(() -> {
assertThat(Paths.get(".").resolve("cluster_config").resolve("cluster").toFile().list())
.containsExactly("def.jar");
Set<String> deployedJars = getDeployedJarsFromClusterConfig();
assertThat(deployedJars).containsExactly("def.jar");
}, locator0, locator1);
assertThat(server2.getWorkingDir().list())
.containsExactlyInAnyOrder("def-1.0.v1.jar", "def.v2.jar");
server2.invoke(() -> verifyLoadAndHasVersion("def", "jddunit.function.Def", "version1c"));
ClusterManagementListResultAssert<Deployment, DeploymentInfo> listAssert =
assertManagementListResult(client.list(new Deployment()));
listAssert.hasConfigurations().hasSize(1)
.extracting(Deployment::getFileName)
.containsExactly("def.jar");
listAssert.hasRuntimeInfos().hasSize(1)
.extracting(DeploymentInfo::getJarLocation).asString()
.containsOnlyOnce("def.v2.jar");
}
static Set<String> getDeployedJarsFromClusterConfig() {
InternalConfigurationPersistenceService cps =
ClusterStartupRule.getLocator().getConfigurationPersistenceService();
return cps.getConfiguration("cluster").getJarNames();
}
private static void verifyLoadAndHasVersion(String artifactId, String className, String version)
throws Exception {
assertThat(ClassPathLoader.getLatest().getJarDeploymentService()
.getDeployed(artifactId).isSuccessful()).isTrue();
Class<?> klass = ClassPathLoader.getLatest().forName(className);
assertThat(klass).isNotNull();
assertThat(klass.getMethod("getVersion").invoke(klass.newInstance())).isEqualTo(version);
}
}
|
apache-2.0
|
yamahata/tacker
|
tacker/plugins/common/constants.py
|
2143
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2012 OpenStack Foundation.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
# service type constants:
CORE = "CORE"
DUMMY = "DUMMY"
LOADBALANCER = "LOADBALANCER"
FIREWALL = "FIREWALL"
VPN = "VPN"
METERING = "METERING"
L3_ROUTER_NAT = "L3_ROUTER_NAT"
SERVICEVM = "SERVICEVM"
#maps extension alias to service type
EXT_TO_SERVICE_MAPPING = {
'dummy': DUMMY,
'lbaas': LOADBALANCER,
'fwaas': FIREWALL,
'vpnaas': VPN,
'metering': METERING,
'router': L3_ROUTER_NAT,
'servicevm': SERVICEVM,
}
# TODO(salvatore-orlando): Move these (or derive them) from conf file
ALLOWED_SERVICES = [CORE, DUMMY, LOADBALANCER, FIREWALL, VPN, METERING,
L3_ROUTER_NAT]
COMMON_PREFIXES = {
CORE: "",
DUMMY: "/dummy_svc",
LOADBALANCER: "/lb",
FIREWALL: "/fw",
VPN: "/vpn",
METERING: "/metering",
L3_ROUTER_NAT: "",
SERVICEVM: "",
}
# Service operation status constants
ACTIVE = "ACTIVE"
DOWN = "DOWN"
PENDING_CREATE = "PENDING_CREATE"
PENDING_UPDATE = "PENDING_UPDATE"
PENDING_DELETE = "PENDING_DELETE"
INACTIVE = "INACTIVE"
ERROR = "ERROR"
ACTIVE_PENDING_STATUSES = (
ACTIVE,
PENDING_CREATE,
PENDING_UPDATE
)
# FWaaS firewall rule action
FWAAS_ALLOW = "allow"
FWAAS_DENY = "deny"
# L3 Protocol name constants
TCP = "tcp"
UDP = "udp"
ICMP = "icmp"
# Network Type constants
TYPE_FLAT = 'flat'
TYPE_GRE = 'gre'
TYPE_LOCAL = 'local'
TYPE_VXLAN = 'vxlan'
TYPE_VLAN = 'vlan'
TYPE_NONE = 'none'
# The maximum length of an interface name (in Linux)
MAX_DEV_NAME_LEN = 16
|
apache-2.0
|
bcopy/opc-ua-stack
|
stack-core/src/main/java/com/digitalpetri/opcua/stack/core/types/structured/PublishRequest.java
|
2594
|
package com.digitalpetri.opcua.stack.core.types.structured;
import com.digitalpetri.opcua.stack.core.Identifiers;
import com.digitalpetri.opcua.stack.core.serialization.DelegateRegistry;
import com.digitalpetri.opcua.stack.core.serialization.UaDecoder;
import com.digitalpetri.opcua.stack.core.serialization.UaEncoder;
import com.digitalpetri.opcua.stack.core.serialization.UaRequestMessage;
import com.digitalpetri.opcua.stack.core.types.builtin.NodeId;
public class PublishRequest implements UaRequestMessage {
public static final NodeId TypeId = Identifiers.PublishRequest;
public static final NodeId BinaryEncodingId = Identifiers.PublishRequest_Encoding_DefaultBinary;
public static final NodeId XmlEncodingId = Identifiers.PublishRequest_Encoding_DefaultXml;
protected final RequestHeader _requestHeader;
protected final SubscriptionAcknowledgement[] _subscriptionAcknowledgements;
public PublishRequest(RequestHeader _requestHeader, SubscriptionAcknowledgement[] _subscriptionAcknowledgements) {
this._requestHeader = _requestHeader;
this._subscriptionAcknowledgements = _subscriptionAcknowledgements;
}
public RequestHeader getRequestHeader() { return _requestHeader; }
public SubscriptionAcknowledgement[] getSubscriptionAcknowledgements() { return _subscriptionAcknowledgements; }
@Override
public NodeId getTypeId() { return TypeId; }
@Override
public NodeId getBinaryEncodingId() { return BinaryEncodingId; }
@Override
public NodeId getXmlEncodingId() { return XmlEncodingId; }
public static void encode(PublishRequest publishRequest, UaEncoder encoder) {
encoder.encodeSerializable("RequestHeader", publishRequest._requestHeader);
encoder.encodeArray("SubscriptionAcknowledgements", publishRequest._subscriptionAcknowledgements, encoder::encodeSerializable);
}
public static PublishRequest decode(UaDecoder decoder) {
RequestHeader _requestHeader = decoder.decodeSerializable("RequestHeader", RequestHeader.class);
SubscriptionAcknowledgement[] _subscriptionAcknowledgements = decoder.decodeArray("SubscriptionAcknowledgements", decoder::decodeSerializable, SubscriptionAcknowledgement.class);
return new PublishRequest(_requestHeader, _subscriptionAcknowledgements);
}
static {
DelegateRegistry.registerEncoder(PublishRequest::encode, PublishRequest.class, BinaryEncodingId, XmlEncodingId);
DelegateRegistry.registerDecoder(PublishRequest::decode, PublishRequest.class, BinaryEncodingId, XmlEncodingId);
}
}
|
apache-2.0
|
matrix-org/synapse
|
synapse/config/captcha.py
|
2233
|
# Copyright 2014-2016 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from ._base import Config
class CaptchaConfig(Config):
section = "captcha"
def read_config(self, config, **kwargs):
self.recaptcha_private_key = config.get("recaptcha_private_key")
self.recaptcha_public_key = config.get("recaptcha_public_key")
self.enable_registration_captcha = config.get(
"enable_registration_captcha", False
)
self.recaptcha_siteverify_api = config.get(
"recaptcha_siteverify_api",
"https://www.recaptcha.net/recaptcha/api/siteverify",
)
self.recaptcha_template = self.read_template("recaptcha.html")
def generate_config_section(self, **kwargs):
return """\
## Captcha ##
# See docs/CAPTCHA_SETUP.md for full details of configuring this.
# This homeserver's ReCAPTCHA public key. Must be specified if
# enable_registration_captcha is enabled.
#
#recaptcha_public_key: "YOUR_PUBLIC_KEY"
# This homeserver's ReCAPTCHA private key. Must be specified if
# enable_registration_captcha is enabled.
#
#recaptcha_private_key: "YOUR_PRIVATE_KEY"
# Uncomment to enable ReCaptcha checks when registering, preventing signup
# unless a captcha is answered. Requires a valid ReCaptcha
# public/private key. Defaults to 'false'.
#
#enable_registration_captcha: true
# The API endpoint to use for verifying m.login.recaptcha responses.
# Defaults to "https://www.recaptcha.net/recaptcha/api/siteverify".
#
#recaptcha_siteverify_api: "https://my.recaptcha.site"
"""
|
apache-2.0
|
markkerzner/nn_kove
|
hadoop/src/hdfs/org/apache/hadoop/hdfs/server/namenode/mapdb/Engine.java
|
8666
|
/*
* Copyright (c) 2012 Jan Kotek
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hdfs.server.namenode.mapdb;
/**
* Centerpiece for record management, `Engine` is simple key value store.
* Engine is low-level interface and is not meant to be used directly
* by user. For most operations user should use {@link DB} class.
*
* In this store key is primitive `long` number, typically pointer to index table.
* Value is class instance. To turn value into/from binary form serializer is
* required as extra argument for most operations.
*
* Unlike other DBs MapDB does not expect user to (de)serialize data before
* they are passed as arguments. Instead MapDB controls (de)serialization itself.
* This gives DB a lot of flexibility: for example instances may be held in
* cache to minimise number of deserializations, or modified instance can
* be placed into queue and asynchronously written on background thread.
*
* There is {@link Store} subinterface for raw persistence
* Most of MapDB features comes from {@link EngineWrapper}s, which are stacked on
* top of each other to provide asynchronous writes, instance cache, encryption etc..
* `Engine` stack is very elegant and uniform way to handle additional functionality.
* Other DBs need an ORM framework to achieve similar features.
* In default configuration MapDB runs with this `Engine` stack:
*
* * **DISK** - raw file or memory
* * {@link StoraWAL} - permanent record store with transactions
* * {@link AsyncWriteEngine} - asynchronous writes to storage
* * {@link EngineWrapper.ByteTransformEngine} - compression or encryption (optional)
* * {@link CacheHashTable} - instance cache
* * {@link SnapshotEngine} - support for snapshots
* * **USER** - {@link DB} and collections
*
* Engine uses `recid` to identify records. There is zero error handling in case recid is invalid
* (random number or already deleted record). Passing illegal recid may result into anything
* (return null, throw EOF or even corrupt store). Engine is considered low-level component
* and it is responsibility of upper layers (collections) to ensure recid is consistent.
* Lack of error handling is trade of for speed (similar way as manual memory management in C++)
* <p/>
* Engine must support `null` record values. You may insert, update and fetch null records.
* Nulls play important role in recid preallocation and asynchronous writes.
* <p/>
* Recid can be reused after it was deleted. If your application relies on unique being unique,
* you should update record with null value, instead of delete.
* Null record consumes only 8 bytes in store and is preserved during defragmentation.
*
* @author Jan Kotek
*/
public interface Engine {
long CATALOG_RECID = 1;
long CLASS_INFO_RECID = 2;
long CHECK_RECORD = 3;
long LAST_RESERVED_RECID = 7;
/**
* Insert new record.
*
* @param value records to be added
* @param serializer used to convert record into/from binary form
* @param <A> type of record
* @return recid (record identifier) under which record is stored.
*/
<A> long put(A value, Serializer<A> serializer);
/**
* Get existing record.
* <p/>
* Recid must be a number returned by 'put' method.
* Behaviour for invalid recid (random number or already deleted record)
* is not defined, typically it returns null or throws 'EndOfFileException'
*
* @param recid (record identifier) under which record was persisted
* @param serializer used to deserialize record from binary form
* @param <A> record type
* @return record matching given recid, or null if record is not found under given recid.
*/
<A> A get(long recid, Serializer<A> serializer);
/**
* Update existing record with new value.
* <p/>
* Recid must be a number returned by 'put' method.
* Behaviour for invalid recid (random number or already deleted record)
* is not defined, typically it throws 'EndOfFileException',
* but it may also corrupt store.
*
* @param recid (record identifier) under which record was persisted.
* @param value new record value to be stored
* @param serializer used to serialize record into binary form
* @param <A> record type
*/
<A> void update(long recid, A value, Serializer<A> serializer);
/**
* Updates existing record in atomic <a href="http://en.wikipedia.org/wiki/Compare-and-swap">(Compare And Swap)</a> manner.
* Value is modified only if old value matches expected value. There are three ways to match values, MapDB may use any of them:
* <ol>
* <li>Equality check <code>oldValue==expectedOldValue</code> when old value is found in instance cache</li>
* <li>Deserializing <code>oldValue</code> using <code>serializer</code> and checking <code>oldValue.equals(expectedOldValue)</code></li>
* <li>Serializing <code>expectedOldValue</code> using <code>serializer </code> and comparing binary array with already serialized <code>oldValue</code>
* </ol>
* <p/>
* Recid must be a number returned by 'put' method.
* Behaviour for invalid recid (random number or already deleted record)
* is not defined, typically it throws 'EndOfFileException',
* but it may also corrupt store.
*
* @param recid (record identifier) under which record was persisted.
* @param expectedOldValue old value to be compared with existing record
* @param newValue to be written if values are matching
* @param serializer used to serialize record into binary form
* @param <A>
* @return true if values matched and newValue was written
*/
//TODO perhaps use Future<Boolean> here?
<A> boolean compareAndSwap(long recid, A expectedOldValue, A newValue, Serializer<A> serializer);
/**
* Remove existing record from store/cache
*
* <p/>
* Recid must be a number returned by 'put' method.
* Behaviour for invalid recid (random number or already deleted record)
* is not defined, typically it throws 'EndOfFileException',
* but it may also corrupt store.
*
* @param recid (record identifier) under which was record persisted
* @param serializer which may be used in some circumstances to deserialize and store old object
*/
<A> void delete(long recid, Serializer<A> serializer);
/**
* Close store/cache. This method must be called before JVM exits to flush all caches and prevent store corruption.
* Also it releases resources used by MapDB (disk, memory..).
* <p/>
* Engine can no longer be used after this method was called. If Engine is used after closing, it may
* throw any exception including <code>NullPointerException</code>
* </p>
* There is an configuration option {@link DBMaker#closeOnJvmShutdown()} which uses shutdown hook to automatically
* close Engine when JVM shutdowns.
*/
void close();
/**
* Checks whether Engine was closed.
*
* @return true if engine was closed
*/
public boolean isClosed();
/**
* Makes all changes made since the previous commit/rollback permanent.
* In transactional mode (on by default) it means creating journal file and replaying it to storage.
* In other modes it may flush disk caches or do nothing at all (check your config options)
*/
void commit();
/**
* Undoes all changes made in the current transaction.
* If transactions are disabled it throws {@link UnsupportedOperationException}.
*
* @throws UnsupportedOperationException if transactions are disabled
*/
void rollback() throws UnsupportedOperationException;
/**
* Check if you can write into this Engine. It may be readonly in some cases (snapshot, read-only files).
*
* @return true if engine is read-only
*/
boolean isReadOnly();
/** @return true if engine supports rollback*/
boolean canRollback();
/** clears any underlying cache */
void clearCache();
void compact();
}
|
apache-2.0
|
nischalsheth/contrail-controller
|
src/vnsw/agent/cmn/agent_signal.cc
|
528
|
//
// Copyright (c) 2017 Juniper Networks, Inc. All rights reserved.
//
#include "cmn/agent_signal.h"
AgentSignal::AgentSignal(EventManager *evm) :
process_signal_(evm, process::Signal::SignalCallbackMap(),
std::vector<process::Signal::SignalChildHandler>(), true) {
}
AgentSignal::~AgentSignal() {
}
void AgentSignal::Terminate() {
process_signal_.Terminate();
}
void AgentSignal::RegisterSigHupHandler(
process::Signal::SignalHandler handler) {
process_signal_.RegisterHandler(SIGHUP, handler);
}
|
apache-2.0
|
NCI-GDC/portal-ui
|
src/packages/@ncigdc/components/Pagination/index.js
|
4026
|
/* @flow */
import React from 'react';
import _ from 'lodash';
import { Row } from '@ncigdc/uikit/Flex';
import { withTheme } from '@ncigdc/theme';
import PaginationButton from './PaginationButton';
import PaginationLink from './PaginationLink';
import Sizes from './Sizes';
export type TProps = {
params: Object,
prefix?: string,
total: number,
theme: Object,
};
const styles = {
topRow: theme => ({
alignItems: 'center',
padding: '1rem',
borderTop: `1px solid ${theme.greyScale5}`,
backgroundColor: 'white',
}),
leftBtn: theme => ({
border: `1px solid ${theme.greyScale5}`,
borderRadius: '4px 0 0 4px',
}),
middleBtn: theme => ({
borderTop: `1px solid ${theme.greyScale5}`,
borderBottom: `1px solid ${theme.greyScale5}`,
borderRight: `1px solid ${theme.greyScale5}`,
borderLeft: 'none',
}),
rightBtn: theme => ({
borderTop: `1px solid ${theme.greyScale5}`,
borderBottom: `1px solid ${theme.greyScale5}`,
borderRight: `1px solid ${theme.greyScale5}`,
borderLeft: 'none',
borderRadius: '0 4px 4px 0',
}),
};
export const calculatePages = (props: TProps): {} => {
const prfOff = [props.prefix, 'offset'].filter(Boolean).join('_');
const prfSize = [props.prefix, 'size'].filter(Boolean).join('_');
const offset = props.params[prfOff];
const size = props.params[prfSize];
const totalPages = Math.ceil(props.total / size);
const prev = Math.max(offset - size, 0);
const last = (totalPages - 1) * size;
const next = Math.min(offset + size, last);
const prevPred = offset !== 0;
const nextPred = offset < last;
const currentPage = Math.ceil(offset / size) + 1;
const pageOffset = 10 * Math.floor((currentPage - 1) / 10);
return {
prfOff,
prfSize,
offset,
size,
prev,
last,
next,
prevPred,
nextPred,
currentPage,
totalPages,
pageOffset,
};
};
export const getPaginationRange = (pageOffset, totalPages) => {
const numPagesToShow = 10;
return _.range(
1 + pageOffset,
Math.min(numPagesToShow + pageOffset, totalPages) + 1,
);
};
const Pagination = (props: TProps) => {
const {
prfOff,
prfSize,
size,
prev,
last,
next,
prevPred,
nextPred,
currentPage,
totalPages,
pageOffset,
} = calculatePages(props);
return (
<Row style={styles.topRow(props.theme)}>
<Row style={{ alignItems: 'center' }}>
<span style={{ marginRight: '1rem' }}>Show</span>
<Sizes
prfSize={prfSize}
prfOff={prfOff}
size={size}
sizes={props.sizes}
/>
<span style={{ marginLeft: '1rem' }}>entries</span>
</Row>
<Row style={{ marginLeft: 'auto' }}>
<PaginationLink pred={prevPred} prfOff={prfOff} offset={0}>
<PaginationButton style={styles.leftBtn(props.theme)}>
{'«'}
</PaginationButton>
</PaginationLink>
<PaginationLink pred={prevPred} prfOff={prfOff} offset={prev}>
<PaginationButton style={styles.middleBtn(props.theme)}>
{'‹'}
</PaginationButton>
</PaginationLink>
{getPaginationRange(pageOffset, totalPages).map(x => (
<PaginationLink key={x} prfOff={prfOff} offset={(x - 1) * size} pred>
<PaginationButton
active={currentPage === x}
style={styles.middleBtn(props.theme)}
>
{x}
</PaginationButton>
</PaginationLink>
))}
<PaginationLink pred={nextPred} prfOff={prfOff} offset={next}>
<PaginationButton style={styles.middleBtn(props.theme)}>
{'›'}
</PaginationButton>
</PaginationLink>
<PaginationLink pred={nextPred} prfOff={prfOff} offset={last}>
<PaginationButton style={styles.rightBtn(props.theme)}>
{'»'}
</PaginationButton>
</PaginationLink>
</Row>
</Row>
);
};
export default withTheme(Pagination);
|
apache-2.0
|
qiao4/SharpLearn
|
MainForm.Designer.cs
|
6913
|
/*
* 由SharpDevelop创建。
* 用户: qiao4
* 日期: 2016/11/2
* 时间: 22:27
*
* 要改变这种模板请点击 工具|选项|代码编写|编辑标准头文件
*/
namespace SharpLearn
{
partial class MainForm
{
/// <summary>
/// Designer variable used to keep track of non-visual components.
/// </summary>
private System.ComponentModel.IContainer components = null;
private System.Windows.Forms.Button button1;
private System.Windows.Forms.Button buttonRomove;
private System.Windows.Forms.Button buttonThreadTest;
private System.Windows.Forms.Button buttonFormatStr;
private System.Windows.Forms.Button openFile;
private System.Windows.Forms.Button tempTest;
/// <summary>
/// Disposes resources used by the form.
/// </summary>
/// <param name="disposing">true if managed resources should be disposed; otherwise, false.</param>
protected override void Dispose(bool disposing)
{
if (disposing) {
if (components != null) {
components.Dispose();
}
}
base.Dispose(disposing);
}
/// <summary>
/// This method is required for Windows Forms designer support.
/// Do not change the method contents inside the source code editor. The Forms designer might
/// not be able to load this method if it was changed manually.
/// </summary>
private void InitializeComponent()
{
this.button1 = new System.Windows.Forms.Button();
this.buttonRomove = new System.Windows.Forms.Button();
this.buttonThreadTest = new System.Windows.Forms.Button();
this.buttonFormatStr = new System.Windows.Forms.Button();
this.openFile = new System.Windows.Forms.Button();
this.tempTest = new System.Windows.Forms.Button();
this.geneQRCode = new System.Windows.Forms.Button();
this.SuspendLayout();
//
// button1
//
this.button1.Location = new System.Drawing.Point(16, 15);
this.button1.Margin = new System.Windows.Forms.Padding(4, 4, 4, 4);
this.button1.Name = "button1";
this.button1.Size = new System.Drawing.Size(140, 29);
this.button1.TabIndex = 0;
this.button1.Text = "send message";
this.button1.UseVisualStyleBackColor = true;
this.button1.Click += new System.EventHandler(this.Button1Click);
//
// buttonRomove
//
this.buttonRomove.Location = new System.Drawing.Point(16, 51);
this.buttonRomove.Margin = new System.Windows.Forms.Padding(4, 4, 4, 4);
this.buttonRomove.Name = "buttonRomove";
this.buttonRomove.Size = new System.Drawing.Size(139, 29);
this.buttonRomove.TabIndex = 1;
this.buttonRomove.Text = "remove space";
this.buttonRomove.UseVisualStyleBackColor = true;
this.buttonRomove.Click += new System.EventHandler(this.ButtonRomoveClick);
//
// buttonThreadTest
//
this.buttonThreadTest.Location = new System.Drawing.Point(16, 125);
this.buttonThreadTest.Margin = new System.Windows.Forms.Padding(4, 4, 4, 4);
this.buttonThreadTest.Name = "buttonThreadTest";
this.buttonThreadTest.Size = new System.Drawing.Size(139, 29);
this.buttonThreadTest.TabIndex = 2;
this.buttonThreadTest.Text = "thread test";
this.buttonThreadTest.UseVisualStyleBackColor = true;
this.buttonThreadTest.Click += new System.EventHandler(this.ButtonThreadTestClick);
//
// buttonFormatStr
//
this.buttonFormatStr.Location = new System.Drawing.Point(17, 174);
this.buttonFormatStr.Margin = new System.Windows.Forms.Padding(4, 4, 4, 4);
this.buttonFormatStr.Name = "buttonFormatStr";
this.buttonFormatStr.Size = new System.Drawing.Size(139, 29);
this.buttonFormatStr.TabIndex = 3;
this.buttonFormatStr.Text = "format string";
this.buttonFormatStr.UseVisualStyleBackColor = true;
this.buttonFormatStr.Click += new System.EventHandler(this.ButtonFormatStrClick);
//
// openFile
//
this.openFile.Location = new System.Drawing.Point(17, 238);
this.openFile.Margin = new System.Windows.Forms.Padding(4, 4, 4, 4);
this.openFile.Name = "openFile";
this.openFile.Size = new System.Drawing.Size(137, 39);
this.openFile.TabIndex = 4;
this.openFile.Text = "open file";
this.openFile.UseVisualStyleBackColor = true;
this.openFile.Click += new System.EventHandler(this.OpenFileClick);
//
// tempTest
//
this.tempTest.Location = new System.Drawing.Point(17, 285);
this.tempTest.Margin = new System.Windows.Forms.Padding(4, 4, 4, 4);
this.tempTest.Name = "tempTest";
this.tempTest.Size = new System.Drawing.Size(137, 38);
this.tempTest.TabIndex = 5;
this.tempTest.Text = "Temp Test";
this.tempTest.UseVisualStyleBackColor = true;
this.tempTest.Click += new System.EventHandler(this.TempTestClick);
//
// geneQRCode
//
this.geneQRCode.Location = new System.Drawing.Point(189, 10);
this.geneQRCode.Margin = new System.Windows.Forms.Padding(4);
this.geneQRCode.Name = "geneQRCode";
this.geneQRCode.Size = new System.Drawing.Size(137, 38);
this.geneQRCode.TabIndex = 6;
this.geneQRCode.Text = "Gene QRCode";
this.geneQRCode.UseVisualStyleBackColor = true;
this.geneQRCode.Click += new System.EventHandler(this.geneQRCode_Click);
//
// MainForm
//
this.AutoScaleDimensions = new System.Drawing.SizeF(8F, 15F);
this.AutoScaleMode = System.Windows.Forms.AutoScaleMode.Font;
this.ClientSize = new System.Drawing.Size(796, 448);
this.Controls.Add(this.geneQRCode);
this.Controls.Add(this.tempTest);
this.Controls.Add(this.openFile);
this.Controls.Add(this.buttonFormatStr);
this.Controls.Add(this.buttonThreadTest);
this.Controls.Add(this.buttonRomove);
this.Controls.Add(this.button1);
this.Margin = new System.Windows.Forms.Padding(4, 4, 4, 4);
this.Name = "MainForm";
this.Text = "SharpLearn";
this.ResumeLayout(false);
}
private System.Windows.Forms.Button geneQRCode;
}
}
|
apache-2.0
|
mediocre/mehdown
|
test/plugins/youtube.js
|
4913
|
const assert = require('assert');
const mehdown = require('../../lib');
describe('youtube', function() {
it('http://www.youtube.com/watch?v=kU9MuM4lP18', function(done) {
mehdown.render('http://www.youtube.com/watch?v=kU9MuM4lP18', function(err, html) {
assert.equal(html, '<p><iframe allowfullscreen class="youtube" frameborder="0" src="https://www.youtube.com/embed/kU9MuM4lP18?autohide=1&color=white&showinfo=0&theme=light"></iframe></p>');
done();
});
});
it('http://www.youtube.com/watch?v=kU9MuM4lP18 http://www.youtube.com/watch?v=eGDBR2L5kzI', function(done) {
mehdown.render('http://www.youtube.com/watch?v=kU9MuM4lP18 http://www.youtube.com/watch?v=eGDBR2L5kzI', function(err, html) {
assert.equal(html, '<p><iframe allowfullscreen class="youtube" frameborder="0" src="https://www.youtube.com/embed/kU9MuM4lP18?autohide=1&color=white&showinfo=0&theme=light"></iframe> <iframe allowfullscreen class="youtube" frameborder="0" src="https://www.youtube.com/embed/eGDBR2L5kzI?autohide=1&color=white&showinfo=0&theme=light"></iframe></p>');
done();
});
});
it('http://www.youtube.com/watch?feature=player_embedded&v=zIEIvi2MuEk', function(done) {
mehdown.render('http://www.youtube.com/watch?feature=player_embedded&v=zIEIvi2MuEk', function(err, html) {
assert.equal(html, '<p><iframe allowfullscreen class="youtube" frameborder="0" src="https://www.youtube.com/embed/zIEIvi2MuEk?autohide=1&color=white&showinfo=0&theme=light"></iframe></p>');
done();
});
});
it('`&` instead of `&` in URL', function(done) {
mehdown.render('http://www.youtube.com/watch?feature=player_embedded&v=zIEIvi2MuEk', function(err, html) {
assert.equal(html, '<p><iframe allowfullscreen class="youtube" frameborder="0" src="https://www.youtube.com/embed/zIEIvi2MuEk?autohide=1&color=white&showinfo=0&theme=light"></iframe></p>');
done();
});
});
it('http://www.youtube.com/watch?v=kU9MuM4lP18&start=10', function(done) {
mehdown.render('http://www.youtube.com/watch?v=kU9MuM4lP18&start=10', function(err, html) {
assert.equal(html, '<p><iframe allowfullscreen class="youtube" frameborder="0" src="https://www.youtube.com/embed/kU9MuM4lP18?autohide=1&color=white&showinfo=0&theme=light&start=10"></iframe></p>');
done();
});
});
it('http://www.youtube.com/watch?v=kU9MuM4lP18&start=10', function(done) {
mehdown.render('http://www.youtube.com/watch?v=kU9MuM4lP18&start=10', function(err, html) {
assert.equal(html, '<p><iframe allowfullscreen class="youtube" frameborder="0" src="https://www.youtube.com/embed/kU9MuM4lP18?autohide=1&color=white&showinfo=0&theme=light&start=10"></iframe></p>');
done();
});
});
it('http://www.youtube.com/watch?v=kU9MuM4lP18&start=10&end=20', function(done) {
mehdown.render('http://www.youtube.com/watch?v=kU9MuM4lP18&start=10&end=20', function(err, html) {
assert.equal(html, '<p><iframe allowfullscreen class="youtube" frameborder="0" src="https://www.youtube.com/embed/kU9MuM4lP18?autohide=1&color=white&showinfo=0&theme=light&end=20&start=10"></iframe></p>');
done();
});
});
it('[text](http://www.youtube.com/watch?v=kU9MuM4lP18&start=10&end=20)', function(done) {
mehdown.render('[text](http://www.youtube.com/watch?v=kU9MuM4lP18&start=10&end=20)', function(err, html) {
assert.equal(html, '<p><a href="http://www.youtube.com/watch?v=kU9MuM4lP18&start=10&end=20">text</a></p>');
done();
});
});
it('youtube.com/watch?v=kU9MuM4lP18', function(done) {
mehdown.render('youtube.com/watch?v=kU9MuM4lP18', function(err, html) {
assert.equal(html, '<p><iframe allowfullscreen class="youtube" frameborder="0" src="https://www.youtube.com/embed/kU9MuM4lP18?autohide=1&color=white&showinfo=0&theme=light"></iframe></p>');
done();
});
});
it('www.youtube.com/watch?v=kU9MuM4lP18', function(done) {
mehdown.render('www.youtube.com/watch?v=kU9MuM4lP18', function(err, html) {
assert.equal(html, '<p><iframe allowfullscreen class="youtube" frameborder="0" src="https://www.youtube.com/embed/kU9MuM4lP18?autohide=1&color=white&showinfo=0&theme=light"></iframe></p>');
done();
});
});
it('https://www.youtube.com/watch?v=ex--O-cJcZA', function(done) {
mehdown.render('https://www.youtube.com/watch?v=ex--O-cJcZA', function(err, html) {
assert.equal(html, '<p><iframe allowfullscreen class="youtube" frameborder="0" src="https://www.youtube.com/embed/ex--O-cJcZA?autohide=1&color=white&showinfo=0&theme=light"></iframe></p>');
done();
});
});
});
|
apache-2.0
|
ankuradhey/dealtrip
|
library/Zend/View/Helper/PartialLoop.php
|
3268
|
<?php
/**
* Zend Framework
*
* LICENSE
*
* This source file is subject to the new BSD license that is bundled
* with this package in the file LICENSE.txt.
* It is also available through the world-wide-web at this URL:
* http://framework.zend.com/license/new-bsd
* If you did not receive a copy of the license and are unable to
* obtain it through the world-wide-web, please send an email
* to license@zend.com so we can send you a copy immediately.
*
* @category Zend
* @package Zend_View
* @subpackage Helper
* @copyright Copyright (c) 2005-2008 Zend Technologies USA Inc. (http://www.zend.com)
* @version $Id: PartialLoop.php 13032 2008-12-05 02:43:17Z sidhighwind $
* @license http://framework.zend.com/license/new-bsd New BSD License
*/
/** Zend_View_Helper_Partial */
require_once 'Zend/View/Helper/Partial.php';
/**
* Helper for rendering a template fragment in its own variable scope; iterates
* over data provided and renders for each iteration.
*
* @package Zend_View
* @subpackage Helper
* @copyright Copyright (c) 2005-2008 Zend Technologies USA Inc. (http://www.zend.com)
* @license http://framework.zend.com/license/new-bsd New BSD License
*/
class Zend_View_Helper_PartialLoop extends Zend_View_Helper_Partial
{
/**
* Marker to where the pointer is at in the loop
* @var integer
*/
protected $partialCounter = 0;
/**
* Renders a template fragment within a variable scope distinct from the
* calling View object.
*
* If no arguments are provided, returns object instance.
*
* @param string $name Name of view script
* @param string|array $module If $model is empty, and $module is an array,
* these are the variables to populate in the
* view. Otherwise, the module in which the
* partial resides
* @param array $model Variables to populate in the view
* @return string
*/
public function partialLoop($name = null, $module = null, $model = null)
{
if (0 == func_num_args()) {
return $this;
}
if ((null === $model) && (null !== $module)) {
$model = $module;
$module = null;
}
if (!is_array($model)
&& (!$model instanceof Traversable)
&& (is_object($model) && !method_exists($model, 'toArray'))
) {
require_once 'Zend/View/Helper/Partial/Exception.php';
throw new Zend_View_Helper_Partial_Exception('PartialLoop helper requires iterable data');
}
if (is_object($model)
&& (!$model instanceof Traversable)
&& method_exists($model, 'toArray')
) {
$model = $model->toArray();
}
$content = '';
// reset the counter if it's call again
$this->partialCounter = 0;
foreach ($model as $item) {
// increment the counter variable
$this->partialCounter++;
$content .= $this->partial($name, $module, $item);
}
return $content;
}
}
|
apache-2.0
|
razie/diesel-rx
|
diesel/src/main/scala/razie/diesel/expr/ExprParser.scala
|
18545
|
/* ____ __ ____ ____ ____,,___ ____ __ __ ____
* ( _ \ /__\ (_ )(_ _)( ___)/ __) ( _ \( )( )( _ \ Read
* ) / /(__)\ / /_ _)(_ )__) \__ \ )___/ )(__)( ) _ < README.txt
* (_)\_)(__)(__)(____)(____)(____)(___/ (__) (______)(____/ LICENSE.txt
*/
package razie.diesel.expr
import razie.diesel.dom.RDOM.P
import razie.diesel.dom.{RDOM, WType, WTypes, XPathIdent}
import razie.diesel.engine.nodes.PAS
import scala.util.parsing.combinator.RegexParsers
/**
* expressions parser. this is a trait you can mix in your other DSL parsers,
* see SimpleExprParser for a concrete implementation
*
* See http://specs.razie.com/wiki/Story:expr_story for possible expressions and examples
*/
trait ExprParser extends RegexParsers {
// mandatory whiteSpace
def ws = whiteSpace
// optional whiteSpace
def ows = opt(whiteSpace)
// def pComment: Parser[String] = "//.*".r | "(?m)/\\*(\\*(?!/)|[^*])*\\*/)".r ^^ {
//def pComment: Parser[String] = """(\s|//.*|(?m)/\*(\*(?!/)|[^*])*\*/)+""".r ^^ {
def pComment: Parser[String] = """(//[^\n]*|(?m)/\*(\*(?!/)|[^*])*\*/)+""".r ^^ {
case s => s
}
def optComment: Parser[String] = opt(whiteSpace ~> pComment) ^^ {
case s => s.mkString
}
def optComment2: Parser[String] = opt(pComment) ^^ {
case s => s.mkString
}
def optComment3: Parser[String] = opt(" *//.*".r) ^^ {
case s => s.mkString
}
//
//======================= MAIN: operator expressions and conditions ========================
//
private def opsAS: Parser[String] = "as"
private def opsMAP: Parser[String] = "map" <~ ws | "fold" <~ ws | "flatMap" <~ ws | "flatten" <~ ws | "filter" <~
ws | "exists" |
"mkString" <~
ws | ">>"
private def opsOR: Parser[String] = "or" | "xor"
private def opsAND: Parser[String] = "and"
private def opsCMP: Parser[String] =
">" | "<" | ">=" | "<=" | "==" | "!=" |
"~=" | "~path" <~ ws |
"?=" | "is" <~ ws | "xNot" <~ ws | "in" <~ ws | "not in" <~ ws | "notIn" <~ ws | "not" <~ ws |
"contains" <~ ws | "containsNot" <~ ws
private def opsPLUS: Parser[String] = "+" | "-" | "||" | "|"
private def opsMULT: Parser[String] = "*" | "/(?!/)".r // negative lookahead to not match comment - it acts funny
// with multiple lines of comment
//--------------------------- expressions
/** main entry point for an expression */
def expr: Parser[Expr] = exprAS | pterm
// a reduced expr, from boolean down, useful for lambdas for conditions
def expr2: Parser[Expr] = exprOR | pterm
private def faexpr2: (Expr, String, Expr) => Expr = { (a, b, c) =>
if (b == ">>") AExprFunc(
c.asInstanceOf[AExprIdent].start,
List(P("", "", WTypes.wt.UNKNOWN, Some(a.asInstanceOf[AExprIdent]))))
else AExpr2(a, b, c)
}
// "1" as number
def exprAS: Parser[Expr] = exprMAP ~ opt(ows ~> opsAS ~ ows ~ pterm) ^^ {
case a ~ None => a
case a ~ Some(op ~ _ ~ p) => AExpr2(a, op, p)
}
// x map (x => x+1)
def exprMAP: Parser[Expr] = exprOR ~ rep(ows ~> opsMAP ~ ows ~ exprOR) ^^ {
case a ~ l => foldAssocAexpr2(a, l, faexpr2)
}
// x > y or ...
def exprOR: Parser[Expr] = exprAND ~ rep(ows ~> (opsOR <~ ws) ~ ows ~ exprAND) ^^ {
case a ~ l => foldAssocAexpr2(a, l, bMkAndOr)
}
// x > y and ...
def exprAND: Parser[Expr] = (exprNOTCMP | exprCMP) ~ rep(ows ~> (opsAND <~ ws) ~ ows ~ (exprNOTCMP | exprCMP)) ^^ {
case a ~ l => foldAssocAexpr2(a, l, bMkAndOr)
}
def exprNOTCMP: Parser[Expr] = (("not" | "NOT") <~ ws) ~> ows ~> exprCMP ^^ {
case e if e.isInstanceOf[BoolExpr] => BCMPNot(e.asInstanceOf[BoolExpr])
case x => BCMPNot(BCMPSingle(x))
}
// x > y
def exprCMP: Parser[Expr] = exprPLUS ~ opt(ows ~> opsCMP ~ ows ~ exprPLUS) ^^ {
case a ~ None => a
case a ~ Some(op ~ _ ~ b) => BCMP2(a, op, b)
}
// x + y
def exprPLUS: Parser[Expr] = exprMULT ~ rep(ows ~> opsPLUS ~ ows ~ exprMULT) ^^ {
case a ~ l => foldAssocAexpr2(a, l, AExpr2)
}
// x * y
def exprMULT: Parser[Expr] = pterm ~ rep(ows ~> opsMULT ~ ows ~ pterm) ^^ {
case a ~ l => foldAssocAexpr2(a, l, AExpr2)
}
// foldLeft associative expressions
private def foldAssocAexpr2[EXP](a:EXP, l:List[String ~ Option[String] ~ EXP], f:(EXP, String, EXP) => EXP) : EXP = {
l.foldLeft(a)((x, y) =>
y match {
case op ~ _ ~ p => f(x, op, p)
}
)
}
//
//================== main expression rules
//
// a term in an expression
def pterm: Parser[Expr] =
numConst | boolConst | multilineStrConst | strConst | jnull |
xpident |
lambda | jsexpr2 | jsexpr1 |
exregex | eblock | jarray | jobj |
scalaexpr2 | scalaexpr1 |
callFunc | aidentaccess | aident | jsexpr4
// exregex | eblock | jarray | jobj
//
//============================== idents
//
/** a regular ident but also something in single quotes 'a@habibi.34 and - is a good ident eh' */
def ident: Parser[String] = """[a-zA-Z_][\w]*""".r | """'[\w@. -]+'""".r ^^ {
case s =>
if(s.startsWith("'") && s.endsWith("'"))
s.substring(1, s.length-1)
else
s
}
/** allow JSON ids with double quotes, single quotes or no quotes */
// def jsonIdent: Parser[String] = """[a-zA-Z_][\w]*""".r | """'[\w@. -]+'""".r | """"[\w@. -]+"""".r ^^ {
def jsonIdent: Parser[String] = """[a-zA-Z_][\w]*""".r | """'[^']+'""".r | """"[^"]+"""".r ^^ {
case s => unquote(s)
}
/** qualified idents, . notation, parsed as a single string */
def qident: Parser[String] = ident ~ rep("." ~> ident) ^^ {
case i ~ l => (i :: l).mkString(".")
}
/** generic qualified ident including diesel exprs */
def qlident: Parser[List[String]] = qidentDiesel | qualifiedIdent
// fix this somehow - these need to be accessed as this - they should be part of a "diesel" object with callbacks
def qidentDiesel: Parser[List[String]] = "diesel" ~ "." ~ qualifiedIdent ^^ {
case d ~ dot ~ i => d :: i
}
/** qualified idents, . notation, parsed as a list */
def qualifiedIdent: Parser[List[String]] = ident ~ rep("." ~> ident) ^^ {
case i ~ l => i :: l
}
def xpath: Parser[String] = ident ~ rep("[/@]+".r ~ ident) ^^ {
case i ~ l => (i :: l.map { x => x._1 + x._2 }).mkString("")
}
def jpath: Parser[String] = ident ~ rep("[/@.]+".r ~ ident) ^^ {
case i ~ l => (i :: l.map { x => x._1.replace(".", "/") + x._2 }).mkString("")
}
//
//==================== lambdas
//
// x => x + 4
def lambda: Parser[Expr] = ident ~ ows ~ "=>" ~ ows ~ (expr2 | "(" ~> expr <~ ")") ^^ {
case id ~ _ ~ a ~ _ ~ ex => LambdaFuncExpr(id, ex)
}
//
//================================== constants - CExpr
//
// a number
def numConst: Parser[Expr] = (afloat | aint) ^^ { case i => new CExpr(i, WTypes.wt.NUMBER) }
def aint: Parser[String] = """-?\d+""".r
def afloat: Parser[String] = """-?\d+[.]\d+""".r
/** prepare a parsed string const */
private def prepStrConst(e: String): CExpr[String] = {
// string const with escaped chars
var s = e
// replace standard escapes like java does
s = s
.replaceAll("(?<!\\\\)\\\\b", "\b")
.replaceAll("(?<!\\\\)\\\\n", "\n")
.replaceAll("(?<!\\\\)\\\\t", "\t")
.replaceAll("(?<!\\\\)\\\\r", "\r")
.replaceAll("(?<!\\\\)\\\\f", "\f")
// kind of like java, now replace anything escaped
// note that Java only replaces a few things, others generate errors
// we replace anything
s = s.replaceAll("\\\\(.)", "$1")
new CExpr(s, WTypes.wt.STRING)
}
// string const with escaped chars
def strConst: Parser[Expr] = "\"" ~> """(\\.|[^\"])*""".r <~ "\"" ^^ {
e => prepStrConst(e)
}
// string const with escaped chars
def strConstSingleQuote: Parser[Expr] = "\'" ~> """(\\.|[^\'])*""".r <~ "\'" ^^ {
e => prepStrConst(e)
}
// escaped multiline string const with escaped chars
// we're removing the first \n
def multilineStrConst: Parser[Expr] = "\"\"\"" ~ opt("\n") ~> """(?s)((?!\"\"\").)*""".r <~ "\"\"\"" ^^ {
e => prepStrConst(e)
}
// xp or jp
def xpident: Parser[Expr] = xpp //| jpp
// XP identifier (either json or xml)
def xpp: Parser[Expr] = ("xp:" ~ xpath) ^^ { case x ~ i => new XPathIdent(i) }
// XP identifier (either json or xml)
// def jpp: Parser[Expr] = ("jp:" ~ jpath) ^^ { case x ~ i => new XPathIdent(i) }
// regular expression, JS style
def exregex: Parser[Expr] = """/[^/]+/""".r ^^ { case x => new CExpr(x, WTypes.wt.REGEX) }
//
//==================================== ACCESSORS
//
// qualified identifier
def aident: Parser[AExprIdent] = qlident ^^ { case i => new AExprIdent(i.head, i.tail.map(P("", _))) }
// simple qident or complex one
def aidentExpr: Parser[AExprIdent] = aidentaccess | aident
// full accessor to value: a.b[4].c.r["field1"]["subfield2"][4].g
// note this kicks in at the first use of [] and continues... so that aident above catches all other
def aidentaccess: Parser[AExprIdent] = qlident ~ (sqbraccess | sqbraccessRange | accessorNum) ~ accessors ^^ {
case i ~ sa ~ a => new AExprIdent(i.head, i.tail.map(P("", _)) ::: sa :: a)
}
def accessors: Parser[List[RDOM.P]] = rep(sqbraccess | sqbraccessRange | accessorIdent | accessorNum) ^^ {
case p => p//.flatMap(prepAccessor)
}
/** if a single accessor is a path, then flatten it */
// def prepAccessor(p: P): List[RDOM.P] = {
// if (p.expr.exists(_.isInstanceOf[CExpr[String]])) {
// p.expr.get.asInstanceOf[CExpr[String]].ee
// .split("\\.")
// .map(e => P("", "").copy(expr = Some(new CExpr(e))))
// .toList
// } else
// List(p)
// }
private def accessorIdent: Parser[RDOM.P] = "." ~> ident ^^ { case id => P("", id, WTypes.wt.STRING) }
private def accessorNum: Parser[RDOM.P] = "." ~> "[0-9]+".r ^^ { case id => P("", id, WTypes.wt.NUMBER) }
// need to check single quotes first to force them strings - otherwise they end up IDs
private def sqbraccess: Parser[RDOM.P] = "\\[".r ~> ows ~> (strConstSingleQuote | expr) <~ ows <~ "]" ^^ {
case e => P("", "").copy(expr = Some(e))
}
// for now the range is only numeric
private def sqbraccessRange: Parser[RDOM.P] = "\\[".r ~> ows ~> (numConst) ~ ows ~ ".." ~ ows ~ opt(
numConst | aidentExpr) <~ ows <~ "]" ^^ {
case e1 ~ _ ~ _ ~ _ ~ e2 => P("", "", WTypes.wt.RANGE).copy(
expr = Some(ExprRange(e1, e2))
)
}
//
//==================================== F U N C T I O N S
//
// calling a function, this is not defining it, so no type annotations etc
// named parameters need to be mentioned, unless it's just one
// a.b.c(
def callFunc: Parser[Expr] = qident ~ pcallattrs ^^ { case i ~ a => AExprFunc(i, a) }
// todo have a pcallattrsSimple which allows an expression and use that in callFunc
/**
* simple ident = expr assignemtn when calling
*/
def pcallattrs: Parser[List[RDOM.P]] = " *\\(".r ~> ows ~> repsep(pcallattrIdent, ows ~ "," ~ ows) <~ opt(",") ~ ows <~ ")"
def pcallattrIdent: Parser[P] = " *".r ~> qident ~ opt(" *= *".r ~> expr) ^^ {
case ident ~ ex => {
P(ident, "", ex.map(_.getType).getOrElse(WTypes.wt.EMPTY), ex)
}
}
def pcallattrExpr: Parser[P] = " *".r ~> expr ^^ {
case ex => {
P("lambda", "", ex.getType, Some(ex))
}
}
// param assignment (x = expr, ...)
// allows comma after last
def pasattrs: Parser[List[PAS]] = " *\\(".r ~> ows ~> repsep(pasattr, ows ~ "," ~ ows) <~ opt(",") ~ ows <~ ")"
/**
* parm assignment, left side can be a[5].name, useful in a $val
*/
def pasattr: Parser[PAS] = " *".r ~> (aidentaccess | aident) ~ opt(" *= *".r ~> expr) ^^ {
case ident ~ e => {
e match {
case Some(ex) => PAS(ident, ex)
case None => PAS(ident, ident) // compatible for a being a=a
}
}
}
/**
* :<>type[kind]*
* <> means it's a ref, not ownership
* * means it's a list
*/
def optType: Parser[WType] = opt((" *: *<> *".r | " *: *".r) ~
// todo make it work better with the opt below - it stopped working at some point
// opt(" *<> *") ~
ident ~
optKinds ~
opt(" *\\* *".r)) ^^ {
case Some(ref ~ tt ~ k ~ None) => {
WType(tt, "", k).withRef(ref.contains("<>"))
}
case Some(ref ~ tt ~ _ ~ Some(_)) => {
WType(WTypes.ARRAY, "", Some(tt)).withRef(ref.contains("<>"))
}
case None => WTypes.wt.EMPTY
}
// A [ KIND, KIND ]
def optKinds: Parser[Option[String]] = opt(ows ~> "[" ~> ows ~> repsep(ident, ",") <~ "]") ^^ {
case Some(tParm) => Some(tParm.mkString)
case None => None
}
val msfDefOperators = "~=|\\?=|=".r
def OPSM1: Parser[String] = msfDefOperators
/**
* parm definition / assignment
*
* name:<>type[kind]*~=default
*
* <> means it's a ref, not ownership
* * means it's a list
*/
def pattr: Parser[RDOM.P] = " *".r ~>
qident ~
optType ~
opt(" *\\?(?!=) *".r) ~ // negative lookahead to not match optional with value
opt(ows ~> OPSM1 ~ ows ~ expr) <~
optComment ^^ {
case name ~ ttype ~ oper ~ e => {
var optional = oper.mkString.trim
val (dflt, ex) = e match {
// we don't use dflt at all now, some parms are interpolated etc
case Some(op ~ _ ~ expr) => {
optional = if (op.contains("?=")) "?" else ""
("", Some(expr))
}
case None => ("", None)
}
ttype match {
// k - kind is [String] etc
case WTypes.wt.EMPTY => // infer type from expr
P(name, dflt, ex.map(_.getType).getOrElse(WTypes.wt.EMPTY), ex, optional)
case tt => // ref or no archetype
P(name, dflt, tt, ex, optional)
}
}
}
/**
* optional attributes
*/
def optAttrs: Parser[List[RDOM.P]] = opt(attrs) ^^ {
case Some(a) => a
case None => List.empty
}
/**
* optional attributes
*/
def attrs: Parser[List[RDOM.P]] = " *\\(".r ~> ows ~> repsep(pattr, "\\s*,\\s*".r ~ optComment) <~ ows <~ ")"
//
//=================== js and JSON
//
def jsexpr1: Parser[Expr] = "js:" ~> ".*(?=[,)])".r ^^ (li => JSSExpr(li))
def jsexpr2: Parser[Expr] = "js:{" ~> ".*(?=})".r <~ "}" ^^ (li => JSSExpr(li))
// def jsexpr3: Parser[Expr] = "js:{{ " ~> ".*(?=})".r <~ "}}" ^^ { case li => JSSExpr(li) }
def scalaexpr1: Parser[Expr] = "sc:" ~> ".*(?=[,)])".r ^^ (li => SCExpr(li))
def scalaexpr2: Parser[Expr] = "sc:{" ~> ".*(?=})".r <~ "}" ^^ (li => SCExpr(li))
def eblock: Parser[Expr] = "(" ~ ows ~> expr <~ ows ~ ")" ^^ (ex =>
if (ex.isInstanceOf[BoolExpr]) BExprBlock(ex.asInstanceOf[BoolExpr]) else BlockExpr(ex)
)
// inline js expr: //1+2//
def jsexpr4: Parser[Expr] = "//" ~> ".*(?=//)".r <~ "//" ^^ (li => JSSExpr(li))
// remove single or double quotes if any, from ID matched with them
def unquote(s: String) = {
if (s.startsWith("'") && s.endsWith("\'") || s.startsWith("\"") && s
.endsWith("\""))
s.substring(1, s.length - 1)
else
s
}
def jnull: Parser[Expr] = "null" ^^ {
_ => new CExprNull
}
// json object - sequence of nvp assignemnts separated with commas
def jobj: Parser[Expr] = opt("new" ~ whiteSpace ~ qident) ~ ows ~
"{" ~ ows ~ repsep(jnvp <~ ows, ",\\s*".r) <~ ows ~ "}" ^^ {
case None ~ _ ~ _ ~ _ ~ li => JBlockExpr(li)
case Some(a ~ _ ~ b) ~ _ ~ _ ~ _ ~ li => JBlockExpr(li, Some(b))
}
// one json block nvp pair
def jnvp: Parser[(String, Expr)] = ows ~> jsonIdent ~ " *[:=] *".r ~ jexpr ^^ {
case name ~ _ ~ ex => (unquote(name), ex)
}
// array [...] - elements are expressions
def jarray: Parser[Expr] = "[" ~ ows ~> repsep(ows ~> jexpr <~ ows, ",") <~ ows ~ "]" ^^ {
li => JArrExpr(li) //CExpr("[ " + li.mkString(",") + " ]")
}
def jexpr: Parser[Expr] = jobj | jarray | boolConst | jother ^^ (ex => ex) //ex.toString }
// def jother: Parser[String] = "[^{}\\[\\],]+".r ^^ { case ex => ex }
def jother: Parser[Expr] = expr ^^ (ex => ex)
//
//==================================== C O N D I T I O N S
//
// todo why need space after not? this no parse: => $if(not(false))
private def opsBool: Parser[String] = "==" | "xNot" | "is" | "in" | "not in" | "notIn" |
"!=" | "not" <~ ws | "~=" | "matches" <~ ws | "<=" | ">=" | "<" | ">" |
"containsNot" <~ ws | "contains" <~ ws
def cond: Parser[BoolExpr] = orexpr
def orexpr: Parser[BoolExpr] = bterm1 ~ rep(ows ~> ("or" <~ ws) ~ ows ~ bterm1) ^^ {
case a ~ l => foldAssocAexpr2(a, l, bcmp)
}
def bterm1: Parser[BoolExpr] = bfactor1 ~ rep(ows ~> ("and" <~ ws) ~ ows ~ bfactor1) ^^ {
case a ~ l => foldAssocAexpr2(a, l, bcmp)
}
def bfactor1: Parser[BoolExpr] = notbfactor2 | bfactor2
def notbfactor2: Parser[BoolExpr] = ows ~> (("not" | "NOT") <~ ws) ~> ows ~> bfactor2 ^^ {BCMPNot}
def bfactor2: Parser[BoolExpr] = boolConst | ibex(opsBool) | bvalue | condBlock
private def condBlock: Parser[BoolExpr] = ows ~> "(" ~> ows ~> cond <~ ows <~ ")" ^^ {BExprBlock}
private def ibex(op: => Parser[String]): Parser[BoolExpr] = expr ~ (ows ~> op <~ ows) ~ expr ^^ {
case a ~ s ~ b => BCMP2(a, s.trim, b)
}
/** true or false constants */
def boolConst: Parser[BoolExpr] = ("true" | "false") ^^ {BCMPConst}
/** single value expressions, where != 0 is true and != null is true */
def bvalue: Parser[BoolExpr] = expr ^^ {
a => BCMPSingle(a)
}
private def bcmp(a: BoolExpr, s: String, b: BoolExpr) = BCMPAndOr(a, s, b)
/** we can only combine: 2 bools with and/or ||| 2 non-bool exprs with non-bool operator ||| blow up */
private def bMkAndOr(a: Expr, s: String, b: Expr): Expr = (a, b) match {
case (a: BoolExpr, b: BoolExpr) => BCMPAndOr(a, s, b)
// todo build this into the parser - see exprMAP comments
case (a: Expr, b: Expr) if !a.isInstanceOf[BoolExpr] && !b.isInstanceOf[BoolExpr]
=> AExpr2(a, s, b)
// need to allow simple idents - they mean "exists"
case (a: Expr, b: Expr) if a.isInstanceOf[AExprIdent] && b.isInstanceOf[BoolExpr]
=> BCMPAndOr(BCMPSingle(a), s, b.asInstanceOf[BoolExpr])
case (a: Expr, b: Expr) if a.isInstanceOf[BoolExpr] || b.isInstanceOf[AExprIdent]
=> BCMPAndOr(a.asInstanceOf[BoolExpr], s, BCMPSingle(b))
case (a, b) => throw new DieselExprException(
"bMkAndOr - can't combine non-logical expressions with or/and: " + a + " WITH " + b)
}
}
|
apache-2.0
|
googleapis/java-tasks
|
proto-google-cloud-tasks-v2/src/main/java/com/google/cloud/tasks/v2/TaskName.java
|
6774
|
/*
* Copyright 2021 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.cloud.tasks.v2;
import com.google.api.pathtemplate.PathTemplate;
import com.google.api.resourcenames.ResourceName;
import com.google.common.base.Preconditions;
import com.google.common.collect.ImmutableMap;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import javax.annotation.Generated;
// AUTO-GENERATED DOCUMENTATION AND CLASS.
@Generated("by gapic-generator-java")
public class TaskName implements ResourceName {
private static final PathTemplate PROJECT_LOCATION_QUEUE_TASK =
PathTemplate.createWithoutUrlEncoding(
"projects/{project}/locations/{location}/queues/{queue}/tasks/{task}");
private volatile Map<String, String> fieldValuesMap;
private final String project;
private final String location;
private final String queue;
private final String task;
@Deprecated
protected TaskName() {
project = null;
location = null;
queue = null;
task = null;
}
private TaskName(Builder builder) {
project = Preconditions.checkNotNull(builder.getProject());
location = Preconditions.checkNotNull(builder.getLocation());
queue = Preconditions.checkNotNull(builder.getQueue());
task = Preconditions.checkNotNull(builder.getTask());
}
public String getProject() {
return project;
}
public String getLocation() {
return location;
}
public String getQueue() {
return queue;
}
public String getTask() {
return task;
}
public static Builder newBuilder() {
return new Builder();
}
public Builder toBuilder() {
return new Builder(this);
}
public static TaskName of(String project, String location, String queue, String task) {
return newBuilder()
.setProject(project)
.setLocation(location)
.setQueue(queue)
.setTask(task)
.build();
}
public static String format(String project, String location, String queue, String task) {
return newBuilder()
.setProject(project)
.setLocation(location)
.setQueue(queue)
.setTask(task)
.build()
.toString();
}
public static TaskName parse(String formattedString) {
if (formattedString.isEmpty()) {
return null;
}
Map<String, String> matchMap =
PROJECT_LOCATION_QUEUE_TASK.validatedMatch(
formattedString, "TaskName.parse: formattedString not in valid format");
return of(
matchMap.get("project"),
matchMap.get("location"),
matchMap.get("queue"),
matchMap.get("task"));
}
public static List<TaskName> parseList(List<String> formattedStrings) {
List<TaskName> list = new ArrayList<>(formattedStrings.size());
for (String formattedString : formattedStrings) {
list.add(parse(formattedString));
}
return list;
}
public static List<String> toStringList(List<TaskName> values) {
List<String> list = new ArrayList<>(values.size());
for (TaskName value : values) {
if (value == null) {
list.add("");
} else {
list.add(value.toString());
}
}
return list;
}
public static boolean isParsableFrom(String formattedString) {
return PROJECT_LOCATION_QUEUE_TASK.matches(formattedString);
}
@Override
public Map<String, String> getFieldValuesMap() {
if (fieldValuesMap == null) {
synchronized (this) {
if (fieldValuesMap == null) {
ImmutableMap.Builder<String, String> fieldMapBuilder = ImmutableMap.builder();
if (project != null) {
fieldMapBuilder.put("project", project);
}
if (location != null) {
fieldMapBuilder.put("location", location);
}
if (queue != null) {
fieldMapBuilder.put("queue", queue);
}
if (task != null) {
fieldMapBuilder.put("task", task);
}
fieldValuesMap = fieldMapBuilder.build();
}
}
}
return fieldValuesMap;
}
public String getFieldValue(String fieldName) {
return getFieldValuesMap().get(fieldName);
}
@Override
public String toString() {
return PROJECT_LOCATION_QUEUE_TASK.instantiate(
"project", project, "location", location, "queue", queue, "task", task);
}
@Override
public boolean equals(Object o) {
if (o == this) {
return true;
}
if (o != null || getClass() == o.getClass()) {
TaskName that = ((TaskName) o);
return Objects.equals(this.project, that.project)
&& Objects.equals(this.location, that.location)
&& Objects.equals(this.queue, that.queue)
&& Objects.equals(this.task, that.task);
}
return false;
}
@Override
public int hashCode() {
int h = 1;
h *= 1000003;
h ^= Objects.hashCode(project);
h *= 1000003;
h ^= Objects.hashCode(location);
h *= 1000003;
h ^= Objects.hashCode(queue);
h *= 1000003;
h ^= Objects.hashCode(task);
return h;
}
/** Builder for projects/{project}/locations/{location}/queues/{queue}/tasks/{task}. */
public static class Builder {
private String project;
private String location;
private String queue;
private String task;
protected Builder() {}
public String getProject() {
return project;
}
public String getLocation() {
return location;
}
public String getQueue() {
return queue;
}
public String getTask() {
return task;
}
public Builder setProject(String project) {
this.project = project;
return this;
}
public Builder setLocation(String location) {
this.location = location;
return this;
}
public Builder setQueue(String queue) {
this.queue = queue;
return this;
}
public Builder setTask(String task) {
this.task = task;
return this;
}
private Builder(TaskName taskName) {
this.project = taskName.project;
this.location = taskName.location;
this.queue = taskName.queue;
this.task = taskName.task;
}
public TaskName build() {
return new TaskName(this);
}
}
}
|
apache-2.0
|
codereligion/beast
|
src/main/java/com/codereligion/beast/internal/test/ToStringFormatTest.java
|
5386
|
/**
* Copyright 2013 www.codereligion.com
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.codereligion.beast.internal.test;
import com.codereligion.beast.internal.creation.ObjectFactory;
import com.codereligion.beast.internal.creation.ObjectMethodNames;
import java.beans.PropertyDescriptor;
import java.lang.reflect.InvocationTargetException;
import java.util.Collections;
import java.util.Set;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import static com.codereligion.beast.internal.util.Assert.assertTrue;
import static com.codereligion.beast.internal.util.Assert.fail;
/**
* Tests the toString implementation of the class under test for the following criteria:
* <p/>
* <ul> <li> the toString method must be implemented <li> the result must comply to the specified {@link Pattern} </ul>
*
* @author Sebastian Gröbler
* @since 11.08.2012
*/
public final class ToStringFormatTest extends AbstractTest {
/**
* The names of the properties excluded from the test.
*/
private final Set<String> excludedPropertyNames;
/**
* The compiled regular expression the toString result should match.
*/
private Pattern toStringPattern;
/**
* Constructs a new instance of this test for the given {@code beanClass} using the given {@code objectFactory}, {@code pattern} and {@code
* excludedPropertyNames}.
*
* @param beanClass the {@link Class} to test
* @param objectFactory the {@link ObjectFactory} to use
* @param pattern the pattern to which to toString result must comply
* @param excludedPropertyNames the names of the properties to exclude from the test
* @throws IllegalArgumentException when any of the given parameters are {@code null} or when the given {@code beanClass} cannot be tested
*/
public ToStringFormatTest(final Class<?> beanClass, final ObjectFactory objectFactory, final Pattern pattern, final Set<String> excludedPropertyNames) {
super(beanClass, objectFactory);
if (pattern == null) {
throw new IllegalArgumentException("pattern must not be null.");
}
if (excludedPropertyNames == null) {
throw new IllegalArgumentException("excludedPropertyNames must not be null.");
}
this.excludedPropertyNames = Collections.unmodifiableSet(excludedPropertyNames);
if (!isMethodImplemented(ObjectMethodNames.TO_STRING)) {
throw new IllegalArgumentException("The given class: " + this.beanClassCanonicalName + " does not implement toString.");
}
this.toStringPattern = pattern;
}
@Override
public void run() {
if (!isMethodImplemented(ObjectMethodNames.TO_STRING)) {
fail("The given class %s does not implement toString.", this.beanClassCanonicalName);
}
final Object defaultObject = newBeanObject();
final String defaultToStringResult = defaultObject.toString();
final Matcher matcher = this.toStringPattern.matcher(defaultToStringResult);
final boolean toStringMatchesPattern = matcher.matches();
assertTrue(toStringMatchesPattern,
"The required pattern '%s' was not matched by the toString result: '%s'.",
this.toStringPattern.pattern(),
defaultToStringResult);
}
@Override
public void handleInvocationTargetException(final PropertyDescriptor property, final InvocationTargetException exception) {
final String propertyName = property.getName();
if (!this.excludedPropertyNames.contains(propertyName)) {
final String message = String.format("Calling the setter of the property '%s' threw an exception. " +
"The setter call can be avoided by excluding the property from the test.", propertyName);
throw new IllegalArgumentException(message, exception);
}
}
@Override
public int hashCode() {
final int prime = 31;
int result = super.hashCode();
result = prime * result + this.toStringPattern.hashCode();
return result;
}
@Override
public boolean equals(final Object obj) {
if (this == obj) {
return true;
}
if (obj == null) {
return false;
}
if (getClass() != obj.getClass()) {
return false;
}
return super.equals(obj);
}
@Override
public String toString() {
final StringBuilder builder = new StringBuilder();
builder.append("ToStringFormatTest [");
builder.append(super.toString());
builder.append(", toStringPattern=");
builder.append(this.toStringPattern);
builder.append("]");
return builder.toString();
}
}
|
apache-2.0
|
LQJJ/demo
|
126-go-common-master/app/service/main/up/service/log_test.go
|
887
|
package service
import (
"context"
"testing"
"go-common/app/service/main/up/model"
"github.com/smartystreets/goconvey/convey"
)
func TestServicesendUpSpecialLog(t *testing.T) {
convey.Convey("sendUpSpecialLog", t, func(ctx convey.C) {
var (
c = context.Background()
opInfo = &UpSpecialLogInfo{}
)
ctx.Convey("When everything gose positive", func(ctx convey.C) {
err := s.sendUpSpecialLog(c, opInfo)
ctx.Convey("Then err should be nil.", func(ctx convey.C) {
ctx.So(err, convey.ShouldBeNil)
})
})
})
}
func TestServicefillGroupInfo(t *testing.T) {
convey.Convey("fillGroupInfo", t, func(ctx convey.C) {
var (
c = context.Background()
up = &model.UpSpecialWithName{}
)
ctx.Convey("When everything gose positive", func(ctx convey.C) {
s.fillGroupInfo(c, up)
ctx.Convey("No return values", func(ctx convey.C) {
})
})
})
}
|
apache-2.0
|
VirtualGamer/SnowEngine
|
Dependencies/opengl/src/org/lwjgl/opengl/EXTBlendEquationSeparate.java
|
2520
|
/*
* Copyright LWJGL. All rights reserved.
* License terms: https://www.lwjgl.org/license
* MACHINE GENERATED FILE, DO NOT EDIT
*/
package org.lwjgl.opengl;
import static org.lwjgl.system.Checks.*;
import static org.lwjgl.system.JNI.*;
/**
* Native bindings to the <a href="http://www.opengl.org/registry/specs/EXT/blend_equation_separate.txt">EXT_blend_equation_separate</a> extension.
*
* <p><a href="http://www.opengl.org/registry/specs/EXT/blend_func_separate.txt">EXT_blend_func_separate</a> introduced separate RGB and alpha blend factors. <a href="http://www.opengl.org/registry/specs/EXT/blend_minmax.txt">EXT_blend_minmax</a> introduced a
* distinct blend equation for combining source and destination blend terms. (<a href="http://www.opengl.org/registry/specs/EXT_blend_subtract/blend_subtract.txt">EXT_blend_subtract_blend_subtract</a> &
* <a href="http://www.opengl.org/registry/specs/EXT/blend_logic_op.txt">EXT_blend_logic_op</a> added other blend equation modes.) OpenGL 1.4 integrated both functionalities into the core standard.</p>
*
* <p>While there are separate blend functions for the RGB and alpha blend factors, OpenGL 1.4 provides a single blend equation that applies to both RGB and
* alpha portions of blending.</p>
*
* <p>This extension provides a separate blend equation for RGB and alpha to match the generality available for blend factors.</p>
*
* <p>Requires {@link GL14 OpenGL 1.4} or {@link ARBImaging ARB_imaging} or <a href="http://www.opengl.org/registry/specs/EXT/blend_minmax.txt">EXT_blend_minmax</a> and/or
* <a href="http://www.opengl.org/registry/specs/EXT_blend_subtract/blend_subtract.txt">EXT_blend_subtract_blend_subtract</a>. Promoted to core in {@link GL20 OpenGL 2.0}.</p>
*/
public class EXTBlendEquationSeparate {
/** Accepted by the {@code pname} parameter of GetBooleanv, GetIntegerv, GetFloatv, and GetDoublev. */
public static final int
GL_BLEND_EQUATION_RGB_EXT = 0x8009,
GL_BLEND_EQUATION_ALPHA_EXT = 0x883D;
protected EXTBlendEquationSeparate() {
throw new UnsupportedOperationException();
}
static boolean isAvailable(GLCapabilities caps) {
return checkFunctions(
caps.glBlendEquationSeparateEXT
);
}
// --- [ glBlendEquationSeparateEXT ] ---
public static void glBlendEquationSeparateEXT(int modeRGB, int modeAlpha) {
long __functionAddress = GL.getCapabilities().glBlendEquationSeparateEXT;
if ( CHECKS )
checkFunctionAddress(__functionAddress);
callV(__functionAddress, modeRGB, modeAlpha);
}
}
|
apache-2.0
|
carbon-design-system/carbon-components
|
packages/react/src/components/FluidForm/FluidForm.js
|
1016
|
/**
* Copyright IBM Corp. 2016, 2018
*
* This source code is licensed under the Apache-2.0 license found in the
* LICENSE file in the root directory of this source tree.
*/
import PropTypes from 'prop-types';
import React from 'react';
import classnames from 'classnames';
import { settings } from 'carbon-components';
import Form from '../Form';
import { FormContext } from './FormContext';
const { prefix } = settings;
function FluidForm({ className, children, ...other }) {
const classNames = classnames(`${prefix}--form--fluid`, className);
return (
<FormContext.Provider value={{ isFluid: true }}>
<Form className={classNames} {...other}>
{children}
</Form>
</FormContext.Provider>
);
}
FluidForm.propTypes = {
/**
* Provide children to be rendered inside of the <form> element
*/
children: PropTypes.node,
/**
* Provide a custom className to be applied on the containing <form> node
*/
className: PropTypes.string,
};
export default FluidForm;
|
apache-2.0
|
nshikalkov/timesheet
|
vendor/autoload.php
|
178
|
<?php
// autoload.php @generated by Composer
require_once __DIR__ . '/composer/autoload_real.php';
return ComposerAutoloaderInit6f55a4a23f6a5f9ca92be0da2c18c823::getLoader();
|
apache-2.0
|
dimone-kun/cuba
|
modules/gui/src/com/haulmont/cuba/gui/components/mainwindow/FoldersPane.java
|
1105
|
/*
* Copyright (c) 2008-2016 Haulmont.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.haulmont.cuba.gui.components.mainwindow;
import com.haulmont.cuba.gui.components.Component;
/**
* A component displaying a panel with application and search folders.
*/
public interface FoldersPane extends Component.BelongToFrame {
String NAME = "foldersPane";
/**
* Initially show the panel and load folders. Invoked by the component loader.
*/
void loadFolders();
/**
* Refresh folders if the panel is visible.
*/
void refreshFolders();
}
|
apache-2.0
|
amoudi87/asterixdb
|
asterix-metadata/src/main/java/org/apache/asterix/metadata/feeds/FeedLifecycleEventSubscriber.java
|
2348
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.asterix.metadata.feeds;
import java.util.Iterator;
import java.util.concurrent.LinkedBlockingQueue;
import org.apache.asterix.common.exceptions.AsterixException;
import org.apache.asterix.common.feeds.api.IFeedLifecycleEventSubscriber;
public class FeedLifecycleEventSubscriber implements IFeedLifecycleEventSubscriber {
private LinkedBlockingQueue<FeedLifecycleEvent> inbox;
public FeedLifecycleEventSubscriber() {
this.inbox = new LinkedBlockingQueue<FeedLifecycleEvent>();
}
@Override
public void handleFeedEvent(FeedLifecycleEvent event) {
inbox.add(event);
}
@Override
public void assertEvent(FeedLifecycleEvent event) throws AsterixException, InterruptedException {
boolean eventOccurred = false;
FeedLifecycleEvent e = null;
Iterator<FeedLifecycleEvent> eventsSoFar = inbox.iterator();
while (eventsSoFar.hasNext()) {
e = eventsSoFar.next();
assertNoFailure(e);
eventOccurred = e.equals(event);
}
while (!eventOccurred) {
e = inbox.take();
eventOccurred = e.equals(event);
if (!eventOccurred) {
assertNoFailure(e);
}
}
}
private void assertNoFailure(FeedLifecycleEvent e) throws AsterixException {
if (e.equals(FeedLifecycleEvent.FEED_INTAKE_FAILURE) || e.equals(FeedLifecycleEvent.FEED_COLLECT_FAILURE)) {
throw new AsterixException("Failure in feed");
}
}
}
|
apache-2.0
|
huangshanqi/algorithom-tutorial
|
src/cn/evilcoder/algorithm/Gcd.java
|
925
|
package cn.evilcoder.algorithm;
/**
* 最大公约数
* Created by huangshanqi on 2016/1/1.
*/
public class Gcd {
/**
* 辗转相除法.
* x = ky + b.
* f(x, y) = f(y, b) = f(y, x%y)
* @param x
* @param y
* @return
*/
public static int gcdByMod(int x, int y) {
return y == 0 ? x : gcdByMod(y, x % y);
}
public static int gcdByPrimeOf2(int x, int y) {
if (y > x) {
return gcdByPrimeOf2(y, x);
}
if (y == 0) {
return x;
}
boolean xEven = (x & 1) == 0;
boolean yEven = (y & 1) == 0;
if (xEven && yEven) {
return 2 * gcdByPrimeOf2(x >> 1, y >> 1);
} else if (xEven) {
return gcdByPrimeOf2(x >> 1, y);
} else if (yEven) {
return gcdByPrimeOf2(x, y >> 1);
} else {
return gcdByPrimeOf2(y, x -y);
}
}
}
|
apache-2.0
|
xxs/es-shop
|
src/main/java/com/sishuok/es/showcase/excel/entity/ExcelData.java
|
756
|
/**
* Copyright (c) 2005-2012 https://github.com/zhangkaitao
*
* Licensed under the Apache License, Version 2.0 (the "License");
*/
package com.sishuok.es.showcase.excel.entity;
import com.sishuok.es.common.entity.BaseEntity;
import javax.persistence.Column;
import javax.persistence.Entity;
import javax.persistence.Table;
/**
* <p>User: Zhang Kaitao
* <p>Date: 13-2-4 上午9:38
* <p>Version: 1.0
*/
@Entity
@Table(name = "showcase_excel_data")
public class ExcelData extends BaseEntity<Long> {
@Column(name = "content")
private String content;
public String getContent() {
return content;
}
public void setContent(final String content) {
this.content = content;
}
}
|
apache-2.0
|
zhuzengpeng/app-console
|
src/main/java/com/thinkgem/jeesite/modules/act/utils/ActUtils.java
|
6509
|
/**
* Copyright © 2012-2016 <a href="https://github.com/thinkgem/jeesite">JeeSite</a> All rights reserved.
*/
package com.thinkgem.jeesite.modules.act.utils;
import java.lang.annotation.Annotation;
import java.lang.reflect.Method;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.activiti.engine.impl.persistence.entity.GroupEntity;
import org.activiti.engine.impl.persistence.entity.UserEntity;
import com.fasterxml.jackson.annotation.JsonBackReference;
import com.fasterxml.jackson.annotation.JsonIgnore;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import com.thinkgem.jeesite.common.annotation.FieldName;
import com.thinkgem.jeesite.common.config.Global;
import com.thinkgem.jeesite.common.utils.Encodes;
import com.thinkgem.jeesite.common.utils.StringUtils;
import com.thinkgem.jeesite.modules.act.entity.Act;
import com.thinkgem.jeesite.modules.sys.entity.Role;
import com.thinkgem.jeesite.modules.sys.entity.User;
import org.springframework.util.ObjectUtils;
/**
* 流程工具
* @author ThinkGem
* @version 2013-11-03
*/
public class ActUtils {
// private static Logger logger = LoggerFactory.getLogger(ActUtils.class);
/**
* 定义流程定义KEY,必须以“PD_”开头
* 组成结构:string[]{"流程标识","业务主表表名"}
*/
public static final String[] PD_LEAVE = new String[]{"leave", "oa_leave"};
public static final String[] PD_TEST_AUDIT = new String[]{"test_audit", "oa_test_audit"};
// /**
// * 流程定义Map(自动初始化)
// */
// private static Map<String, String> procDefMap = new HashMap<String, String>() {
// private static final long serialVersionUID = 1L;
// {
// for (Field field : ActUtils.class.getFields()){
// if(StringUtils.startsWith(field.getName(), "PD_")){
// try{
// String[] ss = (String[])field.get(null);
// put(ss[0], ss[1]);
// }catch (Exception e) {
// logger.debug("load pd error: {}", field.getName());
// }
// }
// }
// }
// };
//
// /**
// * 获取流程执行(办理)URL
// * @param procId
// * @return
// */
// public static String getProcExeUrl(String procId) {
// String url = procDefMap.get(StringUtils.split(procId, ":")[0]);
// if (StringUtils.isBlank(url)){
// return "404";
// }
// return url;
// }
@SuppressWarnings({ "unused" })
public static Map<String, Object> getMobileEntity(Object entity,String spiltType){
if(spiltType==null){
spiltType="@";
}
Map<String, Object> map = Maps.newHashMap();
List<String> field = Lists.newArrayList();
List<String> value = Lists.newArrayList();
List<String> chinesName =Lists.newArrayList();
try{
for (Method m : entity.getClass().getMethods()){
if (m.getAnnotation(JsonIgnore.class) == null && m.getAnnotation(JsonBackReference.class) == null && m.getName().startsWith("get")){
if (m.isAnnotationPresent(FieldName.class)) {
Annotation p = m.getAnnotation(FieldName.class);
FieldName fieldName=(FieldName) p;
chinesName.add(fieldName.value());
}else{
chinesName.add("");
}
if (m.getName().equals("getAct")){
Object act = m.invoke(entity, new Object[]{});
Method actMet = act.getClass().getMethod("getTaskId");
map.put("taskId", ObjectUtils.getDisplayString(m.invoke(act, new Object[]{})));
}else{
field.add(StringUtils.uncapitalize(m.getName().substring(3)));
value.add(ObjectUtils.getDisplayString(m.invoke(entity, new Object[]{})));
}
}
}
}catch (Exception e) {
e.printStackTrace();
}
map.put("beanTitles", StringUtils.join(field, spiltType));
map.put("beanInfos", StringUtils.join(value, spiltType));
map.put("chineseNames", StringUtils.join(chinesName, spiltType));
return map;
}
/**
* 获取流程表单URL
* @param formKey
* @param act 表单传递参数
* @return
*/
public static String getFormUrl(String formKey, Act act){
StringBuilder formUrl = new StringBuilder();
String formServerUrl = Global.getConfig("activiti.form.server.url");
if (StringUtils.isBlank(formServerUrl)){
formUrl.append(Global.getAdminPath());
}else{
formUrl.append(formServerUrl);
}
formUrl.append(formKey).append(formUrl.indexOf("?") == -1 ? "?" : "&");
formUrl.append("act.taskId=").append(act.getTaskId() != null ? act.getTaskId() : "");
formUrl.append("&act.taskName=").append(act.getTaskName() != null ? Encodes.urlEncode(act.getTaskName()) : "");
formUrl.append("&act.taskDefKey=").append(act.getTaskDefKey() != null ? act.getTaskDefKey() : "");
formUrl.append("&act.procInsId=").append(act.getProcInsId() != null ? act.getProcInsId() : "");
formUrl.append("&act.procDefId=").append(act.getProcDefId() != null ? act.getProcDefId() : "");
formUrl.append("&act.status=").append(act.getStatus() != null ? act.getStatus() : "");
formUrl.append("&id=").append(act.getBusinessId() != null ? act.getBusinessId() : "");
return formUrl.toString();
}
/**
* 转换流程节点类型为中文说明
* @param type 英文名称
* @return 翻译后的中文名称
*/
public static String parseToZhType(String type) {
Map<String, String> types = new HashMap<String, String>();
types.put("userTask", "用户任务");
types.put("serviceTask", "系统任务");
types.put("startEvent", "开始节点");
types.put("endEvent", "结束节点");
types.put("exclusiveGateway", "条件判断节点(系统自动根据条件处理)");
types.put("inclusiveGateway", "并行处理任务");
types.put("callActivity", "子流程");
return types.get(type) == null ? type : types.get(type);
}
public static UserEntity toActivitiUser(User user){
if (user == null){
return null;
}
UserEntity userEntity = new UserEntity();
userEntity.setId(user.getLoginName());
userEntity.setFirstName(user.getName());
userEntity.setLastName(StringUtils.EMPTY);
userEntity.setPassword(user.getPassword());
userEntity.setEmail(user.getEmail());
userEntity.setRevision(1);
return userEntity;
}
public static GroupEntity toActivitiGroup(Role role){
if (role == null){
return null;
}
GroupEntity groupEntity = new GroupEntity();
groupEntity.setId(role.getEnname());
groupEntity.setName(role.getName());
groupEntity.setType(role.getRoleType());
groupEntity.setRevision(1);
return groupEntity;
}
public static void main(String[] args) {
User user = new User();
System.out.println(getMobileEntity(user, "@"));
}
}
|
apache-2.0
|
flashreport-io/flashreport-cf-spring
|
src/main/java/io/flashreport/cfspring/controller/ReportController.java
|
1968
|
package io.flashreport.cfspring.controller;
import io.flashreport.cfspring.integration.ReportStatus;
import io.flashreport.cfspring.service.ReportManager;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Controller;
import org.springframework.ui.Model;
import org.springframework.web.bind.annotation.PathVariable;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import org.springframework.web.bind.annotation.ResponseBody;
import org.springframework.web.servlet.ModelAndView;
import org.springframework.web.servlet.mvc.method.annotation.MvcUriComponentsBuilder;
import org.springframework.web.util.UriComponents;
/**
* Created by Nicolas Lejeune on 16/06/15.
*/
@Controller
@SuppressWarnings("unused")
public class ReportController {
@Autowired
ReportManager reportManager;
@RequestMapping(value = "/report/new", method = {RequestMethod.GET, RequestMethod.POST})
public String requestReportGeneration(Model model) {
String uuid = reportManager.generateReport();
UriComponents uriComponents = MvcUriComponentsBuilder
.fromMethodName(ReportController.class, "viewReportStatus", uuid).build();
model.addAttribute("REPORT_UUID", uuid);
model.addAttribute("REPORT_STATUS_URL", uriComponents.encode().toUri().toString());
model.addAttribute("REPORT_DOWNLOAD_URL", uriComponents.encode().toUri().toString() + "/download");
return "created";
}
@RequestMapping("/report/{uuid}")
@ResponseBody
public ReportStatus viewReportStatus(@PathVariable String uuid) {
return reportManager.getReportStatus(uuid);
}
@RequestMapping("/report/{uuid}/download")
@ResponseBody
public ModelAndView downloadReport(@PathVariable String uuid) {
return new ModelAndView("redirect:" + reportManager.getStorageUrl(uuid));
}
}
|
apache-2.0
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.