repo_name
stringlengths 4
116
| path
stringlengths 4
379
| size
stringlengths 1
7
| content
stringlengths 3
1.05M
| license
stringclasses 15
values |
---|---|---|---|---|
diennea/herddb
|
herddb-core/src/main/java/herddb/sql/expressions/CompiledMinorThanExpression.java
|
2096
|
/*
Licensed to Diennea S.r.l. under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. Diennea S.r.l. licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
*/
package herddb.sql.expressions;
import herddb.model.StatementEvaluationContext;
import herddb.model.StatementExecutionException;
import herddb.utils.SQLRecordPredicateFunctions;
public class CompiledMinorThanExpression extends CompiledBinarySQLExpression {
public CompiledMinorThanExpression(CompiledSQLExpression left, CompiledSQLExpression right) {
super(left, right);
}
@Override
public Object evaluate(herddb.utils.DataAccessor bean, StatementEvaluationContext context) throws StatementExecutionException {
SQLRecordPredicateFunctions.CompareResult res = left.opCompareTo(bean, context, right);
return res == SQLRecordPredicateFunctions.CompareResult.MINOR;
}
@Override
public String getOperator() {
return "<";
}
@Override
public CompiledSQLExpression remapPositionalAccessToToPrimaryKeyAccessor(int[] projection) {
return new CompiledMinorThanExpression(
left.remapPositionalAccessToToPrimaryKeyAccessor(projection),
right.remapPositionalAccessToToPrimaryKeyAccessor(projection));
}
@Override
public CompiledBinarySQLExpression negate() {
return new CompiledGreaterThanEqualsExpression(left, right);
}
@Override
public boolean isNegateSupported() {
return true;
}
}
|
apache-2.0
|
citygml4j/citygml4j
|
src/main/java/org/citygml4j/model/xal/XAL.java
|
868
|
/*
* citygml4j - The Open Source Java API for CityGML
* https://github.com/citygml4j
*
* Copyright 2013-2022 Claus Nagel <claus.nagel@gmail.com>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.citygml4j.model.xal;
import org.citygml4j.model.common.base.ModelObject;
public interface XAL extends ModelObject {
XALClass getXALClass();
}
|
apache-2.0
|
fagongzi/fastim
|
pkg/model/support.go
|
1598
|
package model
import (
"encoding/json"
p "github.com/fagongzi/fastim/pkg/protocol"
)
type Support struct {
Addr string `json:"addr,omitempty"`
Product int `json:"product,omitempty"`
CmdList []int `json:"cmdList,omitempty"`
BizList []int `json:"bizList,omitempty"`
MinProtocol int `json:"minProtocol,omitempty"`
MaxProtocol int `json:"maxProtocol,omitempty"`
}
func UnMarshalSupport(data []byte) *Support {
v := &Support{}
json.Unmarshal(data, v)
return v
}
func (self *Support) Marshal() []byte {
v, _ := json.Marshal(self)
return v
}
func (self *Support) Mathces(msg *p.Message) bool {
return self.productMatches(msg) &&
self.protocolMatches(msg) &&
self.bizMatches(msg) &&
self.cmdMatches(msg)
}
func (self *Support) productMatches(msg *p.Message) bool {
return int(msg.GetProduct()) == self.Product
}
func (self *Support) bizMatches(msg *p.Message) bool {
biz := int(msg.GetBiz())
return self.BizList == nil || inArray(self.BizList, biz)
}
func (self *Support) cmdMatches(msg *p.Message) bool {
cmd := int(msg.GetCmd())
return self.CmdList == nil || inArray(self.CmdList, cmd)
}
func (self *Support) protocolMatches(msg *p.Message) bool {
pv := int(msg.GetProduct())
return pv >= self.MinProtocol && pv <= self.MaxProtocol
}
func (self *Support) SupportBiz(biz int) bool {
return self.BizList == nil || inArray(self.BizList, biz)
}
func inArray(arr []int, target int) bool {
for _, v := range arr {
if v == target {
return true
}
}
return false
}
|
apache-2.0
|
jhaux/tensorflow
|
tensorflow/tensorboard/components/tf_backend/router.ts
|
2641
|
/* Copyright 2015 The TensorFlow Authors. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the 'License');
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an 'AS IS' BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
==============================================================================*/
import {demoify, queryEncoder} from './urlPathHelpers'
export type RunTagUrlFn = (tag: string, run: string) => string;
export interface Router {
logdir: () => string;
runs: () => string;
isDemoMode: () => boolean;
textRuns: () => string;
text: RunTagUrlFn;
healthPills: () => string;
pluginRoute: (pluginName: string, route: string) => string;
pluginRunTagRoute: (pluginName: string, route: string) => RunTagUrlFn;
}
;
/**
* The standard router for communicating with the TensorBoard backend
* @param dataDir {string} The base prefix for finding data on server.
* @param demoMode {boolean} Whether to modify urls for filesystem demo usage.
*/
export function router(dataDir = 'data', demoMode = false): Router {
var clean = demoMode ? demoify : (x) => x;
if (dataDir[dataDir.length - 1] === '/') {
dataDir = dataDir.slice(0, dataDir.length - 1);
}
function standardRoute(route: string, demoExtension = '.json'):
((tag: string, run: string) => string) {
return function(tag: string, run: string): string {
var url =
dataDir + '/' + route + clean(queryEncoder({tag: tag, run: run}));
if (demoMode) {
url += demoExtension;
}
return url;
};
}
function pluginRoute(pluginName: string, route: string): string {
return `${dataDir}/plugin/${pluginName}${route}`;
}
function pluginRunTagRoute(pluginName: string, route: string):
((tag: string, run: string) => string) {
const base = pluginRoute(pluginName, route);
return (tag, run) => base + clean(queryEncoder({tag, run}));
}
return {
logdir: () => dataDir + '/logdir',
runs: () => dataDir + '/runs' + (demoMode ? '.json' : ''),
isDemoMode: () => demoMode,
healthPills: () => dataDir + '/plugin/debugger/health_pills',
textRuns: () => dataDir + '/plugin/text/runs' + (demoMode ? '.json' : ''),
text: standardRoute('plugin/text/text'),
pluginRoute,
pluginRunTagRoute,
};
};
|
apache-2.0
|
jeffreyolchovy/sbt-fmpp-resolver
|
plugin/src/main/scala/sbtfmpptemplate/FmppTemplatePlugin.scala
|
1028
|
package sbtfmpptemplate
import sbt._
import sbt.Def.Setting
import sbt.Keys._
import sbt.complete.DefaultParsers._
import sbt.plugins.CorePlugin
import sbtfmppresolver.FmppTemplateResolver
object FmppTemplatePlugin extends AutoPlugin {
override def requires = CorePlugin
override def trigger = allRequirements
object autoImport {
val FmppTemplateKeys = sbtfmpptemplate.FmppTemplateKeys
val fmppResolver = FmppTemplateKeys.fmppResolver
val fmpp = FmppTemplateKeys.fmpp
}
import autoImport._
override lazy val globalSettings: Seq[Setting[_]] = Seq(
fmppResolver := new FmppTemplateResolver,
fmpp := {
val args = spaceDelimited("<args>").parsed.toArray
val resolver = fmppResolver.value
if (resolver.isDefined(args)) {
resolver.run(args)
}
},
templateResolverInfos += TemplateResolverInfo(
"com.github.jeffreyolchovy" %% "sbt-fmpp-resolver" % BuildInfo.version cross(CrossVersion.binary),
"sbtfmppresolver.FmppTemplateResolver"
)
)
}
|
apache-2.0
|
ramesh2k1/Test
|
CognitoSyncDemo/src/com/amazonaws/cognito/sync/demo/MainActivity.java
|
13841
|
/**
* Copyright 2010-2014 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazonaws.cognito.sync.demo;
import android.app.Activity;
import android.app.AlertDialog;
import android.app.Dialog;
import android.content.DialogInterface;
import android.content.Intent;
import android.os.Bundle;
import android.preference.PreferenceManager;
import android.util.Log;
import android.view.View;
import android.view.View.OnClickListener;
import android.widget.Button;
import android.widget.TextView;
import android.widget.Toast;
import com.amazon.identity.auth.device.AuthError;
import com.amazon.identity.auth.device.authorization.api.AmazonAuthorizationManager;
import com.amazon.identity.auth.device.authorization.api.AuthorizationListener;
import com.amazon.identity.auth.device.authorization.api.AuthzConstants;
import com.amazon.identity.auth.device.shared.APIListener;
import com.amazonaws.cognito.sync.devauth.client.AmazonSharedPreferencesWrapper;
import com.facebook.Request;
import com.facebook.Response;
import com.facebook.Session;
import com.facebook.SessionState;
import com.facebook.model.GraphUser;
public class MainActivity extends Activity implements Session.StatusCallback {
private static final String TAG = "MainActivity";
private static final String[] APP_SCOPES = {
"profile"
};
private Button btnLoginFacebook;
private Button btnLoginLWA;
private Button btnLoginDevAuth;
private Button btnWipedata;
private AmazonAuthorizationManager mAuthManager;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.main_activity);
/**
* Initializes the sync client. This must be call before you can use it.
*/
CognitoSyncClientManager.init(this);
btnLoginFacebook = (Button) findViewById(R.id.btnLoginFacebook);
btnLoginFacebook.setOnClickListener(new OnClickListener() {
@Override
public void onClick(View v) {
// start Facebook Login
Session.openActiveSession(MainActivity.this, true,
MainActivity.this);
}
});
final Session session = Session
.openActiveSessionFromCache(MainActivity.this);
if (session != null) {
setFacebookSession(session);
}
try {
mAuthManager = new AmazonAuthorizationManager(this, Bundle.EMPTY);
} catch (IllegalArgumentException e) {
Toast.makeText(this, "Login with Amazon is disabled.",
Toast.LENGTH_LONG).show();
Log.w(TAG, "Login with Amazon isn't configured correctly. "
+ "Thus it's disabled in this demo.", e);
}
btnLoginLWA = (Button) findViewById(R.id.btnLoginLWA);
btnLoginLWA.setVisibility(View.VISIBLE);
btnLoginLWA.setOnClickListener(new OnClickListener() {
@Override
public void onClick(View v) {
mAuthManager.authorize(APP_SCOPES, Bundle.EMPTY,
new AuthorizeListener());
}
});
btnLoginLWA.setEnabled(mAuthManager != null);
btnWipedata = (Button) findViewById(R.id.btnWipedata);
btnWipedata.setOnClickListener(new OnClickListener() {
@Override
public void onClick(View v) {
new AlertDialog.Builder(MainActivity.this)
.setTitle("Wipe data?")
.setMessage(
"This will log off your current session and wipe all user data. "
+ "Any data not synchronized will be lost.")
.setPositiveButton("Yes",
new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialog,
int which) {
// clear login status
if (session != null) {
session.closeAndClearTokenInformation();
}
btnLoginFacebook
.setVisibility(View.VISIBLE);
if (mAuthManager != null) {
mAuthManager
.clearAuthorizationState(null);
}
btnLoginLWA.setVisibility(View.VISIBLE);
// wipe data
CognitoSyncClientManager.getInstance()
.wipeData();
// Wipe shared preferences
AmazonSharedPreferencesWrapper.wipe(PreferenceManager
.getDefaultSharedPreferences(MainActivity.this));
}
})
.setNegativeButton("No",
new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialog,
int which) {
dialog.cancel();
}
}).show();
}
});
findViewById(R.id.btnListDatasets).setOnClickListener(
new OnClickListener() {
@Override
public void onClick(View v) {
Intent intent = new Intent(MainActivity.this,
ListDatasetsActivity.class);
startActivity(intent);
}
});
btnLoginDevAuth = (Button) findViewById(R.id.btnLoginDevAuth);
if ((CognitoSyncClientManager.credentialsProvider.getIdentityProvider()) instanceof DeveloperAuthenticationProvider) {
btnLoginDevAuth.setEnabled(true);
Log.w(TAG, "Developer authentication feature configured correctly. ");
} else {
btnLoginDevAuth.setEnabled(false);
Toast.makeText(this, "Developer authentication feature is disabled.",
Toast.LENGTH_LONG).show();
Log.w(TAG, "Developer authentication feature configured incorrectly. "
+ "Thus it's disabled in this demo.");
}
btnLoginDevAuth.setOnClickListener(new OnClickListener() {
@Override
public void onClick(View v) {
// username and password dialog
final Dialog login = new Dialog(MainActivity.this);
login.setContentView(R.layout.login_dialog);
login.setTitle("Sample developer login");
final TextView txtUsername = (TextView) login
.findViewById(R.id.txtUsername);
txtUsername.setHint("Username");
final TextView txtPassword = (TextView) login
.findViewById(R.id.txtPassword);
txtPassword.setHint("Password");
Button btnLogin = (Button) login.findViewById(R.id.btnLogin);
Button btnCancel = (Button) login.findViewById(R.id.btnCancel);
btnCancel.setOnClickListener(new OnClickListener() {
@Override
public void onClick(View v) {
login.dismiss();
}
});
btnLogin.setOnClickListener(new OnClickListener() {
@Override
public void onClick(View v) {
// Validate the username and password
if (txtUsername.getText().toString().isEmpty()
|| txtPassword.getText().toString().isEmpty()) {
new AlertDialog.Builder(MainActivity.this)
.setTitle("Login error")
.setMessage(
"Username or password cannot be empty!!")
.show();
} else {
// Clear the existing credentials
CognitoSyncClientManager.credentialsProvider
.clearCredentials();
// Initiate user authentication against the
// developer backend in this case the sample Cognito
// developer authentication application.
((DeveloperAuthenticationProvider) CognitoSyncClientManager.credentialsProvider
.getIdentityProvider()).login(
txtUsername.getText().toString(),
txtPassword.getText().toString(),
MainActivity.this);
}
login.dismiss();
}
});
login.show();
}
});
}
@Override
public void onActivityResult(int requestCode, int resultCode, Intent data) {
super.onActivityResult(requestCode, resultCode, data);
Session.getActiveSession().onActivityResult(this, requestCode,
resultCode, data);
}
@Override
public void call(Session session, SessionState state, Exception exception) {
if (session.isOpened()) {
setFacebookSession(session);
// make request to the /me API
Request.newMeRequest(session, new Request.GraphUserCallback() {
// callback after Graph API response with user object
@Override
public void onCompleted(GraphUser user, Response response) {
if (user != null) {
Toast.makeText(MainActivity.this,
"Hello " + user.getName(), Toast.LENGTH_LONG)
.show();
}
}
}).executeAsync();
}
}
private void setFacebookSession(Session session) {
Log.i(TAG, "facebook token: " + session.getAccessToken());
CognitoSyncClientManager.addLogins("graph.facebook.com",
session.getAccessToken());
btnLoginFacebook.setVisibility(View.GONE);
}
private class AuthorizeListener implements AuthorizationListener {
/* Authorization was completed successfully. */
@Override
public void onSuccess(Bundle response) {
Log.i(TAG, "Auth successful. Start to getToken");
mAuthManager.getToken(APP_SCOPES, new AuthTokenListener());
mAuthManager.getProfile(new APIListener() {
@Override
public void onSuccess(Bundle response) {
Bundle profileBundle = response
.getBundle(AuthzConstants.BUNDLE_KEY.PROFILE.val);
final String name = profileBundle
.getString(AuthzConstants.PROFILE_KEY.NAME.val);
runOnUiThread(new Runnable() {
@Override
public void run() {
Toast.makeText(MainActivity.this, "Hello " + name,
Toast.LENGTH_LONG).show();
}
});
}
@Override
public void onError(AuthError ae) {
Log.e(TAG, "AuthError during getProfile", ae);
}
});
runOnUiThread(new Runnable() {
@Override
public void run() {
btnLoginLWA.setVisibility(View.GONE);
}
});
}
/* There was an error during the attempt to authorize the application. */
@Override
public void onError(AuthError ae) {
Log.e(TAG, "AuthError during authorization", ae);
}
/* Authorization was cancelled before it could be completed. */
@Override
public void onCancel(Bundle cause) {
Log.e(TAG, "User cancelled authorization");
}
}
private class AuthTokenListener implements APIListener {
@Override
public void onSuccess(Bundle response) {
final String token = response
.getString(AuthzConstants.BUNDLE_KEY.TOKEN.val);
Log.i(TAG, "amazon token: " + token);
CognitoSyncClientManager.addLogins("www.amazon.com", token);
}
@Override
public void onError(AuthError ae) {
Log.e(TAG, "Failed to get token", ae);
}
}
}
|
apache-2.0
|
felixarpa/Shameless-Party
|
ShamelessParty/app/src/main/java/felixarpa/shamelessapp/utils/DateUtil.java
|
1232
|
package felixarpa.shamelessapp.utils;
import android.annotation.SuppressLint;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.Date;
@SuppressLint("SimpleDateFormat")
public class DateUtil {
public static String getDate(Date date) {
SimpleDateFormat sdf = new SimpleDateFormat("dd/MM/yyyy HH:mm:ss");
return sdf.format(date);
}
public static String getHour(Date date) {
SimpleDateFormat sdf = new SimpleDateFormat("HH:mm:ss");
return sdf.format(date);
}
public static String getDay(Date date) {
SimpleDateFormat sdf = new SimpleDateFormat("dd/MM/yyyy");
return sdf.format(date);
}
public static String getDate(long date) {
return getDate(new Date(date));
}
public static String getHour(long date) {
return getHour(new Date(date));
}
public static String getDay(long date) {
return getDay(new Date(date));
}
public static Date getDate(String date) {
SimpleDateFormat sdf = new SimpleDateFormat("dd/MM/yyyy HH:mm:ss");
try {
return sdf.parse(date);
} catch (ParseException e) {
return new Date();
}
}
}
|
apache-2.0
|
afilimonov/jackrabbit-oak
|
oak-core/src/main/java/org/apache/jackrabbit/oak/query/ast/AstElementFactory.java
|
7586
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with this
* work for additional information regarding copyright ownership. The ASF
* licenses this file to You under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law
* or agreed to in writing, software distributed under the License is
* distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the specific language
* governing permissions and limitations under the License.
*/
package org.apache.jackrabbit.oak.query.ast;
import static com.google.common.base.Preconditions.checkNotNull;
import java.util.ArrayList;
import javax.annotation.Nonnull;
import org.apache.jackrabbit.oak.api.PropertyValue;
import org.apache.jackrabbit.oak.spi.state.NodeState;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* A factory for syntax tree elements.
*/
public class AstElementFactory {
private static final Logger LOG = LoggerFactory.getLogger(AstElementFactory.class);
public AndImpl and(ConstraintImpl constraint1, ConstraintImpl constraint2) {
return new AndImpl(constraint1, constraint2);
}
public OrderingImpl ascending(DynamicOperandImpl operand) {
return new OrderingImpl(operand, Order.ASCENDING);
}
public BindVariableValueImpl bindVariable(String bindVariableName) {
return new BindVariableValueImpl(bindVariableName);
}
public ChildNodeImpl childNode(String selectorName, String path) {
return new ChildNodeImpl(selectorName, path);
}
public ChildNodeJoinConditionImpl childNodeJoinCondition(String childSelectorName, String parentSelectorName)
{
return new ChildNodeJoinConditionImpl(childSelectorName, parentSelectorName);
}
public ColumnImpl column(String selectorName, String propertyName, String columnName) {
return new ColumnImpl(selectorName, propertyName, columnName);
}
public ComparisonImpl comparison(DynamicOperandImpl operand1, Operator operator, StaticOperandImpl operand2) {
return new ComparisonImpl(operand1, operator, operand2);
}
public DescendantNodeImpl descendantNode(String selectorName, String path) {
return new DescendantNodeImpl(selectorName, path);
}
public DescendantNodeJoinConditionImpl descendantNodeJoinCondition(String descendantSelectorName,
String ancestorSelectorName) {
return new DescendantNodeJoinConditionImpl(descendantSelectorName, ancestorSelectorName);
}
public OrderingImpl descending(DynamicOperandImpl operand) {
return new OrderingImpl(operand, Order.DESCENDING);
}
public EquiJoinConditionImpl equiJoinCondition(String selector1Name, String property1Name, String selector2Name,
String property2Name) {
return new EquiJoinConditionImpl(selector1Name, property1Name, selector2Name, property2Name);
}
public FullTextSearchImpl fullTextSearch(String selectorName, String propertyName,
StaticOperandImpl fullTextSearchExpression) {
return new FullTextSearchImpl(selectorName, propertyName, fullTextSearchExpression);
}
public FullTextSearchScoreImpl fullTextSearchScore(String selectorName) {
return new FullTextSearchScoreImpl(selectorName);
}
public JoinImpl join(SourceImpl left, SourceImpl right, JoinType joinType, JoinConditionImpl joinCondition) {
return new JoinImpl(left, right, joinType, joinCondition);
}
public LengthImpl length(PropertyValueImpl propertyValue) {
return new LengthImpl(propertyValue);
}
public LiteralImpl literal(PropertyValue literalValue) {
return new LiteralImpl(literalValue);
}
public LowerCaseImpl lowerCase(DynamicOperandImpl operand) {
return new LowerCaseImpl(operand);
}
public NodeLocalNameImpl nodeLocalName(String selectorName) {
return new NodeLocalNameImpl(selectorName);
}
public NodeNameImpl nodeName(String selectorName) {
return new NodeNameImpl(selectorName);
}
public NotImpl not(ConstraintImpl constraint) {
return new NotImpl(constraint);
}
public OrImpl or(ConstraintImpl constraint1, ConstraintImpl constraint2) {
return new OrImpl(constraint1, constraint2);
}
public PropertyExistenceImpl propertyExistence(String selectorName, String propertyName) {
return new PropertyExistenceImpl(selectorName, propertyName);
}
public PropertyInexistenceImpl propertyInexistence(String selectorName, String propertyName) {
return new PropertyInexistenceImpl(selectorName, propertyName);
}
public PropertyValueImpl propertyValue(String selectorName, String propertyName) {
return new PropertyValueImpl(selectorName, propertyName);
}
public PropertyValueImpl propertyValue(String selectorName, String propertyName, String propertyType) {
return new PropertyValueImpl(selectorName, propertyName, propertyType);
}
public SameNodeImpl sameNode(String selectorName, String path) {
return new SameNodeImpl(selectorName, path);
}
public SameNodeJoinConditionImpl sameNodeJoinCondition(String selector1Name, String selector2Name, String selector2Path) {
return new SameNodeJoinConditionImpl(selector1Name, selector2Name, selector2Path);
}
public SelectorImpl selector(NodeState type, String selectorName) {
return new SelectorImpl(type, selectorName);
}
public UpperCaseImpl upperCase(DynamicOperandImpl operand) {
return new UpperCaseImpl(operand);
}
public ConstraintImpl in(DynamicOperandImpl left,
ArrayList<StaticOperandImpl> list) {
return new InImpl(left, list);
}
public NativeFunctionImpl nativeFunction(String selectorName, String language, StaticOperandImpl expression) {
return new NativeFunctionImpl(selectorName, language, expression);
}
public SimilarImpl similar(String selectorName, String propertyName,
StaticOperandImpl path) {
return new SimilarImpl(selectorName, propertyName, path);
}
public ConstraintImpl spellcheck(String selectorName, StaticOperandImpl expression) {
return new SpellcheckImpl(selectorName, expression);
}
public ConstraintImpl suggest(String selectorName, StaticOperandImpl expression) {
return new SuggestImpl(selectorName, expression);
}
/**
* <p>
* as the {@link AstElement#copyOf()} can return {@code this} is the cloning is not implemented
* by the subclass, this method add some spice around it by checking for this case and tracking
* a DEBUG message in the logs.
* </p>
*
* @param e the element to be cloned. Cannot be null.
* @return same as {@link AstElement#copyOf()}
*/
@Nonnull
public static AstElement copyElementAndCheckReference(@Nonnull final AstElement e) {
AstElement clone = checkNotNull(e).copyOf();
if (clone == e && LOG.isDebugEnabled()) {
LOG.debug(
"Failed to clone the AstElement. Returning same reference; the client may fail. {} - {}",
e.getClass().getName(), e);
}
return clone;
}
}
|
apache-2.0
|
Alachisoft/NCache
|
Src/NCSocketServer/SocketServer/Pooling/Protobuf/InsertResponseInstantiator.cs
|
937
|
// Copyright (c) 2021 Alachisoft
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License
using Alachisoft.NCache.Common.Protobuf;
using Alachisoft.NCache.Common.Pooling.Options;
namespace Alachisoft.NCache.SocketServer.Pooling.Protobuf
{
public sealed class InsertResponseInstantiator : IPooledObjectInstantiator<InsertResponse>
{
public InsertResponse Instantiate() => new InsertResponse();
}
}
|
apache-2.0
|
VirtualGamer/SnowEngine
|
Dependencies/opengl/src/org/lwjgl/opengl/EXTVertexAttrib64bit.java
|
10383
|
/*
* Copyright LWJGL. All rights reserved.
* License terms: https://www.lwjgl.org/license
* MACHINE GENERATED FILE, DO NOT EDIT
*/
package org.lwjgl.opengl;
import java.nio.*;
import static org.lwjgl.system.Checks.*;
import static org.lwjgl.system.JNI.*;
import static org.lwjgl.system.MemoryUtil.*;
/**
* Native bindings to the <a href="http://www.opengl.org/registry/specs/EXT/vertex_attrib_64bit.txt">EXT_vertex_attrib_64bit</a> extension.
*
* <p>This extension provides OpenGL shading language support for vertex shader inputs with 64-bit floating-point components and OpenGL API support for
* specifying the value of those inputs using vertex array or immediate mode entry points. This builds on the support for general-purpose support for
* 64-bit floating-point values in the ARB_gpu_shader_fp64 extension.</p>
*
* <p>This extension provides a new class of vertex attribute functions, beginning with "VertexAttribL" ("L" for "long"), that can be used to specify
* attributes with 64-bit floating-point components. This extension provides no automatic type conversion between attribute and shader variables;
* single-precision attributes are not automatically converted to double-precision or vice versa. For shader variables with 64-bit component types, the
* "VertexAttribL" functions must be used to specify attribute values. For other shader variables, the "VertexAttribL" functions must not be used. If a
* vertex attribute is specified using the wrong attribute function, the values of the corresponding shader input are undefined. This approach requiring
* matching types is identical to that used for the "VertexAttribI" functions provided by OpenGL 3.0 and the EXT_gpu_shader4 extension.</p>
*
* <p>Additionally, some vertex shader inputs using the wider 64-bit components may count double against the implementation-dependent limit on the number of
* vertex shader attribute vectors. A 64-bit scalar or a two-component vector consumes only a single generic vertex attribute; three- and four-component
* "long" may count as two. This approach is similar to the one used in the current GL where matrix attributes consume multiple attributes.</p>
*
* <p>Note that 64-bit generic vertex attributes were nominally supported beginning with the introduction of vertex shaders in OpenGL 2.0. However, the
* OpenGL Shading Language at the time had no support for 64-bit data types, so any such values were automatically converted to 32-bit.</p>
*
* <p>Support for 64-bit floating-point vertex attributes in this extension can be combined with other extensions. In particular, this extension provides an
* entry point that can be used with EXT_direct_state_access to directly set state for any vertex array object. Also, the related
* NV_vertex_attrib_integer_64bit extension provides an entry point to specify bindless vertex attribute arrays with 64-bit components, integer or
* floating-point.</p>
*
* <p>Requires {@link GL30 OpenGL 3.0} and {@link ARBGPUShaderFP64 ARB_gpu_shader_fp64} (or equivalent functionality).</p>
*/
public class EXTVertexAttrib64bit {
/** Returned in the {@code type} parameter of GetActiveAttrib. */
public static final int
GL_DOUBLE_VEC2_EXT = 0x8FFC,
GL_DOUBLE_VEC3_EXT = 0x8FFD,
GL_DOUBLE_VEC4_EXT = 0x8FFE,
GL_DOUBLE_MAT2_EXT = 0x8F46,
GL_DOUBLE_MAT3_EXT = 0x8F47,
GL_DOUBLE_MAT4_EXT = 0x8F48,
GL_DOUBLE_MAT2x3_EXT = 0x8F49,
GL_DOUBLE_MAT2x4_EXT = 0x8F4A,
GL_DOUBLE_MAT3x2_EXT = 0x8F4B,
GL_DOUBLE_MAT3x4_EXT = 0x8F4C,
GL_DOUBLE_MAT4x2_EXT = 0x8F4D,
GL_DOUBLE_MAT4x3_EXT = 0x8F4E;
protected EXTVertexAttrib64bit() {
throw new UnsupportedOperationException();
}
static boolean isAvailable(GLCapabilities caps, java.util.Set<String> ext) {
return checkFunctions(
caps.glVertexAttribL1dEXT, caps.glVertexAttribL2dEXT, caps.glVertexAttribL3dEXT, caps.glVertexAttribL4dEXT, caps.glVertexAttribL1dvEXT,
caps.glVertexAttribL2dvEXT, caps.glVertexAttribL3dvEXT, caps.glVertexAttribL4dvEXT, caps.glVertexAttribLPointerEXT, caps.glGetVertexAttribLdvEXT,
ext.contains("GL_EXT_direct_state_access") ? caps.glVertexArrayVertexAttribLOffsetEXT : -1L
);
}
// --- [ glVertexAttribL1dEXT ] ---
public static void glVertexAttribL1dEXT(int index, double x) {
long __functionAddress = GL.getCapabilities().glVertexAttribL1dEXT;
if ( CHECKS )
checkFunctionAddress(__functionAddress);
callV(__functionAddress, index, x);
}
// --- [ glVertexAttribL2dEXT ] ---
public static void glVertexAttribL2dEXT(int index, double x, double y) {
long __functionAddress = GL.getCapabilities().glVertexAttribL2dEXT;
if ( CHECKS )
checkFunctionAddress(__functionAddress);
callV(__functionAddress, index, x, y);
}
// --- [ glVertexAttribL3dEXT ] ---
public static void glVertexAttribL3dEXT(int index, double x, double y, double z) {
long __functionAddress = GL.getCapabilities().glVertexAttribL3dEXT;
if ( CHECKS )
checkFunctionAddress(__functionAddress);
callV(__functionAddress, index, x, y, z);
}
// --- [ glVertexAttribL4dEXT ] ---
public static void glVertexAttribL4dEXT(int index, double x, double y, double z, double w) {
long __functionAddress = GL.getCapabilities().glVertexAttribL4dEXT;
if ( CHECKS )
checkFunctionAddress(__functionAddress);
callV(__functionAddress, index, x, y, z, w);
}
// --- [ glVertexAttribL1dvEXT ] ---
public static void nglVertexAttribL1dvEXT(int index, long v) {
long __functionAddress = GL.getCapabilities().glVertexAttribL1dvEXT;
if ( CHECKS )
checkFunctionAddress(__functionAddress);
callPV(__functionAddress, index, v);
}
public static void glVertexAttribL1dvEXT(int index, DoubleBuffer v) {
nglVertexAttribL1dvEXT(index, memAddress(v));
}
// --- [ glVertexAttribL2dvEXT ] ---
public static void nglVertexAttribL2dvEXT(int index, long v) {
long __functionAddress = GL.getCapabilities().glVertexAttribL2dvEXT;
if ( CHECKS )
checkFunctionAddress(__functionAddress);
callPV(__functionAddress, index, v);
}
public static void glVertexAttribL2dvEXT(int index, DoubleBuffer v) {
nglVertexAttribL2dvEXT(index, memAddress(v));
}
// --- [ glVertexAttribL3dvEXT ] ---
public static void nglVertexAttribL3dvEXT(int index, long v) {
long __functionAddress = GL.getCapabilities().glVertexAttribL3dvEXT;
if ( CHECKS )
checkFunctionAddress(__functionAddress);
callPV(__functionAddress, index, v);
}
public static void glVertexAttribL3dvEXT(int index, DoubleBuffer v) {
nglVertexAttribL3dvEXT(index, memAddress(v));
}
// --- [ glVertexAttribL4dvEXT ] ---
public static void nglVertexAttribL4dvEXT(int index, long v) {
long __functionAddress = GL.getCapabilities().glVertexAttribL4dvEXT;
if ( CHECKS )
checkFunctionAddress(__functionAddress);
callPV(__functionAddress, index, v);
}
public static void glVertexAttribL4dvEXT(int index, DoubleBuffer v) {
nglVertexAttribL4dvEXT(index, memAddress(v));
}
// --- [ glVertexAttribLPointerEXT ] ---
public static void nglVertexAttribLPointerEXT(int index, int size, int type, int stride, long pointer) {
long __functionAddress = GL.getCapabilities().glVertexAttribLPointerEXT;
if ( CHECKS )
checkFunctionAddress(__functionAddress);
callPV(__functionAddress, index, size, type, stride, pointer);
}
public static void glVertexAttribLPointerEXT(int index, int size, int type, int stride, ByteBuffer pointer) {
nglVertexAttribLPointerEXT(index, size, type, stride, memAddress(pointer));
}
public static void glVertexAttribLPointerEXT(int index, int size, int type, int stride, long pointer) {
nglVertexAttribLPointerEXT(index, size, type, stride, pointer);
}
public static void glVertexAttribLPointerEXT(int index, int size, int stride, DoubleBuffer pointer) {
nglVertexAttribLPointerEXT(index, size, GL11.GL_DOUBLE, stride, memAddress(pointer));
}
// --- [ glGetVertexAttribLdvEXT ] ---
public static void nglGetVertexAttribLdvEXT(int index, int pname, long params) {
long __functionAddress = GL.getCapabilities().glGetVertexAttribLdvEXT;
if ( CHECKS )
checkFunctionAddress(__functionAddress);
callPV(__functionAddress, index, pname, params);
}
public static void glGetVertexAttribLdvEXT(int index, int pname, DoubleBuffer params) {
nglGetVertexAttribLdvEXT(index, pname, memAddress(params));
}
// --- [ glVertexArrayVertexAttribLOffsetEXT ] ---
public static void glVertexArrayVertexAttribLOffsetEXT(int vaobj, int buffer, int index, int size, int type, int stride, long offset) {
long __functionAddress = GL.getCapabilities().glVertexArrayVertexAttribLOffsetEXT;
if ( CHECKS )
checkFunctionAddress(__functionAddress);
callPV(__functionAddress, vaobj, buffer, index, size, type, stride, offset);
}
/** Array version of: {@link #glVertexAttribL1dvEXT VertexAttribL1dvEXT} */
public static void glVertexAttribL1dvEXT(int index, double[] v) {
long __functionAddress = GL.getCapabilities().glVertexAttribL1dvEXT;
if ( CHECKS )
checkFunctionAddress(__functionAddress);
callPV(__functionAddress, index, v);
}
/** Array version of: {@link #glVertexAttribL2dvEXT VertexAttribL2dvEXT} */
public static void glVertexAttribL2dvEXT(int index, double[] v) {
long __functionAddress = GL.getCapabilities().glVertexAttribL2dvEXT;
if ( CHECKS )
checkFunctionAddress(__functionAddress);
callPV(__functionAddress, index, v);
}
/** Array version of: {@link #glVertexAttribL3dvEXT VertexAttribL3dvEXT} */
public static void glVertexAttribL3dvEXT(int index, double[] v) {
long __functionAddress = GL.getCapabilities().glVertexAttribL3dvEXT;
if ( CHECKS )
checkFunctionAddress(__functionAddress);
callPV(__functionAddress, index, v);
}
/** Array version of: {@link #glVertexAttribL4dvEXT VertexAttribL4dvEXT} */
public static void glVertexAttribL4dvEXT(int index, double[] v) {
long __functionAddress = GL.getCapabilities().glVertexAttribL4dvEXT;
if ( CHECKS )
checkFunctionAddress(__functionAddress);
callPV(__functionAddress, index, v);
}
/** Array version of: {@link #glGetVertexAttribLdvEXT GetVertexAttribLdvEXT} */
public static void glGetVertexAttribLdvEXT(int index, int pname, double[] params) {
long __functionAddress = GL.getCapabilities().glGetVertexAttribLdvEXT;
if ( CHECKS )
checkFunctionAddress(__functionAddress);
callPV(__functionAddress, index, pname, params);
}
}
|
apache-2.0
|
ONLYOFFICE/CommunityServer
|
common/ASC.Data.Backup/Tasks/KeyHelper.cs
|
2148
|
/*
*
* (c) Copyright Ascensio System Limited 2010-2021
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
using System.IO;
using ASC.Data.Backup.Tasks.Modules;
namespace ASC.Data.Backup.Tasks
{
internal static class KeyHelper
{
private const string Databases = "databases";
public static string GetDumpKey()
{
return "dump";
}
public static string GetDatabaseSchema()
{
return string.Format("{0}/{1}", Databases, "scheme");
}
public static string GetDatabaseData()
{
return string.Format("{0}/{1}", Databases, "data");
}
public static string GetDatabaseSchema(string table)
{
return string.Format("{0}/{1}", GetDatabaseSchema(), table);
}
public static string GetDatabaseData(string table)
{
return string.Format("{0}/{1}", GetDatabaseData(), table);
}
public static string GetTableZipKey(IModuleSpecifics module, string tableName)
{
return string.Format("{0}/{1}/{2}", Databases, module.ConnectionStringName, tableName);
}
public static string GetZipKey(this BackupFileInfo file)
{
return Path.Combine(file.Module, file.Domain, file.Path);
}
public static string GetStorage()
{
return "storage";
}
public static string GetStorageRestoreInfoZipKey()
{
return string.Format("{0}/restore_info", GetStorage());
}
}
}
|
apache-2.0
|
jxdong1013/archivems
|
pc/ArchiveMS/UILibrary/CheckBoxComboBox/CheckBoxComboBox.Designer.cs
|
1050
|
namespace UILibrary.CheckBoxComboBox
{
partial class CheckBoxComboBox
{
/// <summary>
/// Required designer variable.
/// </summary>
private System.ComponentModel.IContainer components = null;
/// <summary>
/// Clean up any resources being used.
/// </summary>
/// <param name="disposing">true if managed resources should be disposed; otherwise, false.</param>
protected override void Dispose(bool disposing)
{
if (disposing && (components != null))
{
components.Dispose();
}
base.Dispose(disposing);
}
#region Component Designer generated code
/// <summary>
/// Required method for Designer support - do not modify
/// the contents of this method with the code editor.
/// </summary>
private void InitializeComponent()
{
components = new System.ComponentModel.Container();
}
#endregion
}
}
|
apache-2.0
|
Mangesh21/Popular-Movies
|
app/src/main/java/movies/udacity/com/popularmovies/network/MovieDetail.java
|
9460
|
package movies.udacity.com.popularmovies.network;
import android.os.Parcel;
import android.os.Parcelable;
/**
* Created by mangesh on 21/2/16.
*/
public class MovieDetail implements Parcelable {
public Long _id;
private String poster_path;
private Boolean adult;
private String overview;
private String release_date;
// private List<Integer> genreIds = new ArrayList<Integer>();
private Integer id;
private String originalTitle;
private String originalLanguage;
private String title;
private String backdrop_path;
private Double popularity;
private Integer vote_count;
private Boolean video;
private Double vote_average;
public String getReviewsJSON() {
return reviewsJSON;
}
public void setReviewsJSON(String reviewsJSON) {
this.reviewsJSON = reviewsJSON;
}
private String reviewsJSON;
private String movieTrailerOneID = null;
public String getMovieTrailerOneID() {
return movieTrailerOneID;
}
public void setMovieTrailerOneID(String movieTrailerOneID) {
this.movieTrailerOneID = movieTrailerOneID;
}
public String getMovieTrailerTwoID() {
return movieTrailerTwoID;
}
public void setMovieTrailerTwoID(String movieTrailerTwoID) {
this.movieTrailerTwoID = movieTrailerTwoID;
}
private String movieTrailerTwoID = null;
public boolean isOfflineData() {
return isOfflineData != 0;
}
public void setOfflineData(int offlineData) {
isOfflineData = offlineData;
}
private int isOfflineData = 0;
public boolean isSavedInstanceData() {
return isSavedInstanceData != 0;
}
public void setSavedInstanceData(int savedInstanceData) {
isSavedInstanceData = savedInstanceData;
}
private int isSavedInstanceData = 0;
public MovieDetail(int id, String title, String poster_path, Double vote_Average, String release_date, String overview, String reviewsJSON,
String movieTrailerOneID, String movieTrailerTwoID, int isOfflineData) {
this.id = id;
this.title = title;
this.poster_path = poster_path;
this.vote_average = vote_Average;
this.release_date = release_date;
this.overview = overview;
this.reviewsJSON = reviewsJSON;
this.movieTrailerOneID = movieTrailerOneID;
this.movieTrailerTwoID = movieTrailerTwoID;
this.isOfflineData = isOfflineData;
}
/**
* @return The posterPath
*/
public String getPosterPath() {
return poster_path;
}
/**
* @param posterPath The poster_path
*/
public void setPosterPath(String posterPath) {
this.poster_path = posterPath;
}
/**
* @return The adult
*/
public Boolean getAdult() {
return adult;
}
/**
* @param adult The adult
*/
public void setAdult(Boolean adult) {
this.adult = adult;
}
/**
* @return The overview
*/
public String getOverview() {
return overview;
}
/**
* @param overview The overview
*/
public void setOverview(String overview) {
this.overview = overview;
}
/**
* @return The releaseDate
*/
public String getReleaseDate() {
return release_date;
}
/**
* @param releaseDate The release_date
*/
public void setReleaseDate(String releaseDate) {
this.release_date = releaseDate;
}
/* *//**
* @return The genreIds
*//*
public List<Integer> getGenreIds() {
return genreIds;
}
*//**
* @param genreIds The genre_ids
*//*
public void setGenreIds(List<Integer> genreIds) {
this.genreIds = genreIds;
}*/
/**
* @return The id
*/
public Integer getId() {
return id;
}
/**
* @param id The id
*/
public void setId(Integer id) {
this.id = id;
}
/**
* @return The originalTitle
*/
public String getOriginalTitle() {
return originalTitle;
}
/**
* @param originalTitle The original_title
*/
public void setOriginalTitle(String originalTitle) {
this.originalTitle = originalTitle;
}
/**
* @return The originalLanguage
*/
public String getOriginalLanguage() {
return originalLanguage;
}
/**
* @param originalLanguage The original_language
*/
public void setOriginalLanguage(String originalLanguage) {
this.originalLanguage = originalLanguage;
}
/**
* @return The title
*/
public String getTitle() {
return title;
}
/**
* @param title The title
*/
public void setTitle(String title) {
this.title = title;
}
/**
* @return The backdropPath
*/
public String getBackdropPath() {
return backdrop_path;
}
/**
* @param backdropPath The backdrop_path
*/
public void setBackdropPath(String backdropPath) {
this.backdrop_path = backdropPath;
}
/**
* @return The popularity
*/
public Double getPopularity() {
return popularity;
}
/**
* @param popularity The popularity
*/
public void setPopularity(Double popularity) {
this.popularity = popularity;
}
/**
* @return The voteCount
*/
public Integer getVoteCount() {
return vote_count;
}
/**
* @param voteCount The vote_count
*/
public void setVoteCount(Integer voteCount) {
this.vote_count = voteCount;
}
/**
* @return The video
*/
public Boolean getVideo() {
return video;
}
/**
* @param video The video
*/
public void setVideo(Boolean video) {
this.video = video;
}
/**
* @return The voteAverage
*/
public Double getVoteAverage() {
return vote_average;
}
/**
* @param voteAverage The vote_average
*/
public void setVoteAverage(Double voteAverage) {
this.vote_average = voteAverage;
}
@Override
public String toString() {
return "MovieDetail{" +
"adult=" + adult +
", posterPath='" + poster_path + '\'' +
", overview='" + overview + '\'' +
", releaseDate='" + release_date + '\'' +
// ", genreIds=" + genreIds +
", id=" + id +
", originalTitle='" + originalTitle + '\'' +
", originalLanguage='" + originalLanguage + '\'' +
", title='" + title + '\'' +
", backdropPath='" + backdrop_path + '\'' +
", popularity=" + popularity +
", voteCount=" + vote_count +
", video=" + video +
", voteAverage=" + vote_average +
'}';
}
public MovieDetail() {
}
@Override
public int describeContents() {
return 0;
}
@Override
public void writeToParcel(Parcel dest, int flags) {
dest.writeValue(this._id);
dest.writeString(this.poster_path);
dest.writeValue(this.adult);
dest.writeString(this.overview);
dest.writeString(this.release_date);
dest.writeValue(this.id);
dest.writeString(this.originalTitle);
dest.writeString(this.originalLanguage);
dest.writeString(this.title);
dest.writeString(this.backdrop_path);
dest.writeValue(this.popularity);
dest.writeValue(this.vote_count);
dest.writeValue(this.video);
dest.writeValue(this.vote_average);
dest.writeString(this.reviewsJSON);
dest.writeString(this.movieTrailerOneID);
dest.writeString(this.movieTrailerTwoID);
dest.writeInt(this.isOfflineData);
dest.writeInt(this.isSavedInstanceData);
}
protected MovieDetail(Parcel in) {
this._id = (Long) in.readValue(Long.class.getClassLoader());
this.poster_path = in.readString();
this.adult = (Boolean) in.readValue(Boolean.class.getClassLoader());
this.overview = in.readString();
this.release_date = in.readString();
this.id = (Integer) in.readValue(Integer.class.getClassLoader());
this.originalTitle = in.readString();
this.originalLanguage = in.readString();
this.title = in.readString();
this.backdrop_path = in.readString();
this.popularity = (Double) in.readValue(Double.class.getClassLoader());
this.vote_count = (Integer) in.readValue(Integer.class.getClassLoader());
this.video = (Boolean) in.readValue(Boolean.class.getClassLoader());
this.vote_average = (Double) in.readValue(Double.class.getClassLoader());
this.reviewsJSON = in.readString();
this.movieTrailerOneID = in.readString();
this.movieTrailerTwoID = in.readString();
this.isOfflineData = in.readInt();
this.isSavedInstanceData = in.readInt();
}
public static final Creator<MovieDetail> CREATOR = new Creator<MovieDetail>() {
@Override
public MovieDetail createFromParcel(Parcel source) {
return new MovieDetail(source);
}
@Override
public MovieDetail[] newArray(int size) {
return new MovieDetail[size];
}
};
}
|
apache-2.0
|
hmrc/ct-calculations
|
src/main/scala/uk/gov/hmrc/ct/accounts/frs102/boxes/AC127C.scala
|
1568
|
/*
* Copyright 2021 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package uk.gov.hmrc.ct.accounts.frs102.boxes
import uk.gov.hmrc.ct.accounts.frs102.calculations.BalanceSheetTangibleAssetsCalculator
import uk.gov.hmrc.ct.accounts.frs102.retriever.{Frs102AccountsBoxRetriever, FullAccountsBoxRetriever}
import uk.gov.hmrc.ct.box._
case class AC127C(value: Option[Int]) extends CtBoxIdentifier(name = "Tangible assets - Motor vehicles - cost - at POA END")
with CtOptionalInteger
with ValidatableBox[Frs102AccountsBoxRetriever]
with Validators {
override def validate(boxRetriever: Frs102AccountsBoxRetriever): Set[CtValidation] = {
collectErrors(
validateMoney(value)
)
}
}
object AC127C extends Calculated[AC127C, FullAccountsBoxRetriever] with BalanceSheetTangibleAssetsCalculator {
override def calculate(boxRetriever: FullAccountsBoxRetriever): AC127C = {
import boxRetriever._
calculateAC127C(
ac124C(),
ac125C(),
ac126C(),
ac212C(),
ac213C()
)
}
}
|
apache-2.0
|
CharlesGarrocho/MCC
|
computacao_movel/DTN/server.py
|
2035
|
import os
import json
import time
import socket
from threading import Thread
IPS = []
def trata_cliente(conexao, endereco):
requisicao = conexao.recv(1024)
print requisicao
if requisicao == 'LIST':
arqs = os.listdir('/etc/black/garagem/arquivos/')
conexao.send(json.dumps(arqs))
elif requisicao == 'GET':
arqs = os.listdir('/etc/black/garagem/arquivos/')
arquivo = conexao.recv(1024)
if arquivo in arqs:
fp = open('/etc/black/garagem/arquivos/{0}'.format(arquivo), 'r')
strng = fp.read(1024)
while strng:
conexao.send(strng)
strng = fp.read(1024)
elif requisicao == 'PUT':
conexao.send('OK')
arqs = os.listdir('/etc/black/garagem/arquivos/')
arquivo = conexao.recv(1024)
print arquivo
print arqs
if arquivo not in arqs:
conexao.send('TRUE')
arq = open('/etc/black/garagem/arquivos/{0}'.format(arquivo), 'w')
while 1:
dados = conexao.recv(1024)
if not dados:
break
arq.write(dados)
arq.close()
else:
conexao.send('FALSE')
conexao.close()
def loop_servidor():
soquete = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
soquete.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
soquete.bind(('127.0.0.1', 5555))
soquete.listen(10)
global IPS
# Fica aqui aguardando novas conexoes.
while True:
# Para cada nova conexao e criado um novo processo para tratar as requisicoes.
conexao = soquete.accept()
novaConexao = []
novaConexao.append(conexao[0])
novaConexao.append(conexao[1])
if conexao[1] not in IPS:
IPS.append(conexao[1])
Thread(target=trata_cliente, args=(novaConexao)).start()
if __name__ == '__main__':
print 'Servidor de Arquivos Iniciou na Porta 5555...'
Thread(target=loop_servidor).start()
|
apache-2.0
|
aparod/jonix
|
jonix-common/src/main/java/com/tectonica/jonix/codelist/RightsTypes.java
|
1627
|
/*
* Copyright (C) 2012 Zach Melamed
*
* Latest version available online at https://github.com/zach-m/jonix
* Contact me at zach@tectonica.co.il
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.tectonica.jonix.codelist;
/*
* NOTE: THIS IS AN AUTO-GENERATED FILE, DON'T EDIT MANUALLY
*/
/**
* <code>Enum</code> that corresponds to ONIX <b>Codelist 219</b>
* <p>
* Description: Rights type code
*
* @see <a href="http://www.editeur.org/14/code-lists">ONIX Codelists</a>
*/
public enum RightsTypes
{
/**
* Text or image copyright (normally indicated by the © symbol).
*/
Copyright("C"), //
/**
* Phonogram copyright or neighbouring right (normally indicated by the ℗ symbol).
*/
Phonogram_right("P"), //
/**
* Sui generis database right.
*/
Database_right("D");
public final String value;
private RightsTypes(String value)
{
this.value = value;
}
public static RightsTypes byValue(String value)
{
if (value == null || value.isEmpty())
return null;
for (RightsTypes e : values())
if (e.value.equals(value))
return e;
return null;
}
}
|
apache-2.0
|
OADA/oada-srvc-docker
|
oada/services/auth/domains/localhost/config.js
|
1033
|
module.exports = {
domain: 'localhost', // just here for informational purposes
baseuri: 'https://localhost/', // just here for informational purposes
logo: 'logo.png',
name: 'Open Ag Data Alliance',
tagline: '',
color: '#FFFFFF',
hint: {
username: 'frank',
password: 'test',
},
idService: {
shortname: 'OADA',
longname: 'Open Ag Data Alliance',
},
// To generate keys:
// 1: create key pair: openssl genrsa -out private_key.pem 2048
// 2: extract public key: openssl rsa -pubout -in private_key.pem -out public_key.pem
keys: {
public: 'public_key.pem',
private: {
// Use the first (and only) key in software statement:
kid: require('./unsigned_software_statement').jwks.keys[0].kid,
// Read the private key from the private key file:
pem: require('fs').readFileSync(__dirname + '/private_key.pem'),
},
},
unsigned_software_statement: require('./unsigned_software_statement.js'),
software_statement: require('./signed_software_statement.js'),
};
|
apache-2.0
|
google/pyaedj
|
platform/common/rest.py
|
2760
|
#
# Copyright 2017 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""REST."""
__author__ = 'Pavel Simakov (psimakov@google.com)'
import datetime
from common import dao
import config
from django import http
from django import template
from django.views import generic
from django.views.decorators import csrf
ALLOWED_REST_APP_IDS = set([config.ENV.project_id])
# XSSI protection JSON prefix.
_JSON_XSSI_PREFIX = ")]}'\n"
def _add_xssi_prefix(s):
"""Adds the XSSI prefix to the given string."""
return '%s%s' % (_JSON_XSSI_PREFIX, s)
def _strip_xssi_prefix(s):
"""Strips the XSSI prefix (if any) from the given string."""
return s.lstrip(_JSON_XSSI_PREFIX)
class BaseRestHandler(generic.View):
"""Base REST handler class."""
@classmethod
def render_response(cls, status_code, status_message, payload):
"""Generic method for returning a JSON response."""
data = dao.entity_to_json({
'payload': payload if payload is not None else {},
'status_code': status_code,
'status_message': status_message,
})
content = template.Template(
'{{ xssi_prefix|safe }}{{ json|safe }}'
).render(template.Context({
'json': data,
'xssi_prefix': _JSON_XSSI_PREFIX,
}))
response = http.HttpResponse()
response.status_code = status_code
response['Content-Type'] = 'text/plain; charset=utf-8'
response.write(content)
return response
class TrustedCallerRestHandler(BaseRestHandler):
"""Handler that allows requests only from specific App Engine projects."""
@csrf.csrf_exempt
def dispatch(self, request, *args, **kwargs):
inbound_appid = request.META.get('HTTP_X_APPENGINE_INBOUND_APPID', None)
if inbound_appid and inbound_appid not in ALLOWED_REST_APP_IDS:
code = 403
payload = {'error_code': code}
error_message = 'Access denied.'
return self.render_response(code, error_message, payload)
return super(TrustedCallerRestHandler, self).dispatch(
request, *args, **kwargs)
class PingHandler(TrustedCallerRestHandler):
"""Simple REST ping."""
def get(self, request):
return self.render_response(200, 'Success', {
'server_time': datetime.datetime.now()
})
|
apache-2.0
|
googleapis/java-compute
|
proto-google-cloud-compute-v1/src/main/java/com/google/cloud/compute/v1/RegionInstanceGroupManagerUpdateInstanceConfigReq.java
|
36206
|
/*
* Copyright 2020 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/compute/v1/compute.proto
package com.google.cloud.compute.v1;
/**
*
*
* <pre>
* RegionInstanceGroupManagers.updatePerInstanceConfigs
* </pre>
*
* Protobuf type {@code google.cloud.compute.v1.RegionInstanceGroupManagerUpdateInstanceConfigReq}
*/
public final class RegionInstanceGroupManagerUpdateInstanceConfigReq
extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.compute.v1.RegionInstanceGroupManagerUpdateInstanceConfigReq)
RegionInstanceGroupManagerUpdateInstanceConfigReqOrBuilder {
private static final long serialVersionUID = 0L;
// Use RegionInstanceGroupManagerUpdateInstanceConfigReq.newBuilder() to construct.
private RegionInstanceGroupManagerUpdateInstanceConfigReq(
com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private RegionInstanceGroupManagerUpdateInstanceConfigReq() {
perInstanceConfigs_ = java.util.Collections.emptyList();
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new RegionInstanceGroupManagerUpdateInstanceConfigReq();
}
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet getUnknownFields() {
return this.unknownFields;
}
private RegionInstanceGroupManagerUpdateInstanceConfigReq(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
this();
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case -84847286:
{
if (!((mutable_bitField0_ & 0x00000001) != 0)) {
perInstanceConfigs_ =
new java.util.ArrayList<com.google.cloud.compute.v1.PerInstanceConfig>();
mutable_bitField0_ |= 0x00000001;
}
perInstanceConfigs_.add(
input.readMessage(
com.google.cloud.compute.v1.PerInstanceConfig.parser(), extensionRegistry));
break;
}
default:
{
if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) {
done = true;
}
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e).setUnfinishedMessage(this);
} finally {
if (((mutable_bitField0_ & 0x00000001) != 0)) {
perInstanceConfigs_ = java.util.Collections.unmodifiableList(perInstanceConfigs_);
}
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.compute.v1.Compute
.internal_static_google_cloud_compute_v1_RegionInstanceGroupManagerUpdateInstanceConfigReq_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.compute.v1.Compute
.internal_static_google_cloud_compute_v1_RegionInstanceGroupManagerUpdateInstanceConfigReq_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.compute.v1.RegionInstanceGroupManagerUpdateInstanceConfigReq.class,
com.google.cloud.compute.v1.RegionInstanceGroupManagerUpdateInstanceConfigReq.Builder
.class);
}
public static final int PER_INSTANCE_CONFIGS_FIELD_NUMBER = 526265001;
private java.util.List<com.google.cloud.compute.v1.PerInstanceConfig> perInstanceConfigs_;
/**
*
*
* <pre>
* The list of per-instance configs to insert or patch on this managed instance group.
* </pre>
*
* <code>repeated .google.cloud.compute.v1.PerInstanceConfig per_instance_configs = 526265001;
* </code>
*/
@java.lang.Override
public java.util.List<com.google.cloud.compute.v1.PerInstanceConfig> getPerInstanceConfigsList() {
return perInstanceConfigs_;
}
/**
*
*
* <pre>
* The list of per-instance configs to insert or patch on this managed instance group.
* </pre>
*
* <code>repeated .google.cloud.compute.v1.PerInstanceConfig per_instance_configs = 526265001;
* </code>
*/
@java.lang.Override
public java.util.List<? extends com.google.cloud.compute.v1.PerInstanceConfigOrBuilder>
getPerInstanceConfigsOrBuilderList() {
return perInstanceConfigs_;
}
/**
*
*
* <pre>
* The list of per-instance configs to insert or patch on this managed instance group.
* </pre>
*
* <code>repeated .google.cloud.compute.v1.PerInstanceConfig per_instance_configs = 526265001;
* </code>
*/
@java.lang.Override
public int getPerInstanceConfigsCount() {
return perInstanceConfigs_.size();
}
/**
*
*
* <pre>
* The list of per-instance configs to insert or patch on this managed instance group.
* </pre>
*
* <code>repeated .google.cloud.compute.v1.PerInstanceConfig per_instance_configs = 526265001;
* </code>
*/
@java.lang.Override
public com.google.cloud.compute.v1.PerInstanceConfig getPerInstanceConfigs(int index) {
return perInstanceConfigs_.get(index);
}
/**
*
*
* <pre>
* The list of per-instance configs to insert or patch on this managed instance group.
* </pre>
*
* <code>repeated .google.cloud.compute.v1.PerInstanceConfig per_instance_configs = 526265001;
* </code>
*/
@java.lang.Override
public com.google.cloud.compute.v1.PerInstanceConfigOrBuilder getPerInstanceConfigsOrBuilder(
int index) {
return perInstanceConfigs_.get(index);
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
for (int i = 0; i < perInstanceConfigs_.size(); i++) {
output.writeMessage(526265001, perInstanceConfigs_.get(i));
}
unknownFields.writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
for (int i = 0; i < perInstanceConfigs_.size(); i++) {
size +=
com.google.protobuf.CodedOutputStream.computeMessageSize(
526265001, perInstanceConfigs_.get(i));
}
size += unknownFields.getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj
instanceof com.google.cloud.compute.v1.RegionInstanceGroupManagerUpdateInstanceConfigReq)) {
return super.equals(obj);
}
com.google.cloud.compute.v1.RegionInstanceGroupManagerUpdateInstanceConfigReq other =
(com.google.cloud.compute.v1.RegionInstanceGroupManagerUpdateInstanceConfigReq) obj;
if (!getPerInstanceConfigsList().equals(other.getPerInstanceConfigsList())) return false;
if (!unknownFields.equals(other.unknownFields)) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getPerInstanceConfigsCount() > 0) {
hash = (37 * hash) + PER_INSTANCE_CONFIGS_FIELD_NUMBER;
hash = (53 * hash) + getPerInstanceConfigsList().hashCode();
}
hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.compute.v1.RegionInstanceGroupManagerUpdateInstanceConfigReq
parseFrom(java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.compute.v1.RegionInstanceGroupManagerUpdateInstanceConfigReq
parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.compute.v1.RegionInstanceGroupManagerUpdateInstanceConfigReq
parseFrom(com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.compute.v1.RegionInstanceGroupManagerUpdateInstanceConfigReq
parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.compute.v1.RegionInstanceGroupManagerUpdateInstanceConfigReq
parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.compute.v1.RegionInstanceGroupManagerUpdateInstanceConfigReq
parseFrom(byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.compute.v1.RegionInstanceGroupManagerUpdateInstanceConfigReq
parseFrom(java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.compute.v1.RegionInstanceGroupManagerUpdateInstanceConfigReq
parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.compute.v1.RegionInstanceGroupManagerUpdateInstanceConfigReq
parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.compute.v1.RegionInstanceGroupManagerUpdateInstanceConfigReq
parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.compute.v1.RegionInstanceGroupManagerUpdateInstanceConfigReq
parseFrom(com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.compute.v1.RegionInstanceGroupManagerUpdateInstanceConfigReq
parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.compute.v1.RegionInstanceGroupManagerUpdateInstanceConfigReq prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* RegionInstanceGroupManagers.updatePerInstanceConfigs
* </pre>
*
* Protobuf type {@code google.cloud.compute.v1.RegionInstanceGroupManagerUpdateInstanceConfigReq}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.compute.v1.RegionInstanceGroupManagerUpdateInstanceConfigReq)
com.google.cloud.compute.v1.RegionInstanceGroupManagerUpdateInstanceConfigReqOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.compute.v1.Compute
.internal_static_google_cloud_compute_v1_RegionInstanceGroupManagerUpdateInstanceConfigReq_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.compute.v1.Compute
.internal_static_google_cloud_compute_v1_RegionInstanceGroupManagerUpdateInstanceConfigReq_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.compute.v1.RegionInstanceGroupManagerUpdateInstanceConfigReq.class,
com.google.cloud.compute.v1.RegionInstanceGroupManagerUpdateInstanceConfigReq.Builder
.class);
}
// Construct using
// com.google.cloud.compute.v1.RegionInstanceGroupManagerUpdateInstanceConfigReq.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {
getPerInstanceConfigsFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
if (perInstanceConfigsBuilder_ == null) {
perInstanceConfigs_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
} else {
perInstanceConfigsBuilder_.clear();
}
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.compute.v1.Compute
.internal_static_google_cloud_compute_v1_RegionInstanceGroupManagerUpdateInstanceConfigReq_descriptor;
}
@java.lang.Override
public com.google.cloud.compute.v1.RegionInstanceGroupManagerUpdateInstanceConfigReq
getDefaultInstanceForType() {
return com.google.cloud.compute.v1.RegionInstanceGroupManagerUpdateInstanceConfigReq
.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.compute.v1.RegionInstanceGroupManagerUpdateInstanceConfigReq build() {
com.google.cloud.compute.v1.RegionInstanceGroupManagerUpdateInstanceConfigReq result =
buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.compute.v1.RegionInstanceGroupManagerUpdateInstanceConfigReq
buildPartial() {
com.google.cloud.compute.v1.RegionInstanceGroupManagerUpdateInstanceConfigReq result =
new com.google.cloud.compute.v1.RegionInstanceGroupManagerUpdateInstanceConfigReq(this);
int from_bitField0_ = bitField0_;
if (perInstanceConfigsBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)) {
perInstanceConfigs_ = java.util.Collections.unmodifiableList(perInstanceConfigs_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.perInstanceConfigs_ = perInstanceConfigs_;
} else {
result.perInstanceConfigs_ = perInstanceConfigsBuilder_.build();
}
onBuilt();
return result;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other
instanceof
com.google.cloud.compute.v1.RegionInstanceGroupManagerUpdateInstanceConfigReq) {
return mergeFrom(
(com.google.cloud.compute.v1.RegionInstanceGroupManagerUpdateInstanceConfigReq) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(
com.google.cloud.compute.v1.RegionInstanceGroupManagerUpdateInstanceConfigReq other) {
if (other
== com.google.cloud.compute.v1.RegionInstanceGroupManagerUpdateInstanceConfigReq
.getDefaultInstance()) return this;
if (perInstanceConfigsBuilder_ == null) {
if (!other.perInstanceConfigs_.isEmpty()) {
if (perInstanceConfigs_.isEmpty()) {
perInstanceConfigs_ = other.perInstanceConfigs_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensurePerInstanceConfigsIsMutable();
perInstanceConfigs_.addAll(other.perInstanceConfigs_);
}
onChanged();
}
} else {
if (!other.perInstanceConfigs_.isEmpty()) {
if (perInstanceConfigsBuilder_.isEmpty()) {
perInstanceConfigsBuilder_.dispose();
perInstanceConfigsBuilder_ = null;
perInstanceConfigs_ = other.perInstanceConfigs_;
bitField0_ = (bitField0_ & ~0x00000001);
perInstanceConfigsBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders
? getPerInstanceConfigsFieldBuilder()
: null;
} else {
perInstanceConfigsBuilder_.addAllMessages(other.perInstanceConfigs_);
}
}
}
this.mergeUnknownFields(other.unknownFields);
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
com.google.cloud.compute.v1.RegionInstanceGroupManagerUpdateInstanceConfigReq parsedMessage =
null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage =
(com.google.cloud.compute.v1.RegionInstanceGroupManagerUpdateInstanceConfigReq)
e.getUnfinishedMessage();
throw e.unwrapIOException();
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int bitField0_;
private java.util.List<com.google.cloud.compute.v1.PerInstanceConfig> perInstanceConfigs_ =
java.util.Collections.emptyList();
private void ensurePerInstanceConfigsIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
perInstanceConfigs_ =
new java.util.ArrayList<com.google.cloud.compute.v1.PerInstanceConfig>(
perInstanceConfigs_);
bitField0_ |= 0x00000001;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.compute.v1.PerInstanceConfig,
com.google.cloud.compute.v1.PerInstanceConfig.Builder,
com.google.cloud.compute.v1.PerInstanceConfigOrBuilder>
perInstanceConfigsBuilder_;
/**
*
*
* <pre>
* The list of per-instance configs to insert or patch on this managed instance group.
* </pre>
*
* <code>repeated .google.cloud.compute.v1.PerInstanceConfig per_instance_configs = 526265001;
* </code>
*/
public java.util.List<com.google.cloud.compute.v1.PerInstanceConfig>
getPerInstanceConfigsList() {
if (perInstanceConfigsBuilder_ == null) {
return java.util.Collections.unmodifiableList(perInstanceConfigs_);
} else {
return perInstanceConfigsBuilder_.getMessageList();
}
}
/**
*
*
* <pre>
* The list of per-instance configs to insert or patch on this managed instance group.
* </pre>
*
* <code>repeated .google.cloud.compute.v1.PerInstanceConfig per_instance_configs = 526265001;
* </code>
*/
public int getPerInstanceConfigsCount() {
if (perInstanceConfigsBuilder_ == null) {
return perInstanceConfigs_.size();
} else {
return perInstanceConfigsBuilder_.getCount();
}
}
/**
*
*
* <pre>
* The list of per-instance configs to insert or patch on this managed instance group.
* </pre>
*
* <code>repeated .google.cloud.compute.v1.PerInstanceConfig per_instance_configs = 526265001;
* </code>
*/
public com.google.cloud.compute.v1.PerInstanceConfig getPerInstanceConfigs(int index) {
if (perInstanceConfigsBuilder_ == null) {
return perInstanceConfigs_.get(index);
} else {
return perInstanceConfigsBuilder_.getMessage(index);
}
}
/**
*
*
* <pre>
* The list of per-instance configs to insert or patch on this managed instance group.
* </pre>
*
* <code>repeated .google.cloud.compute.v1.PerInstanceConfig per_instance_configs = 526265001;
* </code>
*/
public Builder setPerInstanceConfigs(
int index, com.google.cloud.compute.v1.PerInstanceConfig value) {
if (perInstanceConfigsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensurePerInstanceConfigsIsMutable();
perInstanceConfigs_.set(index, value);
onChanged();
} else {
perInstanceConfigsBuilder_.setMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* The list of per-instance configs to insert or patch on this managed instance group.
* </pre>
*
* <code>repeated .google.cloud.compute.v1.PerInstanceConfig per_instance_configs = 526265001;
* </code>
*/
public Builder setPerInstanceConfigs(
int index, com.google.cloud.compute.v1.PerInstanceConfig.Builder builderForValue) {
if (perInstanceConfigsBuilder_ == null) {
ensurePerInstanceConfigsIsMutable();
perInstanceConfigs_.set(index, builderForValue.build());
onChanged();
} else {
perInstanceConfigsBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The list of per-instance configs to insert or patch on this managed instance group.
* </pre>
*
* <code>repeated .google.cloud.compute.v1.PerInstanceConfig per_instance_configs = 526265001;
* </code>
*/
public Builder addPerInstanceConfigs(com.google.cloud.compute.v1.PerInstanceConfig value) {
if (perInstanceConfigsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensurePerInstanceConfigsIsMutable();
perInstanceConfigs_.add(value);
onChanged();
} else {
perInstanceConfigsBuilder_.addMessage(value);
}
return this;
}
/**
*
*
* <pre>
* The list of per-instance configs to insert or patch on this managed instance group.
* </pre>
*
* <code>repeated .google.cloud.compute.v1.PerInstanceConfig per_instance_configs = 526265001;
* </code>
*/
public Builder addPerInstanceConfigs(
int index, com.google.cloud.compute.v1.PerInstanceConfig value) {
if (perInstanceConfigsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensurePerInstanceConfigsIsMutable();
perInstanceConfigs_.add(index, value);
onChanged();
} else {
perInstanceConfigsBuilder_.addMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* The list of per-instance configs to insert or patch on this managed instance group.
* </pre>
*
* <code>repeated .google.cloud.compute.v1.PerInstanceConfig per_instance_configs = 526265001;
* </code>
*/
public Builder addPerInstanceConfigs(
com.google.cloud.compute.v1.PerInstanceConfig.Builder builderForValue) {
if (perInstanceConfigsBuilder_ == null) {
ensurePerInstanceConfigsIsMutable();
perInstanceConfigs_.add(builderForValue.build());
onChanged();
} else {
perInstanceConfigsBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The list of per-instance configs to insert or patch on this managed instance group.
* </pre>
*
* <code>repeated .google.cloud.compute.v1.PerInstanceConfig per_instance_configs = 526265001;
* </code>
*/
public Builder addPerInstanceConfigs(
int index, com.google.cloud.compute.v1.PerInstanceConfig.Builder builderForValue) {
if (perInstanceConfigsBuilder_ == null) {
ensurePerInstanceConfigsIsMutable();
perInstanceConfigs_.add(index, builderForValue.build());
onChanged();
} else {
perInstanceConfigsBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The list of per-instance configs to insert or patch on this managed instance group.
* </pre>
*
* <code>repeated .google.cloud.compute.v1.PerInstanceConfig per_instance_configs = 526265001;
* </code>
*/
public Builder addAllPerInstanceConfigs(
java.lang.Iterable<? extends com.google.cloud.compute.v1.PerInstanceConfig> values) {
if (perInstanceConfigsBuilder_ == null) {
ensurePerInstanceConfigsIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, perInstanceConfigs_);
onChanged();
} else {
perInstanceConfigsBuilder_.addAllMessages(values);
}
return this;
}
/**
*
*
* <pre>
* The list of per-instance configs to insert or patch on this managed instance group.
* </pre>
*
* <code>repeated .google.cloud.compute.v1.PerInstanceConfig per_instance_configs = 526265001;
* </code>
*/
public Builder clearPerInstanceConfigs() {
if (perInstanceConfigsBuilder_ == null) {
perInstanceConfigs_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
perInstanceConfigsBuilder_.clear();
}
return this;
}
/**
*
*
* <pre>
* The list of per-instance configs to insert or patch on this managed instance group.
* </pre>
*
* <code>repeated .google.cloud.compute.v1.PerInstanceConfig per_instance_configs = 526265001;
* </code>
*/
public Builder removePerInstanceConfigs(int index) {
if (perInstanceConfigsBuilder_ == null) {
ensurePerInstanceConfigsIsMutable();
perInstanceConfigs_.remove(index);
onChanged();
} else {
perInstanceConfigsBuilder_.remove(index);
}
return this;
}
/**
*
*
* <pre>
* The list of per-instance configs to insert or patch on this managed instance group.
* </pre>
*
* <code>repeated .google.cloud.compute.v1.PerInstanceConfig per_instance_configs = 526265001;
* </code>
*/
public com.google.cloud.compute.v1.PerInstanceConfig.Builder getPerInstanceConfigsBuilder(
int index) {
return getPerInstanceConfigsFieldBuilder().getBuilder(index);
}
/**
*
*
* <pre>
* The list of per-instance configs to insert or patch on this managed instance group.
* </pre>
*
* <code>repeated .google.cloud.compute.v1.PerInstanceConfig per_instance_configs = 526265001;
* </code>
*/
public com.google.cloud.compute.v1.PerInstanceConfigOrBuilder getPerInstanceConfigsOrBuilder(
int index) {
if (perInstanceConfigsBuilder_ == null) {
return perInstanceConfigs_.get(index);
} else {
return perInstanceConfigsBuilder_.getMessageOrBuilder(index);
}
}
/**
*
*
* <pre>
* The list of per-instance configs to insert or patch on this managed instance group.
* </pre>
*
* <code>repeated .google.cloud.compute.v1.PerInstanceConfig per_instance_configs = 526265001;
* </code>
*/
public java.util.List<? extends com.google.cloud.compute.v1.PerInstanceConfigOrBuilder>
getPerInstanceConfigsOrBuilderList() {
if (perInstanceConfigsBuilder_ != null) {
return perInstanceConfigsBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(perInstanceConfigs_);
}
}
/**
*
*
* <pre>
* The list of per-instance configs to insert or patch on this managed instance group.
* </pre>
*
* <code>repeated .google.cloud.compute.v1.PerInstanceConfig per_instance_configs = 526265001;
* </code>
*/
public com.google.cloud.compute.v1.PerInstanceConfig.Builder addPerInstanceConfigsBuilder() {
return getPerInstanceConfigsFieldBuilder()
.addBuilder(com.google.cloud.compute.v1.PerInstanceConfig.getDefaultInstance());
}
/**
*
*
* <pre>
* The list of per-instance configs to insert or patch on this managed instance group.
* </pre>
*
* <code>repeated .google.cloud.compute.v1.PerInstanceConfig per_instance_configs = 526265001;
* </code>
*/
public com.google.cloud.compute.v1.PerInstanceConfig.Builder addPerInstanceConfigsBuilder(
int index) {
return getPerInstanceConfigsFieldBuilder()
.addBuilder(index, com.google.cloud.compute.v1.PerInstanceConfig.getDefaultInstance());
}
/**
*
*
* <pre>
* The list of per-instance configs to insert or patch on this managed instance group.
* </pre>
*
* <code>repeated .google.cloud.compute.v1.PerInstanceConfig per_instance_configs = 526265001;
* </code>
*/
public java.util.List<com.google.cloud.compute.v1.PerInstanceConfig.Builder>
getPerInstanceConfigsBuilderList() {
return getPerInstanceConfigsFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.compute.v1.PerInstanceConfig,
com.google.cloud.compute.v1.PerInstanceConfig.Builder,
com.google.cloud.compute.v1.PerInstanceConfigOrBuilder>
getPerInstanceConfigsFieldBuilder() {
if (perInstanceConfigsBuilder_ == null) {
perInstanceConfigsBuilder_ =
new com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.compute.v1.PerInstanceConfig,
com.google.cloud.compute.v1.PerInstanceConfig.Builder,
com.google.cloud.compute.v1.PerInstanceConfigOrBuilder>(
perInstanceConfigs_,
((bitField0_ & 0x00000001) != 0),
getParentForChildren(),
isClean());
perInstanceConfigs_ = null;
}
return perInstanceConfigsBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.compute.v1.RegionInstanceGroupManagerUpdateInstanceConfigReq)
}
// @@protoc_insertion_point(class_scope:google.cloud.compute.v1.RegionInstanceGroupManagerUpdateInstanceConfigReq)
private static final com.google.cloud.compute.v1.RegionInstanceGroupManagerUpdateInstanceConfigReq
DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE =
new com.google.cloud.compute.v1.RegionInstanceGroupManagerUpdateInstanceConfigReq();
}
public static com.google.cloud.compute.v1.RegionInstanceGroupManagerUpdateInstanceConfigReq
getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<RegionInstanceGroupManagerUpdateInstanceConfigReq>
PARSER =
new com.google.protobuf.AbstractParser<
RegionInstanceGroupManagerUpdateInstanceConfigReq>() {
@java.lang.Override
public RegionInstanceGroupManagerUpdateInstanceConfigReq parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new RegionInstanceGroupManagerUpdateInstanceConfigReq(
input, extensionRegistry);
}
};
public static com.google.protobuf.Parser<RegionInstanceGroupManagerUpdateInstanceConfigReq>
parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<RegionInstanceGroupManagerUpdateInstanceConfigReq>
getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.compute.v1.RegionInstanceGroupManagerUpdateInstanceConfigReq
getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
apache-2.0
|
linkedin/parseq
|
subprojects/parseq/src/test/java/com/linkedin/parseq/TestEngine.java
|
9352
|
/*
* Copyright 2012 LinkedIn, Inc
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package com.linkedin.parseq;
import com.linkedin.parseq.promise.Promise;
import com.linkedin.parseq.promise.Promises;
import com.linkedin.parseq.promise.SettablePromise;
import org.testng.annotations.AfterMethod;
import org.testng.annotations.BeforeMethod;
import org.testng.annotations.Test;
import java.util.concurrent.ArrayBlockingQueue;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.RejectedExecutionException;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.ThreadPoolExecutor;
import java.util.concurrent.TimeUnit;
import static com.linkedin.parseq.TestUtil.withDisabledLogging;
import static org.testng.AssertJUnit.assertEquals;
import static org.testng.AssertJUnit.assertFalse;
import static org.testng.AssertJUnit.assertTrue;
/**
* @author Chris Pettitt
*/
public class TestEngine {
private Engine _engine;
private ScheduledExecutorService _scheduler;
@BeforeMethod
public void setUp() throws Exception {
final int numCores = Runtime.getRuntime().availableProcessors();
_scheduler = Executors.newScheduledThreadPool(numCores + 1);
_engine = new EngineBuilder().setTaskExecutor(_scheduler).setTimerScheduler(_scheduler).build();
}
@AfterMethod
public void tearDown() throws Exception {
_engine.shutdown();
_engine.awaitTermination(50, TimeUnit.MILLISECONDS);
_engine = null;
_scheduler.shutdownNow();
_scheduler = null;
}
@Test
public void testShutdownWithNoTasks() throws InterruptedException {
_engine.shutdown();
assertTrue(_engine.isShutdown());
assertTrue(_engine.awaitTermination(50, TimeUnit.MILLISECONDS));
assertTrue(_engine.isTerminated());
assertTrue(_engine.isShutdown());
}
@Test
public void testShutdownThenRunTask() throws InterruptedException {
_engine.shutdown();
final Task<String> task = Task.value("task executed");
_engine.run(task);
// Task should be cancelled immediately
assertTrue(task.await(50, TimeUnit.MILLISECONDS));
assertTrue(task.isFailed());
}
@Test
public void testShutdownWithRunningTask() throws InterruptedException {
final CountDownLatch finishLatch = new CountDownLatch(1);
final String taskValue = "task executed";
final Task<String> task = new BaseTask<String>() {
@Override
protected Promise<? extends String> run(final Context context) throws Exception {
finishLatch.await();
return Promises.value(taskValue);
}
};
_engine.run(task);
_engine.shutdown();
// shutdown should not complete until after our task is done
assertFalse(_engine.awaitTermination(50, TimeUnit.MILLISECONDS));
assertTrue(_engine.isShutdown());
assertFalse(_engine.isTerminated());
finishLatch.countDown();
assertTrue(_engine.awaitTermination(50, TimeUnit.MILLISECONDS));
assertTrue(_engine.isShutdown());
assertTrue(_engine.isTerminated());
// Task should finish shortly
assertTrue(task.await(50, TimeUnit.MILLISECONDS));
assertEquals(taskValue, task.get());
}
@Test
public void testShutdownWithRunningAndSuccessorTask() throws InterruptedException {
final CountDownLatch finishLatch = new CountDownLatch(1);
final String predValue = "task executed";
final String sucValue = "task executed";
final Task<String> predTask = new BaseTask<String>() {
@Override
protected Promise<? extends String> run(final Context context) throws Exception {
finishLatch.await();
return Promises.value(predValue);
}
};
final Task<String> sucTask = Task.value(sucValue);
final Task<String> seq = predTask.andThen(sucTask);
_engine.run(seq);
_engine.shutdown();
// shutdown should not complete until after our task is done
assertFalse(_engine.awaitTermination(50, TimeUnit.MILLISECONDS));
assertTrue(_engine.isShutdown());
assertFalse(_engine.isTerminated());
finishLatch.countDown();
assertTrue(_engine.awaitTermination(50, TimeUnit.MILLISECONDS));
assertTrue(_engine.isShutdown());
assertTrue(_engine.isTerminated());
// Tasks should finish shortly
assertTrue(predTask.await(50, TimeUnit.MILLISECONDS));
assertEquals(predValue, predTask.get());
assertTrue(sucTask.await(50, TimeUnit.MILLISECONDS));
assertEquals(sucValue, sucTask.get());
}
@Test
public void testShutdownWithSideEffectTask() throws InterruptedException {
final CountDownLatch finishLatch = new CountDownLatch(1);
final String mainValue = "main task executed";
final String sideEffectValue = "side-effect task executed";
Task<String> sideEffect = Task.async(context -> {
finishLatch.await();
return Promises.value(sideEffectValue);
});
Task<String> task = Task.value(mainValue).withSideEffect(v -> sideEffect);
_engine.run(task);
_engine.shutdown();
assertFalse(_engine.awaitTermination(50, TimeUnit.MILLISECONDS));
assertTrue(_engine.isShutdown());
assertFalse(_engine.isTerminated());
finishLatch.countDown();
assertTrue(_engine.awaitTermination(50, TimeUnit.MILLISECONDS));
assertTrue(_engine.isShutdown());
assertTrue(_engine.isTerminated());
assertEquals(mainValue, task.get());
assertEquals(sideEffectValue, sideEffect.get());
}
@Test
public void testShutdownWithSideEffectTask2() throws InterruptedException {
final SettablePromise<String> sideEffectPromise = Promises.settable();
final String mainValue = "main task executed";
final String sideEffectValue = "side-effect task executed";
Task<String> sideEffect = Task.async(context -> sideEffectPromise);
Task<String> task = Task.value(mainValue).withSideEffect(v -> sideEffect);
_engine.run(task);
_engine.shutdown();
assertFalse(_engine.awaitTermination(50, TimeUnit.MILLISECONDS));
assertTrue(_engine.isShutdown());
assertFalse(_engine.isTerminated());
sideEffectPromise.done(sideEffectValue);
assertTrue(_engine.awaitTermination(50, TimeUnit.MILLISECONDS));
assertTrue(_engine.isShutdown());
assertTrue(_engine.isTerminated());
assertEquals(mainValue, task.get());
assertEquals(sideEffectValue, sideEffect.get());
}
@Test
public void testFailPlanExecution() throws InterruptedException {
// This test ensures that if execution of a plan's serial executor loop
// fails, e.g. in the case that the underlying executor is saturated, that
// we fail the plan. To simplify this test, we constructor our own executor
// instead of using the default executor set up for test.
final ExecutorService executorService = new ThreadPoolExecutor(1, 1, 0, TimeUnit.SECONDS,
new ArrayBlockingQueue<Runnable>(1), new ThreadPoolExecutor.AbortPolicy());
final ScheduledExecutorService scheduledExecutorService = Executors.newSingleThreadScheduledExecutor();
try {
final Engine engine =
new EngineBuilder().setTaskExecutor(executorService).setTimerScheduler(scheduledExecutorService).build();
// First we submit two tasks that will never finish. This saturates the
// underlying executor by using its only thread and saturating its
// single slot queue.
engine.run(neverEndingBlockingTask());
engine.run(neverEndingBlockingTask());
// Now we submit another task. The execution loop for this task will fail
// during submit to the underlying executor. We expect that it will be
// cancelled.
final Task<?> task = neverEndingBlockingTask();
withDisabledLogging(new Runnable() {
@Override
public void run() {
engine.run(task);
try {
assertTrue(task.await(5, TimeUnit.SECONDS));
} catch (InterruptedException e) {
// Ignore.
}
}
});
assertTrue(task.isFailed());
assertTrue(
"Expected underlying exception to be instance of RejectedExecutionException, but was: "
+ task.getError().getCause().getCause(),
task.getError().getCause().getCause() instanceof RejectedExecutionException);
engine.shutdown();
} finally {
scheduledExecutorService.shutdownNow();
executorService.shutdownNow();
}
}
/**
* A task that blocks forever when it is executed, tying up whatever thread
* executes it.
*/
private Task<?> neverEndingBlockingTask() {
return new BaseTask<Object>() {
@Override
protected Promise<?> run(Context context) throws Throwable {
new CountDownLatch(1).await();
return Promises.value("A value that should never be seen!");
}
};
}
}
|
apache-2.0
|
bjorndm/prebake
|
code/third_party/bdb/src/com/sleepycat/je/recovery/CheckpointStatDefinition.java
|
1905
|
/*-
* See the file LICENSE for redistribution information.
*
* Copyright (c) 2002-2010 Oracle. All rights reserved.
*
* $Id: CheckpointStatDefinition.java,v 1.5 2010/01/04 15:50:44 cwl Exp $
*/
package com.sleepycat.je.recovery;
import com.sleepycat.je.utilint.StatDefinition;
/**
* Per-stat Metadata for JE checkpointer statistics.
*/
public class CheckpointStatDefinition {
public static final String GROUP_NAME = "Checkpoints";
public static final String GROUP_DESC =
"Frequency and extent of checkpointing activity.";
public static final StatDefinition CKPT_CHECKPOINTS =
new StatDefinition("nCheckpoints",
"Total number of checkpints run so far.");
public static final StatDefinition CKPT_LAST_CKPTID =
new StatDefinition("lastCheckpointId", "Id of the last checkpoint.");
public static final StatDefinition CKPT_FULL_IN_FLUSH =
new StatDefinition("nFullINFlush",
"Accumulated number of full INs flushed to the "+
"log.");
public static final StatDefinition CKPT_FULL_BIN_FLUSH =
new StatDefinition("nFullBINFlush",
"Accumulated number of full BINs flushed to the " +
"log.");
public static final StatDefinition CKPT_DELTA_IN_FLUSH =
new StatDefinition("nDeltaINFlush",
"Accumulated number of Delta INs flushed to the " +
"log.");
public static final StatDefinition CKPT_LAST_CKPT_START =
new StatDefinition("lastCheckpointStart",
"Location in the log of the last checkpont start.");
public static final StatDefinition CKPT_LAST_CKPT_END =
new StatDefinition("lastCheckpointEnd",
"Location in the log of the last checkpoint end.");
}
|
apache-2.0
|
zhouronglv/myapp
|
myapp-demo/src/main/java/com/myapp/demo/algorithm/AlgorithmDemo.java
|
562
|
package com.myapp.demo.algorithm;
/**
* Created by zrl on 2017/3/6.
*/
public final class AlgorithmDemo {
/**
* 插入排序算法
* 将数组分为无序区和有序区两个区,然后不断将无序区的第一个元素按大小顺序插入到有序区中去,最终将所有无序区元素都移动到有序区完成排序。
* @param a
*/
public static void insertSort(int[] a)
{
for (int i = 0; i < a.length; i++)
{
}
}
public static void dd()
{
}
}
|
apache-2.0
|
fperignon/siconos
|
kernel/src/simulationTools/NewMarkAlphaOSI.hpp
|
8423
|
/* Siconos is a program dedicated to modeling, simulation and control
* of non smooth dynamical systems.
*
* Copyright 2018 INRIA.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/*! \file
NewMark Alpha Scheme Time-Integrator for Dynamical Systems
*/
#ifndef NEWMARKALPHAOSI_H
#define NEWMARKALPHAOSI_H
#include "OneStepIntegrator.hpp"
/** NewMarkAlpha Scheme Time-Integrator for Dynamical Systems
*
*
* NewMarkAlphaOSI is used to solve constrained dynamical systems represented by index-3 DAE
*
* NewMarkAlphaOSI is instantiated with values of beta, gamma, alpha_m, alpha_f and the list of concerned
* dynamical systems. Each DynamicalSystem is associated to a SiconosMatrix named "W"
*
* W matrices are initialized and computed in initializeIterationMatrixW and computeW.
*/
class NewMarkAlphaOSI : public OneStepIntegrator
{
protected:
/** serialization hooks
*/
ACCEPT_SERIALIZATION(NewMarkAlphaOSI);
/** Parameters of the numerical scheme: beta, gamma, alpha_m, alpha_f */
double _beta, _gamma, _alpha_m, _alpha_f;
/** Order of the polynomial for dense output*/
unsigned int _orderDenseOutput;
/** Indicator whether or not constraints at the velocity level are handled
* _IsVelocityLevel = true: constraints at the velocity level are handled
* _IsVelocityLevel = false: constraints at the position are handled
*/
bool _IsVelocityLevel;
/**
* Default constructor
*/
NewMarkAlphaOSI() {};
public:
enum NewMarkAlphaOSI_ds_workVector_id {RESIDU_FREE, FREE, ACCE_LIKE,
ACCE_MEMORY, WORK_LENGTH};
enum NewMarkAlphaOSI_interaction_workVector_id{OSNSP_RHS, WORK_INTERACTION_LENGTH};
enum NewMarkAlphaOSI_workBlockVector_id{xfree, BLOCK_WORK_LENGTH};
enum NewMarkAlphaOSI_interaction_workMat_id{DENSE_OUTPUT_COEFFICIENTS, MAT_WORK_LENGTH};
/** constructor with only parameters beta, gamma, alpha_m, alpha_f
* \param beta double
* \param gamma double
* \param alpha_m double
* \param alpha_f double
* \param flag true of working at velocity level
*/
NewMarkAlphaOSI(double beta, double gamma, double alpha_m, double alpha_f, bool flag);
/** constructor with only the parameter rho_infty
* \param rho_infty double
* \param flag true of working at velocity level
*/
NewMarkAlphaOSI(double rho_infty, bool flag);
/** destructor
*/
virtual ~NewMarkAlphaOSI() {};
// --- GETTERS/SETTERS ---
/** set value to the parameter beta
* \param beta value of beta
*/
inline void setBeta(double beta)
{
_beta = beta;
};
/** set value to the parameter gamma
* \param value_gamma double : value of gamma
*/
inline void setGamma(double value_gamma)
{
_gamma = value_gamma;
};
/** set value to the parameter alpha_m
* \param value_alpha_m double : value of alpha_m
*/
inline void setAlpha_m(double value_alpha_m)
{
_alpha_m = value_alpha_m;
};
/** set value to the parameter alpha_f
* \param value_alpha_f double : value of alpha_f
*/
inline void setAlpha_f(double value_alpha_f)
{
_alpha_f = value_alpha_f;
};
/** set values to the parameters beta, gamma, alpha_f, alpha_m from the value of rho_infty
* \param rho_infty double : value of rho_infty
*/
inline void setParametersFromRho_infty(double rho_infty)
{
_alpha_m = (2 * rho_infty - 1) / (rho_infty + 1);
_alpha_f = rho_infty / (rho_infty + 1);
_gamma = 0.5 + _alpha_f - _alpha_m;
_beta = 0.25 * std::pow((_gamma + 0.5), 2);
};
/** get value of beta
* \return double
*/
inline double getBeta()
{
return _beta;
};
/** get value of gamma
* \return double
*/
inline double getGamma()
{
return _gamma;
};
/** get value of alpha_m
* \return double
*/
inline double getAlpha_m()
{
return _alpha_m;
};
/** get value of alpha_f
* \return double
*/
inline double getAlpha_f()
{
return _alpha_f;
};
/** get the order of the polynomial for dense output
* \return unsigned int
*/
inline unsigned int getOrderDenseOutput()
{
return _orderDenseOutput;
}
/** set the flag _IsVelocityLevel
* \param flag bool
*/
inline void setFlagVelocityLevel(bool flag)
{
_IsVelocityLevel = flag;
}
/** get the flag _IsVelocityLevel
* \return bool
*/
inline bool getFlagVelocityLevel()
{
return _IsVelocityLevel;
}
/** get matrix W
* \param ds SP::DynamicalSystem DynamicalSystem concerned
* \return SimpleMatrix
*/
const SimpleMatrix getW(SP::DynamicalSystem ds);
/** get pointer to the maxtrix W
* \param ds SP::DynamicalSystem DynamicalSystem concerned
* \return SP::SimpleMatrix
*/
SP::SimpleMatrix W(SP::DynamicalSystem ds);
/** initialize W matrix
* \param ds a pointer to DynamicalSystem
*/
void initializeIterationMatrixW(SP::DynamicalSystem ds);
/** compute W matrix
* \param ds a pointer to DynamicalSystem
* \param W the result in W
*/
void computeW(SP::DynamicalSystem ds, SiconosMatrix& W);
/** compute the residual of dynamical equation
*\return double: maximum residu over all DSs
*/
double computeResidu();
/** compute the free state of the discretized dynamical system */
void computeFreeState();
/** integrates the Interaction linked to this integrator, without taking non-smooth effects into account
* \param vertex_inter of the interaction graph
* \param osnsp pointer to OneStepNSProblem
*/
virtual void computeFreeOutput(InteractionsGraph::VDescriptor& vertex_inter,
OneStepNSProblem* osnsp);
/** initialize */
// void initialize(Model& m);
/** initialization of the work vectors and matrices (properties) related to
* one dynamical system on the graph and needed by the osi
* \param t time of initialization
* \param ds the dynamical system
*/
void initializeWorkVectorsForDS( double t, SP::DynamicalSystem ds);
/** initialization of the work vectors and matrices (properties) related to
* one interaction on the graph and needed by the osi
* \param inter the interaction
* \param interProp the properties on the graph
* \param DSG the dynamical systems graph
*/
void initializeWorkVectorsForInteraction(Interaction &inter,
InteractionProperties& interProp,
DynamicalSystemsGraph & DSG);
/** get the number of index sets required for the simulation
* \return unsigned int
*/
unsigned int numberOfIndexSets() const {return 3;};
/** prepare for Newton Iteration
* \param time
*/
void prepareNewtonIteration(double time);
/** predict first values for the Newton iteration */
void prediction();
/** correct state of all levels of Dynamical Systems after each Newton iteration
*/
void correction();
/** integrate the system, between tinit and tend with possible stop at tout
* \param tinit double: tinit, initial time
* \param tend double: tend, end time
* \param tout double: tout, real end time
* \param flag useless for NewMarkAlphaOSI
*/
void integrate(double& tinit, double& tend, double& tout, int& flag);
/** updates the state of the Dynamical Systems
* \param level the level of interest for the dynamics: not used at the time
*/
void updateState(const unsigned int level);
/** Compute coefficients of the polynomial of the dense output for a given DS
* \param ds SP::DynamicalSystem, ds concerned
*/
void computeCoefsDenseOutput(SP::DynamicalSystem ds);
/** prepare for Event localization*/
void prepareEventLocalization();
/** Generate dense output for all Dynamical Systems belonging to OSI
* \param time at which we want to generate the dense output
*/
void DenseOutputallDSs(double time);
/** Displays the data of the NewMarkAlpha's integrator
*/
void display();
ACCEPT_STD_VISITORS();
};
#endif // NEWMARKALPHAOSI_H
|
apache-2.0
|
aws/aws-sdk-java
|
aws-java-sdk-codedeploy/src/main/java/com/amazonaws/services/codedeploy/model/transform/InvalidAutoRollbackConfigExceptionUnmarshaller.java
|
3019
|
/*
* Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.codedeploy.model.transform;
import java.math.*;
import javax.annotation.Generated;
import com.amazonaws.services.codedeploy.model.*;
import com.amazonaws.transform.SimpleTypeJsonUnmarshallers.*;
import com.amazonaws.transform.*;
import com.fasterxml.jackson.core.JsonToken;
import static com.fasterxml.jackson.core.JsonToken.*;
/**
* InvalidAutoRollbackConfigException JSON Unmarshaller
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class InvalidAutoRollbackConfigExceptionUnmarshaller extends EnhancedJsonErrorUnmarshaller {
private InvalidAutoRollbackConfigExceptionUnmarshaller() {
super(com.amazonaws.services.codedeploy.model.InvalidAutoRollbackConfigException.class, "InvalidAutoRollbackConfigException");
}
@Override
public com.amazonaws.services.codedeploy.model.InvalidAutoRollbackConfigException unmarshallFromContext(JsonUnmarshallerContext context) throws Exception {
com.amazonaws.services.codedeploy.model.InvalidAutoRollbackConfigException invalidAutoRollbackConfigException = new com.amazonaws.services.codedeploy.model.InvalidAutoRollbackConfigException(
null);
int originalDepth = context.getCurrentDepth();
String currentParentElement = context.getCurrentParentElement();
int targetDepth = originalDepth + 1;
JsonToken token = context.getCurrentToken();
if (token == null)
token = context.nextToken();
if (token == VALUE_NULL) {
return null;
}
while (true) {
if (token == null)
break;
if (token == FIELD_NAME || token == START_OBJECT) {
} else if (token == END_ARRAY || token == END_OBJECT) {
if (context.getLastParsedParentElement() == null || context.getLastParsedParentElement().equals(currentParentElement)) {
if (context.getCurrentDepth() <= originalDepth)
break;
}
}
token = context.nextToken();
}
return invalidAutoRollbackConfigException;
}
private static InvalidAutoRollbackConfigExceptionUnmarshaller instance;
public static InvalidAutoRollbackConfigExceptionUnmarshaller getInstance() {
if (instance == null)
instance = new InvalidAutoRollbackConfigExceptionUnmarshaller();
return instance;
}
}
|
apache-2.0
|
vzrus/VZF
|
vzfsrc/VZF.Types/Interfaces/IServiceLocator.cs
|
3651
|
#region copyright
/* Yet Another Forum.NET
* Copyright (C) 2003-2005 Bjørnar Henden
* Copyright (C) 2006-2013 Jaben Cargman
*
* http://www.yetanotherforum.net/
*
* This file can contain some changes in 2014-2016 by Vladimir Zakharov(vzrus)
* for VZF forum
*
* http://www.code.coolhobby.ru/
*
* File ActiveLocation.cs created on 2.6.2015 in 6:29 AM.
* Last changed on 5.21.2016 in 1:07 PM.
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
#endregion
namespace YAF.Types.Interfaces
{
#region Using
using System;
using System.Collections.Generic;
#endregion
/// <summary>
/// The i service locator.
/// </summary>
public interface IServiceLocator : IServiceProvider
{
#region Public Methods
/// <summary>
/// The get.
/// </summary>
/// <param name="serviceType">
/// The service type.
/// </param>
/// <returns>
/// The get.
/// </returns>
object Get([NotNull] Type serviceType);
/// <summary>
/// The get.
/// </summary>
/// <param name="serviceType">
/// The service type.
/// </param>
/// <param name="parameters">
/// The parameters.
/// </param>
/// <returns>
/// The get.
/// </returns>
object Get([NotNull] Type serviceType, [NotNull] IEnumerable<IServiceLocationParameter> parameters);
/// <summary>
/// The get.
/// </summary>
/// <param name="serviceType">
/// The service type.
/// </param>
/// <param name="named">
/// The named.
/// </param>
/// <returns>
/// The get.
/// </returns>
object Get([NotNull] Type serviceType, [NotNull] string named);
/// <summary>
/// The get.
/// </summary>
/// <param name="serviceType">
/// The service type.
/// </param>
/// <param name="named">
/// The named.
/// </param>
/// <param name="parameters">
/// The parameters.
/// </param>
/// <returns>
/// The get.
/// </returns>
object Get([NotNull] Type serviceType, [NotNull] string named, [NotNull] IEnumerable<IServiceLocationParameter> parameters);
/// <summary>
/// The try get.
/// </summary>
/// <param name="serviceType">
/// The service type.
/// </param>
/// <param name="instance">
/// The instance.
/// </param>
/// <returns>
/// The try get.
/// </returns>
bool TryGet([NotNull] Type serviceType, [NotNull] out object instance);
/// <summary>
/// The try get.
/// </summary>
/// <param name="serviceType">
/// The service type.
/// </param>
/// <param name="named">
/// The named.
/// </param>
/// <param name="instance">
/// The instance.
/// </param>
/// <returns>
/// The try get.
/// </returns>
bool TryGet([NotNull] Type serviceType, [NotNull] string named, [NotNull] out object instance);
#endregion
}
}
|
apache-2.0
|
cloudfoundry-incubator/garden-linux
|
containerizer/shell_runner_step.go
|
598
|
package containerizer
import (
"fmt"
"os"
"os/exec"
"github.com/cloudfoundry/gunk/command_runner"
)
type ShellRunnerStep struct {
Runner command_runner.CommandRunner
Path string
}
func (step *ShellRunnerStep) Run() error {
if _, err := os.Stat(step.Path); os.IsNotExist(err) {
return nil
}
command := exec.Command("sh", step.Path)
if err := step.Runner.Start(command); err != nil {
return fmt.Errorf("starting command %s: %s", step.Path, err)
}
if err := step.Runner.Wait(command); err != nil {
return fmt.Errorf("runnng command %s: %s", step.Path, err)
}
return nil
}
|
apache-2.0
|
Leejjon/libgdx-chat-example
|
chat/core/src/main/java/org/stofkat/chat/http/dispatch/AsyncCallbackHandler.java
|
584
|
package org.stofkat.chat.http.dispatch;
import org.stofkat.chat.common.ClientInterface;
import org.stofkat.chat.common.results.Result;
public class AsyncCallbackHandler<R extends Result> implements AsyncCallback<R> {
private ClientInterface client;
public AsyncCallbackHandler(ClientInterface client) {
this.client = client;
}
@Override
public void onFailure(Throwable caught) {
caught.printStackTrace();
// TODO Log the exception and show an error to the user.
client.close();
}
@Override
public void onSuccess(R result) {
result.processResult(client);
}
}
|
apache-2.0
|
rbuffat/pyidf
|
tests/test_zoneinfiltrationeffectiveleakagearea.py
|
2138
|
import os
import tempfile
import unittest
import logging
from pyidf import ValidationLevel
import pyidf
from pyidf.idf import IDF
from pyidf.zone_airflow import ZoneInfiltrationEffectiveLeakageArea
log = logging.getLogger(__name__)
class TestZoneInfiltrationEffectiveLeakageArea(unittest.TestCase):
def setUp(self):
self.fd, self.path = tempfile.mkstemp()
def tearDown(self):
os.remove(self.path)
def test_create_zoneinfiltrationeffectiveleakagearea(self):
pyidf.validation_level = ValidationLevel.error
obj = ZoneInfiltrationEffectiveLeakageArea()
# alpha
var_name = "Name"
obj.name = var_name
# object-list
var_zone_name = "object-list|Zone Name"
obj.zone_name = var_zone_name
# object-list
var_schedule_name = "object-list|Schedule Name"
obj.schedule_name = var_schedule_name
# real
var_effective_air_leakage_area = 0.0001
obj.effective_air_leakage_area = var_effective_air_leakage_area
# real
var_stack_coefficient = 0.0001
obj.stack_coefficient = var_stack_coefficient
# real
var_wind_coefficient = 0.0001
obj.wind_coefficient = var_wind_coefficient
idf = IDF()
idf.add(obj)
idf.save(self.path, check=False)
with open(self.path, mode='r') as f:
for line in f:
log.debug(line.strip())
idf2 = IDF(self.path)
self.assertEqual(idf2.zoneinfiltrationeffectiveleakageareas[0].name, var_name)
self.assertEqual(idf2.zoneinfiltrationeffectiveleakageareas[0].zone_name, var_zone_name)
self.assertEqual(idf2.zoneinfiltrationeffectiveleakageareas[0].schedule_name, var_schedule_name)
self.assertAlmostEqual(idf2.zoneinfiltrationeffectiveleakageareas[0].effective_air_leakage_area, var_effective_air_leakage_area)
self.assertAlmostEqual(idf2.zoneinfiltrationeffectiveleakageareas[0].stack_coefficient, var_stack_coefficient)
self.assertAlmostEqual(idf2.zoneinfiltrationeffectiveleakageareas[0].wind_coefficient, var_wind_coefficient)
|
apache-2.0
|
ndis1/StudySnake
|
.apt_generated/com/studySnake/snake/model/UserManager$$InjectAdapter.java
|
2422
|
// Code generated by dagger-compiler. Do not edit.
package com.studySnake.snake.model;
import dagger.MembersInjector;
import dagger.internal.Binding;
import dagger.internal.Linker;
import java.util.Set;
import javax.inject.Provider;
/**
* A {@code Binder<UserManager>} implementation which satisfies
* Dagger's infrastructure requirements including:
*
* Owning the dependency links between {@code UserManager} and its
* dependencies.
*
* Being a {@code Provider<UserManager>} and handling creation and
* preparation of object instances.
*
* Being a {@code MembersInjector<UserManager>} and handling injection
* of annotated fields.
*/
public final class UserManager$$InjectAdapter extends Binding<UserManager>
implements Provider<UserManager>, MembersInjector<UserManager> {
private Binding<com.squareup.otto.Bus> parameter_bus;
private Binding<com.squareup.otto.Bus> field_bus;
public UserManager$$InjectAdapter() {
super("com.studySnake.snake.model.UserManager", "members/com.studySnake.snake.model.UserManager", NOT_SINGLETON, UserManager.class);
}
/**
* Used internally to link bindings/providers together at run time
* according to their dependency graph.
*/
@Override
@SuppressWarnings("unchecked")
public void attach(Linker linker) {
parameter_bus = (Binding<com.squareup.otto.Bus>) linker.requestBinding("com.squareup.otto.Bus", UserManager.class, getClass().getClassLoader());
field_bus = (Binding<com.squareup.otto.Bus>) linker.requestBinding("com.squareup.otto.Bus", UserManager.class, getClass().getClassLoader());
}
/**
* Used internally obtain dependency information, such as for cyclical
* graph detection.
*/
@Override
public void getDependencies(Set<Binding<?>> getBindings, Set<Binding<?>> injectMembersBindings) {
getBindings.add(parameter_bus);
injectMembersBindings.add(field_bus);
}
/**
* Returns the fully provisioned instance satisfying the contract for
* {@code Provider<UserManager>}.
*/
@Override
public UserManager get() {
UserManager result = new UserManager(parameter_bus.get());
injectMembers(result);
return result;
}
/**
* Injects any {@code @Inject} annotated fields in the given instance,
* satisfying the contract for {@code Provider<UserManager>}.
*/
@Override
public void injectMembers(UserManager object) {
object.bus = field_bus.get();
}
}
|
apache-2.0
|
iamaprin/java-example
|
src/main/java/io/vilya/example/dispatcher/ExceptionListener.java
|
433
|
package io.vilya.example.dispatcher;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* @author zhukuanxin
* @time 2017年4月8日 上午8:14:23
*/
public class ExceptionListener implements IListener {
private static final Logger LOGGER = LoggerFactory.getLogger(ExceptionListener.class);
@Override
public void handle(Object data) {
LOGGER.info("in exceptionListener");
throw new RuntimeException();
}
}
|
apache-2.0
|
lesaint/experimenting-annotation-processing
|
experimenting-rounds/massive-count-of-annotated-classes/src/main/java/fr/javatronic/blog/massive/annotation1/sub1/Class_6132.java
|
151
|
package fr.javatronic.blog.massive.annotation1.sub1;
import fr.javatronic.blog.processor.Annotation_001;
@Annotation_001
public class Class_6132 {
}
|
apache-2.0
|
fpompermaier/onvif
|
onvif-ws-client/src/main/java/org/xmlsoap/schemas/ws/_2005/_04/discovery/SecurityType.java
|
2716
|
package org.xmlsoap.schemas.ws._2005._04.discovery;
import java.util.HashMap;
import java.util.Map;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlAnyAttribute;
import javax.xml.bind.annotation.XmlElement;
import javax.xml.bind.annotation.XmlType;
import javax.xml.namespace.QName;
import org.apache.commons.lang3.builder.ToStringBuilder;
import org.apache.cxf.xjc.runtime.JAXBToStringStyle;
/**
* <p>Java class for SecurityType complex type.
*
* <p>The following schema fragment specifies the expected content contained within this class.
*
* <pre>
* <complexType name="SecurityType">
* <complexContent>
* <restriction base="{http://www.w3.org/2001/XMLSchema}anyType">
* <sequence>
* <element ref="{http://schemas.xmlsoap.org/ws/2005/04/discovery}Sig" minOccurs="0"/>
* </sequence>
* <anyAttribute processContents='lax' namespace='##other'/>
* </restriction>
* </complexContent>
* </complexType>
* </pre>
*
*
*/
@XmlAccessorType(XmlAccessType.FIELD)
@XmlType(name = "SecurityType", propOrder = {
"sig"
})
public class SecurityType {
@XmlElement(name = "Sig")
protected SigType sig;
@XmlAnyAttribute
private Map<QName, String> otherAttributes = new HashMap<QName, String>();
/**
* Gets the value of the sig property.
*
* @return
* possible object is
* {@link SigType }
*
*/
public SigType getSig() {
return sig;
}
/**
* Sets the value of the sig property.
*
* @param value
* allowed object is
* {@link SigType }
*
*/
public void setSig(SigType value) {
this.sig = value;
}
/**
* Gets a map that contains attributes that aren't bound to any typed property on this class.
*
* <p>
* the map is keyed by the name of the attribute and
* the value is the string value of the attribute.
*
* the map returned by this method is live, and you can add new attribute
* by updating the map directly. Because of this design, there's no setter.
*
*
* @return
* always non-null
*/
public Map<QName, String> getOtherAttributes() {
return otherAttributes;
}
/**
* Generates a String representation of the contents of this type.
* This is an extension method, produced by the 'ts' xjc plugin
*
*/
@Override
public String toString() {
return ToStringBuilder.reflectionToString(this, JAXBToStringStyle.DEFAULT_STYLE);
}
}
|
apache-2.0
|
jescarri/prometheus-operator
|
vendor/github.com/improbable-eng/thanos/pkg/compact/retention_test.go
|
7308
|
package compact_test
import (
"bytes"
"context"
"encoding/json"
"strings"
"testing"
"time"
"github.com/go-kit/kit/log"
"github.com/improbable-eng/thanos/pkg/block/metadata"
"github.com/improbable-eng/thanos/pkg/compact"
"github.com/improbable-eng/thanos/pkg/objstore"
"github.com/improbable-eng/thanos/pkg/objstore/inmem"
"github.com/improbable-eng/thanos/pkg/testutil"
"github.com/oklog/ulid"
"github.com/prometheus/tsdb"
)
func TestApplyRetentionPolicyByResolution(t *testing.T) {
type testBlock struct {
id string
minTime time.Time
maxTime time.Time
resolution compact.ResolutionLevel
}
logger := log.NewNopLogger()
ctx := context.TODO()
for _, tt := range []struct {
name string
blocks []testBlock
retentionByResolution map[compact.ResolutionLevel]time.Duration
want []string
wantErr bool
}{
{
"empty bucket",
[]testBlock{},
map[compact.ResolutionLevel]time.Duration{
compact.ResolutionLevelRaw: 24 * time.Hour,
compact.ResolutionLevel5m: 7 * 24 * time.Hour,
compact.ResolutionLevel1h: 14 * 24 * time.Hour,
},
[]string{},
false,
},
{
"only raw retention",
[]testBlock{
{
"01CPHBEX20729MJQZXE3W0BW48",
time.Now().Add(-3 * 24 * time.Hour),
time.Now().Add(-2 * 24 * time.Hour),
compact.ResolutionLevelRaw,
},
{
"01CPHBEX20729MJQZXE3W0BW49",
time.Now().Add(-2 * 24 * time.Hour),
time.Now().Add(-24 * time.Hour),
compact.ResolutionLevel5m,
},
{
"01CPHBEX20729MJQZXE3W0BW50",
time.Now().Add(-24 * time.Hour),
time.Now().Add(-23 * time.Hour),
compact.ResolutionLevel1h,
},
{
"01CPHBEX20729MJQZXE3W0BW51",
time.Now().Add(-23 * time.Hour),
time.Now().Add(-6 * time.Hour),
compact.ResolutionLevelRaw,
},
},
map[compact.ResolutionLevel]time.Duration{
compact.ResolutionLevelRaw: 24 * time.Hour,
compact.ResolutionLevel5m: 0,
compact.ResolutionLevel1h: 0,
},
[]string{
"01CPHBEX20729MJQZXE3W0BW49/",
"01CPHBEX20729MJQZXE3W0BW50/",
"01CPHBEX20729MJQZXE3W0BW51/",
},
false,
},
{
"no retention",
[]testBlock{
{
"01CPHBEX20729MJQZXE3W0BW48",
time.Now().Add(-3 * 24 * time.Hour),
time.Now().Add(-2 * 24 * time.Hour),
compact.ResolutionLevelRaw,
},
{
"01CPHBEX20729MJQZXE3W0BW49",
time.Now().Add(-2 * 24 * time.Hour),
time.Now().Add(-24 * time.Hour),
compact.ResolutionLevel5m,
},
{
"01CPHBEX20729MJQZXE3W0BW50",
time.Now().Add(-24 * time.Hour),
time.Now().Add(-23 * time.Hour),
compact.ResolutionLevel1h,
},
{
"01CPHBEX20729MJQZXE3W0BW51",
time.Now().Add(-23 * time.Hour),
time.Now().Add(-6 * time.Hour),
compact.ResolutionLevelRaw,
},
},
map[compact.ResolutionLevel]time.Duration{
compact.ResolutionLevelRaw: 0,
compact.ResolutionLevel5m: 0,
compact.ResolutionLevel1h: 0,
},
[]string{
"01CPHBEX20729MJQZXE3W0BW48/",
"01CPHBEX20729MJQZXE3W0BW49/",
"01CPHBEX20729MJQZXE3W0BW50/",
"01CPHBEX20729MJQZXE3W0BW51/",
},
false,
},
{
"no retention 1900",
[]testBlock{
{
"01CPHBEX20729MJQZXE3W0BW48",
time.Date(1900, 1, 1, 1, 0, 0, 0, time.Local),
time.Date(1900, 1, 1, 2, 0, 0, 0, time.Local),
compact.ResolutionLevelRaw,
},
{
"01CPHBEX20729MJQZXE3W0BW49",
time.Date(1900, 1, 1, 1, 0, 0, 0, time.Local),
time.Date(1900, 1, 1, 2, 0, 0, 0, time.Local),
compact.ResolutionLevel5m,
},
{
"01CPHBEX20729MJQZXE3W0BW50",
time.Date(1900, 1, 1, 1, 0, 0, 0, time.Local),
time.Date(1900, 1, 1, 2, 0, 0, 0, time.Local),
compact.ResolutionLevel1h,
},
},
map[compact.ResolutionLevel]time.Duration{
compact.ResolutionLevelRaw: 0,
compact.ResolutionLevel5m: 0,
compact.ResolutionLevel1h: 0,
},
[]string{
"01CPHBEX20729MJQZXE3W0BW48/",
"01CPHBEX20729MJQZXE3W0BW49/",
"01CPHBEX20729MJQZXE3W0BW50/",
},
false,
},
{
"unknown resolution",
[]testBlock{
{
"01CPHBEX20729MJQZXE3W0BW48",
time.Now().Add(-3 * 24 * time.Hour),
time.Now().Add(-2 * 24 * time.Hour),
compact.ResolutionLevel(1),
},
},
map[compact.ResolutionLevel]time.Duration{},
[]string{
"01CPHBEX20729MJQZXE3W0BW48/",
},
false,
},
{
"every retention deletes",
[]testBlock{
{
"01CPHBEX20729MJQZXE3W0BW40",
time.Now().Add(-1 * 24 * time.Hour),
time.Now().Add(-0 * 24 * time.Hour),
compact.ResolutionLevelRaw,
},
{
"01CPHBEX20729MJQZXE3W0BW41",
time.Now().Add(-2 * 24 * time.Hour),
time.Now().Add(-1 * 24 * time.Hour),
compact.ResolutionLevelRaw,
},
{
"01CPHBEX20729MJQZXE3W0BW42",
time.Now().Add(-7 * 24 * time.Hour),
time.Now().Add(-6 * 24 * time.Hour),
compact.ResolutionLevel5m,
},
{
"01CPHBEX20729MJQZXE3W0BW43",
time.Now().Add(-8 * 24 * time.Hour),
time.Now().Add(-7 * 24 * time.Hour),
compact.ResolutionLevel5m,
},
{
"01CPHBEX20729MJQZXE3W0BW44",
time.Now().Add(-14 * 24 * time.Hour),
time.Now().Add(-13 * 24 * time.Hour),
compact.ResolutionLevel1h,
},
{
"01CPHBEX20729MJQZXE3W0BW45",
time.Now().Add(-15 * 24 * time.Hour),
time.Now().Add(-14 * 24 * time.Hour),
compact.ResolutionLevel1h,
},
},
map[compact.ResolutionLevel]time.Duration{
compact.ResolutionLevelRaw: 24 * time.Hour,
compact.ResolutionLevel5m: 7 * 24 * time.Hour,
compact.ResolutionLevel1h: 14 * 24 * time.Hour,
},
[]string{
"01CPHBEX20729MJQZXE3W0BW40/",
"01CPHBEX20729MJQZXE3W0BW42/",
"01CPHBEX20729MJQZXE3W0BW44/",
},
false,
},
} {
t.Run(tt.name, func(t *testing.T) {
bkt := inmem.NewBucket()
for _, b := range tt.blocks {
uploadMockBlock(t, bkt, b.id, b.minTime, b.maxTime, int64(b.resolution))
}
if err := compact.ApplyRetentionPolicyByResolution(ctx, logger, bkt, tt.retentionByResolution); (err != nil) != tt.wantErr {
t.Errorf("ApplyRetentionPolicyByResolution() error = %v, wantErr %v", err, tt.wantErr)
}
got := []string{}
testutil.Ok(t, bkt.Iter(context.TODO(), "", func(name string) error {
got = append(got, name)
return nil
}))
testutil.Equals(t, got, tt.want)
})
}
}
func uploadMockBlock(t *testing.T, bkt objstore.Bucket, id string, minTime, maxTime time.Time, resolutionLevel int64) {
t.Helper()
meta1 := metadata.Meta{
Version: 1,
BlockMeta: tsdb.BlockMeta{
ULID: ulid.MustParse(id),
MinTime: minTime.Unix() * 1000,
MaxTime: maxTime.Unix() * 1000,
},
Thanos: metadata.Thanos{
Downsample: metadata.ThanosDownsample{
Resolution: resolutionLevel,
},
},
}
b, err := json.Marshal(meta1)
testutil.Ok(t, err)
testutil.Ok(t, bkt.Upload(context.Background(), id+"/meta.json", bytes.NewReader(b)))
testutil.Ok(t, bkt.Upload(context.Background(), id+"/chunks/000001", strings.NewReader("@test-data@")))
testutil.Ok(t, bkt.Upload(context.Background(), id+"/chunks/000002", strings.NewReader("@test-data@")))
testutil.Ok(t, bkt.Upload(context.Background(), id+"/chunks/000003", strings.NewReader("@test-data@")))
}
|
apache-2.0
|
dlvenable/dynamic-dynamo
|
src/main/java/io/venable/amazonaws/dynamo/table/builder/LocalSecondaryIndexBuilderImpl.java
|
3280
|
/*
* Copyright (c) 2015 David Venable.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package io.venable.amazonaws.dynamo.table.builder;
import com.amazonaws.services.dynamodbv2.model.AttributeDefinition;
import com.amazonaws.services.dynamodbv2.model.KeySchemaElement;
import com.amazonaws.services.dynamodbv2.model.KeyType;
import com.amazonaws.services.dynamodbv2.model.LocalSecondaryIndex;
import io.venable.amazonaws.dynamo.table.RangeRequiredException;
import java.util.ArrayList;
import java.util.Collection;
/**
* @author David Venable
*/
class LocalSecondaryIndexBuilderImpl implements LocalSecondaryIndexBuilder
{
private final TableBuilder tableBuilder;
private final ProjectionBuilderImpl<LocalSecondaryIndexBuilder> projection;
private String indexName;
private KeyElementBuilderImpl<LocalSecondaryIndexBuilder> rangeBuilder;
public LocalSecondaryIndexBuilderImpl(TableBuilder tableBuilder) {
this.tableBuilder = tableBuilder;
this.projection = new ProjectionBuilderImpl<LocalSecondaryIndexBuilder>(this);
}
@Override
public LocalSecondaryIndexBuilder name(String indexName) {
this.indexName = indexName;
return this;
}
@Override
public KeyElementBuilder<LocalSecondaryIndexBuilder> range() {
if(rangeBuilder == null)
rangeBuilder = new KeyElementBuilderImpl<LocalSecondaryIndexBuilder>(this, KeyType.RANGE);
return rangeBuilder;
}
@Override
public ProjectionBuilder<LocalSecondaryIndexBuilder> projection() {
return projection;
}
@Override
public TableBuilder and() {
return tableBuilder;
}
void buildSecondaryIndexes(KeySchemaElement primaryHashKeySchemaElement, Collection<LocalSecondaryIndex> localSecondaryIndexCollection, Collection<AttributeDefinition> attributeDefinitionCollection)
{
LocalSecondaryIndex localSecondaryIndex = new LocalSecondaryIndex();
localSecondaryIndex.setIndexName(indexName);
Collection<KeySchemaElement> keySchemaElementCollection = new ArrayList<>();
keySchemaElementCollection.add(primaryHashKeySchemaElement);
buildRangeKey(keySchemaElementCollection, attributeDefinitionCollection);
localSecondaryIndex.setKeySchema(keySchemaElementCollection);
projection.build(localSecondaryIndex);
localSecondaryIndexCollection.add(localSecondaryIndex);
}
private void buildRangeKey(Collection<KeySchemaElement> keySchemaElementCollection, Collection<AttributeDefinition> attributeDefinitionCollection)
{
if(rangeBuilder == null)
throw new RangeRequiredException();
rangeBuilder.build(keySchemaElementCollection, attributeDefinitionCollection);
}
}
|
apache-2.0
|
yesil/jackrabbit-oak
|
oak-jcr/src/main/java/org/apache/jackrabbit/oak/jcr/observation/filter/package-info.java
|
916
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
@Version("1.0.0")
package org.apache.jackrabbit.oak.jcr.observation.filter;
import aQute.bnd.annotation.Version;
|
apache-2.0
|
OmniKryptec/OmniKryptec-Engine
|
src/main/java/de/omnikryptec/util/math/Mathd.java
|
4994
|
/*
* Copyright 2017 - 2019 Roman Borris (pcfreak9000), Paul Hagedorn (Panzer1119)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package de.omnikryptec.util.math;
import org.joml.Math;
import org.joml.Matrix3x2d;
import org.joml.Matrix3x2dc;
import org.joml.Matrix4d;
import org.joml.Matrix4dc;
public strictfp class Mathd {
public static final Matrix3x2dc IDENTITY3x2d = new Matrix3x2d();
public static final Matrix4dc IDENTITY4d = new Matrix4d();
public static final double PI = java.lang.Math.PI;
public static final double E = java.lang.Math.E;
/**
* All double values above or equal to this value are integer numbers, all
* double values below or equal to (-1) * this value are integer numbers.
*/
private static final double TWO_POW_52 = 4503599627370496.0d;
public static double pow(final double in, final double e) {
return java.lang.Math.pow(in, e);
}
public static double square(final double x) {
return x * x;
}
public static double clamp(final double in, final double min, final double max) {
return in < min ? min : (in > max ? max : in);
}
public static double clamp01(final double in) {
return in < 0.0 ? 0.0 : (in > 1.0 ? 1.0 : in);
}
public static double interpolate(final double a, final double b, final double ratio, final Interpolator interpol) {
return lerp(a, b, interpol.interpolate(ratio));
}
public static double lerp(final double a, final double b, final double ratio) {
return a * (1 - ratio) + b * ratio;
}
public static double pingpong(double in, final double length) {
in %= length * 2;
if (in < length) {
return in;
} else {
return 2 * length - in;
}
}
public static double sin(final double rad) {
return Math.sin(rad);
}
public static double cos(final double rad) {
return Math.cos(rad);
}
public static double tan(final double rad) {
return Math.tan(rad);
}
public static double arcsin(final double x) {
return Math.asin(x);
}
public static double arccos(final double x) {
return Math.acos(x);
}
public static double arctan(final double x) {
return java.lang.Math.atan(x);
}
public static double arctan2(final double y, final double x) {
return Math.atan2(y, x);
}
public static double sqrt(final double value) {
return Math.sqrt(value);
}
public static double abs(final double value) {
return value < 0.0 ? 0.0 - value : value;
}
public static double min(final double v0, final double v1) {
return v0 < v1 ? v0 : v1;
}
public static double max(final double v0, final double v1) {
return v0 > v1 ? v0 : v1;
}
public static double floor(final double value) {
if (value != value) {
// NaN
return value;
}
if (value >= TWO_POW_52 || value <= -TWO_POW_52) {
return value;
}
long intvalue = (long) value;
if (value < 0 && intvalue != value) {
intvalue--;
}
return intvalue;
}
public static double ceil(final double value) {
if (value != value) {
// NaN
return value;
}
if (value >= TWO_POW_52 || value <= -TWO_POW_52) {
return value;
}
long intvalue = (long) value;
if (value > 0 && intvalue != value) {
intvalue++;
}
return intvalue;
}
public static double rint(final double value) {
if (value != value) {
// NaN
return value;
}
if (value > 0 && value < TWO_POW_52) {
return (TWO_POW_52 + value) - TWO_POW_52;
} else if (value < 0 && value > -TWO_POW_52) {
return (-TWO_POW_52 + value) + TWO_POW_52;
}
return value;
}
public static long round(final double value) {
return (long) rint(value);
}
public static int roundi(final double value) {
return (int) rint(value);
}
public static double roundM(final double value) {
return java.lang.Math.round(value);
}
public static double round(final double value, final int d) {
final long mult = (long) pow(10, d);
return roundM(value * mult) / mult;
}
}
|
apache-2.0
|
inbloom/secure-data-service
|
sli/ingestion/ingestion-core/src/main/java/org/slc/sli/ingestion/processors/DeltaHashPurgeProcessor.java
|
2477
|
/*
* Copyright 2012-2013 inBloom, Inc. and its affiliates.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.slc.sli.ingestion.processors;
import org.apache.camel.Exchange;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.slc.sli.common.util.tenantdb.TenantContext;
import org.slc.sli.ingestion.BatchJobStageType;
import org.slc.sli.ingestion.Resource;
import org.slc.sli.ingestion.WorkNote;
import org.slc.sli.ingestion.landingzone.AttributeType;
import org.slc.sli.ingestion.model.NewBatchJob;
/**
* Processor to remove delta hash from the datastore
*
* @author npandey
*
*/
public class DeltaHashPurgeProcessor extends IngestionProcessor<WorkNote, Resource> {
public static final BatchJobStageType BATCH_JOB_STAGE = BatchJobStageType.DELTA_PROPERTY_PROCESSOR;
private static final String BATCH_JOB_STAGE_DESC = "Process the duplicate detection prooperty";
private static final Logger LOG = LoggerFactory.getLogger(DeltaProcessor.class);
/**
* Camel Exchange process callback method
*
* @param exchange Camel exchange.
*/
@Override
public void process(Exchange exchange, ProcessorArgs<WorkNote> args) {
String tenantId = TenantContext.getTenantId();
removeRecordHash(args.job, tenantId);
}
/**
* Clear out delta hash for this tenant from datastore
*
* @param job Batch Job
* @param tenantId Tenant Id
*/
private void removeRecordHash(NewBatchJob job, String tenantId) {
String rhMode = job.getProperty(AttributeType.DUPLICATE_DETECTION.getName());
LOG.info("@duplicate-detection mode '" + rhMode + "' given: resetting recordHash");
batchJobDAO.removeRecordHashByTenant(tenantId);
}
@Override
protected BatchJobStageType getStage() {
return BATCH_JOB_STAGE;
}
@Override
protected String getStageDescription() {
return BATCH_JOB_STAGE_DESC;
}
}
|
apache-2.0
|
trade-nexus/tradesharp-core
|
Order Execution Providers/Integral/TradeHub.OrderExecutionProvider.Integral.Tests/Properties/AssemblyInfo.cs
|
3008
|
/*****************************************************************************
* Copyright 2016 Aurora Solutions
*
* http://www.aurorasolutions.io
*
* Aurora Solutions is an innovative services and product company at
* the forefront of the software industry, with processes and practices
* involving Domain Driven Design(DDD), Agile methodologies to build
* scalable, secure, reliable and high performance products.
*
* TradeSharp is a C# based data feed and broker neutral Algorithmic
* Trading Platform that lets trading firms or individuals automate
* any rules based trading strategies in stocks, forex and ETFs.
* TradeSharp allows users to connect to providers like Tradier Brokerage,
* IQFeed, FXCM, Blackwood, Forexware, Integral, HotSpot, Currenex,
* Interactive Brokers and more.
* Key features: Place and Manage Orders, Risk Management,
* Generate Customized Reports etc
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*****************************************************************************/
using System.Reflection;
using System.Runtime.CompilerServices;
using System.Runtime.InteropServices;
// General Information about an assembly is controlled through the following
// set of attributes. Change these attribute values to modify the information
// associated with an assembly.
[assembly: AssemblyTitle("TradeHub.OrderExecutionProvider.Integral.Tests")]
[assembly: AssemblyDescription("")]
[assembly: AssemblyConfiguration("")]
[assembly: AssemblyCompany("")]
[assembly: AssemblyProduct("TradeHub.OrderExecutionProvider.Integral.Tests")]
[assembly: AssemblyCopyright("Copyright © 2015")]
[assembly: AssemblyTrademark("")]
[assembly: AssemblyCulture("")]
// Setting ComVisible to false makes the types in this assembly not visible
// to COM components. If you need to access a type in this assembly from
// COM, set the ComVisible attribute to true on that type.
[assembly: ComVisible(false)]
// The following GUID is for the ID of the typelib if this project is exposed to COM
[assembly: Guid("03263fd2-14d8-4055-b81e-48fd6061e798")]
// Version information for an assembly consists of the following four values:
//
// Major Version
// Minor Version
// Build Number
// Revision
//
// You can specify all the values or you can default the Build and Revision Numbers
// by using the '*' as shown below:
// [assembly: AssemblyVersion("1.0.*")]
[assembly: AssemblyVersion("1.0.0.0")]
[assembly: AssemblyFileVersion("1.0.0.0")]
|
apache-2.0
|
popo1379/NetHardNEWSV0.01
|
app/src/main/java/com/palmintelligence/administrator/nethardnewsv001/network/NetWork.java
|
2135
|
package com.palmintelligence.administrator.nethardnewsv001.network;
import android.util.Log;
import java.io.IOException;
import okhttp3.Interceptor;
import okhttp3.OkHttpClient;
import okhttp3.Request;
import okhttp3.ResponseBody;
import retrofit2.CallAdapter;
import retrofit2.Converter;
import retrofit2.Retrofit;
import retrofit2.adapter.rxjava.RxJavaCallAdapterFactory;
import retrofit2.converter.gson.GsonConverterFactory;
/**
* Created by Administrator on 2017/2/10 0010.
*/
public class NetWork {
private static final Converter.Factory gsonConverterFactory = GsonConverterFactory.create();
private static final CallAdapter.Factory rxJavaCallAdapterFactory = RxJavaCallAdapterFactory.create();
private static JUHEAPI juheapi;
public static JUHEAPI getJUHEAPI(){
if (juheapi == null) {
juheapi = getRetrofit(Api.BASE_API_JUHE).create(JUHEAPI.class);
}
return juheapi;
}
private static Retrofit getRetrofit(String baseUrl) {
return new Retrofit.Builder()
.client(new OkHttpClient.Builder().addInterceptor(new LogInterceptor()).build())
.baseUrl(baseUrl)
.addConverterFactory(gsonConverterFactory)
.addCallAdapterFactory(rxJavaCallAdapterFactory)
.build();
}
private static class LogInterceptor implements Interceptor {
@Override
public okhttp3.Response intercept(Chain chain) throws IOException {
Request request = chain.request();
Log.i("LogUtils--> ", "request:" + request.toString());
okhttp3.Response response = chain.proceed(chain.request());
okhttp3.MediaType mediaType = response.body().contentType();
String content = response.body().string();
Log.i("LogUtils--> ", "response body:" + content);
if (response.body() != null) {
ResponseBody body = ResponseBody.create(mediaType, content);
return response.newBuilder().body(body).build();
} else {
return response;
}
}
}
}
|
apache-2.0
|
facug91/OJ-Solutions
|
coj.uci.cu/EasyChange.cpp
|
1270
|
/*
By: facug91
From: http://coj.uci.cu/24h/problem.xhtml?abb=2616
Name: Easy Change
Number: 2616
Date: 02/08/2014
*/
#include <iostream>
#include <cstdio>
#include <cstdlib>
#include <string>
#include <cstring>
#include <cmath>
#include <algorithm>
#include <vector>
#include <queue>
#include <deque>
#include <set>
#include <map>
#include <iterator>
#include <utility>
#include <list>
#include <stack>
#include <iomanip>
#include <bitset>
#define MAX_INT 2147483647
#define MAX_LONG 9223372036854775807ll
#define MAX_ULONG 18446744073709551615ull
#define MAX_DBL 1.7976931348623158e+308
#define EPS 1e-9
const double PI = 2.0*acos(0.0);
#define INF 1000000000
using namespace std;
typedef long long ll;
typedef pair<int, int> ii;
typedef pair<int, pair<int, int> > iii;
int n, coin[] = {6, 5, 3, 1}, DP[100005];
int dp (int n) {
if (n < 0) return INF;
if (n == 0) return 0;
if (DP[n] != -1) return DP[n];
int ans = INF;
for (int i=0; i<4; i++)
ans = min(ans, dp(n-coin[i])+1);
return DP[n] = ans;
}
int main () {
int t, i, j;
memset(DP, -1, sizeof DP);
scanf("%d", &t);
for (int it=1; it<=t; it++) {
scanf("%d", &n);
printf("Case %d: %d\n", it, dp(n));
}
return 0;
}
|
apache-2.0
|
gandulf/DsaTab
|
DsaTab/src/main/java/com/dsatab/data/enums/TalentType.java
|
31451
|
package com.dsatab.data.enums;
public enum TalentType {
Anderthalbhänder(
"Anderthalbhänder",
TalentGroupType.Nahkampf
, -2),
Armbrust(
"Armbrust",
TalentGroupType.Fernkampf
, -5),
Bastardstäbe(
"Bastardstäbe",
TalentGroupType.Nahkampf
, -2),
Belagerungswaffen(
"Belagerungswaffen",
TalentGroupType.Fernkampf
),
Blasrohr(
"Blasrohr",
TalentGroupType.Fernkampf
, -5),
Bogen(
"Bogen",
TalentGroupType.Fernkampf
, -3),
Diskus(
"Diskus",
TalentGroupType.Fernkampf
, -3),
Dolche(
"Dolche",
TalentGroupType.Nahkampf
, -1),
Fechtwaffen(
"Fechtwaffen",
TalentGroupType.Nahkampf
, -1),
Hiebwaffen(
"Hiebwaffen",
TalentGroupType.Nahkampf
, -4),
Infanteriewaffen(
"Infanteriewaffen",
TalentGroupType.Nahkampf
, -3),
Kettenstäbe(
"Kettenstäbe",
TalentGroupType.Nahkampf
, -1),
Kettenwaffen(
"Kettenwaffen",
TalentGroupType.Nahkampf
, -3),
Lanzenreiten(
"Lanzenreiten",
TalentGroupType.Fernkampf
),
Peitsche(
"Peitsche",
TalentGroupType.Nahkampf
, -1),
Raufen(
"Raufen",
TalentGroupType.Nahkampf
, 0),
Ringen(
"Ringen",
TalentGroupType.Nahkampf
, 0),
Säbel(
"Säbel",
TalentGroupType.Nahkampf
, -2),
Schleuder(
"Schleuder",
TalentGroupType.Fernkampf
, -2),
Schwerter(
"Schwerter",
TalentGroupType.Nahkampf
, -2),
Speere(
"Speere",
TalentGroupType.Nahkampf
, -3),
Stäbe(
"Stäbe",
TalentGroupType.Nahkampf
, -2),
Wurfbeile(
"Wurfbeile",
TalentGroupType.Fernkampf
, -2),
Wurfmesser(
"Wurfmesser",
TalentGroupType.Fernkampf
, -3),
Wurfspeere(
"Wurfspeere",
TalentGroupType.Fernkampf
, -2),
Zweihandflegel(
"Zweihandflegel",
TalentGroupType.Nahkampf
, -3),
Zweihandhiebwaffen(
"Zweihandhiebwaffen",
TalentGroupType.Nahkampf
, -3),
Zweihandschwertersäbel(
"Zweihandschwerter/-säbel",
TalentGroupType.Nahkampf
, -2),
Akrobatik(
"Akrobatik",
TalentGroupType.Körperlich
),
Athletik(
"Athletik",
TalentGroupType.Körperlich
),
Fliegen(
"Fliegen",
TalentGroupType.Körperlich
),
Gaukeleien(
"Gaukeleien",
TalentGroupType.Körperlich
),
Klettern(
"Klettern",
TalentGroupType.Körperlich
),
Körperbeherrschung(
"Körperbeherrschung",
TalentGroupType.Körperlich
),
Reiten(
"Reiten",
TalentGroupType.Körperlich
),
Schleichen(
"Schleichen",
TalentGroupType.Körperlich
),
Schwimmen(
"Schwimmen",
TalentGroupType.Körperlich
),
Selbstbeherrschung(
"Selbstbeherrschung",
TalentGroupType.Körperlich
),
SichVerstecken(
"Sich verstecken",
TalentGroupType.Körperlich
),
Singen(
"Singen",
TalentGroupType.Körperlich
),
Sinnenschärfe(
"Sinnenschärfe",
TalentGroupType.Körperlich
),
Skifahren(
"Skifahren",
TalentGroupType.Körperlich
),
StimmenImitieren(
"Stimmen imitieren",
TalentGroupType.Körperlich
),
Tanzen(
"Tanzen",
TalentGroupType.Körperlich
),
Taschendiebstahl(
"Taschendiebstahl",
TalentGroupType.Körperlich
),
Zechen(
"Zechen",
TalentGroupType.Körperlich
),
Betören(
"Betören",
TalentGroupType.Gesellschaft
),
Etikette(
"Etikette",
TalentGroupType.Gesellschaft
),
Gassenwissen(
"Gassenwissen",
TalentGroupType.Gesellschaft
),
Lehren(
"Lehren",
TalentGroupType.Gesellschaft
),
Menschenkenntnis(
"Menschenkenntnis",
TalentGroupType.Gesellschaft
),
Schauspielerei(
"Schauspielerei",
TalentGroupType.Gesellschaft
),
SchriftlicherAusdruck(
"Schriftlicher Ausdruck",
TalentGroupType.Gesellschaft
),
SichVerkleiden(
"Sich verkleiden",
TalentGroupType.Gesellschaft
),
Überreden(
"Überreden",
TalentGroupType.Gesellschaft
),
Überzeugen(
"Überzeugen",
TalentGroupType.Gesellschaft
),
Galanterie(
"Galanterie",
TalentGroupType.Gesellschaft
),
Fährtensuchen(
"Fährtensuchen",
TalentGroupType.Natur
),
FallenStellen(
"Fallen stellen",
TalentGroupType.Natur
),
FesselnEntfesseln(
"Fesseln/Entfesseln",
TalentGroupType.Natur
),
FischenAngeln(
"Fischen/Angeln",
TalentGroupType.Natur
),
Orientierung(
"Orientierung",
TalentGroupType.Natur
),
Wettervorhersage(
"Wettervorhersage",
TalentGroupType.Natur
),
Seefischerei(
"Seefischerei",
TalentGroupType.Natur
),
Wildnisleben(
"Wildnisleben",
TalentGroupType.Natur
),
LesenSchreiben(
"Lesen/Schreiben",
TalentGroupType.Schriften
),
LesenSchreibenAltesAlaani(
"Lesen/Schreiben Altes Alaani",
TalentGroupType.Schriften
),
LesenSchreibenAltesAmulashtra(
"Lesen/Schreiben Altes Amulashtra",
TalentGroupType.Schriften
),
LesenSchreibenAmulashtra(
"Lesen/Schreiben Amulashtra",
TalentGroupType.Schriften
),
LesenSchreibenAngram(
"Lesen/Schreiben Angram",
TalentGroupType.Schriften
),
LesenSchreibenArkanil(
"Lesen/Schreiben Arkanil",
TalentGroupType.Schriften
),
LesenSchreibenAsdharia(
"Lesen/Schreiben Asdharia",
TalentGroupType.Schriften
),
LesenSchreibenChrmk(
"Lesen/Schreiben Chrmk",
TalentGroupType.Schriften
),
LesenSchreibenChuchas(
"Lesen/Schreiben Chuchas",
TalentGroupType.Schriften
),
LesenSchreibenDrakhardZinken(
"Lesen/Schreiben Drakhard-Zinken",
TalentGroupType.Schriften
),
LesenSchreibenDraknedGlyphen(
"Lesen/Schreiben Drakned-Glyphen",
TalentGroupType.Schriften
),
LesenSchreibenGeheiligteGlyphenVonUnau(
"Lesen/Schreiben Geheiligte Glyphen von Unau",
TalentGroupType.Schriften
),
LesenSchreibenGimarilGlyphen(
"Lesen/Schreiben Gimaril-Glyphen",
TalentGroupType.Schriften
),
LesenSchreibenGjalskisch(
"Lesen/Schreiben Gjalskisch",
TalentGroupType.Schriften
),
LesenSchreibenHjaldingscheRunen(
"Lesen/Schreiben Hjaldingsche Runen",
TalentGroupType.Schriften
),
LesenSchreibenAltImperialeZeichen(
"Lesen/Schreiben (Alt-)Imperiale Zeichen",
TalentGroupType.Schriften
),
LesenSchreibenIsdira(
"Lesen/Schreiben Isdira",
TalentGroupType.Schriften
),
LesenSchreibenIsdiraAsdharia(
"Lesen/Schreiben Isdira/Asdharia",
TalentGroupType.Schriften
),
LesenSchreibenAltesKemi(
"Lesen/Schreiben Altes Kemi",
TalentGroupType.Schriften
),
LesenSchreibenKuslikerZeichen(
"Lesen/Schreiben Kusliker Zeichen",
TalentGroupType.Schriften
),
LesenSchreibenNanduria(
"Lesen/Schreiben Nanduria",
TalentGroupType.Schriften
),
LesenSchreibenRogolan(
"Lesen/Schreiben Rogolan",
TalentGroupType.Schriften
),
LesenSchreibenTrollischeRaumbilderschrift(
"Lesen/Schreiben Trollische Raumbilderschrift",
TalentGroupType.Schriften
),
LesenSchreibenTulamidya(
"Lesen/Schreiben Tulamidya",
TalentGroupType.Schriften
),
LesenSchreibenUrtulamidya(
"Lesen/Schreiben Urtulamidya",
TalentGroupType.Schriften
),
LesenSchreibenZhayad(
"Lesen/Schreiben Zhayad",
TalentGroupType.Schriften
),
LesenSchreibenMahrischeGlyphen(
"Lesen/Schreiben Mahrische Glyphen",
TalentGroupType.Schriften
),
LesenSchreibenWudu(
"Lesen/Schreiben Wudu",
TalentGroupType.Schriften
),
SprachenKennen(
"Sprachen kennen",
TalentGroupType.Sprachen
),
SprachenKennenGarethi(
"Sprachen kennen Garethi",
TalentGroupType.Sprachen
),
SprachenKennenBosparano(
"Sprachen kennen Bosparano",
TalentGroupType.Sprachen
),
SprachenKennenAltImperialAureliani(
"Sprachen kennen Alt-Imperial/Aureliani",
TalentGroupType.Sprachen
),
SprachenKennenZyklopäisch(
"Sprachen kennen Zyklopäisch",
TalentGroupType.Sprachen
),
SprachenKennenTulamidya(
"Sprachen kennen Tulamidya",
TalentGroupType.Sprachen
),
SprachenKennenUrtulamidya(
"Sprachen kennen Urtulamidya",
TalentGroupType.Sprachen
),
SprachenKennenZelemja(
"Sprachen kennen Zelemja",
TalentGroupType.Sprachen
),
SprachenKennenAltesKemi(
"Sprachen kennen Altes Kemi",
TalentGroupType.Sprachen
),
SprachenKennenAlaani(
"Sprachen kennen Alaani",
TalentGroupType.Sprachen
),
SprachenKennenZhulchammaqra(
"Sprachen kennen Zhulchammaqra",
TalentGroupType.Sprachen
),
SprachenKennenFerkina(
"Sprachen kennen Ferkina",
TalentGroupType.Sprachen
),
SprachenKennenRuuz(
"Sprachen kennen Ruuz",
TalentGroupType.Sprachen
),
SprachenKennenRabensprache(
"Sprachen kennen Rabensprache",
TalentGroupType.Sprachen
),
SprachenKennenNujuka(
"Sprachen kennen Nujuka",
TalentGroupType.Sprachen
),
SprachenKennenMohisch(
"Sprachen kennen Mohisch",
TalentGroupType.Sprachen
),
SprachenKennenThorwalsch(
"Sprachen kennen Thorwalsch",
TalentGroupType.Sprachen
),
SprachenKennenHjaldingsch(
"Sprachen kennen Hjaldingsch",
TalentGroupType.Sprachen
),
SprachenKennenIsdira(
"Sprachen kennen Isdira",
TalentGroupType.Sprachen
),
SprachenKennenAsdharia(
"Sprachen kennen Asdharia",
TalentGroupType.Sprachen
),
SprachenKennenRogolan(
"Sprachen kennen Rogolan",
TalentGroupType.Sprachen
),
SprachenKennenAngram(
"Sprachen kennen Angram",
TalentGroupType.Sprachen
),
SprachenKennenOloghaijan(
"Sprachen kennen Ologhaijan",
TalentGroupType.Sprachen
),
SprachenKennenOloarkh(
"Sprachen kennen Oloarkh",
TalentGroupType.Sprachen
),
SprachenKennenGoblinisch(
"Sprachen kennen Goblinisch",
TalentGroupType.Sprachen
),
SprachenKennenTrollisch(
"Sprachen kennen Trollisch",
TalentGroupType.Sprachen
),
SprachenKennenRssahh(
"Sprachen kennen Rssahh",
TalentGroupType.Sprachen
),
SprachenKennenGrolmisch(
"Sprachen kennen Grolmisch",
TalentGroupType.Sprachen
),
SprachenKennenKoboldisch(
"Sprachen kennen Koboldisch",
TalentGroupType.Sprachen
),
SprachenKennenDrachisch(
"Sprachen kennen Drachisch",
TalentGroupType.Sprachen
),
SprachenKennenZhayad(
"Sprachen kennen Zhayad",
TalentGroupType.Sprachen
),
SprachenKennenAtak(
"Sprachen kennen Atak",
TalentGroupType.Sprachen
),
SprachenKennenFüchsisch(
"Sprachen kennen Füchsisch",
TalentGroupType.Sprachen
),
SprachenKennenMahrisch(
"Sprachen kennen Mahrisch",
TalentGroupType.Sprachen
),
SprachenKennenRissoal(
"Sprachen kennen Rissoal",
TalentGroupType.Sprachen
),
SprachenKennenMolochisch(
"Sprachen kennen Molochisch",
TalentGroupType.Sprachen
),
SprachenKennenNeckergesang(
"Sprachen kennen Neckergesang",
TalentGroupType.Sprachen
),
SprachenKennenZLit(
"Sprachen kennen Z'Lit",
TalentGroupType.Sprachen
),
SprachenKennenWudu(
"Sprachen kennen Wudu",
TalentGroupType.Sprachen
),
Anatomie(
"Anatomie",
TalentGroupType.Wissen
),
Baukunst(
"Baukunst",
TalentGroupType.Wissen
),
BrettKartenspiel(
"Brett-/Kartenspiel",
TalentGroupType.Wissen
),
Geografie(
"Geografie",
TalentGroupType.Wissen
),
Geschichtswissen(
"Geschichtswissen",
TalentGroupType.Wissen
),
Gesteinskunde(
"Gesteinskunde",
TalentGroupType.Wissen
),
GötterUndKulte(
"Götter und Kulte",
TalentGroupType.Wissen
),
Heraldik(
"Heraldik",
TalentGroupType.Wissen
),
Hüttenkunde(
"Hüttenkunde",
TalentGroupType.Wissen
),
Schiffbau(
"Schiffbau",
TalentGroupType.Wissen
),
Kriegskunst(
"Kriegskunst",
TalentGroupType.Wissen
),
Kryptographie(
"Kryptographie",
TalentGroupType.Wissen
),
Magiekunde(
"Magiekunde",
TalentGroupType.Wissen
),
Mechanik(
"Mechanik",
TalentGroupType.Wissen
),
Pflanzenkunde(
"Pflanzenkunde",
TalentGroupType.Wissen
),
Philosophie(
"Philosophie",
TalentGroupType.Wissen
),
Rechnen(
"Rechnen",
TalentGroupType.Wissen
),
Rechtskunde(
"Rechtskunde",
TalentGroupType.Wissen
),
SagenUndLegenden(
"Sagen und Legenden",
TalentGroupType.Wissen
),
Schätzen(
"Schätzen",
TalentGroupType.Wissen
),
Sprachenkunde(
"Sprachenkunde",
TalentGroupType.Wissen
),
Staatskunst(
"Staatskunst",
TalentGroupType.Wissen
),
Sternkunde(
"Sternkunde",
TalentGroupType.Wissen
),
Tierkunde(
"Tierkunde",
TalentGroupType.Wissen
),
Abrichten(
"Abrichten",
TalentGroupType.Handwerk
),
Ackerbau(
"Ackerbau",
TalentGroupType.Handwerk
),
Alchimie(
"Alchimie",
TalentGroupType.Handwerk
),
Bergbau(
"Bergbau",
TalentGroupType.Handwerk
),
Bogenbau(
"Bogenbau",
TalentGroupType.Handwerk
),
BooteFahren(
"Boote fahren",
TalentGroupType.Handwerk
),
Brauer(
"Brauer",
TalentGroupType.Handwerk
),
Drucker(
"Drucker",
TalentGroupType.Handwerk
),
FahrzeugLenken(
"Fahrzeug lenken",
TalentGroupType.Handwerk
),
Falschspiel(
"Falschspiel",
TalentGroupType.Handwerk
),
Feinmechanik(
"Feinmechanik",
TalentGroupType.Handwerk
),
Feuersteinbearbeitung(
"Feuersteinbearbeitung",
TalentGroupType.Handwerk
),
Fleischer(
"Fleischer",
TalentGroupType.Handwerk
),
GerberKürschner(
"Gerber/Kürschner",
TalentGroupType.Handwerk
),
Glaskunst(
"Glaskunst",
TalentGroupType.Handwerk
),
Grobschmied(
"Grobschmied",
TalentGroupType.Handwerk
),
Handel(
"Handel",
TalentGroupType.Handwerk
),
Hauswirtschaft(
"Hauswirtschaft",
TalentGroupType.Handwerk
),
HeilkundeGift(
"Heilkunde: Gift",
TalentGroupType.Handwerk
),
HeilkundeKrankheiten(
"Heilkunde: Krankheiten",
TalentGroupType.Handwerk
),
HeilkundeSeele(
"Heilkunde: Seele",
TalentGroupType.Handwerk
),
HeilkundeWunden(
"Heilkunde: Wunden",
TalentGroupType.Handwerk
),
Kartographie(
"Kartographie",
TalentGroupType.Handwerk
),
HundeschlittenFahren(
"Hundeschlitten fahren",
TalentGroupType.Handwerk
),
EisseglerFahren(
"Eissegler fahren",
TalentGroupType.Handwerk
),
Kapellmeister(
"Kapellmeister",
TalentGroupType.Handwerk
),
Steuermann(
"Steuermann",
TalentGroupType.Handwerk
),
Holzbearbeitung(
"Holzbearbeitung",
TalentGroupType.Handwerk
),
Instrumentenbauer(
"Instrumentenbauer",
TalentGroupType.Handwerk
),
Kartografie(
"Kartografie",
TalentGroupType.Handwerk
),
Kochen(
"Kochen",
TalentGroupType.Handwerk
),
Kristallzucht(
"Kristallzucht",
TalentGroupType.Handwerk
),
Lederarbeiten(
"Lederarbeiten",
TalentGroupType.Handwerk
),
MalenZeichnen(
"Malen/Zeichnen",
TalentGroupType.Handwerk
),
Maurer(
"Maurer",
TalentGroupType.Handwerk
),
Metallguss(
"Metallguss",
TalentGroupType.Handwerk
),
Musizieren(
"Musizieren",
TalentGroupType.Handwerk
),
SchlösserKnacken(
"Schlösser knacken",
TalentGroupType.Handwerk
),
SchnapsBrennen(
"Schnaps brennen",
TalentGroupType.Handwerk
),
Schneidern(
"Schneidern",
TalentGroupType.Handwerk
),
Seefahrt(
"Seefahrt",
TalentGroupType.Handwerk
),
Seiler(
"Seiler",
TalentGroupType.Handwerk
),
Steinmetz(
"Steinmetz",
TalentGroupType.Handwerk
),
SteinschneiderJuwelier(
"Steinschneider/Juwelier",
TalentGroupType.Handwerk
),
Stellmacher(
"Stellmacher",
TalentGroupType.Handwerk
),
StoffeFärben(
"Stoffe färben",
TalentGroupType.Handwerk
),
Tätowieren(
"Tätowieren",
TalentGroupType.Handwerk
),
Töpfern(
"Töpfern",
TalentGroupType.Handwerk
),
Viehzucht(
"Viehzucht",
TalentGroupType.Handwerk
),
Webkunst(
"Webkunst",
TalentGroupType.Handwerk
),
Winzer(
"Winzer",
TalentGroupType.Handwerk
),
Zimmermann(
"Zimmermann",
TalentGroupType.Handwerk
),
Gefahreninstinkt(
"Gefahreninstinkt",
TalentGroupType.Gaben
),
Zwergennase(
"Zwergennase",
TalentGroupType.Gaben
),
GeisterRufen(
"Geister rufen",
TalentGroupType.Gaben
),
GeisterBannen(
"Geister bannen",
TalentGroupType.Gaben
),
GeisterBinden(
"Geister binden",
TalentGroupType.Gaben
),
GeisterAufnehmen(
"Geister aufnehmen",
TalentGroupType.Gaben
),
PirschUndAnsitzjagd(
"Pirsch- und Ansitzjagd",
TalentGroupType.Meta
),
NahrungSammeln(
"Nahrung sammeln",
TalentGroupType.Meta
),
Kräutersuchen(
"Kräutersuchen",
TalentGroupType.Meta
),
WacheHalten(
"Wache halten",
TalentGroupType.Meta
),
Ritualkenntnis(
"Ritualkenntnis",
TalentGroupType.Gaben
),
RitualkenntnisAchazSchamane(
"Ritualkenntnis: Achaz-Schamane",
TalentGroupType.Gaben
),
RitualkenntnisAlchimist(
"Ritualkenntnis: Alchimist",
TalentGroupType.Gaben
),
RitualkenntnisAlhanisch(
"Ritualkenntnis: Alhanisch",
TalentGroupType.Gaben
),
RitualkenntnisDerwisch(
"Ritualkenntnis: Derwisch",
TalentGroupType.Gaben
),
RitualkenntnisDruide(
"Ritualkenntnis: Druide",
TalentGroupType.Gaben
),
RitualkenntnisDruidischGeodisch(
"Ritualkenntnis: Druidisch-Geodisch",
TalentGroupType.Gaben
),
RitualkenntnisDurroDûn(
"Ritualkenntnis: Durro-Dûn",
TalentGroupType.Gaben
),
RitualkenntnisFerkinaSchamane(
"Ritualkenntnis: Ferkina-Schamane",
TalentGroupType.Gaben
),
RitualkenntnisGjalskerSchamane(
"Ritualkenntnis: Gjalsker-Schamane",
TalentGroupType.Gaben
),
RitualkenntnisGoblinSchamanin(
"Ritualkenntnis: Goblin-Schamanin",
TalentGroupType.Gaben
),
RitualkenntnisGeode(
"Ritualkenntnis: Geode",
TalentGroupType.Gaben
),
RitualkenntnisGildenmagie(
"Ritualkenntnis: Gildenmagie",
TalentGroupType.Gaben
),
RitualkenntnisGüldenländisch(
"Ritualkenntnis: Güldenländisch",
TalentGroupType.Gaben
),
RitualkenntnisGrolmisch(
"Ritualkenntnis: Grolmisch",
TalentGroupType.Gaben
),
RitualkenntnisHexe(
"Ritualkenntnis: Hexe",
TalentGroupType.Gaben
),
RitualkenntnisKophtanisch(
"Ritualkenntnis: Kophtanisch",
TalentGroupType.Gaben
),
RitualkenntnisKristallomantie(
"Ritualkenntnis: Kristallomantie",
TalentGroupType.Gaben
),
RitualkenntnisMudramulisch(
"Ritualkenntnis: Mudramulisch",
TalentGroupType.Gaben
),
RitualkenntnisNivesenSchamane(
"Ritualkenntnis: Nivesen-Schamane",
TalentGroupType.Gaben
),
RitualkenntnisOrkSchamane(
"Ritualkenntnis: Ork-Schamane",
TalentGroupType.Gaben
),
RitualkenntnisRunenzauberei(
"Ritualkenntnis: Runenzauberei",
TalentGroupType.Gaben
),
RitualkenntnisSatuarisch(
"Ritualkenntnis: Satuarisch",
TalentGroupType.Gaben
),
RitualkenntnisScharlatan(
"Ritualkenntnis: Scharlatan",
TalentGroupType.Gaben
),
RitualkenntnisTapasuul(
"Ritualkenntnis: Tapasuul",
TalentGroupType.Gaben
),
RitualkenntnisTrollzackerSchamane(
"Ritualkenntnis: Trollzacker-Schamane",
TalentGroupType.Gaben
),
RitualkenntnisWaldmenschenSchamane(
"Ritualkenntnis: Waldmenschen-Schamane",
TalentGroupType.Gaben
),
RitualkenntnisWaldmenschenSchamaneUtulus(
"Ritualkenntnis: Waldmenschen-Schamane (Utulus)",
TalentGroupType.Gaben
),
RitualkenntnisWaldmenschenSchamaneTocamuyac(
"Ritualkenntnis: Waldmenschen-Schamane (Tocamuyac)",
TalentGroupType.Gaben
),
RitualkenntnisZaubertänzer(
"Ritualkenntnis: Zaubertänzer",
TalentGroupType.Gaben
),
RitualkenntnisZaubertänzerHazaqi(
"Ritualkenntnis: Zaubertänzer (Hazaqi)",
TalentGroupType.Gaben
),
RitualkenntnisZaubertänzerMajuna(
"Ritualkenntnis: Zaubertänzer (Majuna)",
TalentGroupType.Gaben
),
RitualkenntnisZaubertänzernovadischeSharisad(
"Ritualkenntnis: Zaubertänzer (novadische Sharisad)",
TalentGroupType.Gaben
),
RitualkenntnisZaubertänzertulamidischeSharisad(
"Ritualkenntnis: Zaubertänzer (tulamidische Sharisad)",
TalentGroupType.Gaben
),
RitualkenntnisZibilja(
"Ritualkenntnis: Zibilja",
TalentGroupType.Gaben
),
RitualkenntnisLeonir(
"Ritualkenntnis: Leonir",
TalentGroupType.Gaben
),
Liturgiekenntnis(
"Liturgiekenntnis",
TalentGroupType.Gaben
),
LiturgiekenntnisAngrosch(
"Liturgiekenntnis (Angrosch)",
TalentGroupType.Gaben
),
LiturgiekenntnisAves(
"Liturgiekenntnis (Aves)",
TalentGroupType.Gaben
),
LiturgiekenntnisBoron(
"Liturgiekenntnis (Boron)",
TalentGroupType.Gaben
),
LiturgiekenntnisEfferd(
"Liturgiekenntnis (Efferd)",
TalentGroupType.Gaben
),
LiturgiekenntnisFirun(
"Liturgiekenntnis (Firun)",
TalentGroupType.Gaben
),
LiturgiekenntnisGravesh(
"Liturgiekenntnis (Gravesh)",
TalentGroupType.Gaben
),
LiturgiekenntnisHRanga(
"Liturgiekenntnis (H'Ranga)",
TalentGroupType.Gaben
),
LiturgiekenntnisHSzint(
"Liturgiekenntnis (H'Szint)",
TalentGroupType.Gaben
),
LiturgiekenntnisHesinde(
"Liturgiekenntnis (Hesinde)",
TalentGroupType.Gaben
),
LiturgiekenntnisHimmelswölfe(
"Liturgiekenntnis (Himmelswölfe)",
TalentGroupType.Gaben
),
LiturgiekenntnisIfirn(
"Liturgiekenntnis (Ifirn)",
TalentGroupType.Gaben
),
LiturgiekenntnisIngerimm(
"Liturgiekenntnis (Ingerimm)",
TalentGroupType.Gaben
),
LiturgiekenntnisKamaluq(
"Liturgiekenntnis (Kamaluq)",
TalentGroupType.Gaben
),
LiturgiekenntnisKor(
"Liturgiekenntnis (Kor)",
TalentGroupType.Gaben
),
LiturgiekenntnisNandus(
"Liturgiekenntnis (Nandus)",
TalentGroupType.Gaben
),
LiturgiekenntnisNamenloser(
"Liturgiekenntnis (Namenloser)",
TalentGroupType.Gaben
),
LiturgiekenntnisPeraine(
"Liturgiekenntnis (Peraine)",
TalentGroupType.Gaben
),
LiturgiekenntnisPhex(
"Liturgiekenntnis (Phex)",
TalentGroupType.Gaben
),
LiturgiekenntnisPraios(
"Liturgiekenntnis (Praios)",
TalentGroupType.Gaben
),
LiturgiekenntnisRahja(
"Liturgiekenntnis (Rahja)",
TalentGroupType.Gaben
),
LiturgiekenntnisRondra(
"Liturgiekenntnis (Rondra)",
TalentGroupType.Gaben
),
LiturgiekenntnisSwafnir(
"Liturgiekenntnis (Swafnir)",
TalentGroupType.Gaben
),
LiturgiekenntnisTairach(
"Liturgiekenntnis (Tairach)",
TalentGroupType.Gaben
),
LiturgiekenntnisTravia(
"Liturgiekenntnis (Travia)",
TalentGroupType.Gaben
),
LiturgiekenntnisTsa(
"Liturgiekenntnis (Tsa)",
TalentGroupType.Gaben
),
LiturgiekenntnisZsahh(
"Liturgiekenntnis (Zsahh)",
TalentGroupType.Gaben
),
Prophezeien(
"Prophezeien",
TalentGroupType.Gaben
),
Geräuschhexerei(
"Geräuschhexerei",
TalentGroupType.Gaben
),
Magiegespür(
"Magiegespür",
TalentGroupType.Gaben
),
Tierempathiespeziell(
"Tierempathie (speziell)",
TalentGroupType.Gaben
),
Tierempathiealle(
"Tierempathie (alle)",
TalentGroupType.Gaben
),
Empathie(
"Empathie",
TalentGroupType.Gaben
),
Immanspiel(
"Immanspiel",
TalentGroupType.Körperlich
);
private static final String DEPRECATED_WACHE_NAME = "Wache";
private static final String DEPRECATED_KRÄUTERSUCHE_NAME1 = "Kräutersuchen";
private static final String DEPRECATED_KRÄUTERSUCHE_NAME2 = "Kräuter Suchen";
private static final String DEPRECATED_KRÄUTERSUCHE_NAME3 = "Kräutersuche";
private static final String DEPRECATED_PIRSCH_ANSITZ_JAGD = "PirschAnsitzJagd ";
private TalentGroupType groupType;
private Integer be;
private String xmlName;
TalentType(String name, TalentGroupType type) {
this(name, type, null);
}
TalentType(String name, TalentGroupType type, Integer be) {
this.be = be;
this.xmlName = name;
this.groupType = type;
}
public String xmlName() {
if (xmlName != null)
return xmlName;
else
return name();
}
public TalentGroupType type() {
return groupType;
}
public Integer getBe() {
return be;
}
public static TalentType byValue(String type) {
if (DEPRECATED_KRÄUTERSUCHE_NAME1.equalsIgnoreCase(type)
|| DEPRECATED_KRÄUTERSUCHE_NAME2.equalsIgnoreCase(type)
|| DEPRECATED_KRÄUTERSUCHE_NAME3.equalsIgnoreCase(type)) {
return TalentType.Kräutersuchen;
} else if (DEPRECATED_WACHE_NAME.equalsIgnoreCase(type)) {
return TalentType.WacheHalten;
} else if (DEPRECATED_PIRSCH_ANSITZ_JAGD.equalsIgnoreCase(type)) {
return TalentType.PirschUndAnsitzjagd;
} else {
return TalentType.valueOf(type);
}
}
public static TalentType byXmlName(String code) {
if (code == null)
return null;
for (TalentType attr : TalentType.values()) {
if (attr.xmlName().equals(code)) {
return attr;
}
}
return null;
}
}
|
apache-2.0
|
google-research/google-research
|
uq_benchmark_2019/imagenet/hparams_lib.py
|
3427
|
# coding=utf-8
# Copyright 2022 The Google Research Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Lint as: python2, python3
"""Library of tuned hparams and functions for converting to ModelOptions."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import collections
from uq_benchmark_2019.imagenet import data_lib
from uq_benchmark_2019.imagenet import learning_rate_lib
from uq_benchmark_2019.imagenet import models_lib
HParams = collections.namedtuple(
'ImagenetHparams', ['batch_size', 'init_learning_rate', 'dropout_rate',
'init_prior_scale_mean', 'init_prior_scale_std',
'std_prior_scale'])
_HPS_VANILLA = HParams(
1024, learning_rate_lib.BASE_LEARNING_RATE, 0, None, None, None)
_HPS_DROPOUT = HParams(
1024, learning_rate_lib.BASE_LEARNING_RATE, 0.1, None, None, None)
_HPS_LL_SVI = HParams(
1024,
learning_rate_lib.BASE_LEARNING_RATE,
0,
init_prior_scale_mean=-2.73995,
init_prior_scale_std=-3.61795,
std_prior_scale=4.85503)
_HPS_SVI = HParams(
1024,
learning_rate_lib.BASE_LEARNING_RATE,
0,
init_prior_scale_mean=-1.9994,
init_prior_scale_std=-0.30840,
std_prior_scale=3.4210)
_HPS_LL_DROPOUT = HParams(
1024, learning_rate_lib.BASE_LEARNING_RATE, 0.25, None, None, None)
HPS_DICT = dict(
vanilla=_HPS_VANILLA,
dropout=_HPS_DROPOUT,
dropout_nofirst=_HPS_DROPOUT,
svi=_HPS_SVI,
ll_dropout=_HPS_LL_DROPOUT,
ll_svi=_HPS_LL_SVI,
)
def model_opts_from_hparams(hps, method, use_tpu, tpu, fake_training=False):
"""Returns a ModelOptions instance using given hyperparameters."""
dropout_rate = hps.dropout_rate if hasattr(hps, 'dropout_rate') else 0
variational = method in ('svi', 'll_svi')
model_opts = models_lib.ModelOptions(
# Modeling params
method=method,
# Data params.
image_shape=data_lib.IMAGENET_SHAPE,
num_classes=data_lib.IMAGENET_NUM_CLASSES,
examples_per_epoch=data_lib.APPROX_IMAGENET_TRAINING_IMAGES,
validation_size=data_lib.IMAGENET_VALIDATION_IMAGES,
use_bfloat16=True,
# SGD params
train_epochs=90,
batch_size=hps.batch_size,
dropout_rate=dropout_rate,
init_learning_rate=hps.init_learning_rate,
# Variational params
std_prior_scale=hps.std_prior_scale if variational else None,
init_prior_scale_mean=hps.init_prior_scale_mean if variational else None,
init_prior_scale_std=hps.init_prior_scale_std if variational else None,
num_updates=data_lib.APPROX_IMAGENET_TRAINING_IMAGES,
# TPU params
use_tpu=use_tpu,
tpu=tpu,
num_cores=8,
# GPU params
num_gpus=8,
num_replicas=1,
)
if fake_training:
model_opts.batch_size = 32
model_opts.examples_per_epoch = 256
model_opts.train_epochs = 1
return model_opts
|
apache-2.0
|
kasundezoysa/senze
|
utils/RSAEncryption.java
|
3644
|
/*
Kasun De Zoysa @ UCSC
Compile:
javac -cp ../BC/bcprov-jdk16-146.jar RSAEncryption.java
Execute:
java -cp ../BC/bcprov-jdk16-146.jar:. RSAEncryption
*/
import java.security.Security;
import java.security.Key;
import java.security.KeyPair;
import java.security.KeyPairGenerator;
import java.security.SecureRandom;
import javax.crypto.Cipher;
import java.util.Formatter;
import java.util.Base64;
import java.math.BigInteger;
import java.security.KeyFactory;
import java.security.interfaces.RSAPrivateKey;
import java.security.interfaces.RSAPublicKey;
import java.security.spec.RSAPrivateKeySpec;
import java.security.spec.RSAPublicKeySpec;
//import org.bouncycastle.jce.provider.BouncyCastleProvider;
public class RSAEncryption {
public static void main(String[] args) throws Exception {
//Security.addProvider(new BouncyCastleProvider());
String input = "Hello Kasun ..";
Cipher cipher = Cipher.getInstance("RSA/None/OAEPWithSHA1AndMGF1Padding");
/*
// create the keys
KeyPairGenerator generator = KeyPairGenerator.getInstance("RSA");
generator.initialize(1024,new SecureRandom());
KeyPair pair = generator.generateKeyPair();
Key pubKey = pair.getPublic();
Key privKey = pair.getPrivate();
*/
String pubStr=new String("MIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQCyFwg3JilTgwvXQvdqidarrN7X"
+"llgLU132sMA6QXTalFonXNLB2tmJf9LRALpdEaXB368REEKCsxKGm9zt0ayhmGjg"
+"x5DI8s1bCRb7J5FCySoWJ9uYss06/2dFvfsc0oQ3FAVAJej/gCLbBamGwaO0md1Y"
+"2gQk75c0Mv8oS+OeiQIDAQAB");
byte[] decoded1 = Base64.getDecoder().decode(pubStr);
String privkeyS= new String("MIICXAIBAAKBgQCyFwg3JilTgwvXQvdqidarrN7XllgLU132sMA6QXTalFonXNLB"
+"2tmJf9LRALpdEaXB368REEKCsxKGm9zt0ayhmGjgx5DI8s1bCRb7J5FCySoWJ9uY"
+"ss06/2dFvfsc0oQ3FAVAJej/gCLbBamGwaO0md1Y2gQk75c0Mv8oS+OeiQIDAQAB"
+"AoGBAKTL5WFLIfDSklF2+YaP2KNyS5/J0t1gHtJZyvfdfGmL4EUOg0S55JV1QDsB"
+"ZnMbEnzuJY0vs6xIUvtXHcDARvKSUP/s2Rt3b1Ex6wjCrm3vAT57sOiWQs8z8WM6"
+"LJ2NckF1vCt+i6HoK4jQaWOx7NH6t0LehsFCKLfrDiIVaSNRAkEAyeOx97UghedC"
+"SmSv+dm26Cy4uzGv/YAILy4Jvlq/Uk/r/v1kHumof8S6Ca6WqlUv3Ood/b4EGYAg"
+"lhzPMi0J7QJBAOHSYGRAyCESnsHT41YKyaBDkkM51BP7vhiHC24lBkr+6UmhJ5Kc"
+"nEcfgHuThA7666ew0XKFBJZ5tf1mRFVo440CQBCTjng9Ofdkno/HJp/IHXmAuoY8"
+"NSwGSCW/jPNBNjZG86STH5ZeLwSWnYPP/vTrW6uy2VWNNX72gzZwFR8UjZ0CQG0S"
+"93oVDFVlMAeBN/JsXX0qhjAwc25/jw8701qNSZ/ZxobI71tSh+2WmrGVzBiMPF0P"
+"++qrs06XVT8jMWhrtFUCQCq6F8Ex5IF8EFqSOaa9FjBGYRer5564qmGlDba2qHVf"
+"TPaKn5GfJtjNK3DK7iKn4DZ2Ltn3dc2D2CShVoi/nCU=");
byte[] decoded2 = Base64.getDecoder().decode(privkeyS);
String exponentBase64 = "65537";
RSAPublicKeySpec publicKeySpec = new RSAPublicKeySpec(new BigInteger(1024,decoded1),new BigInteger(1024,exponentBase64.getBytes()));
RSAPrivateKeySpec privKeySpec = new RSAPrivateKeySpec(new BigInteger(1024,decoded2), new BigInteger(1024,exponentBase64.getBytes()));
KeyFactory publicKeyFactory = KeyFactory.getInstance("RSA");
KeyFactory privateKeyFactory = KeyFactory.getInstance("RSA");
Key pubKey = publicKeyFactory.generatePublic(publicKeySpec);
Key privKey = privateKeyFactory.generatePrivate(privKeySpec);
// encryption step
cipher.init(Cipher.ENCRYPT_MODE,pubKey);
byte[] cipherText = cipher.doFinal(input.getBytes());
System.out.println("Cipher : " +byteArray2Hex(cipherText));
//decryption step
cipher.init(Cipher.DECRYPT_MODE,privKey);
byte[] plainText = cipher.doFinal(cipherText);
System.out.println("Plain : " +new String(plainText));
}
private static String byteArray2Hex(byte[] hash) {
Formatter formatter = new Formatter();
for (byte b : hash) formatter.format("%02x", b);
return formatter.toString();
}
}
|
apache-2.0
|
Maccimo/commons-bcel
|
src/examples/Mini/Variable.java
|
2309
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package Mini;
import org.apache.bcel.generic.LocalVariableGen;
/**
* Represents a variable declared in a LET expression or a FUN declaration.
*
* @version $Id$
* @author <A HREF="mailto:m.dahm@gmx.de">M. Dahm</A>
*/
public class Variable implements EnvEntry {
private ASTIdent name; // Reference to the original declaration
private boolean reserved; // Is a key word?
private int line, column; // Extracted from name.getToken()
private String var_name; // Short for name.getName()
private LocalVariableGen local_var; // local var associated with this variable
public Variable(ASTIdent name) {
this(name, false);
}
public Variable(ASTIdent name, boolean reserved) {
this.name = name;
this.reserved = reserved;
var_name = name.getName();
line = name.getLine();
column = name.getColumn();
}
@Override
public String toString() {
if(!reserved) {
return var_name + " declared at line " + line + ", column " + column;
} else {
return var_name + " <reserved key word>";
}
}
public ASTIdent getName() { return name; }
public String getHashKey() { return var_name; }
public int getLine() { return line; }
public int getColumn() { return column; }
public int getType() { return name.getType(); }
void setLocalVariable(LocalVariableGen local_var) {
this.local_var = local_var;
}
LocalVariableGen getLocalVariable() { return local_var; }
}
|
apache-2.0
|
sebrandon1/envoy
|
test/test_common/utility.cc
|
7399
|
#include "utility.h"
#include <dirent.h>
#include <unistd.h>
#include <cstdint>
#include <iostream>
#include <list>
#include <stdexcept>
#include <string>
#include <vector>
#include "envoy/buffer/buffer.h"
#include "envoy/http/codec.h"
#include "common/common/empty_string.h"
#include "common/common/fmt.h"
#include "common/common/lock_guard.h"
#include "common/common/utility.h"
#include "common/config/bootstrap_json.h"
#include "common/json/json_loader.h"
#include "common/network/address_impl.h"
#include "common/network/utility.h"
#include "test/test_common/printers.h"
#include "absl/strings/string_view.h"
#include "gtest/gtest.h"
using testing::GTEST_FLAG(random_seed);
namespace Envoy {
static const int32_t SEED = std::chrono::duration_cast<std::chrono::nanoseconds>(
std::chrono::system_clock::now().time_since_epoch())
.count();
TestRandomGenerator::TestRandomGenerator()
: seed_(GTEST_FLAG(random_seed) == 0 ? SEED : GTEST_FLAG(random_seed)), generator_(seed_) {
std::cerr << "TestRandomGenerator running with seed " << seed_ << "\n";
}
uint64_t TestRandomGenerator::random() { return generator_(); }
bool TestUtility::buffersEqual(const Buffer::Instance& lhs, const Buffer::Instance& rhs) {
if (lhs.length() != rhs.length()) {
return false;
}
uint64_t lhs_num_slices = lhs.getRawSlices(nullptr, 0);
uint64_t rhs_num_slices = rhs.getRawSlices(nullptr, 0);
if (lhs_num_slices != rhs_num_slices) {
return false;
}
Buffer::RawSlice lhs_slices[lhs_num_slices];
lhs.getRawSlices(lhs_slices, lhs_num_slices);
Buffer::RawSlice rhs_slices[rhs_num_slices];
rhs.getRawSlices(rhs_slices, rhs_num_slices);
for (size_t i = 0; i < lhs_num_slices; i++) {
if (lhs_slices[i].len_ != rhs_slices[i].len_) {
return false;
}
if (0 != memcmp(lhs_slices[i].mem_, rhs_slices[i].mem_, lhs_slices[i].len_)) {
return false;
}
}
return true;
}
std::string TestUtility::bufferToString(const Buffer::Instance& buffer) {
std::string output;
uint64_t num_slices = buffer.getRawSlices(nullptr, 0);
Buffer::RawSlice slices[num_slices];
buffer.getRawSlices(slices, num_slices);
for (Buffer::RawSlice& slice : slices) {
output.append(static_cast<const char*>(slice.mem_), slice.len_);
}
return output;
}
void TestUtility::feedBufferWithRandomCharacters(Buffer::Instance& buffer, uint64_t n_char,
uint64_t seed) {
const std::string sample = "Neque porro quisquam est qui dolorem ipsum..";
std::mt19937 generate(seed);
std::uniform_int_distribution<> distribute(1, sample.length() - 1);
std::string str{};
for (uint64_t n = 0; n < n_char; ++n) {
str += sample.at(distribute(generate));
}
buffer.add(str);
}
Stats::CounterSharedPtr TestUtility::findCounter(Stats::Store& store, const std::string& name) {
for (auto counter : store.counters()) {
if (counter->name() == name) {
return counter;
}
}
return nullptr;
}
Stats::GaugeSharedPtr TestUtility::findGauge(Stats::Store& store, const std::string& name) {
for (auto gauge : store.gauges()) {
if (gauge->name() == name) {
return gauge;
}
}
return nullptr;
}
std::list<Network::Address::InstanceConstSharedPtr>
TestUtility::makeDnsResponse(const std::list<std::string>& addresses) {
std::list<Network::Address::InstanceConstSharedPtr> ret;
for (const auto& address : addresses) {
ret.emplace_back(Network::Utility::parseInternetAddress(address));
}
return ret;
}
std::vector<std::string> TestUtility::listFiles(const std::string& path, bool recursive) {
DIR* dir = opendir(path.c_str());
if (!dir) {
throw std::runtime_error(fmt::format("Directory not found '{}'", path));
}
std::vector<std::string> file_names;
dirent* entry;
while ((entry = readdir(dir)) != nullptr) {
std::string file_name = fmt::format("{}/{}", path, std::string(entry->d_name));
struct stat stat_result;
int rc = ::stat(file_name.c_str(), &stat_result);
EXPECT_EQ(rc, 0);
if (recursive && S_ISDIR(stat_result.st_mode) && std::string(entry->d_name) != "." &&
std::string(entry->d_name) != "..") {
std::vector<std::string> more_file_names = listFiles(file_name, recursive);
file_names.insert(file_names.end(), more_file_names.begin(), more_file_names.end());
continue;
} else if (S_ISDIR(stat_result.st_mode)) {
continue;
}
file_names.push_back(file_name);
}
closedir(dir);
return file_names;
}
envoy::config::bootstrap::v2::Bootstrap
TestUtility::parseBootstrapFromJson(const std::string& json_string) {
envoy::config::bootstrap::v2::Bootstrap bootstrap;
auto json_object_ptr = Json::Factory::loadFromString(json_string);
Config::BootstrapJson::translateBootstrap(*json_object_ptr, bootstrap);
return bootstrap;
}
std::vector<std::string> TestUtility::split(const std::string& source, char split) {
return TestUtility::split(source, std::string{split});
}
std::vector<std::string> TestUtility::split(const std::string& source, const std::string& split,
bool keep_empty_string) {
std::vector<std::string> ret;
const auto tokens_sv = StringUtil::splitToken(source, split, keep_empty_string);
std::transform(tokens_sv.begin(), tokens_sv.end(), std::back_inserter(ret),
[](absl::string_view sv) { return std::string(sv); });
return ret;
}
void ConditionalInitializer::setReady() {
Thread::LockGuard lock(mutex_);
EXPECT_FALSE(ready_);
ready_ = true;
cv_.notifyAll();
}
void ConditionalInitializer::waitReady() {
Thread::LockGuard lock(mutex_);
if (ready_) {
ready_ = false;
return;
}
cv_.wait(mutex_);
EXPECT_TRUE(ready_);
ready_ = false;
}
ScopedFdCloser::ScopedFdCloser(int fd) : fd_(fd) {}
ScopedFdCloser::~ScopedFdCloser() { ::close(fd_); }
namespace Http {
// Satisfy linker
const uint32_t Http2Settings::DEFAULT_HPACK_TABLE_SIZE;
const uint32_t Http2Settings::DEFAULT_MAX_CONCURRENT_STREAMS;
const uint32_t Http2Settings::DEFAULT_INITIAL_STREAM_WINDOW_SIZE;
const uint32_t Http2Settings::DEFAULT_INITIAL_CONNECTION_WINDOW_SIZE;
const uint32_t Http2Settings::MIN_INITIAL_STREAM_WINDOW_SIZE;
TestHeaderMapImpl::TestHeaderMapImpl() : HeaderMapImpl() {}
TestHeaderMapImpl::TestHeaderMapImpl(
const std::initializer_list<std::pair<std::string, std::string>>& values)
: HeaderMapImpl() {
for (auto& value : values) {
addCopy(value.first, value.second);
}
}
TestHeaderMapImpl::TestHeaderMapImpl(const HeaderMap& rhs) : HeaderMapImpl(rhs) {}
void TestHeaderMapImpl::addCopy(const std::string& key, const std::string& value) {
addCopy(LowerCaseString(key), value);
}
void TestHeaderMapImpl::remove(const std::string& key) { remove(LowerCaseString(key)); }
std::string TestHeaderMapImpl::get_(const std::string& key) { return get_(LowerCaseString(key)); }
std::string TestHeaderMapImpl::get_(const LowerCaseString& key) {
const HeaderEntry* header = get(key);
if (!header) {
return EMPTY_STRING;
} else {
return header->value().c_str();
}
}
bool TestHeaderMapImpl::has(const std::string& key) { return get(LowerCaseString(key)) != nullptr; }
bool TestHeaderMapImpl::has(const LowerCaseString& key) { return get(key) != nullptr; }
} // namespace Http
} // namespace Envoy
|
apache-2.0
|
dperaltac/bigdata-fingerprint
|
src/main/java/sci2s/mrfingerprint/PartialScoreLSS.java
|
8030
|
package sci2s.mrfingerprint;
import java.io.DataInput;
import java.io.DataOutput;
import java.io.IOException;
import java.util.Arrays;
import java.util.Comparator;
import java.util.HashMap;
import java.util.Map;
import org.apache.commons.lang.ArrayUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.io.ArrayPrimitiveWritable;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.MapFile;
import org.apache.hadoop.io.Text;
import org.apache.zookeeper.common.IOUtils;
public class PartialScoreLSS implements PartialScore {
protected float [] bestsimilarities;
protected int templatesize;
public static final float MUP = 20; //!< Sigmoid parameter 1 in the computation of n_P
public static final float TAUP = 0.4f; //!< Sigmoid parameter 2 in the computation of n_P
public static final int MINNP = 4; //!< Minimum number of minutiae in the computation of n_P
public static final int MAXNP = 12; //!< Maximum number of minutiae in the computation of n_P
public PartialScoreLSS() {
bestsimilarities = new float[0];
templatesize = 0;
}
public PartialScoreLSS(PartialScoreLSS o) {
bestsimilarities = Arrays.copyOf(o.bestsimilarities, o.bestsimilarities.length);
templatesize = o.templatesize;
}
public PartialScoreLSS(float [] bs, int ts) {
bestsimilarities = Arrays.copyOf(bs, bs.length);
templatesize = ts;
}
public PartialScoreLSS(LocalStructure ls, LocalStructure [] als) {
computePartialScore(ls, als);
}
@Override
public PartialScoreLSS clone() {
PartialScoreLSS ps = new PartialScoreLSS(this);
return ps;
}
@Override
public String toString() {
return bestsimilarities.toString();
}
public void readFields(DataInput in) throws IOException {
templatesize = in.readInt();
ArrayPrimitiveWritable auxaw = new ArrayPrimitiveWritable(bestsimilarities);
auxaw.readFields(in);
bestsimilarities = (float[]) auxaw.get();
}
public void write(DataOutput out) throws IOException {
out.writeInt(templatesize);
ArrayPrimitiveWritable auxaw = new ArrayPrimitiveWritable(bestsimilarities);
auxaw.write(out);
}
public static int computeNP(int n_A, int n_B)
{
return MINNP + (int)Math.round(Util.psi(Math.min(n_A,n_B), MUP, TAUP*(MAXNP-MINNP)));
}
public static int computeNP(int n_A)
{
return MINNP + (int)Math.round(Util.psi(n_A, MUP, TAUP*(MAXNP-MINNP)));
}
public float aggregateG(PartialScoreKey key, Iterable<GenericPSWrapper> values, Map<?,?> infomap) {
int tam = 0;
float sum = 0.0f;
Integer inputsize = (Integer) infomap.get(key.getFpidInput().toString());
TopN<Float> best = new TopN<Float>(computeNP(inputsize));
if(inputsize == null) {
System.err.println("No infomap value found for key " + key.getFpidInput());
inputsize = 50;
}
// Concatenate all similarity values
for(GenericPSWrapper ps : values) {
PartialScoreLSS psc = (PartialScoreLSS) ps.get();
for(float sl : psc.bestsimilarities)
best.add(sl);
tam += psc.templatesize;
}
int np = computeNP(inputsize, tam);
for(int i = 0; i < np; i++)
sum += best.poll();
return sum/np;
}
public void partialAggregateG(PartialScoreKey key, Iterable<GenericPSWrapper> values, Map<?,?> infomap) {
Integer inputsize = (Integer) infomap.get(key.getFpidInput().toString());
if(inputsize == null) {
System.err.println("No infomap value found for key " + key.getFpidInput());
inputsize = 50;
}
partialAggregateG(values, computeNP(inputsize));
}
public void partialAggregateG(Iterable<GenericPSWrapper> values) {
partialAggregateG(values, computeNP(50));
}
public void partialAggregateG(Iterable<GenericPSWrapper> values, int np) {
TopN<Float> best = new TopN<Float>(np);
PartialScoreLSS psc;
templatesize = 0;
// Aggregate all similarity values
for(GenericPSWrapper ps : values) {
psc = (PartialScoreLSS) ps.get();
for(float sl : psc.bestsimilarities)
if(sl > 0.0)
best.add(sl);
templatesize += psc.templatesize;
}
bestsimilarities = new float[best.size()];
for(int i = 0; i < bestsimilarities.length; ++i)
bestsimilarities[i] = best.poll();
}
public void saveInfoFile(LocalStructure[][] inputls, Configuration conf) {
String name = conf.get(Util.INFOFILENAMEPROPERTY, Util.INFOFILEDEFAULTNAME);
MapFile.Writer infofile = Util.createMapFileWriter(conf, name, Text.class, IntWritable.class);
Arrays.sort(inputls, new Comparator<LocalStructure[]>() {
public int compare(LocalStructure [] als1, LocalStructure [] als2) {
return als1[0].fpid.compareTo(als2[0].fpid);
}
});
for(LocalStructure [] ails : inputls) {
String fpid = ails[0].fpid;
try {
infofile.append(new Text(fpid), new IntWritable(ails.length));
} catch (IOException e) {
System.err.println("PartialScoreCylinder.saveInfoFile: unable to save fingerprint "
+ fpid + " in MapFile " + name + ": " + e.getMessage());
e.printStackTrace();
}
}
IOUtils.closeStream(infofile);
}
public Map<String, Integer> loadInfoFile(Configuration conf) {
String name = conf.get(Util.INFOFILENAMEPROPERTY, Util.INFOFILEDEFAULTNAME);
MapFile.Reader infofile = Util.createMapFileReader(conf, name);
Map<String, Integer> infomap = new HashMap<String,Integer>();
Text key = new Text();
IntWritable value = new IntWritable();
try {
while(infofile.next(key, value)) {
infomap.put(key.toString(), value.get());
}
} catch (Exception e) {
System.err.println("PartialScoreCylinder.loadInfoFile: unable to read fingerprint "
+ key + " in MapFile " + name + ": " + e.getMessage());
e.printStackTrace();
}
IOUtils.closeStream(infofile);
return infomap;
}
public <T extends LocalStructure> boolean isCompatibleLS(Class<T> lsclass) {
return (lsclass == LocalStructureCylinder.class);
}
public void computePartialScore(LocalStructure ls, LocalStructure[] als) {
TopN<Float> gamma = new TopN<Float>(computeNP(als.length));
float sl;
for(LocalStructure ils : als) {
try {
sl = ls.similarity(ils);
if(sl > 0.0)
gamma.add(sl);
} catch (LSException e) {
System.err.println(e.getMessage());
e.printStackTrace();
}
}
bestsimilarities = new float[gamma.size()];
for(int i = 0; i < bestsimilarities.length; ++i)
bestsimilarities[i] = gamma.poll();
templatesize = 1;
}
public Map<?, ?> loadCombinerInfoFile(Configuration conf) {
return loadInfoFile(conf);
}
public Map<?, ?> loadReducerInfoFile(Configuration conf) {
return loadInfoFile(conf);
}
public boolean isEmpty() {
return (bestsimilarities == null || bestsimilarities.length == 0 || bestsimilarities[0] <= 0);
}
public PartialScore aggregateSinglePS(PartialScore ps) {
PartialScoreLSS psc = (PartialScoreLSS) ps;
final int MAX_SIMS = computeNP(250);
if(psc.bestsimilarities.length + bestsimilarities.length > MAX_SIMS) {
TopN<Float> topn = new TopN<Float>(ArrayUtils.toObject(bestsimilarities), MAX_SIMS);
topn.addAll(ArrayUtils.toObject(psc.bestsimilarities));
bestsimilarities = ArrayUtils.toPrimitive(topn.toArray(new Float[0]));
}
else if(psc.bestsimilarities.length + bestsimilarities.length > 0) {
bestsimilarities = ArrayUtils.addAll(bestsimilarities, psc.bestsimilarities);
}
else {
bestsimilarities = new float[0];
}
templatesize = psc.templatesize + templatesize;
return this;
}
public void aggregateSingleValue(float value) {
int minpos = Util.minPosition(bestsimilarities);
if(bestsimilarities[minpos] < value)
bestsimilarities[minpos] = value;
}
public float computeScore(int inputsize) {
int np = computeNP(inputsize, templatesize);
float sum = 0.0f;
int np2 = Math.min(np, bestsimilarities.length);
if(np2 == 0)
return 0;
for(int i = 0; i < np2; i++)
sum += bestsimilarities[i];
return sum/np;
}
public float computeScore(String input_fpid, Map<?, ?> infomap) {
Integer inputsize = (Integer) infomap.get(input_fpid);
return computeScore(inputsize);
}
}
|
apache-2.0
|
tkunovsky/TXML
|
src/main/java/txml/interpreter/command/CommandGoBackToStartPaths.java
|
2309
|
/*
* Copyright 2016 Tomas Kunovsky.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package txml.interpreter.command;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.List;
import txml.interpreter.model.SymbolTableItem;
import txml.interpreter.model.SymbolTableType;
import txml.TXmlException;
import txml.interpreter.InstructionsInterpreter;
import txml.xpath.model.NodeGlobalSettings;
import txml.xpath.model.TNode;
import txml.xpath.model.TNodeList;
public class CommandGoBackToStartPaths implements Command {
@Override
public void execute(InstructionsInterpreter interpreter, String operand1, String operand2, String operand3, String result) throws SQLException, TXmlException {
if (interpreter.getConnection() == null) {
throw new TXmlException("No database connection");
}
NodeGlobalSettings settings = interpreter.getLastSettings();
SymbolTableItem tableItem = interpreter.getSymbolTable().getTable().get(result);
if (tableItem.getType() != SymbolTableType.TNODE_LIST) {
throw new TXmlException("Variable " + result + " has bad type.");
}
TNodeList tNodeList = (TNodeList) tableItem.getAttribute();
List<TNode> startTNodes = new ArrayList<>();
for (int i = 0; i < tNodeList.getLength(); i++) {
startTNodes.add(tNodeList.item(i).getLocalSettings().getSavedNodes().pop());
startTNodes.get(i).getLocalSettings().setSavedNodes(tNodeList.item(i).getLocalSettings().getSavedNodes());
}
TNodeList tNodeListNew = new TNodeList(settings, startTNodes);
interpreter.getSymbolTable().getTable().put(result, new SymbolTableItem(SymbolTableType.TNODE_LIST, tNodeListNew));
}
}
|
apache-2.0
|
RippleOSI/NE-HSCIE-Core
|
webapp/app/scripts/services/medication.js
|
481
|
'use strict';
angular.module('rippleDemonstrator')
.factory('MedicationService', function ($http) {
var all = function (patientId) {
return $http.get('/api/hscie/patients/' + patientId + '/medications');
};
var get = function (patientId, medicationId, subSource) {
return $http.get('/api/hscie/patients/' + patientId + '/medications/' + medicationId + '?subSource=' + subSource);
};
return {
get: get,
all: all
};
});
|
apache-2.0
|
hustman/design-pattern
|
src/main/java/com/zeus/visitor/Monitor.java
|
249
|
package com.zeus.visitor;
/**
* @author xuxingbo
* @Date 2017/7/4
*/
public class Monitor implements ComputerPart {
@Override
public void accept(ComputerPartVisitor computerPartVisitor) {
computerPartVisitor.visit(this);
}
}
|
apache-2.0
|
mayuranjan/the-hawker-front-end
|
src/app/angular-bootstrap-md/free/modals/index.js
|
270
|
export { ModalBackdropComponent, ModalBackdropOptions } from './modalBackdropComponent';
export { ModalOptions } from './modalOptionsClass';
export { ModalDirective } from './modalDirective';
export { ModalModule } from './modalModule';
//# sourceMappingURL=index.js.map
|
apache-2.0
|
xerial/td-client-java
|
src/main/java/com/treasure_data/model/SetTableSchemaRequest.java
|
1662
|
//
// Java Client Library for Treasure Data Cloud
//
// Copyright (C) 2011 - 2013 Muga Nishizawa
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
package com.treasure_data.model;
import java.util.ArrayList;
import java.util.List;
import org.json.simple.JSONValue;
public class SetTableSchemaRequest extends AbstractRequest<TableSchema> {
public SetTableSchemaRequest(TableSchema schema) {
super(schema);
}
public TableSchema getTableSchema() {
return get();
}
public String getDatabaseName() {
return get().getDatabase().getName();
}
public String getTableName() {
return get().getTable().getName();
}
public String getJSONString() {
List<List<String>> ret = new ArrayList<List<String>>();
List<TableSchema.Pair> pairs = get().getPairsOfColsAndTypes();
for (TableSchema.Pair p : pairs) {
List<String> pair = new ArrayList<String>();
pair.add(p.getColumnName());
pair.add(p.getType().toString());
ret.add(pair);
}
return JSONValue.toJSONString(ret);
}
}
|
apache-2.0
|
jonvestal/open-kilda
|
src-java/flowhs-topology/flowhs-storm-topology/src/main/java/org/openkilda/wfm/topology/flowhs/fsm/update/actions/EmitIngressRulesVerifyRequestsAction.java
|
2213
|
/* Copyright 2019 Telstra Open Source
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.openkilda.wfm.topology.flowhs.fsm.update.actions;
import org.openkilda.floodlight.api.request.factory.FlowSegmentRequestFactory;
import org.openkilda.wfm.topology.flowhs.fsm.common.actions.HistoryRecordingAction;
import org.openkilda.wfm.topology.flowhs.fsm.update.FlowUpdateContext;
import org.openkilda.wfm.topology.flowhs.fsm.update.FlowUpdateFsm;
import org.openkilda.wfm.topology.flowhs.fsm.update.FlowUpdateFsm.Event;
import org.openkilda.wfm.topology.flowhs.fsm.update.FlowUpdateFsm.State;
import org.openkilda.wfm.topology.flowhs.utils.SpeakerVerifySegmentEmitter;
import lombok.extern.slf4j.Slf4j;
import java.util.ArrayList;
import java.util.Map;
import java.util.UUID;
@Slf4j
public class EmitIngressRulesVerifyRequestsAction
extends HistoryRecordingAction<FlowUpdateFsm, State, Event, FlowUpdateContext> {
@Override
public void perform(State from, State to, Event event, FlowUpdateContext context, FlowUpdateFsm stateMachine) {
Map<UUID, FlowSegmentRequestFactory> requestsStorage = stateMachine.getIngressCommands();
ArrayList<FlowSegmentRequestFactory> requestFactories = new ArrayList<>(requestsStorage.values());
requestsStorage.clear();
SpeakerVerifySegmentEmitter.INSTANCE.emitBatch(stateMachine.getCarrier(), requestFactories, requestsStorage);
requestsStorage.forEach((key, value) -> stateMachine.getPendingCommands().put(key, value.getSwitchId()));
stateMachine.getRetriedCommands().clear();
stateMachine.saveActionToHistory("Started validation of installed ingress rules");
}
}
|
apache-2.0
|
zhoffice/redisson
|
redisson/src/main/java/org/redisson/api/map/event/EntryCreatedListener.java
|
851
|
/**
* Copyright 2016 Nikita Koksharov
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.redisson.api.map.event;
/**
*
* @author Nikita Koksharov
*
* @param <K> key type
* @param <V> value type
*/
public interface EntryCreatedListener<K, V> extends MapEntryListener {
void onCreated(EntryEvent<K, V> event);
}
|
apache-2.0
|
googleapis/java-compute
|
proto-google-cloud-compute-v1/src/main/java/com/google/cloud/compute/v1/SetTagsInstanceRequestOrBuilder.java
|
6223
|
/*
* Copyright 2020 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/compute/v1/compute.proto
package com.google.cloud.compute.v1;
public interface SetTagsInstanceRequestOrBuilder
extends
// @@protoc_insertion_point(interface_extends:google.cloud.compute.v1.SetTagsInstanceRequest)
com.google.protobuf.MessageOrBuilder {
/**
*
*
* <pre>
* Name of the instance scoping this request.
* </pre>
*
* <code>string instance = 18257045 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The instance.
*/
java.lang.String getInstance();
/**
*
*
* <pre>
* Name of the instance scoping this request.
* </pre>
*
* <code>string instance = 18257045 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The bytes for instance.
*/
com.google.protobuf.ByteString getInstanceBytes();
/**
*
*
* <pre>
* Project ID for this request.
* </pre>
*
* <code>
* string project = 227560217 [(.google.api.field_behavior) = REQUIRED, (.google.cloud.operation_request_field) = "project"];
* </code>
*
* @return The project.
*/
java.lang.String getProject();
/**
*
*
* <pre>
* Project ID for this request.
* </pre>
*
* <code>
* string project = 227560217 [(.google.api.field_behavior) = REQUIRED, (.google.cloud.operation_request_field) = "project"];
* </code>
*
* @return The bytes for project.
*/
com.google.protobuf.ByteString getProjectBytes();
/**
*
*
* <pre>
* An optional request ID to identify requests. Specify a unique request ID so that if you must retry your request, the server will know to ignore the request if it has already been completed. For example, consider a situation where you make an initial request and the request times out. If you make the request again with the same request ID, the server can check if original operation with the same request ID was received, and if so, will ignore the second request. This prevents clients from accidentally creating duplicate commitments. The request ID must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000).
* </pre>
*
* <code>optional string request_id = 37109963;</code>
*
* @return Whether the requestId field is set.
*/
boolean hasRequestId();
/**
*
*
* <pre>
* An optional request ID to identify requests. Specify a unique request ID so that if you must retry your request, the server will know to ignore the request if it has already been completed. For example, consider a situation where you make an initial request and the request times out. If you make the request again with the same request ID, the server can check if original operation with the same request ID was received, and if so, will ignore the second request. This prevents clients from accidentally creating duplicate commitments. The request ID must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000).
* </pre>
*
* <code>optional string request_id = 37109963;</code>
*
* @return The requestId.
*/
java.lang.String getRequestId();
/**
*
*
* <pre>
* An optional request ID to identify requests. Specify a unique request ID so that if you must retry your request, the server will know to ignore the request if it has already been completed. For example, consider a situation where you make an initial request and the request times out. If you make the request again with the same request ID, the server can check if original operation with the same request ID was received, and if so, will ignore the second request. This prevents clients from accidentally creating duplicate commitments. The request ID must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000).
* </pre>
*
* <code>optional string request_id = 37109963;</code>
*
* @return The bytes for requestId.
*/
com.google.protobuf.ByteString getRequestIdBytes();
/**
*
*
* <pre>
* The body resource for this request
* </pre>
*
* <code>
* .google.cloud.compute.v1.Tags tags_resource = 331435380 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the tagsResource field is set.
*/
boolean hasTagsResource();
/**
*
*
* <pre>
* The body resource for this request
* </pre>
*
* <code>
* .google.cloud.compute.v1.Tags tags_resource = 331435380 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The tagsResource.
*/
com.google.cloud.compute.v1.Tags getTagsResource();
/**
*
*
* <pre>
* The body resource for this request
* </pre>
*
* <code>
* .google.cloud.compute.v1.Tags tags_resource = 331435380 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
com.google.cloud.compute.v1.TagsOrBuilder getTagsResourceOrBuilder();
/**
*
*
* <pre>
* The name of the zone for this request.
* </pre>
*
* <code>
* string zone = 3744684 [(.google.api.field_behavior) = REQUIRED, (.google.cloud.operation_request_field) = "zone"];
* </code>
*
* @return The zone.
*/
java.lang.String getZone();
/**
*
*
* <pre>
* The name of the zone for this request.
* </pre>
*
* <code>
* string zone = 3744684 [(.google.api.field_behavior) = REQUIRED, (.google.cloud.operation_request_field) = "zone"];
* </code>
*
* @return The bytes for zone.
*/
com.google.protobuf.ByteString getZoneBytes();
}
|
apache-2.0
|
alaeddine10/ggrc-core
|
src/ggrc/assets/javascripts/mapping/mapping_controller.js
|
18973
|
/*
* Copyright (C) 2013 Google Inc., authors, and contributors <see AUTHORS file>
* Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file>
* Created By:
* Maintained By:
*/
//= require can.jquery-all
//= require sections/section
//= require controls/control
//= require controls/controls_controller
(function(namespace, $) {
function mapunmap(unmap) {
return function(section, rcontrol, ccontrol) {
var params = {
ccontrol : (ccontrol ? ccontrol.id : "")
};
if(unmap)
params.u = "1";
if(rcontrol) params.rcontrol = rcontrol.id;
if(rcontrol === null) params.rcontrol = ccontrol.id;
if(section) params.section = section.id;
var dfd = section ?
section["map_" + (rcontrol === null ? "control" : "rcontrol")](params)
: rcontrol.map_ccontrol(params);
dfd.done(can.proxy(this.updateButtons, this));
return dfd;
}
}
can.Control("CMS.Controllers.Mapping", {
//static
cache : {}
, defaults : {
section_model : namespace.CMS.Models.SectionSlug
}
}, {
init : function() {
this.link_lists();
this.updateButtons();
}
, link_lists : function() {
var that = this;
$.when(
this.options.company_list_controller.find_all_deferred
, this.options.reg_list_controller.find_all_deferred
, this.options.section_list_controller.find_all_deferred
).done(function() {
can.each(CMS.Models.RegControl.cache, function(rcontrol, id) {
rcontrol.attr("implementing_controls", new can.Model.List(
can.$(rcontrol.implementing_controls).map(function(index, ictl){
return CMS.Models.Control.findInCacheById(ictl.id);
})));
});
can.each(that.options.section_model.cache, function(section, id) {
section.update_linked_controls();
});
});
}
, "#rmap:not([disabled]), #cmap:not([disabled]) click" : function(el, ev) {
var that = this;
var section = can.getObject("options.instance", $("#selected_sections").control(namespace.CMS.Controllers.Sections));
var rcontrol = can.getObject("options.instance", $("#selected_rcontrol").control(namespace.CMS.Controllers.Controls));
var ccontrol = can.getObject("options.instance", $("#selected_ccontrol").control(namespace.CMS.Controllers.Controls));
if(el.is("#cmap")) {
section = null;
}
var dfd = this[el.is(".unmapbtn") ? "unmap" : "map"](section, rcontrol, ccontrol);
this.bindXHRToButton(dfd, el);
dfd.always(this.proxy("updateButtons")); //bindXHR will remove the disabled attr, so re-check afterwards.
var that = this;
dfd.then(function() {
that.options.section_list_controller.draw_list(); //manual update because section model doesn't contain "real" rcontrol model
});
if(!rcontrol && el.is("#rmap")) {
var notice, reg_slug;
dfd.then(function(resp, status, xhr) {
notice = /.*Created regulation control (.+)\. Mapped regulation control\. */.exec(xhr.getResponseHeader("X-Flash-Notice"));
if(notice)
reg_slug = notice[1];
})
dfd.then($.proxy(this.options.reg_list_controller, "fetch_list"))
.then(function() {
that.options.reg_list_controller.find_all_deferred.then(function(list) {
var ccontrol = section.linked_controls[section.linked_controls.length - 1];
section.removeElementFromChildList("linked_controls", ccontrol);
section.addElementToChildList("linked_controls", can.filter(can.makeArray(list), function(item) { return item.slug === reg_slug })[0]);
section.addElementToChildList("linked_controls", ccontrol); //adding the reg control in before the ccontrol is necessary because we
// are assuming order when updating linkages
});
});
}
}
, unmap : function() { return mapunmap(true).apply(this, arguments); }
, map : function() { return mapunmap(false).apply(this, arguments); }
, "#rcontrol_list .regulationslot click" : function(el, ev) {
CMS.Controllers.Controls.Instances.SelectedRegControl.update({ instance : el.closest("[data-model]").data("model") });
this.updateButtons();
ev.preventDefault();
}
, "#ccontrol_list .regulationslot click" : function(el, ev) {
CMS.Controllers.Controls.Instances.SelectedCompanyControl.update({ instance : el.closest("[data-model]").data("model") });
this.updateButtons();
ev.preventDefault();
}
, "#section_list .regulationslot click" : function(el, ev) {
CMS.Controllers.Sections.Instances.SelectedSection.update({ instance : el.closest("[data-model]").data("model") });
this.updateButtons();
ev.preventDefault();
}
, updateButtons : function(ev, oldVal, newVal) {
var section = can.getObject("options.instance", $("#selected_sections").control(namespace.CMS.Controllers.Sections));
var rcontrol = can.getObject("options.instance", $("#selected_rcontrol").control(namespace.CMS.Controllers.Controls));
var ccontrol = can.getObject("options.instance", $("#selected_ccontrol").control(namespace.CMS.Controllers.Controls));
var rmap = $('#rmap');
var cmap = $('#cmap');
rmap.attr('disabled', !(section && (rcontrol || ccontrol)));
if (!(section && (rcontrol || ccontrol))) {
rmap.children(':first').text('Map section to control');
}
cmap.attr('disabled', !(rcontrol && ccontrol));
if (!(rcontrol && ccontrol)) {
cmap.children(':first').text('Map control to control');
}
if ((section && (rcontrol || ccontrol)) || (rcontrol && ccontrol)) {
var rmap_text = $(rmap.children()[0]);
var cmap_text = $(cmap.children()[0]);
var runmap = section && rcontrol ? $(section.linked_controls).filter(function() { return this.id === rcontrol.id}).length : false;
runmap || (runmap = section && !rcontrol && ccontrol ? $(section.linked_controls).filter(function() { return this.id === ccontrol.id}).length : false);
var cunmap = rcontrol && ccontrol ? $(rcontrol.implementing_controls).filter(function() { return this.id === ccontrol.id}).length : false;
// We don't know how we'd unmap a ccontrol directly from a section, because there's an auto-generated
// rcontrol associated with it. So don't allow it.
if(section && !rcontrol && runmap) {
rmap.attr("disabled", true);
}
rmap_text.text(runmap ? 'Unmap' : 'Map section to control')
rmap[runmap ? 'addClass' : "removeClass"]("unmapbtn");
cmap_text.text(cunmap ? 'Unmap' : 'Map control to control')
cmap[cunmap ? 'addClass' : "removeClass"]("unmapbtn");
}
}
, ".clearselection click" : function(el, ev) {
this.updateButtons();
}
// Post-submit handler for new control dialog
, "a[href^='/controls/new'] modal:success" : function(el, ev, data) {
var item;
if($(el).closest("#mapping_rcontrols_widget").length) {
// add this control to the reg controls.
// This isn't the best way to go about it, but CanJS/Mustache is currently ornery about accepting new observable list elements
// added with "push" --BM 12/11/2012
var rctl = this.options.reg_list_controller;
item = namespace.CMS.Models.RegControl.model(data);
rctl.options.observer.list.splice(this.slug_sort_position(item, rctl.options.observer.list), 0, item);
} else {
var cctl = this.options.company_list_controller;
item = namespace.CMS.Models.Control.model(data);
cctl.options.observer.list.splice(this.slug_sort_position(item, cctl.options.observer.list), 0, item);
}
var $item = $("[content_id=" + item.content_id + "]");
var $content = $item.closest(".content");
$item.find("a").click();
$content.scrollTop($item.offset().top - $content.offset().top - ($content.height() - $item.height()) / 2)
this.element.find(".search-results-count").html(+(this.element.find(".search-results-count").html()) + 1);
}
, slug_sort_position : function(data, list) {
var pos = list.length;
can.each(list, function(item, i) {
if(window.natural_comparator(data, item) < 1) {
pos = i;
return false;
}
});
return pos;
}
, "a.controllist, a.controllistRM click" : function(el, ev) {
var $dialog = $("#mapping_dialog");
if(!$dialog.length) {
$dialog = $('<div id="mapping_dialog" class="modal hide"></div>')
.appendTo(this.element)
.draggable({ handle: '.modal-header' });
}
ev.preventDefault();
// Not putting in the real model because live binding is having a problem with how we do things.
$dialog.html(can.view("/static/mustache/sections/controls_mapping.mustache", el.closest("[data-model]").data("model").serialize()));
$dialog.modal_form({ backdrop: true }).modal_form('show');
}
, "#mapping_dialog .closebtn click" : function(el) {
el.closest("#mapping_dialog").modal_form('hide');
}
, "#mapping_dialog .unmapbtn click" : function(el, ev) {
var thiscontrol = el.data("id")
, _section = this.options.section_model.findInCacheById(el.closest("[data-section-id]").data("section-id"))
, that = this
, $rc, rcontrol, ccontrol, section;
if(($rc = el.closest("[data-rcontrol-id]")).length > 0) {
rcontrol = namespace.CMS.Models.RegControl.findInCacheById($rc.data("rcontrol-id"));
ccontrol = namespace.CMS.Models.Control.findInCacheById(thiscontrol);
} else {
rcontrol = namespace.CMS.Models.RegControl.findInCacheById(thiscontrol);
section = _section;
}
this.bindXHRToButton(
this.unmap(section, rcontrol, ccontrol)
.then(function() {
_section.update_linked_controls();
var $dialog = $("#mapping_dialog");
$dialog.html(can.view("/static/mustache/sections/controls_mapping.mustache", _section.serialize()));
that.options.section_list_controller.draw_list();
}),
el);
}
, "#section_na click" : function(el, ev) {
var section = this.options.section_model.findInCacheById(el.closest("[data-section-id]").data("section-id"));
section.attr("na", el.attr("checked") ? 1 : 0);
this.bindXHRToButton(section.save(), el);
}
, "#section_notes change" : function(el, ev) {
var section = this.options.section_model.findInCacheById(el.closest("[data-section-id]").data("section-id"));
section.attr("notes", el.val());
this.bindXHRToButton(section.save(), el);
}
});
can.Control("CMS.Controllers.MappingWidgets", {}, {
".clearselection click" : function(el, ev) {
var controllers = this.element.find(".cms_controllers_controls, .cms_controllers_sections").controls(namespace.CMS.Controllers.Controls);
$(controllers).each(function() {
if(this.options.arity === 1) {
this.update({instance : null});
} else {
this.setSelected(null);
}
});
}
, ".widgetsearch-tocontent keydown" : function(el, ev) {
var controllers = this.element.find(".cms_controllers_controls, .cms_controllers_sections").controls(namespace.CMS.Controllers.Controls);
$(controllers).each(function() {
var that = this;
if(that.search_timeout) clearTimeout(that.search_timeout);
that.search_timeout = setTimeout(function() {
if(that.options.arity > 1) {
that.filter(el.val());
}
}, 300);
});
ev.stopPropagation();
}
});
//---------------------------------------------------------------
// Below this line is new development for killing the reg mapper
//---------------------------------------------------------------
CMS.Controllers.Mapping("CMS.Controllers.ControlMappingPopup", {
defaults : {
section_model : namespace.CMS.Models.SectionSlug
, parent_model : namespace.CMS.Models.Program
, parent_id : null
, observer : undefined
, section : null
}
//static
}, {
init : function() {
var that = this;
if(this.element.find(".spinner").length < 1)
this.element.append($(new Spinner().spin().el).css({"position" : "relative", "left" : 50, "top" : 50, "height": 150, "width": 150}));
this.options.observer = new can.Observe({
section : this.options.section
, parent_type : window.cms_singularize(this.options.parent_model.root_object)
, parent_subtype : can.underscore(this.options.parent_model.shortName).replace("_", " ")
, parent_id : this.options.parent_id
});
can.view("/static/mustache/sections/control_selector.mustache", that.options.observer, function(frag) {
that.options.company_list_controller = that.element
.html(frag).trigger("shown")
.find(".controls-list")
.cms_controllers_controls({
list : "/static/mustache/controls/list_selector.mustache"
, show : "/static/mustache/controls/show_selector.mustache"
, arity : 2})
.control();
that.options.selected_control_controller = that.element
.find(".selector-info.control")
.append($(new Spinner().spin().el).css({"position" : "relative", "left" : 50, "top" : 50, "height": 150, "width": 150}))
.cms_controllers_controls({show : "/static/mustache/controls/show_selected_sidebar.mustache", arity : 1})
.control();
that.search_filter(that.options.company_list_controller.find_all_deferred).done(function(d) {
that.list = d;
that.options.section.update_linked_controls_ccontrol_only();
//that.options.observer.attr("controls", d);
that.update();
that.element.trigger("shown").trigger("kill-all-popoevers");
});
});
this.on();
}
, update : function() {
var section = this.options.section;
this.options.observer.attr("section", section);
this.element.find(".controls-list ul > [data-model]").each(this.proxy("style_item"));
}
, style_item : function(el) {
if(arguments.length === 2 && typeof arguments[0] === "number") { //jQuery "each" case
el = arguments[1];
}
if(~can.inArray($(el).data("model"), this.options.section.linked_controls)) {
$(el).find("input[type=checkbox]").prop("checked", true);
} else {
$(el).find("input[type=checkbox]").prop("checked", false);
}
}
, " hidden" : function() {
this.element.remove();
}
, "input.map-control change" : function(el, ev) {
var that = this
, control = el.closest("[data-model]").data("model")
, is_mapped = !!~can.inArray(control, this.options.section.linked_controls);
if(is_mapped ^ el.prop("checked")) {
this[is_mapped ? "unmap" : "map"](this.options.section, null, control)
.done(function() {
setTimeout(function() {
that.style_item(that.element.find("[content_id=" + control.content_id + "]").parent());
}, 10)
});
}
}
// , "{section} updated" : function(obj, ev) {
// // if(!/(^|\.)linked_controls(\.|$)/.test(attr))
// // return;
// var $count = $("#content_" + obj.slug).find("> .item-main .controls-count")
// , html;
// if (obj.linked_controls.length > 0) {
// html = "<i class='grcicon-control-color'></i> " + obj.linked_controls.length;
// } else if (obj.na) {
// html = "<i class='grcicon-control-color'></i> <small class='warning'>N/A</small>";
// } else {
// html = "<i class='grcicon-control-danger'></i> <strong class='error'>0</strong>";
// }
// $count.html(html);
// var data = obj.linked_controls.length ? obj.linked_controls.serialize() : {na : obj.na};
// var render_str = can.view.render("/static/mustache/controls/list_popover.mustache", data);
// $count.attr("data-content", render_str).data("content", render_str)
// this.update();
// }
, ".edit-control modal:success" : function(el, ev, data) {
el.closest("[data-model]").data("model").attr(data).updated();
}
, ".widgetsearch-tocontent keydown" : function(el, ev) {
if(ev.which === 13) {
this.search_filter();
}
}
, ".control-type-filter change" : "search_filter"
, search_filter : function(dfd) {
var that = this;
var check = { ids_only: true };
if(this.element.find(".control-type-filter").prop("checked")) {
check[window.cms_singularize(this.options.parent_model.root_object) + "_id"] = this.options.parent_id;
}
var search = this.element.find(".widgetsearch-tocontent").val();
return this.options.company_list_controller
.filter(search, check, dfd)
.done(function(d) {
that.element.find(".search-results-count").html(d.length);
that.update_map_all();
});
}
, redo_last_search : function(id_to_add) {
var that = this;
this.options.company_list_controller.redo_last_filter(id_to_add).done(function(d){
that.element.find(".search-results-count").html(d.length);
that.update_map_all();
});
}
, "a[href^='/controls/new'] modal:success" : function(el, ev, data) {
var that = this
, model;
this._super(el, ev, data);
model = CMS.Models.Control.model(data);
this.redo_last_search(model.id);
this.map(this.options.section, null, model).done(function() {
that.update();
});
}
, ".search-reset click" : function(el, ev) {
this.element.find(".widgetsearch-tocontent").val("");
this.search_filter();
}
, ".item-main click" : function(el, ev) {
this.options.selected_control_controller.update({"instance" : el.closest("[data-model]").data("model")});
this.element.find(".control").removeClass("selected");
el.closest(".control").addClass("selected");
}
, update_map_all : function() {
this.element.find(".map-all").prop("checked", !this.element.find(".item-main:visible input:not(:checked)").length);
}
, ".map-all click" : function(el, ev) {
var that = this;
var dfds = [];
if(el.prop("checked")) {
//map
this.element.find(".control:visible:has(input:not(:checked))").each(function(i, val) {
dfds.push(that.map(that.options.section, null, $(val).data("model")).then(function(d) {
that.style_item(val);
return d;
}));
});
} else {
//unmap
this.element.find(".control:visible:has(input:checked)").each(function(i, val) {
dfds.push(that.unmap(that.options.section, null, $(val).data("model")).then(function(d) {
that.style_item(val);
return d;
}));
});
}
$.when.apply(dfds).done(this.proxy("update_map_all"));
}
, ".jump-to-control click" : function(el, ev) {
var $item = this.element.find(".controls-list [content_id=" + el.data("content-id") + "]");
var $content = $item.closest(".content");
$item.find("a").click();
$content.scrollTop(0).scrollTop($item.offset().top - $content.offset().top - ($content.height() - $item.height()) / 2);
}
});
})(this, can.$);
|
apache-2.0
|
leopardoooo/cambodia
|
ycsoft-lib/src/main/java/com/ycsoft/beans/core/job/JCustWriteoff.java
|
1113
|
/**
* JCustWriteOff.java 2010/06/08
*/
package com.ycsoft.beans.core.job;
import java.io.Serializable;
import com.ycsoft.beans.base.BusiBase;
import com.ycsoft.daos.config.POJO;
/**
* JCustWriteOff -> J_CUST_WRITE_OFF mapping
*/
@POJO(tn = "J_CUST_WRITEOFF", sn = "", pk = "JOB_ID")
public class JCustWriteoff extends BusiBase implements Serializable {
// JCustWriteOff all properties
/**
*
*/
private static final long serialVersionUID = -2353264118600012866L;
private Integer job_id;
private String cust_id;
private String writeoff;
/**
* default empty constructor
*/
public JCustWriteoff() {
}
// job_id getter and setter
public int getJob_id() {
return job_id;
}
public void setJob_id(int job_id) {
this.job_id = job_id;
}
// cust_id getter and setter
public String getCust_id() {
return cust_id;
}
public void setCust_id(String cust_id) {
this.cust_id = cust_id;
}
public String getWriteoff() {
return writeoff;
}
public void setWriteoff(String writeoff) {
this.writeoff = writeoff;
}
}
|
apache-2.0
|
bric3/assertj-core
|
src/main/java/org/assertj/core/error/ShouldContainKeys.java
|
1587
|
/**
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*
* Copyright 2012-2017 the original author or authors.
*/
package org.assertj.core.error;
import java.util.Set;
/**
* Creates an error message indicating that an assertion that verifies a map contains a key..
*
* @author Nicolas François
* @author Joel Costigliola
*/
public class ShouldContainKeys extends BasicErrorMessageFactory {
/**
* Creates a new </code>{@link ShouldContainKeys}</code>.
*
* @param actual the actual value in the failed assertion.
* @return the created {@code ErrorMessageFactory}.
*/
public static <K> ErrorMessageFactory shouldContainKeys(Object actual, Set<K> keys) {
if (keys.size() == 1) return new ShouldContainKeys(actual, keys.iterator().next());
return new ShouldContainKeys(actual, keys);
}
private <K> ShouldContainKeys(Object actual, Set<K> key) {
super("%nExpecting:%n <%s>%nto contain keys:%n <%s>", actual, key);
}
private <K> ShouldContainKeys(Object actual, K key) {
super("%nExpecting:%n <%s>%nto contain key:%n <%s>", actual, key);
}
}
|
apache-2.0
|
akarnokd/rxjava2-backport
|
src/main/java/hu/akarnokd/rxjava2/internal/operators/nbp/NbpOnSubscribeZipIterable.java
|
5058
|
/**
* Copyright 2015 David Karnok and Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in
* compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is
* distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See
* the License for the specific language governing permissions and limitations under the License.
*/
package hu.akarnokd.rxjava2.internal.operators.nbp;
import java.util.Iterator;
import hu.akarnokd.rxjava2.NbpObservable;
import hu.akarnokd.rxjava2.NbpObservable.*;
import hu.akarnokd.rxjava2.disposables.Disposable;
import hu.akarnokd.rxjava2.functions.BiFunction;
import hu.akarnokd.rxjava2.internal.disposables.EmptyDisposable;
import hu.akarnokd.rxjava2.internal.subscriptions.SubscriptionHelper;
import hu.akarnokd.rxjava2.plugins.RxJavaPlugins;
public final class NbpOnSubscribeZipIterable<T, U, V> implements NbpOnSubscribe<V> {
final NbpObservable<? extends T> source;
final Iterable<U> other;
final BiFunction<? super T, ? super U, ? extends V> zipper;
public NbpOnSubscribeZipIterable(
NbpObservable<? extends T> source,
Iterable<U> other, BiFunction<? super T, ? super U, ? extends V> zipper) {
this.source = source;
this.other = other;
this.zipper = zipper;
}
@Override
public void accept(NbpSubscriber<? super V> t) {
Iterator<U> it;
try {
it = other.iterator();
} catch (Throwable e) {
EmptyDisposable.error(e, t);
return;
}
if (it == null) {
EmptyDisposable.error(new NullPointerException("The iterator returned by other is null"), t);
return;
}
boolean b;
try {
b = it.hasNext();
} catch (Throwable e) {
EmptyDisposable.error(e, t);
return;
}
if (!b) {
EmptyDisposable.complete(t);
return;
}
source.subscribe(new ZipIterableSubscriber<T, U, V>(t, it, zipper));
}
static final class ZipIterableSubscriber<T, U, V> implements NbpSubscriber<T> {
final NbpSubscriber<? super V> actual;
final Iterator<U> iterator;
final BiFunction<? super T, ? super U, ? extends V> zipper;
Disposable s;
boolean done;
public ZipIterableSubscriber(NbpSubscriber<? super V> actual, Iterator<U> iterator,
BiFunction<? super T, ? super U, ? extends V> zipper) {
this.actual = actual;
this.iterator = iterator;
this.zipper = zipper;
}
@Override
public void onSubscribe(Disposable s) {
if (SubscriptionHelper.validateDisposable(this.s, s)) {
return;
}
this.s = s;
actual.onSubscribe(s);
}
@Override
public void onNext(T t) {
if (done) {
return;
}
U u;
try {
u = iterator.next();
} catch (Throwable e) {
error(e);
return;
}
if (u == null) {
error(new NullPointerException("The iterator returned a null value"));
return;
}
V v;
try {
v = zipper.apply(t, u);
} catch (Throwable e) {
error(new NullPointerException("The iterator returned a null value"));
return;
}
if (v == null) {
error(new NullPointerException("The zipper function returned a null value"));
return;
}
actual.onNext(v);
boolean b;
try {
b = iterator.hasNext();
} catch (Throwable e) {
error(e);
return;
}
if (!b) {
done = true;
s.dispose();
actual.onComplete();
}
}
void error(Throwable e) {
done = true;
s.dispose();
actual.onError(e);
}
@Override
public void onError(Throwable t) {
if (done) {
RxJavaPlugins.onError(t);
return;
}
done = true;
actual.onError(t);
}
@Override
public void onComplete() {
if (done) {
return;
}
done = true;
actual.onComplete();
}
}
}
|
apache-2.0
|
sunli2015/Quark
|
src/main/webapp/js/jquery-ui-1.9.2.custom/development-bundle/ui/minified/i18n/jquery.ui.datepicker-hi.min.js
|
1234
|
/*! jQuery UI - v1.9.2 - 2014-01-26
* http://jqueryui.com
* Copyright 2014 jQuery Foundation and other contributors; Licensed MIT */
jQuery(function(e){e.datepicker.regional.hi={closeText:"बंद",prevText:"पिछला",nextText:"अगला",currentText:"आज",monthNames:["जनवरी ","फरवरी","मार्च","अप्रेल","मई","जून","जूलाई","अगस्त ","सितम्बर","अक्टूबर","नवम्बर","दिसम्बर"],monthNamesShort:["जन","फर","मार्च","अप्रेल","मई","जून","जूलाई","अग","सित","अक्ट","नव","दि"],dayNames:["रविवार","सोमवार","मंगलवार","बुधवार","गुरुवार","शुक्रवार","शनिवार"],dayNamesShort:["रवि","सोम","मंगल","बुध","गुरु","शुक्र","शनि"],dayNamesMin:["रवि","सोम","मंगल","बुध","गुरु","शुक्र","शनि"],weekHeader:"हफ्ता",dateFormat:"dd/mm/yy",firstDay:1,isRTL:!1,showMonthAfterYear:!1,yearSuffix:""},e.datepicker.setDefaults(e.datepicker.regional.hi)});
|
apache-2.0
|
googlearchive/tango-examples-c
|
cpp_example_util/app/src/main/java/com/projecttango/examples/cpp/util/TangoInitializationHelper.java
|
4832
|
/*
* Copyright 2016 Google Inc. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.projecttango.examples.cpp.util;
import android.content.Context;
import android.content.Intent;
import android.content.ServiceConnection;
import android.os.Build;
import android.os.IBinder;
import android.util.Log;
import java.io.File;
/**
* Functions for simplifying the process of initializing TangoService, and function
* handles loading correct libtango_client_api.so.
*/
public class TangoInitializationHelper {
public static final int ARCH_ERROR = -2;
public static final int ARCH_FALLBACK = -1;
public static final int ARCH_DEFAULT = 0;
public static final int ARCH_ARM64 = 1;
public static final int ARCH_ARM32 = 2;
public static final int ARCH_X86_64 = 3;
public static final int ARCH_X86 = 4;
/**
* Only for apps using the C API:
* Initializes the underlying TangoService for native apps.
*
* @return returns false if the device doesn't have the Tango running as Android Service.
* Otherwise ture.
*/
public static final boolean bindTangoService(final Context context,
ServiceConnection connection) {
Intent intent = new Intent();
intent.setClassName("com.google.tango", "com.google.atap.tango.TangoService");
boolean hasJavaService = (context.getPackageManager().resolveService(intent, 0) != null);
// User doesn't have the latest packagename for TangoCore, fallback to the previous name.
if (!hasJavaService) {
intent = new Intent();
intent.setClassName("com.projecttango.tango", "com.google.atap.tango.TangoService");
hasJavaService = (context.getPackageManager().resolveService(intent, 0) != null);
}
// User doesn't have a Java-fied TangoCore at all; fallback to the deprecated approach
// of doing nothing and letting the native side auto-init to the system-service version
// of Tango.
if (!hasJavaService) {
return false;
}
return context.bindService(intent, connection, Context.BIND_AUTO_CREATE);
}
/**
* Load the libtango_client_api.so library based on different Tango device setup.
*
* @return returns the loaded architecture id.
*/
public static final int loadTangoSharedLibrary() {
int loadedSoId = ARCH_ERROR;
String basePath = "/data/data/com.google.tango/libfiles/";
if (!(new File(basePath).exists())) {
basePath = "/data/data/com.projecttango.tango/libfiles/";
}
Log.i("TangoInitializationHelper", "basePath: " + basePath);
try {
System.load(basePath + "arm64-v8a/libtango_client_api.so");
loadedSoId = ARCH_ARM64;
Log.i("TangoInitializationHelper", "Success! Using arm64-v8a/libtango_client_api.");
} catch (UnsatisfiedLinkError e) {
}
if (loadedSoId < ARCH_DEFAULT) {
try {
System.load(basePath + "armeabi-v7a/libtango_client_api.so");
loadedSoId = ARCH_ARM32;
Log.i("TangoInitializationHelper", "Success! Using armeabi-v7a/libtango_client_api.");
} catch (UnsatisfiedLinkError e) {
}
}
if (loadedSoId < ARCH_DEFAULT) {
try {
System.load(basePath + "x86_64/libtango_client_api.so");
loadedSoId = ARCH_X86_64;
Log.i("TangoInitializationHelper", "Success! Using x86_64/libtango_client_api.");
} catch (UnsatisfiedLinkError e) {
}
}
if (loadedSoId < ARCH_DEFAULT) {
try {
System.load(basePath + "x86/libtango_client_api.so");
loadedSoId = ARCH_X86;
Log.i("TangoInitializationHelper", "Success! Using x86/libtango_client_api.");
} catch (UnsatisfiedLinkError e) {
}
}
if (loadedSoId < ARCH_DEFAULT) {
try {
System.load(basePath + "default/libtango_client_api.so");
loadedSoId = ARCH_DEFAULT;
Log.i("TangoInitializationHelper", "Success! Using default/libtango_client_api.");
} catch (UnsatisfiedLinkError e) {
}
}
if (loadedSoId < ARCH_DEFAULT) {
try {
System.loadLibrary("tango_client_api");
loadedSoId = ARCH_FALLBACK;
Log.i("TangoInitializationHelper", "Falling back to libtango_client_api.so symlink.");
} catch (UnsatisfiedLinkError e) {
}
}
return loadedSoId;
}
}
|
apache-2.0
|
ravjotsingh9/DBLike
|
DBLike/ClientUI/Program.cs
|
511
|
using System;
using System.Collections.Generic;
using System.Linq;
using System.Threading.Tasks;
using System.Windows.Forms;
namespace ClientUI
{
static class Program
{
/// <summary>
/// The main entry point for the application.
/// </summary>
[STAThread]
static void Main()
{
Application.EnableVisualStyles();
Application.SetCompatibleTextRenderingDefault(false);
Application.Run(new Form1());
}
}
}
|
apache-2.0
|
daniil-timofeev/gridsplines
|
approximation/src/test/scala/approximation/TwoDGridUnit.scala
|
5290
|
package approximation
import approximation.TwoDGrid.Bounds._
import approximation.TwoDGrid._
import org.specs2._
import piecewise._
/**
*
*/
class TwoDGridUnit extends Specification{override def is = s2"""
correctly update X ${updateX}
correctly update Y ${updateY}
correct horizontal iteration step ${horIter}
correct vertical iteration step ${vertIter}
correct full iteration step ${makeFullStep}
no heat flow bound update at upper side ${noHeatFlowUpp}
no heat flow bound update at lower side ${noHeatFlowLow}
no heat flow bound update at right side ${noHeatFlowRight}
no heat flow bound update at left side ${noHeatFlowLeft}
"write grid ${writeGridTest}
"x coefficient array size ${getXCoefs}"
"y coefficient array size ${getYCoefs}"
"Orthogonal direction will heated faster than radial ${radialAndOrthoHeating}"
"""
def updateX = {
val xD = new XDim[Radial](1.0, x => x + 1.0, 10.0)
val yD = new YDim[Ortho](1.0, y => y + 1.0, 10.0)
val coef = new ConstantCoef(Spline.const(1.0), Spline.const(1.0), Spline.const(1.0))
val grid = TwoDGrid(xD, yD)(One, Temp)(One, Temp)(One, Temp)(One, Temp)(coef)
grid.updateX(x => 1.0)
grid.avValue must_== 1.0
}
def updateY = {
val xD = new XDim[Radial](1.0, x => x + 1.0, 10.0)
val yD = new YDim[Ortho](1.0, y => y + 1.0, 10.0)
val coef = new ConstantCoef(Spline.const(1.0), Spline.const(1.0), Spline.const(1.0))
val grid = TwoDGrid(xD, yD)(One, Temp)(One, Temp)(One, Temp)(One, Temp)(coef)
grid.updateY(y => 1.0)
grid.avValue must_== 1.0
}
def makeGrid4Iter(): TwoDGrid[Radial, Ortho, Const] = {
val xD = new XDim[Radial](1.0, x => x + 1.0, 10.0)
val yD = new YDim[Ortho](1.0, y => y + 1.0, 12.0)
val coef = new ConstantCoef(Spline.const(1.0), Spline.const(1.0), Spline.const(1.0))
TwoDGrid(xD, yD)(One, Temp)(One, Temp)(One, Temp)(One, Temp)(coef)
}
def horIter = {
val grid = makeGrid4Iter()
grid *= 1.0
grid.bounds *= 1.0
grid.bounds.left *= 4.0
grid.xIter(900)
grid.grid.update()
grid.noHeatFlow(Left)
grid.noHeatFlow(Right)
grid.bounds.left.get(0) must be_>(grid.bounds.right.get(0))
}
def vertIter = {
val grid = makeGrid4Iter()
grid *= 1.0
grid.bounds *= 1.0
grid.bounds.upp *= 4.0
grid.yIter(900)
grid.grid.update()
grid.noHeatFlow(Upper)
grid.noHeatFlow(Lower)
grid.bounds.upp.get(0) must be_>(grid.bounds.low.get(0))
}
def makeFullStep = {
val grid = makeGrid4Iter()
grid.updateY(y => 1.0)
grid.bounds *= 1.0
grid.bounds.left *= 4.0
grid.iteration(900.0)
grid.avValue must be_>(1.0)
}
def makeGrid(): TwoDGrid[Radial, Ortho, Const] = {
val xD = new XDim[Radial](1.0, x => x + 1.0, 10.0)
val yD = new YDim[Ortho](1.0, y => y + 1.0, 12.0)
val coef = new ConstantCoef(Spline.const(1.0), Spline.const(1.0), Spline.const(1.0))
TwoDGrid(xD, yD)(One, Temp)(One, Temp)(One, Temp)(One, Temp)(coef)
}
def noHeatFlowLeft = {
val grid = makeGrid()
grid *= 1.0
grid.noHeatFlow(Left)
grid.bounds.left.get(0) must_== 1
}
def noHeatFlowRight = {
val grid = makeGrid()
grid *= 1.0
grid.noHeatFlow(Right)
grid.bounds.right.get(0) must_== 1
}
def noHeatFlowUpp = {
val grid = makeGrid()
grid *= 1.0
grid.noHeatFlow(Upper)
grid.bounds.upp.get(0) must_== 1
}
def noHeatFlowLow = {
val grid = makeGrid()
grid *= 1.0
grid.noHeatFlow(Lower)
grid.bounds.low.get(0) must_== 1
}
def writeGridTest = {
import java.nio.file._
val dest = Files.createTempFile(Paths.get("."), "WriteGrid", "temp")
val grid = makeGrid()
val writer = Files.newBufferedWriter(dest)
try{
grid.write(writer)
} finally writer.close()
val colsNum = grid.x.colsNum
val read = Files.newBufferedReader(dest)
val line =
try {
read.readLine()
}
finally {
read.close()
Files.deleteIfExists(dest)
}
line.split(" ").size must_== {colsNum + 1}
}
def getXCoefs = {
val grid = makeGrid()
val xCoefs = grid.colCoefs(5)
xCoefs.size must_== grid.y.rowsNum
}
def getYCoefs = {
val grid = makeGrid()
val yCoefs = grid.rowCoefs(5)
yCoefs.size must_== grid.x.colsNum
}
def radialAndOrthoHeating = {
def buildGrid = {
val xD = new XDim[Radial](1.0, x => x + 1.0, 10.0)
val yD = new YDim[Ortho](1.0, y => y + 1.0, 10.0)
val coef = new ConstantCoef(Spline.const(1.5), Spline.const(1E6))
val grid = TwoDGrid(xD, yD)(One, Flow)(One, Flow)(One, Flow)(One, Flow)(coef)
grid.updateX(x => 6.0)
grid.bounds *= 0.0
grid
}
val timeStep = 15.0 * 60.0
val grid = buildGrid
for (i <- 1 to 100){
grid.bounds.left *= 15.0
grid.iteration(timeStep)
}
val grid0 = buildGrid
for (i <- 1 to 100){
grid0.bounds.upp *= 15.0
grid0.iteration(timeStep)
}
val verticalSum = grid0.col(5).reduce(_ + _) / grid0.col(5).length
val horizontalSum = grid.row(5).reduce(_ + _) / grid.row(5).length
verticalSum must be_>(horizontalSum) //because vertical is ortho, and horizontal is radial
}
}
|
apache-2.0
|
sapanywhereai/anywhere-api-sample
|
IntegrationDemoApp/src/main/java/com/sap/integration/anywhere/AccessTokenGetter.java
|
814
|
package com.sap.integration.anywhere;
import org.apache.log4j.Logger;
import com.sap.integration.anywhere.oauth.AccessTokenLoader;
public class AccessTokenGetter {
/**
* Logger for logging purposes. Similar definition is used in all classes of this application. You may configure it
* in log4j.properties configuration file.
*/
private static final Logger LOG = Logger.getLogger(AccessTokenGetter.class);
/**
* Method, which calls classes and method for retrieving Access Token.
*/
public static void runAccessToken() {
LOG.info("Retrieving Access Token");
try {
AccessTokenLoader.load();
} catch (Exception e) {
LOG.error("Exception " + e.getMessage(), e);
}
LOG.info("Access Token retrieved");
}
}
|
apache-2.0
|
fishercoder1534/Leetcode
|
src/test/java/com/fishercoder/_1561Test.java
|
692
|
package com.fishercoder;
import com.fishercoder.solutions._1561;
import org.junit.BeforeClass;
import org.junit.Test;
import static org.junit.Assert.assertEquals;
public class _1561Test {
private static _1561.Solution1 solution1;
@BeforeClass
public static void setup() {
solution1 = new _1561.Solution1();
}
@Test
public void test1() {
assertEquals(9, solution1.maxCoins(new int[]{2, 4, 1, 2, 7, 8}));
}
@Test
public void test2() {
assertEquals(4, solution1.maxCoins(new int[]{2, 4, 5}));
}
@Test
public void test3() {
assertEquals(18, solution1.maxCoins(new int[]{9, 8, 7, 6, 5, 1, 2, 3, 4}));
}
}
|
apache-2.0
|
MReichenbach/visitmeta
|
visualization/src/main/java/de/hshannover/f4/trust/visitmeta/graphDrawer/GraphPanelFactory.java
|
2443
|
/*
* #%L
* =====================================================
* _____ _ ____ _ _ _ _
* |_ _|_ __ _ _ ___| |_ / __ \| | | | ___ | | | |
* | | | '__| | | / __| __|/ / _` | |_| |/ __|| |_| |
* | | | | | |_| \__ \ |_| | (_| | _ |\__ \| _ |
* |_| |_| \__,_|___/\__|\ \__,_|_| |_||___/|_| |_|
* \____/
*
* =====================================================
*
* Hochschule Hannover
* (University of Applied Sciences and Arts, Hannover)
* Faculty IV, Dept. of Computer Science
* Ricklinger Stadtweg 118, 30459 Hannover, Germany
*
* Email: trust@f4-i.fh-hannover.de
* Website: http://trust.f4.hs-hannover.de/
*
* This file is part of visitmeta-visualization, version 0.5.0,
* implemented by the Trust@HsH research group at the Hochschule Hannover.
* %%
* Copyright (C) 2012 - 2015 Trust@HsH
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
package de.hshannover.f4.trust.visitmeta.graphDrawer;
import org.apache.log4j.Logger;
import de.hshannover.f4.trust.visitmeta.gui.GraphConnection;
public final class GraphPanelFactory {
/**
*
*/
private GraphPanelFactory() { }
private static final Logger LOGGER = Logger.getLogger(GraphPanelFactory.class);
/**
* Return a Panel that shows the graph.
*
* @param type
* define witch Panel to return. "Piccolo2D" a Panel that use
* Piccolo2D to draw the graph. TODO "OpenGL" a Panel that use
* OpenGL to draw the graph.
* @return a Panel that shows the graph.
*/
public static GraphPanel getGraphPanel(String type, GraphConnection connection) {
LOGGER.trace("Method getGraphPanel(" + type + ") called.");
switch (type) {
case "Piccolo2D":
return new Piccolo2DPanel(connection);
// case "OpenGL" : return new OpenGLPanel(pController);
default:
return new Piccolo2DPanel(connection);
}
}
}
|
apache-2.0
|
the-ntf/XPagesExtensionLibrary
|
extlib-des/lwp/product/design/eclipse/plugins/com.ibm.xsp.extlib.designer.bluemix/src/com/ibm/xsp/extlib/designer/bluemix/manifest/editor/ManifestMultiPageEditor.java
|
8618
|
/*
* © Copyright IBM Corp. 2015
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
* implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.ibm.xsp.extlib.designer.bluemix.manifest.editor;
import org.eclipse.core.runtime.IProgressMonitor;
import org.eclipse.jface.dialogs.MessageDialog;
import org.eclipse.swt.SWT;
import org.eclipse.swt.events.DisposeEvent;
import org.eclipse.swt.events.DisposeListener;
import org.eclipse.swt.widgets.Composite;
import org.eclipse.ui.IEditorInput;
import org.eclipse.ui.IEditorPart;
import org.eclipse.ui.IEditorSite;
import org.eclipse.ui.IWindowListener;
import org.eclipse.ui.IWorkbenchWindow;
import org.eclipse.ui.PartInitException;
import org.eclipse.ui.PlatformUI;
import org.eclipse.ui.editors.text.TextEditor;
import org.eclipse.ui.forms.widgets.FormToolkit;
import org.eclipse.ui.ide.FileStoreEditorInput;
import org.eclipse.ui.part.MultiPageEditorPart;
import com.ibm.commons.iloader.node.collections.SingleCollection;
import com.ibm.commons.util.StringUtil;
import com.ibm.designer.domino.ide.resources.project.IDominoDesignerProject;
import com.ibm.xsp.extlib.designer.bluemix.BluemixLogger;
import com.ibm.xsp.extlib.designer.bluemix.manifest.editor.ManifestMetaModel.BluemixManifestEditorInput;
import com.ibm.xsp.extlib.designer.bluemix.util.BluemixUtil;
/**
* @author Gary Marjoram
*
*/
public class ManifestMultiPageEditor extends MultiPageEditorPart implements IWindowListener {
private IEditorInput _editorInput;
private ManifestTextEditor _srcEditor;
private ManifestEditorPage _visualEditor;
private FormToolkit _toolkit;
private ManifestBeanLoader _beanLoader;
private ManifestBean _bean;
private IDominoDesignerProject _designerProject;
public ManifestMultiPageEditor() {
}
@Override
protected void createPages() {
String label = BluemixUtil.productizeString(StringUtil.format("%BM_PRODUCT% Manifest - {0}", _designerProject.getDatabaseTitle())); // $NLX-ManifestMultiPageEditor.IBMBluemixManifest0-1$
setPartName(label);
Composite ourContainer = this.getContainer();
if ( _toolkit == null){
_toolkit = new FormToolkit(ourContainer.getDisplay() );
_toolkit.setBackground(ourContainer.getDisplay().getSystemColor(SWT.COLOR_LIST_BACKGROUND));
_toolkit.setBorderStyle(ourContainer.getBorderWidth());
}
ourContainer.addDisposeListener(new DisposeListener() {
public void widgetDisposed(DisposeEvent e) {
if (_toolkit != null) {
_toolkit.dispose();
_toolkit = null;
}
PlatformUI.getWorkbench().removeWindowListener(ManifestMultiPageEditor.this);
}
});
try {
// Add the Application page
_visualEditor = new ManifestEditorPage(this.getContainer(), _toolkit, this);
_visualEditor.getDataNode().setClassDef(_beanLoader.getClassOf(_bean));
_visualEditor.getDataNode().setDataProvider(new SingleCollection(_bean));
_visualEditor.getDataNode().setModelModified(false);
addPage(_visualEditor);
setPageText(0, "Application"); // $NLX-ManifestMultiPageEditor.Application-1$
_visualEditor.refreshTables();
// Add the Source page
_srcEditor = new ManifestTextEditor();
addPage(_srcEditor, _editorInput);
setPageText(1, "Source"); // $NLX-ManifestMultiPageEditor.Source-1$
} catch (Exception e) {
if (BluemixLogger.BLUEMIX_LOGGER.isErrorEnabled()) {
BluemixLogger.BLUEMIX_LOGGER.errorp(this, "createPages", e, "Failed to create visual editor"); // $NON-NLS-1$ $NLE-ManifestMultiPageEditor.Failedtocreatevisualeditor-2$
}
}
PlatformUI.getWorkbench().addWindowListener(this);
}
@Override
protected void pageChange(int newPageIndex) {
if (newPageIndex != 1) {
// Moving from the Source Editor to visual - update the bean from the src
String contents = getSrcEditor().getDocumentProvider().getDocument(_editorInput).get();
_bean.loadFromString(contents);
if (_bean.isManifestValid()) {
_visualEditor.getDataNode().notifyInvalidate(null);
_visualEditor.refreshTables();
_visualEditor.hideError();
} else {
_visualEditor.displayError();
}
}
super.pageChange(newPageIndex);
}
@Override
public void init(IEditorSite editorSite, IEditorInput editorInput) throws PartInitException {
_editorInput = editorInput;
_designerProject = ((BluemixManifestEditorInput)_editorInput).getDesignerProject();
// Setup the bean and the bean loader
_beanLoader = new ManifestBeanLoader("bluemix.manifest", (FileStoreEditorInput) editorInput, this); // $NON-NLS-1$
_bean = new ManifestBean((FileStoreEditorInput)editorInput);
super.init(editorSite, editorInput);
}
@Override
public void doSave(IProgressMonitor progress) {
if (_srcEditor.isDirty()) {
// Save
_srcEditor.doSave(progress);
// Update the bean file modified time so we're
// not prompting the user to reload
_bean.resetModifiedTime();
// Have to do this to update the dirty state of the editor
firePropertyChange(IEditorPart.PROP_DIRTY);
}
}
@Override
public void doSaveAs() {
}
@Override
public boolean isSaveAsAllowed() {
return true;
}
public TextEditor getSrcEditor() {
return _srcEditor;
}
public ManifestBean getBean() {
return _bean;
}
public IEditorInput getEditorInput() {
return _editorInput;
}
// Function to update the source editor with the contents of the bean
public void writeContentsFromBean() {
getSrcEditor().getDocumentProvider().getDocument(getEditorInput()).set(getBean().getContents());
}
public IDominoDesignerProject getDesignerProject() {
return _designerProject;
}
@Override
public void setFocus() {
super.setFocus();
// Every time the editor gets focus check if it has been modified externally
checkForExternalChange();
}
@Override
public void windowActivated(IWorkbenchWindow arg0) {
// Every time the editor gets focus check if it has been modified externally
checkForExternalChange();
}
@Override
public void windowClosed(IWorkbenchWindow arg0) {
}
@Override
public void windowDeactivated(IWorkbenchWindow arg0) {
}
@Override
public void windowOpened(IWorkbenchWindow arg0) {
}
private void checkForExternalChange() {
// Check has the file been externally modified
if (_bean.externallyModified()) {
// Only do this once per change
_bean.resetModifiedTime();
// Ask the reload question
String msg = "The file '{0}' has been changed on the file system. Do you wish to replace the editor contents with these changes?"; // $NLX-ManifestMultiPageEditor.Thefile0hasbeenchangedonthefilesy-1$
if (MessageDialog.openQuestion(null, "File changed", StringUtil.format(msg, _bean.getFileName()))) { // $NLX-ManifestMultiPageEditor.Filechanged-1$
// Reload the file
_srcEditor.doRevertToSaved();
// Make sure the contents are reflected in the editor
pageChange(getActivePage());
} else {
// User has chosen not to reload - show editor as "dirty"
_srcEditor.setExternallyModified(true);
firePropertyChange(IEditorPart.PROP_DIRTY);
}
}
}
}
|
apache-2.0
|
chamindias/carbon-apimgt
|
components/apimgt/org.wso2.carbon.apimgt.rest.api.publisher.v1/src/main/java/org/wso2/carbon/apimgt/rest/api/publisher/v1/impl/ApisApiServiceImpl.java
|
267938
|
/*
* Copyright (c) 2019 WSO2 Inc. (http://www.wso2.org) All Rights Reserved.
*
* WSO2 Inc. licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file except
* in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.wso2.carbon.apimgt.rest.api.publisher.v1.impl;
import com.amazonaws.SdkClientException;
import com.amazonaws.auth.AWSCredentialsProvider;
import com.amazonaws.auth.AWSStaticCredentialsProvider;
import com.amazonaws.auth.BasicAWSCredentials;
import com.amazonaws.auth.InstanceProfileCredentialsProvider;
import com.amazonaws.services.lambda.AWSLambda;
import com.amazonaws.services.lambda.AWSLambdaClientBuilder;
import com.amazonaws.services.lambda.model.FunctionConfiguration;
import com.amazonaws.services.lambda.model.ListFunctionsResult;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.dataformat.yaml.YAMLFactory;
import com.google.gson.Gson;
import org.apache.axiom.om.OMElement;
import org.apache.axiom.util.base64.Base64Utils;
import org.apache.commons.collections.MapUtils;
import org.apache.commons.httpclient.HostConfiguration;
import org.apache.commons.httpclient.HttpMethod;
import org.apache.commons.httpclient.HttpStatus;
import org.apache.commons.httpclient.methods.HeadMethod;
import org.apache.commons.io.IOUtils;
import org.apache.commons.lang3.ArrayUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.cxf.jaxrs.ext.MessageContext;
import org.apache.cxf.jaxrs.ext.multipart.Attachment;
import org.apache.cxf.jaxrs.ext.multipart.ContentDisposition;
import org.apache.cxf.phase.PhaseInterceptorChain;
import org.apache.http.client.methods.CloseableHttpResponse;
import org.apache.http.client.methods.HttpGet;
import org.apache.http.client.methods.HttpHead;
import org.apache.http.client.methods.HttpPut;
import org.apache.http.entity.StringEntity;
import org.apache.http.impl.client.CloseableHttpClient;
import org.json.JSONArray;
import org.json.JSONException;
import org.json.JSONTokener;
import org.json.XML;
import org.json.simple.JSONObject;
import org.json.simple.parser.JSONParser;
import org.json.simple.parser.ParseException;
import org.wso2.carbon.apimgt.api.APIDefinition;
import org.wso2.carbon.apimgt.api.APIDefinitionValidationResponse;
import org.wso2.carbon.apimgt.api.APIManagementException;
import org.wso2.carbon.apimgt.api.APIMgtResourceAlreadyExistsException;
import org.wso2.carbon.apimgt.api.APIMgtResourceNotFoundException;
import org.wso2.carbon.apimgt.api.APIProvider;
import org.wso2.carbon.apimgt.api.ExceptionCodes;
import org.wso2.carbon.apimgt.api.FaultGatewaysException;
import org.wso2.carbon.apimgt.api.MonetizationException;
import org.wso2.carbon.apimgt.api.ServiceCatalog;
import org.wso2.carbon.apimgt.api.doc.model.APIResource;
import org.wso2.carbon.apimgt.api.dto.CertificateInformationDTO;
import org.wso2.carbon.apimgt.api.dto.ClientCertificateDTO;
import org.wso2.carbon.apimgt.api.model.API;
import org.wso2.carbon.apimgt.api.model.APIIdentifier;
import org.wso2.carbon.apimgt.api.model.APIProduct;
import org.wso2.carbon.apimgt.api.model.APIProductIdentifier;
import org.wso2.carbon.apimgt.api.model.APIResourceMediationPolicy;
import org.wso2.carbon.apimgt.api.model.APIRevision;
import org.wso2.carbon.apimgt.api.model.APIRevisionDeployment;
import org.wso2.carbon.apimgt.api.model.APIStateChangeResponse;
import org.wso2.carbon.apimgt.api.model.APIStore;
import org.wso2.carbon.apimgt.api.model.ApiTypeWrapper;
import org.wso2.carbon.apimgt.api.model.Comment;
import org.wso2.carbon.apimgt.api.model.Documentation;
import org.wso2.carbon.apimgt.api.model.DocumentationContent;
import org.wso2.carbon.apimgt.api.model.DocumentationContent.ContentSourceType;
import org.wso2.carbon.apimgt.api.model.DuplicateAPIException;
import org.wso2.carbon.apimgt.api.model.Identifier;
import org.wso2.carbon.apimgt.api.model.LifeCycleEvent;
import org.wso2.carbon.apimgt.api.model.Mediation;
import org.wso2.carbon.apimgt.api.model.Monetization;
import org.wso2.carbon.apimgt.api.model.ResourceFile;
import org.wso2.carbon.apimgt.api.model.ResourcePath;
import org.wso2.carbon.apimgt.api.model.SOAPToRestSequence;
import org.wso2.carbon.apimgt.api.model.Scope;
import org.wso2.carbon.apimgt.api.model.ServiceEntry;
import org.wso2.carbon.apimgt.api.model.SubscribedAPI;
import org.wso2.carbon.apimgt.api.model.SwaggerData;
import org.wso2.carbon.apimgt.api.model.Tier;
import org.wso2.carbon.apimgt.api.model.URITemplate;
import org.wso2.carbon.apimgt.api.model.graphql.queryanalysis.GraphqlComplexityInfo;
import org.wso2.carbon.apimgt.api.model.graphql.queryanalysis.GraphqlSchemaType;
import org.wso2.carbon.apimgt.impl.APIConstants;
import org.wso2.carbon.apimgt.impl.GZIPUtils;
import org.wso2.carbon.apimgt.impl.ServiceCatalogImpl;
import org.wso2.carbon.apimgt.impl.certificatemgt.ResponseCode;
import org.wso2.carbon.apimgt.impl.dao.ApiMgtDAO;
import org.wso2.carbon.apimgt.impl.definitions.AsyncApiParserUtil;
import org.wso2.carbon.apimgt.impl.definitions.GraphQLSchemaDefinition;
import org.wso2.carbon.apimgt.impl.definitions.OAS2Parser;
import org.wso2.carbon.apimgt.impl.definitions.OAS3Parser;
import org.wso2.carbon.apimgt.impl.definitions.OASParserUtil;
import org.wso2.carbon.apimgt.impl.importexport.APIImportExportException;
import org.wso2.carbon.apimgt.impl.importexport.ExportFormat;
import org.wso2.carbon.apimgt.impl.importexport.ImportExportAPI;
import org.wso2.carbon.apimgt.impl.importexport.utils.APIImportExportUtil;
import org.wso2.carbon.apimgt.impl.utils.APIMWSDLReader;
import org.wso2.carbon.apimgt.impl.utils.APIUtil;
import org.wso2.carbon.apimgt.impl.utils.APIVersionStringComparator;
import org.wso2.carbon.apimgt.impl.utils.CertificateMgtUtils;
import org.wso2.carbon.apimgt.impl.wsdl.SequenceGenerator;
import org.wso2.carbon.apimgt.impl.wsdl.model.WSDLValidationResponse;
import org.wso2.carbon.apimgt.impl.wsdl.util.SOAPOperationBindingUtils;
import org.wso2.carbon.apimgt.impl.wsdl.util.SequenceUtils;
import org.wso2.carbon.apimgt.rest.api.common.RestApiCommonUtil;
import org.wso2.carbon.apimgt.rest.api.common.RestApiConstants;
import org.wso2.carbon.apimgt.rest.api.common.dto.ErrorDTO;
import org.wso2.carbon.apimgt.rest.api.publisher.v1.ApisApiService;
import org.wso2.carbon.apimgt.rest.api.publisher.v1.common.mappings.APIMappingUtil;
import org.wso2.carbon.apimgt.rest.api.publisher.v1.common.mappings.CertificateMappingUtil;
import org.wso2.carbon.apimgt.rest.api.publisher.v1.common.mappings.CertificateRestApiUtils;
import org.wso2.carbon.apimgt.rest.api.publisher.v1.common.mappings.DocumentationMappingUtil;
import org.wso2.carbon.apimgt.rest.api.publisher.v1.common.mappings.ExternalStoreMappingUtil;
import org.wso2.carbon.apimgt.rest.api.publisher.v1.common.mappings.GraphqlQueryAnalysisMappingUtil;
import org.wso2.carbon.apimgt.rest.api.publisher.v1.common.mappings.MediationMappingUtil;
import org.wso2.carbon.apimgt.rest.api.publisher.v1.common.mappings.PublisherCommonUtils;
import org.wso2.carbon.apimgt.rest.api.publisher.v1.dto.APIDTO;
import org.wso2.carbon.apimgt.rest.api.publisher.v1.dto.APIExternalStoreListDTO;
import org.wso2.carbon.apimgt.rest.api.publisher.v1.dto.APIListDTO;
import org.wso2.carbon.apimgt.rest.api.publisher.v1.dto.APIMonetizationInfoDTO;
import org.wso2.carbon.apimgt.rest.api.publisher.v1.dto.APIRevenueDTO;
import org.wso2.carbon.apimgt.rest.api.publisher.v1.dto.ApiEndpointValidationResponseDTO;
import org.wso2.carbon.apimgt.rest.api.publisher.v1.dto.AsyncAPISpecificationValidationResponseDTO;
import org.wso2.carbon.apimgt.rest.api.publisher.v1.dto.AuditReportDTO;
import org.wso2.carbon.apimgt.rest.api.publisher.v1.dto.CertificateInfoDTO;
import org.wso2.carbon.apimgt.rest.api.publisher.v1.dto.ClientCertMetadataDTO;
import org.wso2.carbon.apimgt.rest.api.publisher.v1.dto.ClientCertificatesDTO;
import org.wso2.carbon.apimgt.rest.api.publisher.v1.dto.CommentDTO;
import org.wso2.carbon.apimgt.rest.api.publisher.v1.dto.CommentListDTO;
import org.wso2.carbon.apimgt.rest.api.publisher.v1.dto.DeploymentStatusListDTO;
import org.wso2.carbon.apimgt.rest.api.publisher.v1.dto.DocumentDTO;
import org.wso2.carbon.apimgt.rest.api.publisher.v1.dto.DocumentListDTO;
import org.wso2.carbon.apimgt.rest.api.publisher.v1.dto.FileInfoDTO;
import org.wso2.carbon.apimgt.rest.api.publisher.v1.dto.GraphQLQueryComplexityInfoDTO;
import org.wso2.carbon.apimgt.rest.api.publisher.v1.dto.GraphQLSchemaDTO;
import org.wso2.carbon.apimgt.rest.api.publisher.v1.dto.GraphQLSchemaTypeListDTO;
import org.wso2.carbon.apimgt.rest.api.publisher.v1.dto.GraphQLValidationResponseDTO;
import org.wso2.carbon.apimgt.rest.api.publisher.v1.dto.LifecycleHistoryDTO;
import org.wso2.carbon.apimgt.rest.api.publisher.v1.dto.LifecycleStateDTO;
import org.wso2.carbon.apimgt.rest.api.publisher.v1.dto.MediationDTO;
import org.wso2.carbon.apimgt.rest.api.publisher.v1.dto.MediationListDTO;
import org.wso2.carbon.apimgt.rest.api.publisher.v1.dto.OpenAPIDefinitionValidationResponseDTO;
import org.wso2.carbon.apimgt.rest.api.publisher.v1.dto.PaginationDTO;
import org.wso2.carbon.apimgt.rest.api.publisher.v1.dto.PatchRequestBodyDTO;
import org.wso2.carbon.apimgt.rest.api.publisher.v1.dto.PostRequestBodyDTO;
import org.wso2.carbon.apimgt.rest.api.publisher.v1.dto.APIRevisionDTO;
import org.wso2.carbon.apimgt.rest.api.publisher.v1.dto.APIRevisionListDTO;
import org.wso2.carbon.apimgt.rest.api.publisher.v1.dto.APIRevisionDeploymentDTO;
import org.wso2.carbon.apimgt.rest.api.publisher.v1.dto.ResourcePathListDTO;
import org.wso2.carbon.apimgt.rest.api.publisher.v1.dto.ResourcePolicyInfoDTO;
import org.wso2.carbon.apimgt.rest.api.publisher.v1.dto.ResourcePolicyListDTO;
import org.wso2.carbon.apimgt.rest.api.publisher.v1.dto.ThrottlingPolicyDTO;
import org.wso2.carbon.apimgt.rest.api.publisher.v1.dto.TopicDTO;
import org.wso2.carbon.apimgt.rest.api.publisher.v1.dto.TopicListDTO;
import org.wso2.carbon.apimgt.rest.api.publisher.v1.common.mappings.*;
import org.wso2.carbon.apimgt.rest.api.publisher.v1.dto.WSDLInfoDTO;
import org.wso2.carbon.apimgt.rest.api.publisher.v1.dto.WSDLValidationResponseDTO;
import org.wso2.carbon.apimgt.rest.api.publisher.v1.dto.WorkflowResponseDTO;
import org.wso2.carbon.apimgt.rest.api.publisher.v1.utils.RestApiPublisherUtils;
import org.wso2.carbon.apimgt.rest.api.util.exception.BadRequestException;
import org.wso2.carbon.apimgt.rest.api.util.utils.RestApiUtil;
import org.wso2.carbon.base.ServerConfiguration;
import org.wso2.carbon.core.util.CryptoException;
import org.wso2.carbon.core.util.CryptoUtil;
import org.wso2.carbon.utils.CarbonUtils;
import java.io.BufferedReader;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.OutputStream;
import java.io.OutputStreamWriter;
import java.io.PrintWriter;
import java.io.UnsupportedEncodingException;
import java.net.HttpURLConnection;
import java.net.MalformedURLException;
import java.net.URI;
import java.net.URISyntaxException;
import java.net.URL;
import java.net.URLConnection;
import java.net.UnknownHostException;
import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response;
import javax.xml.namespace.QName;
import static org.wso2.carbon.apimgt.api.ExceptionCodes.API_ALREADY_EXISTS;
public class ApisApiServiceImpl implements ApisApiService {
private static final Log log = LogFactory.getLog(ApisApiServiceImpl.class);
private static final String API_PRODUCT_TYPE = "APIPRODUCT";
@Override
public Response getAllAPIs(Integer limit, Integer offset, String xWSO2Tenant, String query,
String ifNoneMatch, Boolean expand, String accept, MessageContext messageContext) {
List<API> allMatchedApis = new ArrayList<>();
Object apiListDTO;
//pre-processing
//setting default limit and offset values if they are not set
limit = limit != null ? limit : RestApiConstants.PAGINATION_LIMIT_DEFAULT;
offset = offset != null ? offset : RestApiConstants.PAGINATION_OFFSET_DEFAULT;
query = query == null ? "" : query;
expand = expand != null && expand;
try {
//revert content search back to normal search by name to avoid doc result complexity and to comply with REST api practices
if (query.startsWith(APIConstants.CONTENT_SEARCH_TYPE_PREFIX + ":")) {
query = query
.replace(APIConstants.CONTENT_SEARCH_TYPE_PREFIX + ":", APIConstants.NAME_TYPE_PREFIX + ":");
}
APIProvider apiProvider = RestApiCommonUtil.getLoggedInUserProvider();
String tenantDomain = RestApiCommonUtil.getLoggedInUserTenantDomain();
boolean migrationMode = Boolean.getBoolean(RestApiConstants.MIGRATION_MODE);
/*if (migrationMode) { // migration flow
if (!StringUtils.isEmpty(targetTenantDomain)) {
tenantDomain = targetTenantDomain;
}
RestApiUtil.handleMigrationSpecificPermissionViolations(tenantDomain, username);
}*/
Map<String, Object> result;
result = apiProvider.searchPaginatedAPIs(query, tenantDomain, offset, limit);
Set<API> apis = (Set<API>) result.get("apis");
allMatchedApis.addAll(apis);
apiListDTO = APIMappingUtil.fromAPIListToDTO(allMatchedApis, expand);
//Add pagination section in the response
Object totalLength = result.get("length");
Integer length = 0;
if (totalLength != null) {
length = (Integer) totalLength;
}
APIMappingUtil.setPaginationParams(apiListDTO, query, offset, limit, length);
if (APIConstants.APPLICATION_GZIP.equals(accept)) {
try {
File zippedResponse = GZIPUtils.constructZippedResponse(apiListDTO);
return Response.ok().entity(zippedResponse)
.header("Content-Disposition", "attachment").
header("Content-Encoding", "gzip").build();
} catch (APIManagementException e) {
RestApiUtil.handleInternalServerError(e.getMessage(), e, log);
}
} else {
return Response.ok().entity(apiListDTO).build();
}
} catch (APIManagementException e) {
String errorMessage = "Error while retrieving APIs";
RestApiUtil.handleInternalServerError(errorMessage, e, log);
}
return null;
}
@Override
public Response createAPI(APIDTO body, String oasVersion, MessageContext messageContext) {
URI createdApiUri;
APIDTO createdApiDTO;
try {
APIProvider apiProvider = RestApiCommonUtil.getLoggedInUserProvider();
API createdApi = PublisherCommonUtils
.addAPIWithGeneratedSwaggerDefinition(body, oasVersion, RestApiCommonUtil.getLoggedInUsername());
createdApiDTO = APIMappingUtil.fromAPItoDTO(createdApi);
//This URI used to set the location header of the POST response
createdApiUri = new URI(RestApiConstants.RESOURCE_PATH_APIS + "/" + createdApiDTO.getId());
return Response.created(createdApiUri).entity(createdApiDTO).build();
} catch (APIManagementException e) {
String errorMessage = "Error while adding new API : " + body.getProvider() + "-" +
body.getName() + "-" + body.getVersion() + " - " + e.getMessage();
RestApiUtil.handleInternalServerError(errorMessage, e, log);
} catch (URISyntaxException e) {
String errorMessage = "Error while retrieving API location : " + body.getProvider() + "-" +
body.getName() + "-" + body.getVersion();
RestApiUtil.handleInternalServerError(errorMessage, e, log);
} catch (CryptoException e) {
String errorMessage = "Error while encrypting the secret key of API : " + body.getProvider() + "-" +
body.getName() + "-" + body.getVersion() + " - " + e.getMessage();
RestApiUtil.handleInternalServerError(errorMessage, e, log);
}
return null;
}
@Override
public Response getAPI(String apiId, String xWSO2Tenant, String ifNoneMatch, MessageContext messageContext) throws APIManagementException {
APIProvider apiProvider = RestApiCommonUtil.getLoggedInUserProvider();
APIDTO apiToReturn = getAPIByID(apiId, apiProvider);
return Response.ok().entity(apiToReturn).build();
}
@Override
public Response addCommentToAPI(String apiId, PostRequestBodyDTO postRequestBodyDTO, String replyTo, MessageContext
messageContext) throws APIManagementException{
String username = RestApiCommonUtil.getLoggedInUsername();
String requestedTenantDomain = RestApiCommonUtil.getLoggedInUserTenantDomain();
try {
APIProvider apiProvider = RestApiCommonUtil.getLoggedInUserProvider();
ApiTypeWrapper apiTypeWrapper = apiProvider.getAPIorAPIProductByUUID(apiId, requestedTenantDomain);
Identifier identifier;
if (apiTypeWrapper.isAPIProduct()) {
identifier = apiTypeWrapper.getApiProduct().getId();
} else {
identifier = apiTypeWrapper.getApi().getId();
}
Comment comment = new Comment();
comment.setText(postRequestBodyDTO.getContent());
comment.setCategory(postRequestBodyDTO.getCategory());
comment.setParentCommentID(replyTo);
comment.setEntryPoint("publisher");
comment.setUser(username);
comment.setApiId(apiId);
String createdCommentId = apiProvider.addComment(identifier, comment, username);
Comment createdComment = apiProvider.getComment(apiTypeWrapper, createdCommentId, 0, 0);
CommentDTO commentDTO = CommentMappingUtil.fromCommentToDTO(createdComment);
String uriString = RestApiConstants.RESOURCE_PATH_APIS + "/" + apiId +
RestApiConstants.RESOURCE_PATH_COMMENTS + "/" + createdCommentId;
URI uri = new URI(uriString);
return Response.created(uri).entity(commentDTO).build();
} catch (APIManagementException e) {
if (RestApiUtil.isDueToResourceNotFound(e) || RestApiUtil.isDueToAuthorizationFailure(e)) {
RestApiUtil.handleResourceNotFoundError(RestApiConstants.RESOURCE_API, apiId, e, log);
} else {
RestApiUtil.handleInternalServerError("Failed to add comment to the API " + apiId, e, log);
}
} catch (URISyntaxException e) {
throw new APIManagementException("Error while retrieving comment content location for API " + apiId);
}
return null;
}
@Override
public Response getAllCommentsOfAPI(String apiId, String xWSO2Tenant, Integer limit, Integer offset, Boolean
includeCommenterInfo, MessageContext messageContext) throws APIManagementException{
String requestedTenantDomain = RestApiUtil.getRequestedTenantDomain(xWSO2Tenant);
try {
APIProvider apiProvider = RestApiCommonUtil.getLoggedInUserProvider();
ApiTypeWrapper apiTypeWrapper = apiProvider.getAPIorAPIProductByUUID(apiId, requestedTenantDomain);
String parentCommentID = null;
Comment[] comments = apiProvider.getComments(apiTypeWrapper, parentCommentID);
CommentListDTO commentDTO = CommentMappingUtil.fromCommentListToDTO(comments, limit, offset,
includeCommenterInfo);
String uriString = RestApiConstants.RESOURCE_PATH_APIS + "/" + apiId +
RestApiConstants.RESOURCE_PATH_COMMENTS;
URI uri = new URI(uriString);
return Response.ok(uri).entity(commentDTO).build();
} catch (APIManagementException e) {
if (RestApiUtil.isDueToResourceNotFound(e) || RestApiUtil.isDueToAuthorizationFailure(e)) {
RestApiUtil.handleResourceNotFoundError(RestApiConstants.RESOURCE_API, apiId, e, log);
} else {
RestApiUtil.handleInternalServerError("Failed to get comments of API " + apiId, e, log);
}
} catch (URISyntaxException e) {
String errorMessage = "Error while retrieving comments content location for API " + apiId;
RestApiUtil.handleInternalServerError(errorMessage, e, log);
}
return null;
}
@Override
public Response getCommentOfAPI(String commentId, String apiId, String xWSO2Tenant, String ifNoneMatch, Boolean
includeCommenterInfo, Integer replyLimit, Integer replyOffset, MessageContext messageContext) throws
APIManagementException{
String requestedTenantDomain = RestApiUtil.getRequestedTenantDomain(xWSO2Tenant);
try {
APIProvider apiProvider = RestApiCommonUtil.getLoggedInUserProvider();
ApiTypeWrapper apiTypeWrapper = apiProvider.getAPIorAPIProductByUUID(apiId, requestedTenantDomain);
Comment comment = apiProvider.getComment(apiTypeWrapper, commentId, replyLimit, replyOffset);
if (comment != null) {
CommentDTO commentDTO;
if (includeCommenterInfo) {
Map<String, Map<String, String>> userClaimsMap = CommentMappingUtil
.retrieveUserClaims(comment.getUser(), new HashMap<>());
commentDTO = CommentMappingUtil.fromCommentToDTOWithUserInfo(comment, userClaimsMap);
} else {
commentDTO = CommentMappingUtil.fromCommentToDTO(comment);
}
String uriString = RestApiConstants.RESOURCE_PATH_APIS + "/" + apiId +
RestApiConstants.RESOURCE_PATH_COMMENTS + "/" + commentId;
URI uri = new URI(uriString);
return Response.ok(uri).entity(commentDTO).build();
} else {
RestApiUtil.handleResourceNotFoundError(RestApiConstants.RESOURCE_COMMENTS,
String.valueOf(commentId), log);
}
} catch (APIManagementException e) {
if (RestApiUtil.isDueToAuthorizationFailure(e)) {
RestApiUtil.handleAuthorizationFailure(RestApiConstants.RESOURCE_API, apiId, e, log);
} else if (RestApiUtil.isDueToResourceNotFound(e)) {
RestApiUtil.handleResourceNotFoundError(RestApiConstants.RESOURCE_API, apiId, e, log);
} else {
String errorMessage = "Error while retrieving comment for API : " + apiId + "with comment ID " + commentId;
RestApiUtil.handleInternalServerError(errorMessage, e, log);
}
} catch (URISyntaxException e) {
String errorMessage = "Error while retrieving comment content location : " + apiId;
RestApiUtil.handleInternalServerError(errorMessage, e, log);
}
return null;
}
@Override
public Response getRepliesOfComment(String commentId, String apiId, String xWSO2Tenant, Integer limit, Integer
offset, String ifNoneMatch, Boolean includeCommenterInfo, MessageContext messageContext) throws
APIManagementException{
String requestedTenantDomain = RestApiUtil.getRequestedTenantDomain(xWSO2Tenant);
try {
APIProvider apiProvider = RestApiCommonUtil.getLoggedInUserProvider();
ApiTypeWrapper apiTypeWrapper = apiProvider.getAPIorAPIProductByUUID(apiId, requestedTenantDomain);
Comment[] comments = apiProvider.getComments(apiTypeWrapper, commentId);
CommentListDTO commentDTO = CommentMappingUtil.fromCommentListToDTO(comments, limit, offset,
includeCommenterInfo);
String uriString = RestApiConstants.RESOURCE_PATH_APIS + "/" + apiId +
RestApiConstants.RESOURCE_PATH_COMMENTS;
URI uri = new URI(uriString);
return Response.ok(uri).entity(commentDTO).build();
} catch (APIManagementException e) {
if (RestApiUtil.isDueToResourceNotFound(e) || RestApiUtil.isDueToAuthorizationFailure(e)) {
RestApiUtil.handleResourceNotFoundError(RestApiConstants.RESOURCE_API, apiId, e, log);
} else {
RestApiUtil.handleInternalServerError("Failed to get comments of API " + apiId, e, log);
}
} catch (URISyntaxException e) {
String errorMessage = "Error while retrieving comments content location for API " + apiId;
RestApiUtil.handleInternalServerError(errorMessage, e, log);
}
return null;
}
@Override
public Response editCommentOfAPI(String commentId, String apiId, PatchRequestBodyDTO patchRequestBodyDTO,
MessageContext messageContext) throws APIManagementException{
String username = RestApiCommonUtil.getLoggedInUsername();
String requestedTenantDomain = RestApiCommonUtil.getLoggedInUserTenantDomain();
try {
APIProvider apiProvider = RestApiCommonUtil.getLoggedInUserProvider();
ApiTypeWrapper apiTypeWrapper = apiProvider.getAPIorAPIProductByUUID(apiId, requestedTenantDomain);
Comment comment = apiProvider.getComment(apiTypeWrapper, commentId, 0, 0);
if (comment != null) {
if ( comment.getUser().equals(username)) {
boolean commentEdited = false;
if (patchRequestBodyDTO.getCategory() != null && !(patchRequestBodyDTO.getCategory().equals(comment.getCategory()))){
comment.setCategory(patchRequestBodyDTO.getCategory());
commentEdited = true;
}
if (patchRequestBodyDTO.getContent() != null && !(patchRequestBodyDTO.getContent().equals(comment.getText()))){
comment.setText(patchRequestBodyDTO.getContent());
commentEdited = true;
}
if (commentEdited){
if (apiProvider.editComment(apiTypeWrapper, commentId, comment)){
Comment editedComment = apiProvider.getComment(apiTypeWrapper, commentId, 0, 0);
CommentDTO commentDTO = CommentMappingUtil.fromCommentToDTO(editedComment);
String uriString = RestApiConstants.RESOURCE_PATH_APIS + "/" + apiId +
RestApiConstants.RESOURCE_PATH_COMMENTS + "/" + commentId;
URI uri = new URI(uriString);
return Response.ok(uri).entity(commentDTO).build();
}
} else {
return Response.notModified("Not Modified").type(MediaType.APPLICATION_JSON).build();
}
} else {
return Response.status(403, "Forbidden").type(MediaType.APPLICATION_JSON).build();
}
} else {
RestApiUtil.handleResourceNotFoundError(RestApiConstants.RESOURCE_COMMENTS,
String.valueOf(commentId), log);
}
} catch (APIManagementException e) {
if (RestApiUtil.isDueToResourceNotFound(e) || RestApiUtil.isDueToAuthorizationFailure(e)) {
RestApiUtil.handleResourceNotFoundError(RestApiConstants.RESOURCE_API, apiId, e, log);
} else {
RestApiUtil.handleInternalServerError("Failed to add comment to the API " + apiId, e, log);
}
} catch (URISyntaxException e) {
String errorMessage = "Error while retrieving comment content location for API " + apiId;
RestApiUtil.handleInternalServerError(errorMessage, e, log);
}
return null;
}
@Override
public Response deleteComment(String commentId, String apiId, String ifMatch, MessageContext messageContext) throws
APIManagementException{
String requestedTenantDomain = RestApiCommonUtil.getLoggedInUserTenantDomain();
String username = RestApiCommonUtil.getLoggedInUsername();
try {
APIProvider apiProvider = RestApiCommonUtil.getLoggedInUserProvider();
ApiTypeWrapper apiTypeWrapper = apiProvider.getAPIorAPIProductByUUID(apiId, requestedTenantDomain);
Comment comment = apiProvider.getComment(apiTypeWrapper, commentId, 0, 0);
if (comment != null) {
String[] tokenScopes = (String[]) PhaseInterceptorChain.getCurrentMessage().getExchange().get(RestApiConstants.USER_REST_API_SCOPES);
if ( Arrays.asList(tokenScopes).contains("apim:app_import_export")|| comment.getUser().equals(username)) {
if (apiProvider.deleteComment(apiTypeWrapper, commentId)) {
JSONObject obj = new JSONObject();
obj.put("id", commentId);
obj.put("message", "The comment has been deleted");
return Response.ok(obj).type(MediaType.APPLICATION_JSON).build();
} else {
return Response.status(405, "Method Not Allowed").type(MediaType.APPLICATION_JSON).build();
}
} else {
return Response.status(403, "Forbidden").type(MediaType.APPLICATION_JSON).build();
}
} else {
RestApiUtil.handleResourceNotFoundError(RestApiConstants.RESOURCE_COMMENTS,
String.valueOf(commentId), log);
}
} catch (APIManagementException e) {
if (RestApiUtil.isDueToAuthorizationFailure(e)) {
RestApiUtil.handleAuthorizationFailure(RestApiConstants.RESOURCE_API, apiId, e, log);
} else if (RestApiUtil.isDueToResourceNotFound(e)) {
RestApiUtil.handleResourceNotFoundError(RestApiConstants.RESOURCE_API, apiId, e, log);
} else {
String errorMessage = "Error while deleting comment " + commentId + "for API " + apiId;
RestApiUtil.handleInternalServerError(errorMessage, e, log);
}
}
return null;
}
/**
* Get complexity details of a given API
*
* @param apiId apiId
* @param messageContext message context
* @return Response with complexity details of the GraphQL API
*/
@Override
public Response getGraphQLPolicyComplexityOfAPI(String apiId, MessageContext messageContext)
throws APIManagementException {
try {
APIProvider apiProvider = RestApiCommonUtil.getLoggedInUserProvider();
String tenantDomain = RestApiCommonUtil.getLoggedInUserTenantDomain();
APIIdentifier apiIdentifier;
if (ApiMgtDAO.getInstance().checkAPIUUIDIsARevisionUUID(apiId).getApiUUID() != null) {
apiIdentifier = APIMappingUtil.getAPIInfoFromUUID(apiId,tenantDomain).getId();
} else {
apiIdentifier = APIMappingUtil.getAPIIdentifierFromUUID(apiId);
}
API api = apiProvider.getAPIbyUUID(apiId, tenantDomain);
if (APIConstants.GRAPHQL_API.equals(api.getType())) {
GraphqlComplexityInfo graphqlComplexityInfo = apiProvider.getComplexityDetails(apiIdentifier);
GraphQLQueryComplexityInfoDTO graphQLQueryComplexityInfoDTO =
GraphqlQueryAnalysisMappingUtil.fromGraphqlComplexityInfotoDTO(graphqlComplexityInfo);
return Response.ok().entity(graphQLQueryComplexityInfoDTO).build();
} else {
throw new APIManagementException(ExceptionCodes.API_NOT_GRAPHQL);
}
} catch (APIManagementException e) {
//Auth failure occurs when cross tenant accessing APIs. Sends 404, since we don't need
// to expose the existence of the resource
if (RestApiUtil.isDueToResourceNotFound(e) || RestApiUtil.isDueToAuthorizationFailure(e)) {
RestApiUtil.handleResourceNotFoundError(RestApiConstants.RESOURCE_API, apiId, e, log);
} else if (isAuthorizationFailure(e)) {
RestApiUtil.handleAuthorizationFailure(
"Authorization failure while retrieving complexity details of API : " + apiId, e, log);
} else {
String msg = "Error while retrieving complexity details of API " + apiId;
RestApiUtil.handleInternalServerError(msg, e, log);
}
}
return null;
}
/**
* Update complexity details of a given API
*
* @param apiId apiId
* @param body GraphQLQueryComplexityInfo DTO as request body
* @param messageContext message context
* @return Response
*/
@Override
public Response updateGraphQLPolicyComplexityOfAPI(String apiId, GraphQLQueryComplexityInfoDTO body,
MessageContext messageContext) throws APIManagementException {
try {
if (StringUtils.isBlank(apiId)) {
String errorMessage = "API ID cannot be empty or null.";
RestApiUtil.handleBadRequest(errorMessage, log);
}
APIIdentifier apiIdentifier = APIMappingUtil.getAPIIdentifierFromUUID(apiId);
if (apiIdentifier == null) {
throw new APIMgtResourceNotFoundException("Couldn't retrieve existing API with API UUID: "
+ apiId, ExceptionCodes.from(ExceptionCodes.API_NOT_FOUND,
apiId));
}
String tenantDomain = RestApiCommonUtil.getLoggedInUserTenantDomain();
APIProvider apiProvider = RestApiCommonUtil.getLoggedInUserProvider();
API api = apiProvider.getAPIbyUUID(apiId, tenantDomain);
String schema = apiProvider.getGraphqlSchema(apiIdentifier);
GraphqlComplexityInfo graphqlComplexityInfo =
GraphqlQueryAnalysisMappingUtil.fromDTOtoValidatedGraphqlComplexityInfo(body, schema);
if (APIConstants.GRAPHQL_API.equals(api.getType())) {
apiProvider.addOrUpdateComplexityDetails(apiIdentifier, graphqlComplexityInfo);
return Response.ok().build();
} else {
throw new APIManagementException(ExceptionCodes.API_NOT_GRAPHQL);
}
} catch (APIManagementException e) {
//Auth failure occurs when cross tenant accessing APIs. Sends 404, since we don't need
// to expose the existence of the resource
if (RestApiUtil.isDueToResourceNotFound(e) || RestApiUtil.isDueToAuthorizationFailure(e)) {
RestApiUtil.handleResourceNotFoundError(RestApiConstants.RESOURCE_API, apiId, e, log);
} else if (isAuthorizationFailure(e)) {
RestApiUtil.handleAuthorizationFailure(
"Authorization failure while updating complexity details of API : " + apiId, e, log);
} else {
String errorMessage = "Error while updating complexity details of API : " + apiId;
RestApiUtil.handleInternalServerError(errorMessage, e, log);
}
}
return null;
}
@Override
public Response updateTopics(String apiId, TopicListDTO topicListDTO, String ifMatch, MessageContext messageContext)
throws APIManagementException {
APIProvider apiProvider = RestApiCommonUtil.getLoggedInUserProvider();
String tenantDomain = RestApiCommonUtil.getLoggedInUserTenantDomain();
API existingAPI = apiProvider.getAPIbyUUID(apiId, tenantDomain);
Set<URITemplate> uriTemplates = existingAPI.getUriTemplates();
uriTemplates.clear();
for (TopicDTO topicDTO : topicListDTO.getList()) {
URITemplate uriTemplate = new URITemplate();
uriTemplate.setUriTemplate(topicDTO.getName());
uriTemplate.setHTTPVerb(topicDTO.getMode().toUpperCase());
// TODO: Get these from proper locations
uriTemplate.setAuthType(APIConstants.AUTH_APPLICATION_OR_USER_LEVEL_TOKEN);
uriTemplate.setThrottlingTier(APIConstants.UNLIMITED_TIER);
uriTemplates.add(uriTemplate);
}
existingAPI.setUriTemplates(uriTemplates);
// TODO: Add scopes
try {
apiProvider.updateAPI(existingAPI);
} catch (FaultGatewaysException e) {
e.printStackTrace();
}
return Response.ok().build();
}
/**
* Get GraphQL Schema of given API
*
* @param apiId apiId
* @param accept
* @param ifNoneMatch If--Match header value
* @param messageContext message context
* @return Response with GraphQL Schema
*/
@Override
public Response getAPIGraphQLSchema(String apiId, String accept, String ifNoneMatch,
MessageContext messageContext) {
try {
APIProvider apiProvider = RestApiCommonUtil.getLoggedInUserProvider();
String tenantDomain = RestApiCommonUtil.getLoggedInUserTenantDomain();
//this will fail if user does not have access to the API or the API does not exist
APIIdentifier apiIdentifier;
if (ApiMgtDAO.getInstance().checkAPIUUIDIsARevisionUUID(apiId).getApiUUID() != null) {
apiIdentifier = APIMappingUtil.getAPIInfoFromUUID(apiId,tenantDomain).getId();
} else {
apiIdentifier = APIMappingUtil.getAPIIdentifierFromUUID(apiId);
}
String schemaContent = apiProvider.getGraphqlSchema(apiIdentifier);
GraphQLSchemaDTO dto = new GraphQLSchemaDTO();
dto.setSchemaDefinition(schemaContent);
dto.setName(apiIdentifier.getProviderName() + APIConstants.GRAPHQL_SCHEMA_PROVIDER_SEPERATOR +
apiIdentifier.getApiName() + apiIdentifier.getVersion() + APIConstants.GRAPHQL_SCHEMA_FILE_EXTENSION);
return Response.ok().entity(dto).build();
} catch (APIManagementException e) {
//Auth failure occurs when cross tenant accessing APIs. Sends 404, since we don't need
// to expose the existence of the resource
if (RestApiUtil.isDueToResourceNotFound(e) || RestApiUtil.isDueToAuthorizationFailure(e)) {
RestApiUtil.handleResourceNotFoundError(RestApiConstants.RESOURCE_API, apiId, e, log);
} else if (isAuthorizationFailure(e)) {
RestApiUtil
.handleAuthorizationFailure("Authorization failure while retrieving schema of API: " + apiId, e,
log);
} else {
String errorMessage = "Error while retrieving schema of API: " + apiId;
RestApiUtil.handleInternalServerError(errorMessage, e, log);
}
}
return null;
}
/**
* Update GraphQL Schema
*
* @param apiId api Id
* @param schemaDefinition graphQL schema definition
* @param ifMatch
* @param messageContext
* @return
*/
@Override
public Response updateAPIGraphQLSchema(String apiId, String schemaDefinition, String ifMatch,
MessageContext messageContext) {
try {
APIProvider apiProvider = RestApiCommonUtil.getLoggedInUserProvider();
String tenantDomain = RestApiCommonUtil.getLoggedInUserTenantDomain();
APIIdentifier apiIdentifier = APIMappingUtil.getAPIIdentifierFromUUID(apiId);
if (apiIdentifier == null) {
throw new APIMgtResourceNotFoundException("Couldn't retrieve existing API with API UUID: "
+ apiId, ExceptionCodes.from(ExceptionCodes.API_NOT_FOUND,
apiId));
}
API originalAPI = apiProvider.getAPIbyUUID(apiId, tenantDomain);
originalAPI = PublisherCommonUtils.addGraphQLSchema(originalAPI, schemaDefinition, apiProvider);
APIDTO modifiedAPI = APIMappingUtil.fromAPItoDTO(originalAPI);
return Response.ok().entity(modifiedAPI.getOperations()).build();
} catch (APIManagementException | FaultGatewaysException e) {
//Auth failure occurs when cross tenant accessing APIs. Sends 404, since we don't need
// to expose the existence of the resource
if (RestApiUtil.isDueToResourceNotFound(e) || RestApiUtil.isDueToAuthorizationFailure(e)) {
RestApiUtil.handleResourceNotFoundError(RestApiConstants.RESOURCE_API, apiId, e, log);
} else if (isAuthorizationFailure(e)) {
RestApiUtil
.handleAuthorizationFailure("Authorization failure while retrieving schema of API: " + apiId, e,
log);
} else {
String errorMessage = "Error while uploading schema of the API: " + apiId;
RestApiUtil.handleInternalServerError(errorMessage, e, log);
}
}
return null;
}
@Override
public Response updateAPI(String apiId, APIDTO body, String ifMatch, MessageContext messageContext) {
String[] tokenScopes =
(String[]) PhaseInterceptorChain.getCurrentMessage().getExchange().get(RestApiConstants.USER_REST_API_SCOPES);
String username = RestApiCommonUtil.getLoggedInUsername();
String tenantDomain = RestApiCommonUtil.getLoggedInUserTenantDomain();
try {
APIIdentifier apiIdentifier = APIUtil.getAPIIdentifierFromUUID(apiId);
if (apiIdentifier == null) {
throw new APIMgtResourceNotFoundException("Couldn't retrieve existing API with API UUID: "
+ apiId, ExceptionCodes.from(ExceptionCodes.API_NOT_FOUND,
apiId));
}
APIProvider apiProvider = RestApiCommonUtil.getProvider(username);
API originalAPI = apiProvider.getAPIbyUUID(apiId, tenantDomain);
API updatedApi = PublisherCommonUtils.updateApi(originalAPI, body, apiProvider, tokenScopes);
return Response.ok().entity(APIMappingUtil.fromAPItoDTO(updatedApi)).build();
} catch (APIManagementException e) {
//Auth failure occurs when cross tenant accessing APIs. Sends 404, since we don't need
// to expose the existence of the resource
if (RestApiUtil.isDueToResourceNotFound(e) || RestApiUtil.isDueToAuthorizationFailure(e)) {
RestApiUtil.handleResourceNotFoundError(RestApiConstants.RESOURCE_API, apiId, e, log);
} else if (isAuthorizationFailure(e)) {
RestApiUtil.handleAuthorizationFailure("Authorization failure while updating API : " + apiId, e, log);
} else {
String errorMessage = "Error while updating the API : " + apiId + " - " + e.getMessage();
RestApiUtil.handleInternalServerError(errorMessage, e, log);
}
} catch (FaultGatewaysException e) {
String errorMessage = "Error while updating API : " + apiId;
RestApiUtil.handleInternalServerError(errorMessage, e, log);
} catch (CryptoException e) {
String errorMessage = "Error while encrypting the secret key of API : " + apiId;
RestApiUtil.handleInternalServerError(errorMessage, e, log);
} catch (ParseException e) {
String errorMessage = "Error while parsing endpoint config of API : " + apiId;
RestApiUtil.handleInternalServerError(errorMessage, e, log);
}
return null;
}
/**
* Get all types and fields of the GraphQL Schema of a given API
*
* @param apiId apiId
* @param messageContext message context
* @return Response with all the types and fields found within the schema definition
*/
@Override public Response getGraphQLPolicyComplexityTypesOfAPI(String apiId, MessageContext messageContext) {
GraphQLSchemaDefinition graphql = new GraphQLSchemaDefinition();
try {
APIProvider apiProvider = RestApiCommonUtil.getLoggedInUserProvider();
String tenantDomain = RestApiCommonUtil.getLoggedInUserTenantDomain();
APIIdentifier apiIdentifier;
if (ApiMgtDAO.getInstance().checkAPIUUIDIsARevisionUUID(apiId).getApiUUID() != null) {
apiIdentifier = APIMappingUtil.getAPIInfoFromUUID(apiId,tenantDomain).getId();
} else {
apiIdentifier = APIMappingUtil.getAPIIdentifierFromUUID(apiId);
}
API api = apiProvider.getAPIbyUUID(apiId, tenantDomain);
if (APIConstants.GRAPHQL_API.equals(api.getType())) {
String schemaContent = apiProvider.getGraphqlSchema(apiIdentifier);
List<GraphqlSchemaType> typeList = graphql.extractGraphQLTypeList(schemaContent);
GraphQLSchemaTypeListDTO graphQLSchemaTypeListDTO =
GraphqlQueryAnalysisMappingUtil.fromGraphqlSchemaTypeListtoDTO(typeList);
return Response.ok().entity(graphQLSchemaTypeListDTO).build();
} else {
throw new APIManagementException(ExceptionCodes.API_NOT_GRAPHQL);
}
} catch (APIManagementException e) {
//Auth failure occurs when cross tenant accessing APIs. Sends 404, since we don't need
// to expose the existence of the resource
if (RestApiUtil.isDueToResourceNotFound(e) || RestApiUtil.isDueToAuthorizationFailure(e)) {
RestApiUtil.handleResourceNotFoundError(RestApiConstants.RESOURCE_API, apiId, e, log);
} else if (isAuthorizationFailure(e)) {
RestApiUtil.handleAuthorizationFailure(
"Authorization failure while retrieving types and fields of API : " + apiId, e, log);
} else {
String msg = "Error while retrieving types and fields of the schema of API " + apiId;
RestApiUtil.handleInternalServerError(msg, e, log);
}
}
return null;
}
// AWS Lambda: rest api operation to get ARNs
@Override
public Response getAmazonResourceNamesOfAPI(String apiId, MessageContext messageContext) {
JSONObject arns = new JSONObject();
try {
String tenantDomain = RestApiCommonUtil.getLoggedInUserTenantDomain();
APIProvider apiProvider = RestApiCommonUtil.getLoggedInUserProvider();
API api = apiProvider.getAPIbyUUID(apiId, tenantDomain);
String endpointConfigString = api.getEndpointConfig();
if (!StringUtils.isEmpty(endpointConfigString)) {
JSONParser jsonParser = new JSONParser();
JSONObject endpointConfig = (JSONObject) jsonParser.parse(endpointConfigString);
if (endpointConfig != null) {
if (endpointConfig.containsKey(APIConstants.AMZN_ACCESS_KEY) &&
endpointConfig.containsKey(APIConstants.AMZN_SECRET_KEY) &&
endpointConfig.containsKey(APIConstants.AMZN_REGION)) {
String accessKey = (String) endpointConfig.get(APIConstants.AMZN_ACCESS_KEY);
String secretKey = (String) endpointConfig.get(APIConstants.AMZN_SECRET_KEY);
String region = (String) endpointConfig.get(APIConstants.AMZN_REGION);
AWSCredentialsProvider credentialsProvider;
if (StringUtils.isEmpty(accessKey) && StringUtils.isEmpty(secretKey) &&
StringUtils.isEmpty(region)) {
credentialsProvider = InstanceProfileCredentialsProvider.getInstance();
} else if (!StringUtils.isEmpty(accessKey) && !StringUtils.isEmpty(secretKey) &&
!StringUtils.isEmpty(region)) {
if (secretKey.length() == APIConstants.AWS_ENCRYPTED_SECRET_KEY_LENGTH) {
CryptoUtil cryptoUtil = CryptoUtil.getDefaultCryptoUtil();
secretKey = new String(cryptoUtil.base64DecodeAndDecrypt(secretKey),
APIConstants.DigestAuthConstants.CHARSET);
}
BasicAWSCredentials awsCredentials = new BasicAWSCredentials(accessKey, secretKey);
credentialsProvider = new AWSStaticCredentialsProvider(awsCredentials);
} else {
log.error("Missing AWS Credentials");
return null;
}
AWSLambda awsLambda = AWSLambdaClientBuilder.standard()
.withCredentials(credentialsProvider)
.withRegion(region)
.build();
ListFunctionsResult listFunctionsResult = awsLambda.listFunctions();
List<FunctionConfiguration> functionConfigurations = listFunctionsResult.getFunctions();
arns.put("count", functionConfigurations.size());
JSONArray list = new JSONArray();
for (FunctionConfiguration functionConfiguration : functionConfigurations) {
list.put(functionConfiguration.getFunctionArn());
}
arns.put("list", list);
return Response.ok().entity(arns.toString()).build();
}
}
}
} catch (SdkClientException e) {
if (e.getCause() instanceof UnknownHostException) {
arns.put("error", "No internet connection to connect the given access method.");
log.error("No internet connection to connect the given access method of API : " + apiId, e);
return Response.serverError().entity(arns.toString()).build();
} else {
arns.put("error", "Unable to access Lambda functions under the given access method.");
log.error("Unable to access Lambda functions under the given access method of API : " + apiId, e);
return Response.serverError().entity(arns.toString()).build();
}
} catch (ParseException e) {
String errorMessage = "Error while parsing endpoint config of the API: " + apiId;
RestApiUtil.handleInternalServerError(errorMessage, e, log);
} catch (CryptoException | UnsupportedEncodingException e) {
String errorMessage = "Error while decrypting the secret key of the API: " + apiId;
RestApiUtil.handleInternalServerError(errorMessage, e, log);
} catch (APIManagementException e) {
String errorMessage = "Error while retrieving the API: " + apiId;
RestApiUtil.handleInternalServerError(errorMessage, e, log);
}
return null;
}
/**
* Method to retrieve Security Audit Report
* @param apiId API ID of the API
* @param accept Accept header string
* @param messageContext Message Context string
* @return Response object of Security Audit
*/
@Override
public Response getAuditReportOfAPI(String apiId, String accept, MessageContext messageContext) {
boolean isDebugEnabled = log.isDebugEnabled();
try {
String username = RestApiCommonUtil.getLoggedInUsername();
String tenantDomain = RestApiCommonUtil.getLoggedInUserTenantDomain();
APIProvider apiProvider = RestApiCommonUtil.getProvider(username);
API api = apiProvider.getAPIbyUUID(apiId, tenantDomain);
APIIdentifier apiIdentifier = api.getId();
String apiDefinition = apiProvider.getOpenAPIDefinition(apiIdentifier, tenantDomain);
// Get configuration file, retrieve API token and collection id
JSONObject securityAuditPropertyObject = apiProvider.getSecurityAuditAttributesFromConfig(username);
String apiToken = (String) securityAuditPropertyObject.get("apiToken");
String collectionId = (String) securityAuditPropertyObject.get("collectionId");
String baseUrl = (String) securityAuditPropertyObject.get("baseUrl");
if (baseUrl == null) {
baseUrl = APIConstants.BASE_AUDIT_URL;
}
// Retrieve the uuid from the database
String auditUuid = ApiMgtDAO.getInstance().getAuditApiId(apiIdentifier);
if (auditUuid != null) {
updateAuditApi(apiDefinition, apiToken, auditUuid, baseUrl, isDebugEnabled);
} else {
auditUuid = createAuditApi(collectionId, apiToken, apiIdentifier, apiDefinition, baseUrl,
isDebugEnabled);
}
// Logic for the HTTP request
String getUrl = baseUrl + "/" + auditUuid + APIConstants.ASSESSMENT_REPORT;
URL getReportUrl = new URL(getUrl);
try (CloseableHttpClient getHttpClient = (CloseableHttpClient) APIUtil
.getHttpClient(getReportUrl.getPort(), getReportUrl.getProtocol())) {
HttpGet httpGet = new HttpGet(getUrl);
// Set the header properties of the request
httpGet.setHeader(APIConstants.HEADER_ACCEPT, APIConstants.APPLICATION_JSON_MEDIA_TYPE);
httpGet.setHeader(APIConstants.HEADER_API_TOKEN, apiToken);
httpGet.setHeader(APIConstants.HEADER_USER_AGENT, APIConstants.USER_AGENT_APIM);
// Code block for the processing of the response
try (CloseableHttpResponse response = getHttpClient.execute(httpGet)) {
if (isDebugEnabled) {
log.debug("HTTP status " + response.getStatusLine().getStatusCode());
}
if (response.getStatusLine().getStatusCode() == HttpStatus.SC_OK) {
BufferedReader reader = new BufferedReader(
new InputStreamReader(response.getEntity().getContent(), StandardCharsets.UTF_8));
String inputLine;
StringBuilder responseString = new StringBuilder();
while ((inputLine = reader.readLine()) != null) {
responseString.append(inputLine);
}
reader.close();
JSONObject responseJson = (JSONObject) new JSONParser().parse(responseString.toString());
String report = responseJson.get(APIConstants.DATA).toString();
String grade = (String) ((JSONObject) ((JSONObject) responseJson.get(APIConstants.ATTR))
.get(APIConstants.DATA)).get(APIConstants.GRADE);
Integer numErrors = Integer.valueOf(
(String) ((JSONObject) ((JSONObject) responseJson.get(APIConstants.ATTR))
.get(APIConstants.DATA)).get(APIConstants.NUM_ERRORS));
String decodedReport = new String(Base64Utils.decode(report), StandardCharsets.UTF_8);
AuditReportDTO auditReportDTO = new AuditReportDTO();
auditReportDTO.setReport(decodedReport);
auditReportDTO.setGrade(grade);
auditReportDTO.setNumErrors(numErrors);
auditReportDTO.setExternalApiId(auditUuid);
return Response.ok().entity(auditReportDTO).build();
}
}
}
} catch (IOException e) {
RestApiUtil.handleInternalServerError("Error occurred while getting "
+ "HttpClient instance", e, log);
} catch (ParseException e) {
RestApiUtil.handleInternalServerError("API Definition String "
+ "could not be parsed into JSONObject.", e, log);
} catch (APIManagementException e) {
String errorMessage = "Error while Auditing API : " + apiId;
RestApiUtil.handleInternalServerError(errorMessage, e, log);
}
return null;
}
/**
* Update API Definition before retrieving Security Audit Report
* @param apiDefinition API Definition of API
* @param apiToken API Token to access Security Audit
* @param auditUuid Respective UUID of API in Security Audit
* @param baseUrl Base URL to communicate with Security Audit
* @param isDebugEnabled Boolean whether debug is enabled
* @throws IOException In the event of any problems with the request
* @throws APIManagementException In the event of unexpected response
*/
private void updateAuditApi(String apiDefinition, String apiToken, String auditUuid, String baseUrl,
boolean isDebugEnabled)
throws IOException, APIManagementException {
// Set the property to be attached in the body of the request
// Attach API Definition to property called specfile to be sent in the request
JSONObject jsonBody = new JSONObject();
jsonBody.put("specfile", Base64Utils.encode(apiDefinition.getBytes(StandardCharsets.UTF_8)));
// Logic for HTTP Request
String putUrl = baseUrl + "/" + auditUuid;
URL updateApiUrl = new URL(putUrl);
try (CloseableHttpClient httpClient = (CloseableHttpClient) APIUtil
.getHttpClient(updateApiUrl.getPort(), updateApiUrl.getProtocol())) {
HttpPut httpPut = new HttpPut(putUrl);
// Set the header properties of the request
httpPut.setHeader(APIConstants.HEADER_ACCEPT, APIConstants.APPLICATION_JSON_MEDIA_TYPE);
httpPut.setHeader(APIConstants.HEADER_CONTENT_TYPE, APIConstants.APPLICATION_JSON_MEDIA_TYPE);
httpPut.setHeader(APIConstants.HEADER_API_TOKEN, apiToken);
httpPut.setHeader(APIConstants.HEADER_USER_AGENT, APIConstants.USER_AGENT_APIM);
httpPut.setEntity(new StringEntity(jsonBody.toJSONString()));
// Code block for processing the response
try (CloseableHttpResponse response = httpClient.execute(httpPut)) {
if (isDebugEnabled) {
log.debug("HTTP status " + response.getStatusLine().getStatusCode());
}
if (!(response.getStatusLine().getStatusCode() == HttpStatus.SC_OK)) {
throw new APIManagementException(
"Error while sending data to the API Security Audit Feature. Found http status " +
response.getStatusLine());
}
} finally {
httpPut.releaseConnection();
}
}
}
/**
* Send API Definition to Security Audit for the first time
* @param collectionId Collection ID in which the Definition should be sent to
* @param apiToken API Token to access Security Audit
* @param apiIdentifier API Identifier object
* @param apiDefinition API Definition of API
* @param baseUrl Base URL to communicate with Security Audit
* @param isDebugEnabled Boolean whether debug is enabled
* @return String UUID of API in Security Audit
* @throws IOException In the event of any problems in the request
* @throws APIManagementException In the event of unexpected response
* @throws ParseException In the event of any parse errors from the response
*/
private String createAuditApi(String collectionId, String apiToken, APIIdentifier apiIdentifier,
String apiDefinition, String baseUrl, boolean isDebugEnabled)
throws IOException, APIManagementException, ParseException {
HttpURLConnection httpConn;
OutputStream outputStream;
PrintWriter writer;
String auditUuid = null;
URL url = new URL(baseUrl);
httpConn = (HttpURLConnection) url.openConnection();
httpConn.setUseCaches(false);
httpConn.setDoOutput(true); // indicates POST method
httpConn.setDoInput(true);
httpConn.setRequestProperty(APIConstants.HEADER_CONTENT_TYPE,
APIConstants.MULTIPART_CONTENT_TYPE + APIConstants.MULTIPART_FORM_BOUNDARY);
httpConn.setRequestProperty(APIConstants.HEADER_ACCEPT, APIConstants.APPLICATION_JSON_MEDIA_TYPE);
httpConn.setRequestProperty(APIConstants.HEADER_API_TOKEN, apiToken);
httpConn.setRequestProperty(APIConstants.HEADER_USER_AGENT, APIConstants.USER_AGENT_APIM);
outputStream = httpConn.getOutputStream();
writer = new PrintWriter(new OutputStreamWriter(outputStream, StandardCharsets.UTF_8), true);
// Name property
writer.append("--" + APIConstants.MULTIPART_FORM_BOUNDARY).append(APIConstants.MULTIPART_LINE_FEED)
.append("Content-Disposition: form-data; name=\"name\"")
.append(APIConstants.MULTIPART_LINE_FEED).append(APIConstants.MULTIPART_LINE_FEED)
.append(apiIdentifier.getApiName()).append(APIConstants.MULTIPART_LINE_FEED);
writer.flush();
// Specfile property
writer.append("--" + APIConstants.MULTIPART_FORM_BOUNDARY).append(APIConstants.MULTIPART_LINE_FEED)
.append("Content-Disposition: form-data; name=\"specfile\"; filename=\"swagger.json\"")
.append(APIConstants.MULTIPART_LINE_FEED)
.append(APIConstants.HEADER_CONTENT_TYPE + ": " + APIConstants.APPLICATION_JSON_MEDIA_TYPE)
.append(APIConstants.MULTIPART_LINE_FEED).append(APIConstants.MULTIPART_LINE_FEED)
.append(apiDefinition).append(APIConstants.MULTIPART_LINE_FEED);
writer.flush();
// CollectionID property
writer.append("--" + APIConstants.MULTIPART_FORM_BOUNDARY).append(APIConstants.MULTIPART_LINE_FEED)
.append("Content-Disposition: form-data; name=\"cid\"").append(APIConstants.MULTIPART_LINE_FEED)
.append(APIConstants.MULTIPART_LINE_FEED).append(collectionId)
.append(APIConstants.MULTIPART_LINE_FEED);
writer.flush();
writer.append("--" + APIConstants.MULTIPART_FORM_BOUNDARY + "--")
.append(APIConstants.MULTIPART_LINE_FEED);
writer.close();
// Checks server's status code first
int status = httpConn.getResponseCode();
if (status == HttpURLConnection.HTTP_OK) {
if (isDebugEnabled) {
log.debug("HTTP status " + status);
}
BufferedReader reader = new BufferedReader(
new InputStreamReader(httpConn.getInputStream(), StandardCharsets.UTF_8));
String inputLine;
StringBuilder responseString = new StringBuilder();
while ((inputLine = reader.readLine()) != null) {
responseString.append(inputLine);
}
reader.close();
httpConn.disconnect();
JSONObject responseJson = (JSONObject) new JSONParser().parse(responseString.toString());
auditUuid = (String) ((JSONObject) responseJson.get(APIConstants.DESC)).get(APIConstants.ID);
ApiMgtDAO.getInstance().addAuditApiMapping(apiIdentifier, auditUuid);
} else {
throw new APIManagementException(
"Error while retrieving data for the API Security Audit Report. Found http status: " +
httpConn.getResponseCode() + " - " + httpConn.getResponseMessage());
}
return auditUuid;
}
@Override
public Response getAPIClientCertificateContentByAlias(String apiId, String alias,
MessageContext messageContext) {
String tenantDomain = RestApiCommonUtil.getLoggedInUserTenantDomain();
String certFileName = alias + ".crt";
try {
APIProvider apiProvider = RestApiCommonUtil.getLoggedInUserProvider();
API api = apiProvider.getAPIbyUUID(apiId, tenantDomain);
ClientCertificateDTO clientCertificateDTO = CertificateRestApiUtils.preValidateClientCertificate(alias,
api.getId());
if (clientCertificateDTO != null) {
Object certificate = CertificateRestApiUtils
.getDecodedCertificate(clientCertificateDTO.getCertificate());
Response.ResponseBuilder responseBuilder = Response.ok().entity(certificate);
responseBuilder.header(RestApiConstants.HEADER_CONTENT_DISPOSITION,
"attachment; filename=\"" + certFileName + "\"");
responseBuilder.header(RestApiConstants.HEADER_CONTENT_TYPE, MediaType.APPLICATION_OCTET_STREAM);
return responseBuilder.build();
}
} catch (APIManagementException e) {
RestApiUtil.handleInternalServerError(
"Error while retrieving the client certificate with alias " + alias + " for the tenant "
+ tenantDomain, e, log);
}
return null;
}
@Override
public Response deleteAPIClientCertificateByAlias(String alias, String apiId,
MessageContext messageContext) {
String tenantDomain = RestApiCommonUtil.getLoggedInUserTenantDomain();
try {
APIIdentifier apiIdentifierFromTable = APIUtil.getAPIIdentifierFromUUID(apiId);
if (apiIdentifierFromTable == null) {
throw new APIMgtResourceNotFoundException("Couldn't retrieve existing API with API UUID: "
+ apiId, ExceptionCodes.from(ExceptionCodes.API_NOT_FOUND,
apiId));
}
APIProvider apiProvider = RestApiCommonUtil.getLoggedInUserProvider();
API api = apiProvider.getAPIbyUUID(apiId, RestApiCommonUtil.getLoggedInUserTenantDomain());
ClientCertificateDTO clientCertificateDTO = CertificateRestApiUtils.preValidateClientCertificate(alias,
api.getId());
int responseCode = apiProvider
.deleteClientCertificate(
RestApiCommonUtil.getLoggedInUsername(), clientCertificateDTO.getApiIdentifier(),
alias);
if (responseCode == ResponseCode.SUCCESS.getResponseCode()) {
//Handle api product case.
if (API_PRODUCT_TYPE.equals(api.getType())) {
APIIdentifier apiIdentifier = api.getId();
APIProductIdentifier apiProductIdentifier =
new APIProductIdentifier(apiIdentifier.getProviderName(), apiIdentifier.getApiName(),
apiIdentifier.getVersion());
APIProduct apiProduct = apiProvider.getAPIProduct(apiProductIdentifier);
apiProvider.updateAPIProduct(apiProduct);
} else {
apiProvider.updateAPI(api);
}
if (log.isDebugEnabled()) {
log.debug(String.format("The client certificate which belongs to tenant : %s represented by the "
+ "alias : %s is deleted successfully", tenantDomain, alias));
}
return Response.ok().entity("The certificate for alias '" + alias + "' deleted successfully.").build();
} else {
if (log.isDebugEnabled()) {
log.debug(String.format("Failed to delete the client certificate which belongs to tenant : %s "
+ "represented by the alias : %s.", tenantDomain, alias));
}
RestApiUtil.handleInternalServerError(
"Error while deleting the client certificate for alias '" + alias + "'.", log);
}
} catch (APIManagementException e) {
RestApiUtil.handleInternalServerError(
"Error while deleting the client certificate with alias " + alias + " for the tenant "
+ tenantDomain, e, log);
} catch (FaultGatewaysException e) {
RestApiUtil.handleInternalServerError(
"Error while publishing the certificate change to gateways for the alias " + alias, e, log);
}
return null;
}
@Override
public Response getAPIClientCertificateByAlias(String alias, String apiId,
MessageContext messageContext) {
String tenantDomain = RestApiCommonUtil.getLoggedInUserTenantDomain();
CertificateMgtUtils certificateMgtUtils = CertificateMgtUtils.getInstance();
try {
APIProvider apiProvider = RestApiCommonUtil.getLoggedInUserProvider();
API api = apiProvider.getAPIbyUUID(apiId, tenantDomain);
ClientCertificateDTO clientCertificateDTO = CertificateRestApiUtils.preValidateClientCertificate(alias,
api.getId());
CertificateInformationDTO certificateInformationDTO = certificateMgtUtils
.getCertificateInfo(clientCertificateDTO.getCertificate());
if (certificateInformationDTO != null) {
CertificateInfoDTO certificateInfoDTO = CertificateMappingUtil
.fromCertificateInformationToDTO(certificateInformationDTO);
return Response.ok().entity(certificateInfoDTO).build();
} else {
RestApiUtil.handleResourceNotFoundError("Certificate is empty for alias " + alias, log);
}
} catch (APIManagementException e) {
RestApiUtil.handleInternalServerError(
"Error while retrieving the client certificate with alias " + alias + " for the tenant "
+ tenantDomain, e, log);
}
return null;
}
@Override
public Response updateAPIClientCertificateByAlias(String alias, String apiId,
InputStream certificateInputStream,
Attachment certificateDetail, String tier,
MessageContext messageContext) {
try {
APIIdentifier apiIdentifierFromTable = APIUtil.getAPIIdentifierFromUUID(apiId);
if (apiIdentifierFromTable == null) {
throw new APIMgtResourceNotFoundException("Couldn't retrieve existing API with API UUID: "
+ apiId, ExceptionCodes.from(ExceptionCodes.API_NOT_FOUND,
apiId));
}
ContentDisposition contentDisposition;
String fileName;
String base64EncodedCert = null;
APIProvider apiProvider = RestApiCommonUtil.getLoggedInUserProvider();
API api = apiProvider.getAPIbyUUID(apiId, RestApiCommonUtil.getLoggedInUserTenantDomain());
String userName = RestApiCommonUtil.getLoggedInUsername();
int tenantId = APIUtil.getTenantId(userName);
ClientCertificateDTO clientCertificateDTO = CertificateRestApiUtils.preValidateClientCertificate(alias,
api.getId());
if (certificateDetail != null) {
contentDisposition = certificateDetail.getContentDisposition();
fileName = contentDisposition.getParameter(RestApiConstants.CONTENT_DISPOSITION_FILENAME);
if (StringUtils.isNotBlank(fileName)) {
base64EncodedCert = CertificateRestApiUtils.generateEncodedCertificate(certificateInputStream);
}
}
if (StringUtils.isEmpty(base64EncodedCert) && StringUtils.isEmpty(tier)) {
return Response.ok().entity("Client Certificate is not updated for alias " + alias).build();
}
int responseCode = apiProvider
.updateClientCertificate(base64EncodedCert, alias, clientCertificateDTO.getApiIdentifier(), tier,
tenantId);
if (ResponseCode.SUCCESS.getResponseCode() == responseCode) {
//Handle api product case.
if (API_PRODUCT_TYPE.equals(api.getType())) {
APIIdentifier apiIdentifier = api.getId();
APIProductIdentifier apiProductIdentifier =
new APIProductIdentifier(apiIdentifier.getProviderName(), apiIdentifier.getApiName(),
apiIdentifier.getVersion());
APIProduct apiProduct = apiProvider.getAPIProduct(apiProductIdentifier);
apiProvider.updateAPIProduct(apiProduct);
} else {
apiProvider.updateAPI(api);
}
ClientCertMetadataDTO clientCertMetadataDTO = new ClientCertMetadataDTO();
clientCertMetadataDTO.setAlias(alias);
clientCertMetadataDTO.setApiId(api.getUUID());
clientCertMetadataDTO.setTier(clientCertificateDTO.getTierName());
URI updatedCertUri = new URI(RestApiConstants.CLIENT_CERTS_BASE_PATH + "?alias=" + alias);
return Response.ok(updatedCertUri).entity(clientCertMetadataDTO).build();
} else if (ResponseCode.INTERNAL_SERVER_ERROR.getResponseCode() == responseCode) {
RestApiUtil.handleInternalServerError(
"Error while updating the client certificate for the alias " + alias + " due to an internal "
+ "server error", log);
} else if (ResponseCode.CERTIFICATE_NOT_FOUND.getResponseCode() == responseCode) {
RestApiUtil.handleResourceNotFoundError("", log);
} else if (ResponseCode.CERTIFICATE_EXPIRED.getResponseCode() == responseCode) {
RestApiUtil.handleBadRequest(
"Error while updating the client certificate for the alias " + alias + " Certificate Expired.",
log);
}
} catch (APIManagementException e) {
RestApiUtil.handleInternalServerError(
"Error while updating the client certificate for the alias " + alias + " due to an internal "
+ "server error", e, log);
} catch (IOException e) {
RestApiUtil
.handleInternalServerError("Error while encoding client certificate for the alias " + alias, e,
log);
} catch (URISyntaxException e) {
RestApiUtil.handleInternalServerError(
"Error while generating the resource location URI for alias '" + alias + "'", e, log);
} catch (FaultGatewaysException e) {
RestApiUtil.handleInternalServerError(
"Error while publishing the certificate change to gateways for the alias " + alias, e, log);
}
return null;
}
@Override
public Response getAPIClientCertificates(String apiId, Integer limit, Integer offset, String alias,
MessageContext messageContext) {
limit = limit != null ? limit : RestApiConstants.PAGINATION_LIMIT_DEFAULT;
offset = offset != null ? offset : RestApiConstants.PAGINATION_OFFSET_DEFAULT;
List<ClientCertificateDTO> certificates = new ArrayList<>();
String userName = RestApiCommonUtil.getLoggedInUsername();
int tenantId = APIUtil.getTenantId(userName);
String query = CertificateRestApiUtils.buildQueryString("alias", alias, "apiId", apiId);
try {
APIProvider apiProvider = RestApiCommonUtil.getLoggedInUserProvider();
int totalCount = apiProvider.getClientCertificateCount(tenantId);
if (totalCount > 0) {
APIIdentifier apiIdentifier = null;
if (StringUtils.isNotEmpty(apiId)) {
API api = apiProvider.getAPIbyUUID(apiId, RestApiCommonUtil.getLoggedInUserTenantDomain());
apiIdentifier = api.getId();
}
certificates = apiProvider.searchClientCertificates(tenantId, alias, apiIdentifier);
}
ClientCertificatesDTO certificatesDTO = CertificateRestApiUtils
.getPaginatedClientCertificates(certificates, limit, offset, query);
APIListDTO apiListDTO = new APIListDTO();
PaginationDTO paginationDTO = new PaginationDTO();
paginationDTO.setLimit(limit);
paginationDTO.setOffset(offset);
paginationDTO.setTotal(totalCount);
certificatesDTO.setPagination(paginationDTO);
return Response.status(Response.Status.OK).entity(certificatesDTO).build();
} catch (APIManagementException e) {
RestApiUtil.handleInternalServerError("Error while retrieving the client certificates.", e, log);
}
return null;
}
@Override
public Response addAPIClientCertificate(String apiId, InputStream certificateInputStream,
Attachment certificateDetail, String alias, String tier,
MessageContext messageContext) {
try {
APIProvider apiProvider = RestApiCommonUtil.getLoggedInUserProvider();
APIIdentifier apiIdentifierFromTable = APIUtil.getAPIIdentifierFromUUID(apiId);
if (apiIdentifierFromTable == null) {
throw new APIMgtResourceNotFoundException("Couldn't retrieve existing API with API UUID: "
+ apiId, ExceptionCodes.from(ExceptionCodes.API_NOT_FOUND,
apiId));
}
ContentDisposition contentDisposition = certificateDetail.getContentDisposition();
String fileName = contentDisposition.getParameter(RestApiConstants.CONTENT_DISPOSITION_FILENAME);
if (StringUtils.isEmpty(alias) || StringUtils.isEmpty(apiId)) {
RestApiUtil.handleBadRequest("The alias and/ or apiId should not be empty", log);
}
if (StringUtils.isBlank(fileName)) {
RestApiUtil.handleBadRequest(
"Certificate addition failed. Proper Certificate file should be provided", log);
}
API api = apiProvider.getAPIbyUUID(apiId, RestApiCommonUtil.getLoggedInUserTenantDomain());
String userName = RestApiCommonUtil.getLoggedInUsername();
String base64EncodedCert = CertificateRestApiUtils.generateEncodedCertificate(certificateInputStream);
int responseCode = apiProvider.addClientCertificate(userName, api.getId(), base64EncodedCert, alias, tier);
if (log.isDebugEnabled()) {
log.debug(String.format("Add certificate operation response code : %d", responseCode));
}
if (ResponseCode.SUCCESS.getResponseCode() == responseCode) {
//Handle api product case.
if (API_PRODUCT_TYPE.equals(api.getType())) {
APIIdentifier apiIdentifier = api.getId();
APIProductIdentifier apiProductIdentifier =
new APIProductIdentifier(apiIdentifier.getProviderName(), apiIdentifier.getApiName(),
apiIdentifier.getVersion());
APIProduct apiProduct = apiProvider.getAPIProduct(apiProductIdentifier);
apiProvider.updateAPIProduct(apiProduct);
} else {
apiProvider.updateAPI(api);
}
ClientCertMetadataDTO certificateDTO = new ClientCertMetadataDTO();
certificateDTO.setAlias(alias);
certificateDTO.setApiId(apiId);
certificateDTO.setTier(tier);
URI createdCertUri = new URI(RestApiConstants.CLIENT_CERTS_BASE_PATH + "?alias=" + alias);
return Response.created(createdCertUri).entity(certificateDTO).build();
} else if (ResponseCode.INTERNAL_SERVER_ERROR.getResponseCode() == responseCode) {
RestApiUtil.handleInternalServerError(
"Internal server error while adding the client certificate to " + "API " + apiId, log);
} else if (ResponseCode.ALIAS_EXISTS_IN_TRUST_STORE.getResponseCode() == responseCode) {
RestApiUtil.handleResourceAlreadyExistsError(
"The alias '" + alias + "' already exists in the trust store.", log);
} else if (ResponseCode.CERTIFICATE_EXPIRED.getResponseCode() == responseCode) {
RestApiUtil.handleBadRequest(
"Error while adding the certificate to the API " + apiId + ". " + "Certificate Expired.", log);
}
} catch (APIManagementException e) {
RestApiUtil.handleInternalServerError(
"APIManagement exception while adding the certificate to the API " + apiId + " due to an internal "
+ "server error", e, log);
} catch (IOException e) {
RestApiUtil.handleInternalServerError(
"IOException while generating the encoded certificate for the API " + apiId, e, log);
} catch (URISyntaxException e) {
RestApiUtil.handleInternalServerError(
"Error while generating the resource location URI for alias '" + alias + "'", e, log);
} catch (FaultGatewaysException e) {
RestApiUtil.handleInternalServerError(
"Error while publishing the certificate change to gateways for the alias " + alias, e, log);
}
return null;
}
/**
* Delete API
*
* @param apiId API Id
* @param ifMatch If-Match header value
* @return Status of API Deletion
*/
@Override
public Response deleteAPI(String apiId, String ifMatch, MessageContext messageContext) {
try {
String username = RestApiCommonUtil.getLoggedInUsername();
String tenantDomain = RestApiCommonUtil.getLoggedInUserTenantDomain();
APIProvider apiProvider = RestApiCommonUtil.getProvider(username);
APIIdentifier apiIdentifier = APIMappingUtil.getAPIIdentifierFromUUID(apiId);
if (apiIdentifier == null) {
throw new APIMgtResourceNotFoundException("Couldn't retrieve existing API with API UUID: "
+ apiId, ExceptionCodes.from(ExceptionCodes.API_NOT_FOUND,
apiId));
}
API api = apiProvider.getAPIbyUUID(apiId, tenantDomain);
//check if the API has subscriptions
//Todo : need to optimize this check. This method seems too costly to check if subscription exists
List<SubscribedAPI> apiUsages = apiProvider.getAPIUsageByAPIId(api.getId());
if (apiUsages != null && apiUsages.size() > 0) {
RestApiUtil.handleConflict("Cannot remove the API " + apiId + " as active subscriptions exist", log);
}
List<APIResource> usedProductResources = apiProvider.getUsedProductResources(api.getId());
if (!usedProductResources.isEmpty()) {
RestApiUtil.handleConflict("Cannot remove the API because following resource paths " +
usedProductResources.toString() + " are used by one or more API Products", log);
}
//deletes the API
apiProvider.deleteAPI(api);
return Response.ok().build();
} catch (APIManagementException e) {
//Auth failure occurs when cross tenant accessing APIs. Sends 404, since we don't need to expose the existence of the resource
if (RestApiUtil.isDueToResourceNotFound(e) || RestApiUtil.isDueToAuthorizationFailure(e)) {
RestApiUtil.handleResourceNotFoundError(RestApiConstants.RESOURCE_API, apiId, e, log);
} else if (isAuthorizationFailure(e)) {
RestApiUtil.handleAuthorizationFailure("Authorization failure while deleting API : " + apiId, e, log);
} else {
String errorMessage = "Error while deleting API : " + apiId;
RestApiUtil.handleInternalServerError(errorMessage, e, log);
}
}
return null;
}
/**
* Get resources of an API that are reused by API Products
*
* @param api API
* @return List of resources reused by API Products
*/
private List<APIResource> getUsedProductResources(API api) {
List<APIResource> usedProductResources = new ArrayList<>();
Set<URITemplate> uriTemplates = api.getUriTemplates();
for (URITemplate uriTemplate : uriTemplates) {
// If existing URITemplate is used by any API Products
if (!uriTemplate.retrieveUsedByProducts().isEmpty()) {
APIResource apiResource = new APIResource(uriTemplate.getHTTPVerb(), uriTemplate.getUriTemplate());
usedProductResources.add(apiResource);
}
}
return usedProductResources;
}
/**
* Retrieves the content of a document
*
* @param apiId API identifier
* @param documentId document identifier
* @param ifNoneMatch If-None-Match header value
* @return Content of the document/ either inline/file or source url as a redirection
*/
@Override
public Response getAPIDocumentContentByDocumentId(String apiId, String documentId,
String ifNoneMatch, MessageContext messageContext) {
try {
APIProvider apiProvider = RestApiCommonUtil.getLoggedInUserProvider();
String tenantDomain = RestApiCommonUtil.getLoggedInUserTenantDomain();
DocumentationContent docContent = apiProvider.getDocumentationContent(apiId, documentId, tenantDomain);
if (docContent == null) {
RestApiUtil.handleResourceNotFoundError(RestApiConstants.RESOURCE_DOCUMENTATION, documentId, log);
return null;
}
// gets the content depending on the type of the document
if (docContent.getSourceType().equals(DocumentationContent.ContentSourceType.FILE)) {
String contentType = docContent.getResourceFile().getContentType();
contentType = contentType == null ? RestApiConstants.APPLICATION_OCTET_STREAM : contentType;
String name = docContent.getResourceFile().getName();
return Response.ok(docContent.getResourceFile().getContent())
.header(RestApiConstants.HEADER_CONTENT_TYPE, contentType)
.header(RestApiConstants.HEADER_CONTENT_DISPOSITION, "attachment; filename=\"" + name + "\"")
.build();
} else if (docContent.getSourceType().equals(DocumentationContent.ContentSourceType.INLINE)
|| docContent.getSourceType().equals(DocumentationContent.ContentSourceType.MARKDOWN)) {
String content = docContent.getTextContent();
return Response.ok(content)
.header(RestApiConstants.HEADER_CONTENT_TYPE, APIConstants.DOCUMENTATION_INLINE_CONTENT_TYPE)
.build();
} else if (docContent.getSourceType().equals(DocumentationContent.ContentSourceType.URL)) {
String sourceUrl = docContent.getTextContent();
return Response.seeOther(new URI(sourceUrl)).build();
}
} catch (APIManagementException e) {
// Auth failure occurs when cross tenant accessing APIs. Sends 404, since we don't need to expose the
// existence of the resource
if (RestApiUtil.isDueToResourceNotFound(e) || RestApiUtil.isDueToAuthorizationFailure(e)) {
RestApiUtil.handleResourceNotFoundError(RestApiConstants.RESOURCE_API, apiId, e, log);
} else if (isAuthorizationFailure(e)) {
RestApiUtil.handleAuthorizationFailure(
"Authorization failure while retrieving document : " + documentId + " of API " + apiId, e, log);
} else {
String errorMessage = "Error while retrieving document " + documentId + " of the API " + apiId;
RestApiUtil.handleInternalServerError(errorMessage, e, log);
}
} catch (URISyntaxException e) {
String errorMessage = "Error while retrieving source URI location of " + documentId;
RestApiUtil.handleInternalServerError(errorMessage, e, log);
}
return null;
}
/**
* Add content to a document. Content can be inline or File
*
* @param apiId API identifier
* @param documentId document identifier
* @param inputStream file input stream
* @param fileDetail file details as Attachment
* @param inlineContent inline content for the document
* @param ifMatch If-match header value
* @return updated document as DTO
*/
@Override
public Response addAPIDocumentContent(String apiId, String documentId, String ifMatch,
InputStream inputStream, Attachment fileDetail, String inlineContent,
MessageContext messageContext) {
try {
APIIdentifier apiIdentifier = APIMappingUtil.getAPIIdentifierFromUUID(apiId);
if (apiIdentifier == null) {
throw new APIMgtResourceNotFoundException("Couldn't retrieve existing API with API UUID: "
+ apiId, ExceptionCodes.from(ExceptionCodes.API_NOT_FOUND,
apiId));
}
String tenantDomain = RestApiCommonUtil.getLoggedInUserTenantDomain();
APIProvider apiProvider = RestApiCommonUtil.getLoggedInUserProvider();
//API api = APIMappingUtil.getAPIInfoFromUUID(apiId, tenantDomain);
if (inputStream != null && inlineContent != null) {
RestApiUtil.handleBadRequest("Only one of 'file' and 'inlineContent' should be specified", log);
}
//retrieves the document and send 404 if not found
Documentation documentation = apiProvider.getDocumentation(apiId, documentId, tenantDomain);
if (documentation == null) {
RestApiUtil.handleResourceNotFoundError(RestApiConstants.RESOURCE_DOCUMENTATION, documentId, log);
return null;
}
//add content depending on the availability of either input stream or inline content
if (inputStream != null) {
if (!documentation.getSourceType().equals(Documentation.DocumentSourceType.FILE)) {
RestApiUtil.handleBadRequest("Source type of document " + documentId + " is not FILE", log);
}
RestApiPublisherUtils.attachFileToDocument(apiId, documentation, inputStream, fileDetail);
} else if (inlineContent != null) {
if (!documentation.getSourceType().equals(Documentation.DocumentSourceType.INLINE) &&
!documentation.getSourceType().equals(Documentation.DocumentSourceType.MARKDOWN)) {
RestApiUtil.handleBadRequest("Source type of document " + documentId + " is not INLINE " +
"or MARKDOWN", log);
}
DocumentationContent content = new DocumentationContent();
content.setSourceType(ContentSourceType.valueOf(documentation.getSourceType().toString()));
content.setTextContent(inlineContent);
// apiProvider.addDocumentationContent(api, documentation.getName(), inlineContent);
apiProvider.addDocumentationContent(apiId, documentId, tenantDomain, content);
} else {
RestApiUtil.handleBadRequest("Either 'file' or 'inlineContent' should be specified", log);
}
//retrieving the updated doc and the URI
Documentation updatedDoc = apiProvider.getDocumentation(apiId, documentId, tenantDomain);
DocumentDTO documentDTO = DocumentationMappingUtil.fromDocumentationToDTO(updatedDoc);
String uriString = RestApiConstants.RESOURCE_PATH_DOCUMENT_CONTENT
.replace(RestApiConstants.APIID_PARAM, apiId)
.replace(RestApiConstants.DOCUMENTID_PARAM, documentId);
URI uri = new URI(uriString);
return Response.created(uri).entity(documentDTO).build();
} catch (APIManagementException e) {
//Auth failure occurs when cross tenant accessing APIs. Sends 404, since we don't need to expose the existence of the resource
if (RestApiUtil.isDueToResourceNotFound(e) || RestApiUtil.isDueToAuthorizationFailure(e)) {
RestApiUtil.handleResourceNotFoundError(RestApiConstants.RESOURCE_API, apiId, e, log);
} else if (isAuthorizationFailure(e)) {
RestApiUtil.handleAuthorizationFailure(
"Authorization failure while adding content to the document: " + documentId + " of API "
+ apiId, e, log);
} else {
RestApiUtil.handleInternalServerError("Failed to add content to the document " + documentId, e, log);
}
} catch (URISyntaxException e) {
String errorMessage = "Error while retrieving document content location : " + documentId;
RestApiUtil.handleInternalServerError(errorMessage, e, log);
} finally {
IOUtils.closeQuietly(inputStream);
}
return null;
}
/**
* Deletes an existing document of an API
*
* @param apiId API identifier
* @param documentId document identifier
* @param ifMatch If-match header value
* @return 200 response if deleted successfully
*/
@Override
public Response deleteAPIDocument(String apiId, String documentId, String ifMatch,
MessageContext messageContext) {
Documentation documentation;
try {
APIProvider apiProvider = RestApiCommonUtil.getLoggedInUserProvider();
String tenantDomain = RestApiCommonUtil.getLoggedInUserTenantDomain();
APIIdentifier apiIdentifier = APIMappingUtil.getAPIIdentifierFromUUID(apiId);
if (apiIdentifier == null) {
throw new APIMgtResourceNotFoundException("Couldn't retrieve existing API with API UUID: "
+ apiId, ExceptionCodes.from(ExceptionCodes.API_NOT_FOUND,
apiId));
}
//this will fail if user does not have access to the API or the API does not exist
//APIIdentifier apiIdentifier = APIMappingUtil.getAPIIdentifierFromUUID(apiId, tenantDomain);
documentation = apiProvider.getDocumentation(apiId, documentId, tenantDomain);
if (documentation == null) {
RestApiUtil.handleResourceNotFoundError(RestApiConstants.RESOURCE_DOCUMENTATION, documentId, log);
}
apiProvider.removeDocumentation(apiId, documentId, tenantDomain);
return Response.ok().build();
} catch (APIManagementException e) {
//Auth failure occurs when cross tenant accessing APIs. Sends 404, since we don't need to expose the existence of the resource
if (RestApiUtil.isDueToResourceNotFound(e) || RestApiUtil.isDueToAuthorizationFailure(e)) {
RestApiUtil.handleResourceNotFoundError(RestApiConstants.RESOURCE_API, apiId, e, log);
} else if (isAuthorizationFailure(e)) {
RestApiUtil.handleAuthorizationFailure(
"Authorization failure while deleting : " + documentId + " of API " + apiId, e, log);
} else {
String errorMessage = "Error while retrieving API : " + apiId;
RestApiUtil.handleInternalServerError(errorMessage, e, log);
}
}
return null;
}
@Override
public Response getAPIDocumentByDocumentId(String apiId, String documentId, String ifNoneMatch,
MessageContext messageContext) {
Documentation documentation;
try {
APIProvider apiProvider = RestApiCommonUtil.getLoggedInUserProvider();
String tenantDomain = RestApiCommonUtil.getLoggedInUserTenantDomain();
documentation = apiProvider.getDocumentation(apiId, documentId, tenantDomain);
if (documentation == null) {
RestApiUtil.handleResourceNotFoundError(RestApiConstants.RESOURCE_DOCUMENTATION, documentId, log);
}
DocumentDTO documentDTO = DocumentationMappingUtil.fromDocumentationToDTO(documentation);
return Response.ok().entity(documentDTO).build();
} catch (APIManagementException e) {
//Auth failure occurs when cross tenant accessing APIs. Sends 404, since we don't need to expose the existence of the resource
if (RestApiUtil.isDueToResourceNotFound(e) || RestApiUtil.isDueToAuthorizationFailure(e)) {
RestApiUtil.handleResourceNotFoundError(RestApiConstants.RESOURCE_API, apiId, e, log);
} else if (isAuthorizationFailure(e)) {
RestApiUtil.handleAuthorizationFailure(
"Authorization failure while retrieving document : " + documentId + " of API " + apiId, e, log);
} else {
String errorMessage = "Error while retrieving document : " + documentId;
RestApiUtil.handleInternalServerError(errorMessage, e, log);
}
}
return null;
}
/**
* Updates an existing document of an API
*
* @param apiId API identifier
* @param documentId document identifier
* @param body updated document DTO
* @param ifMatch If-match header value
* @return updated document DTO as response
*/
@Override
public Response updateAPIDocument(String apiId, String documentId, DocumentDTO body,
String ifMatch, MessageContext messageContext) {
try {
APIProvider apiProvider = RestApiCommonUtil.getLoggedInUserProvider();
String tenantDomain = RestApiCommonUtil.getLoggedInUserTenantDomain();
//this will fail if user does not have access to the API or the API does not exist
APIIdentifier apiIdentifier = APIMappingUtil.getAPIIdentifierFromUUID(apiId);
if (apiIdentifier == null) {
throw new APIMgtResourceNotFoundException("Couldn't retrieve existing API with API UUID: "
+ apiId, ExceptionCodes.from(ExceptionCodes.API_NOT_FOUND,
apiId));
}
String sourceUrl = body.getSourceUrl();
Documentation oldDocument = apiProvider.getDocumentation(apiId, documentId, tenantDomain);
//validation checks for existence of the document
if (body.getType() == null) {
throw new BadRequestException();
}
if (oldDocument == null) {
RestApiUtil.handleResourceNotFoundError(RestApiConstants.RESOURCE_DOCUMENTATION, documentId, log);
return null;
}
if (body.getType() == DocumentDTO.TypeEnum.OTHER && org.apache.commons.lang3.StringUtils.isBlank(body.getOtherTypeName())) {
//check otherTypeName for not null if doc type is OTHER
RestApiUtil.handleBadRequest("otherTypeName cannot be empty if type is OTHER.", log);
return null;
}
if (body.getSourceType() == DocumentDTO.SourceTypeEnum.URL &&
(org.apache.commons.lang3.StringUtils.isBlank(sourceUrl) || !RestApiCommonUtil.isURL(sourceUrl))) {
RestApiUtil.handleBadRequest("Invalid document sourceUrl Format", log);
return null;
}
//overriding some properties
body.setName(oldDocument.getName());
Documentation newDocumentation = DocumentationMappingUtil.fromDTOtoDocumentation(body);
newDocumentation.setFilePath(oldDocument.getFilePath());
newDocumentation.setId(documentId);
newDocumentation = apiProvider.updateDocumentation(apiId, newDocumentation, tenantDomain);
return Response.ok().entity(DocumentationMappingUtil.fromDocumentationToDTO(newDocumentation)).build();
} catch (APIManagementException e) {
//Auth failure occurs when cross tenant accessing APIs. Sends 404, since we don't need to expose the existence of the resource
if (RestApiUtil.isDueToResourceNotFound(e) || RestApiUtil.isDueToAuthorizationFailure(e)) {
RestApiUtil.handleResourceNotFoundError(RestApiConstants.RESOURCE_API, apiId, e, log);
} else if (isAuthorizationFailure(e)) {
RestApiUtil.handleAuthorizationFailure(
"Authorization failure while updating document : " + documentId + " of API " + apiId, e, log);
} else {
String errorMessage = "Error while updating the document " + documentId + " for API : " + apiId;
RestApiUtil.handleInternalServerError(errorMessage, e, log);
}
}
return null;
}
/**
* Returns all the documents of the given API identifier that matches to the search condition
*
* @param apiId API identifier
* @param limit max number of records returned
* @param offset starting index
* @param ifNoneMatch If-None-Match header value
* @return matched documents as a list if DocumentDTOs
*/
@Override
public Response getAPIDocuments(String apiId, Integer limit, Integer offset, String ifNoneMatch,
MessageContext messageContext) {
// do some magic!
//pre-processing
//setting default limit and offset values if they are not set
limit = limit != null ? limit : RestApiConstants.PAGINATION_LIMIT_DEFAULT;
offset = offset != null ? offset : RestApiConstants.PAGINATION_OFFSET_DEFAULT;
try {
APIProvider apiProvider = RestApiCommonUtil.getLoggedInUserProvider();
String tenantDomain = RestApiCommonUtil.getLoggedInUserTenantDomain();
//this will fail if user does not have access to the API or the API does not exist
//APIIdentifier apiIdentifier = APIMappingUtil.getAPIIdentifierFromUUID(apiId, tenantDomain);
//List<Documentation> allDocumentation = apiProvider.getAllDocumentation(apiIdentifier);
List<Documentation> allDocumentation = apiProvider.getAllDocumentation(apiId, tenantDomain);
DocumentListDTO documentListDTO = DocumentationMappingUtil.fromDocumentationListToDTO(allDocumentation,
offset, limit);
DocumentationMappingUtil
.setPaginationParams(documentListDTO, apiId, offset, limit, allDocumentation.size());
return Response.ok().entity(documentListDTO).build();
} catch (APIManagementException e) {
//Auth failure occurs when cross tenant accessing APIs. Sends 404, since we don't need to expose the existence of the resource
if (RestApiUtil.isDueToResourceNotFound(e) || RestApiUtil.isDueToAuthorizationFailure(e)) {
RestApiUtil.handleResourceNotFoundError(RestApiConstants.RESOURCE_API, apiId, e, log);
} else if (isAuthorizationFailure(e)) {
RestApiUtil.handleAuthorizationFailure(
"Authorization failure while retrieving documents of API : " + apiId, e, log);
} else {
String msg = "Error while retrieving documents of API " + apiId;
RestApiUtil.handleInternalServerError(msg, e, log);
}
}
return null;
}
/**
* Add a documentation to an API
*
* @param apiId api identifier
* @param body Documentation DTO as request body
* @return created document DTO as response
*/
@Override
public Response addAPIDocument(String apiId, DocumentDTO body, String ifMatch, MessageContext messageContext) {
try {
APIIdentifier apiIdentifier = APIMappingUtil.getAPIIdentifierFromUUID(apiId);
if (apiIdentifier == null) {
throw new APIMgtResourceNotFoundException("Couldn't retrieve existing API with API UUID: "
+ apiId, ExceptionCodes.from(ExceptionCodes.API_NOT_FOUND,
apiId));
}
Documentation documentation = PublisherCommonUtils.addDocumentationToAPI(body, apiId);
DocumentDTO newDocumentDTO = DocumentationMappingUtil.fromDocumentationToDTO(documentation);
String uriString = RestApiConstants.RESOURCE_PATH_DOCUMENTS_DOCUMENT_ID
.replace(RestApiConstants.APIID_PARAM, apiId)
.replace(RestApiConstants.DOCUMENTID_PARAM, documentation.getId());
URI uri = new URI(uriString);
return Response.created(uri).entity(newDocumentDTO).build();
} catch (APIManagementException e) {
//Auth failure occurs when cross tenant accessing APIs. Sends 404, since we don't need to expose the existence of the resource
if (RestApiUtil.isDueToResourceNotFound(e) || RestApiUtil.isDueToAuthorizationFailure(e)) {
RestApiUtil.handleResourceNotFoundError(RestApiConstants.RESOURCE_API, apiId, e, log);
} else if (isAuthorizationFailure(e)) {
RestApiUtil
.handleAuthorizationFailure("Authorization failure while adding documents of API : " + apiId, e,
log);
} else {
String errorMessage = "Error while adding the document for API : " + apiId;
RestApiUtil.handleInternalServerError(errorMessage, e, log);
}
} catch (URISyntaxException e) {
String errorMessage = "Error while retrieving location for document " + body.getName() + " of API " + apiId;
RestApiUtil.handleInternalServerError(errorMessage, e, log);
}
return null;
}
/**
* Get external store list which the given API is already published to.
* @param apiId API Identifier
* @param ifNoneMatch If-None-Match header value
* @param messageContext CXF Message Context
* @return External Store list of published API
*/
@Override
public Response getAllPublishedExternalStoresByAPI(String apiId, String ifNoneMatch, MessageContext messageContext)
throws APIManagementException {
APIIdentifier apiIdentifier = null;
String tenantDomain = RestApiCommonUtil.getLoggedInUserTenantDomain();
APIProvider apiProvider = RestApiCommonUtil.getLoggedInUserProvider();
try {
apiIdentifier = APIMappingUtil.getAPIIdentifierFromUUID(apiId);
} catch (APIManagementException e) {
if (RestApiUtil.isDueToResourceNotFound(e)) {
RestApiUtil.handleResourceNotFoundError(RestApiConstants.RESOURCE_API, apiId, e, log);
} else {
String errorMessage = "Error while getting API: " + apiId;
log.error(errorMessage, e);
RestApiUtil.handleInternalServerError(errorMessage, e, log);
}
}
Set<APIStore> publishedStores = apiProvider.getPublishedExternalAPIStores(apiIdentifier);
APIExternalStoreListDTO apiExternalStoreListDTO =
ExternalStoreMappingUtil.fromAPIExternalStoreCollectionToDTO(publishedStores);
return Response.ok().entity(apiExternalStoreListDTO).build();
}
/**
* Gets generated scripts
*
* @param apiId API Id
* @param ifNoneMatch If-None-Match header value
* @param messageContext message context
* @return list of policies of generated sample payload
* @throws APIManagementException
*/
@Override
public Response getGeneratedMockScriptsOfAPI(String apiId, String ifNoneMatch, MessageContext messageContext) throws APIManagementException {
String tenantDomain = RestApiCommonUtil.getLoggedInUserTenantDomain();
APIProvider apiProvider = RestApiCommonUtil.getLoggedInUserProvider();
API originalAPI = apiProvider.getAPIbyUUID(apiId, tenantDomain);
APIIdentifier apiIdentifier = originalAPI.getId();
String apiDefinition = apiProvider.getOpenAPIDefinition(apiIdentifier, tenantDomain);
Map<String, Object> examples = OASParserUtil.generateExamples(apiDefinition);
List<APIResourceMediationPolicy> policies = (List<APIResourceMediationPolicy>) examples.get(APIConstants.MOCK_GEN_POLICY_LIST);
return Response.ok().entity(APIMappingUtil.fromMockPayloadsToListDTO(policies)).build();
}
/**
* Retrieves the WSDL meta information of the given API. The API must be a SOAP API.
*
* @param apiId Id of the API
* @param messageContext CXF Message Context
* @return WSDL meta information of the API
* @throws APIManagementException when error occurred while retrieving API WSDL meta info.
* eg: when API doesn't exist, API exists but it is not a SOAP API.
*/
@Override
public Response getWSDLInfoOfAPI(String apiId, MessageContext messageContext)
throws APIManagementException {
String tenantDomain = RestApiCommonUtil.getLoggedInUserTenantDomain();
APIProvider apiProvider = RestApiCommonUtil.getLoggedInUserProvider();
API api = apiProvider.getLightweightAPIByUUID(apiId, tenantDomain);
WSDLInfoDTO wsdlInfoDTO = APIMappingUtil.getWsdlInfoDTO(api);
if (wsdlInfoDTO == null) {
throw new APIManagementException(
ExceptionCodes.from(ExceptionCodes.NO_WSDL_AVAILABLE_FOR_API,
api.getId().getApiName(), api.getId().getVersion()));
} else {
return Response.ok().entity(wsdlInfoDTO).build();
}
}
/**
* Retrieves API Lifecycle history information
*
* @param apiId API Id
* @param ifNoneMatch If-None-Match header value
* @return API Lifecycle history information
*/
@Override
public Response getAPILifecycleHistory(String apiId, String ifNoneMatch, MessageContext messageContext) {
try {
String tenantDomain = RestApiCommonUtil.getLoggedInUserTenantDomain();
APIProvider apiProvider = RestApiCommonUtil.getLoggedInUserProvider();
APIIdentifier apiIdentifier;
APIRevision apiRevision = ApiMgtDAO.getInstance().checkAPIUUIDIsARevisionUUID(apiId);
if (apiRevision != null && apiRevision.getApiUUID() != null) {
apiIdentifier = APIMappingUtil.getAPIIdentifierFromUUID(apiRevision.getApiUUID());
} else {
apiIdentifier = APIMappingUtil.getAPIIdentifierFromUUID(apiId);
}
List<LifeCycleEvent> lifeCycleEvents = apiProvider.getLifeCycleEvents(apiIdentifier);
LifecycleHistoryDTO historyDTO = APIMappingUtil.fromLifecycleHistoryModelToDTO(lifeCycleEvents);
return Response.ok().entity(historyDTO).build();
} catch (APIManagementException e) {
//Auth failure occurs when cross tenant accessing APIs. Sends 404, since we don't need to expose the existence of the resource
if (RestApiUtil.isDueToResourceNotFound(e) || RestApiUtil.isDueToAuthorizationFailure(e)) {
RestApiUtil.handleResourceNotFoundError(RestApiConstants.RESOURCE_API, apiId, e, log);
} else if (isAuthorizationFailure(e)) {
RestApiUtil.handleAuthorizationFailure("Authorization failure while deleting API : " + apiId, e, log);
} else {
String errorMessage = "Error while deleting API : " + apiId;
RestApiUtil.handleInternalServerError(errorMessage, e, log);
}
}
return null;
}
/**
* Retrieves API Lifecycle state information
*
* @param apiId API Id
* @param ifNoneMatch If-None-Match header value
* @return API Lifecycle state information
*/
@Override
public Response getAPILifecycleState(String apiId, String ifNoneMatch, MessageContext messageContext) {
LifecycleStateDTO lifecycleStateDTO = getLifecycleState(apiId);
return Response.ok().entity(lifecycleStateDTO).build();
}
/**
* Retrieves API Lifecycle state information
*
* @param apiId API Id
* @return API Lifecycle state information
*/
private LifecycleStateDTO getLifecycleState(String apiId) {
return getLifecycleState(null, apiId);
}
private LifecycleStateDTO getLifecycleState(APIIdentifier identifier, String apiId) {
try {
String tenantDomain = RestApiCommonUtil.getLoggedInUserTenantDomain();
APIProvider apiProvider = RestApiCommonUtil.getLoggedInUserProvider();
APIIdentifier apiIdentifier;
if (identifier == null) {
if (ApiMgtDAO.getInstance().checkAPIUUIDIsARevisionUUID(apiId).getApiUUID() != null) {
apiIdentifier = APIMappingUtil.getAPIInfoFromUUID(apiId,tenantDomain).getId();
} else {
apiIdentifier = APIMappingUtil.getAPIIdentifierFromUUID(apiId);
}
} else {
apiIdentifier = identifier;
}
Map<String, Object> apiLCData = apiProvider.getAPILifeCycleData(apiId, tenantDomain);
if (apiLCData == null) {
String errorMessage = "Error while getting lifecycle state for API : " + apiId;
RestApiUtil.handleInternalServerError(errorMessage, log);
}
boolean apiOlderVersionExist = false;
// check whether other versions of the current API exists
APIVersionStringComparator comparator = new APIVersionStringComparator();
Set<String> versions = apiProvider.getAPIVersions(
APIUtil.replaceEmailDomain(apiIdentifier.getProviderName()), apiIdentifier.getName());
for (String tempVersion : versions) {
if (comparator.compare(tempVersion, apiIdentifier.getVersion()) < 0) {
apiOlderVersionExist = true;
break;
}
}
return APIMappingUtil.fromLifecycleModelToDTO(apiLCData, apiOlderVersionExist);
} catch (APIManagementException e) {
//Auth failure occurs when cross tenant accessing APIs. Sends 404, since we don't need to expose the existence of the resource
if (RestApiUtil.isDueToResourceNotFound(e) || RestApiUtil.isDueToAuthorizationFailure(e)) {
RestApiUtil.handleResourceNotFoundError(RestApiConstants.RESOURCE_API, apiId, e, log);
} else if (isAuthorizationFailure(e)) {
RestApiUtil.handleAuthorizationFailure("Authorization failure while deleting API : " + apiId, e, log);
} else {
String errorMessage = "Error while deleting API : " + apiId;
RestApiUtil.handleInternalServerError(errorMessage, e, log);
}
}
return null;
}
@Override
public Response deleteAPILifecycleStatePendingTasks(String apiId, MessageContext messageContext) {
try {
APIProvider apiProvider = RestApiCommonUtil.getLoggedInUserProvider();
String tenantDomain = RestApiCommonUtil.getLoggedInUserTenantDomain();
APIIdentifier apiIdentifierFromTable = APIMappingUtil.getAPIIdentifierFromUUID(apiId);
if (apiIdentifierFromTable == null) {
throw new APIMgtResourceNotFoundException("Couldn't retrieve existing API with API UUID: "
+ apiId, ExceptionCodes.from(ExceptionCodes.API_NOT_FOUND,
apiId));
}
APIIdentifier apiIdentifier = APIMappingUtil.getAPIIdentifierFromApiIdOrUUID(apiId, tenantDomain);
apiProvider.deleteWorkflowTask(apiIdentifier);
return Response.ok().build();
} catch (APIManagementException e) {
String errorMessage = "Error while deleting task ";
RestApiUtil.handleInternalServerError(errorMessage, e, log);
}
return null;
}
@Override
public Response getAllAPIMediationPolicies(String apiId, Integer limit, Integer offset, String query,
String ifNoneMatch, MessageContext messageContext) {
//pre-processing
//setting default limit and offset values if they are not set
limit = limit != null ? limit : RestApiConstants.PAGINATION_LIMIT_DEFAULT;
offset = offset != null ? offset : RestApiConstants.PAGINATION_OFFSET_DEFAULT;
APIIdentifier apiIdentifier;
try {
APIProvider apiProvider = RestApiCommonUtil.getLoggedInUserProvider();
String tenantDomain = RestApiCommonUtil.getLoggedInUserTenantDomain();
//apiIdentifier = APIMappingUtil.getAPIIdentifierFromApiIdOrUUID(apiId,
// tenantDomain);
//Getting list of API specific mediation policies
List<Mediation> mediationList =
apiProvider.getAllApiSpecificMediationPolicies(apiId, tenantDomain);
//Converting list of mediation policies to DTO
MediationListDTO mediationListDTO =
MediationMappingUtil.fromMediationListToDTO(mediationList, offset, limit);
return Response.ok().entity(mediationListDTO).build();
} catch (APIManagementException e) {
//Auth failure occurs when cross tenant accessing APIs. Sends 404, since we don't need
// to expose the existence of the resource
if (RestApiUtil.isDueToResourceNotFound(e) || RestApiUtil.isDueToAuthorizationFailure(e)) {
RestApiUtil.handleResourceNotFoundError(RestApiConstants.RESOURCE_API, apiId, e, log);
} else if (isAuthorizationFailure(e)) {
RestApiUtil.handleAuthorizationFailure(
"Authorization failure while retrieving mediation policies of API " + apiId, e, log);
} else {
String errorMessage = "Error while retrieving all api specific mediation policies" +
" of API : " + apiId;
RestApiUtil.handleInternalServerError(errorMessage, e, log);
}
}
return null;
}
@Override
public Response deleteAPIMediationPolicyByPolicyId(String apiId, String mediationPolicyId,
String ifMatch, MessageContext messageContext) {
APIIdentifier apiIdentifier;
try {
APIIdentifier apiIdentifierFromTable = APIMappingUtil.getAPIIdentifierFromUUID(apiId);
if (apiIdentifierFromTable == null) {
throw new APIMgtResourceNotFoundException("Couldn't retrieve existing API with API UUID: "
+ apiId, ExceptionCodes.from(ExceptionCodes.API_NOT_FOUND,
apiId));
}
String tenantDomain = RestApiCommonUtil.getLoggedInUserTenantDomain();
apiIdentifier = APIMappingUtil.getAPIIdentifierFromApiIdOrUUID(apiId,
tenantDomain);
API api = APIMappingUtil.getAPIFromApiIdOrUUID(apiId, tenantDomain);
APIProvider apiProvider = RestApiCommonUtil.getLoggedInUserProvider();
//String apiResourcePath = APIUtil.getAPIPath(apiIdentifier);
//Getting the api base path out apiResourcePath
//apiResourcePath = apiResourcePath.substring(0, apiResourcePath.lastIndexOf("/"));
//Getting specified mediation policy
Mediation mediation =
apiProvider.getApiSpecificMediationPolicyByPolicyId(apiId, mediationPolicyId, tenantDomain);
if (mediation != null) {
if (isAPIModified(api, mediation)) {
API oldAPI = APIMappingUtil.getAPIFromApiIdOrUUID(apiId, tenantDomain); // TODO do a deep copy
apiProvider.updateAPI(oldAPI, api);
}
} else {
RestApiUtil.handleResourceNotFoundError(RestApiConstants.RESOURCE_POLICY, mediationPolicyId, log);
}
apiProvider.deleteApiSpecificMediationPolicy(apiId, mediationPolicyId, tenantDomain);
} catch (APIManagementException e) {
//Auth failure occurs when cross tenant accessing APIs. Sends 404, since we don't need
// to expose the existence of the resource
if (RestApiUtil.isDueToResourceNotFound(e) || RestApiUtil.isDueToAuthorizationFailure(e)) {
RestApiUtil.handleResourceNotFoundError(RestApiConstants.RESOURCE_API, apiId, e, log);
} else if (isAuthorizationFailure(e)) {
RestApiUtil.handleAuthorizationFailure(
"Authorization failure while deleting mediation policies of API " + apiId, e, log);
} else {
String errorMessage = "Error while deleting API specific mediation policy : " +
mediationPolicyId + "of API " + apiId;
RestApiUtil.handleInternalServerError(errorMessage, e, log);
}
} catch (FaultGatewaysException e) {
String errorMessage = "Error while updating API : " + apiId;
RestApiUtil.handleInternalServerError(errorMessage, e, log);
}
return null;
}
/**
* Returns a specific mediation policy by identifier that is belong to the given API identifier
*
* @param apiId API uuid
* @param mediationPolicyId mediation policy uuid
* @param ifNoneMatch If-None-Match header value
* @return returns the matched mediation
*/
@Override
public Response getAPIMediationPolicyByPolicyId(String apiId, String mediationPolicyId,
String ifNoneMatch, MessageContext messageContext) {
try {
String tenantDomain = RestApiCommonUtil.getLoggedInUserTenantDomain();
APIProvider apiProvider = RestApiCommonUtil.getLoggedInUserProvider();
//Getting specified mediation policy
Mediation mediation =
apiProvider.getApiSpecificMediationPolicyByPolicyId(apiId, mediationPolicyId, tenantDomain);
if (mediation != null) {
MediationDTO mediationDTO =
MediationMappingUtil.fromMediationToDTO(mediation);
return Response.ok().entity(mediationDTO).build();
} else {
RestApiUtil.handleResourceNotFoundError(RestApiConstants.RESOURCE_POLICY, mediationPolicyId, log);
}
} catch (APIManagementException e) {
//Auth failure occurs when cross tenant accessing APIs. Sends 404, since we don't need
// to expose the existence of the resource
if (RestApiUtil.isDueToResourceNotFound(e) || RestApiUtil.isDueToAuthorizationFailure(e)) {
RestApiUtil.handleResourceNotFoundError(RestApiConstants.RESOURCE_API, apiId, e, log);
} else if (isAuthorizationFailure(e)) {
RestApiUtil.handleAuthorizationFailure(
"Authorization failure while getting mediation policy with uuid " + mediationPolicyId
+ " of API " + apiId, e, log);
} else {
String errorMessage = "Error while getting mediation policy with uuid "
+ mediationPolicyId + " of API " + apiId;
RestApiUtil.handleInternalServerError(errorMessage, e, log);
}
}
return null;
}
/**
* Updates an existing API specific mediation policy
*
* @param type type of the mediation policy(in/out/fault)
* @param apiId API identifier
* @param mediationPolicyId uuid of mediation policy
* @param fileInputStream input stream of mediation policy
* @param fileDetail mediation policy file
* @param inlineContent mediation policy content
* @param ifMatch If-match header value
* @return updated mediation DTO as response
*/
@Override
public Response updateAPIMediationPolicyContentByPolicyId(String apiId, String mediationPolicyId,
String type, String ifMatch, InputStream fileInputStream, Attachment fileDetail,
String inlineContent, MessageContext messageContext) {
try {
APIIdentifier apiIdentifierFromTable = APIMappingUtil.getAPIIdentifierFromUUID(apiId);
if (apiIdentifierFromTable == null) {
throw new APIMgtResourceNotFoundException("Couldn't retrieve existing API with API UUID: "
+ apiId, ExceptionCodes.from(ExceptionCodes.API_NOT_FOUND,
apiId));
}
String tenantDomain = RestApiCommonUtil.getLoggedInUserTenantDomain();
APIProvider apiProvider = RestApiCommonUtil.getLoggedInUserProvider();
Mediation mediationResource = apiProvider
.getApiSpecificMediationPolicyByPolicyId(apiId, mediationPolicyId, tenantDomain);
if (mediationResource != null) {
ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
IOUtils.copy(fileInputStream, outputStream);
byte[] sequenceBytes = outputStream.toByteArray();
InputStream inSequenceStream = new ByteArrayInputStream(sequenceBytes);
String content = IOUtils.toString(inSequenceStream, StandardCharsets.UTF_8.name());
OMElement seqElement = APIUtil.buildOMElement(new ByteArrayInputStream(sequenceBytes));
String localName = seqElement.getLocalName();
Mediation returnedPolicy;
if (APIConstants.MEDIATION_SEQUENCE_ELEM.equals(localName)) {
Mediation mediationPolicy = new Mediation();
mediationPolicy.setConfig(content);
mediationPolicy.setName(localName);
mediationPolicy.setType(type);
mediationPolicy.setUuid(mediationPolicyId);
//Adding api specific mediation policy
returnedPolicy = apiProvider.updateApiSpecificMediationPolicyContent(apiId, mediationPolicy, tenantDomain);
} else {
throw new APIManagementException("Sequence is malformed");
}
if (returnedPolicy != null) {
String uuid = returnedPolicy.getUuid();
String uriString = RestApiConstants.RESOURCE_PATH_API_MEDIATION
.replace(RestApiConstants.APIID_PARAM, apiId) + "/" + uuid;
URI uri = new URI(uriString);
MediationDTO updatedMediationDTO =
MediationMappingUtil.fromMediationToDTO(returnedPolicy);
return Response.ok(uri).entity(updatedMediationDTO).build();
}
} else {
//If registry resource not exists
RestApiUtil.handleResourceNotFoundError(RestApiConstants.RESOURCE_POLICY, mediationPolicyId, log);
}
} catch (APIManagementException e) {
//Auth failure occurs when cross tenant accessing APIs. Sends 404, since we don't need
// to expose the existence of the resource
if (RestApiUtil.isDueToResourceNotFound(e) || RestApiUtil.isDueToAuthorizationFailure(e)) {
RestApiUtil.handleResourceNotFoundError(RestApiConstants.RESOURCE_API, apiId, e, log);
} else if (isAuthorizationFailure(e)) {
RestApiUtil.handleAuthorizationFailure(
"Authorization failure while updating the mediation policy with uuid " + mediationPolicyId
+ " of API " + apiId, e, log);
} else {
String errorMessage = "Error occurred while updating the mediation policy with uuid " +
mediationPolicyId + " of API " + apiId;
RestApiUtil.handleInternalServerError(errorMessage, e, log);
}
} catch (URISyntaxException e) {
String errorMessage = "Error while getting location header for uploaded " +
"mediation policy " + mediationPolicyId;
RestApiUtil.handleInternalServerError(errorMessage, e, log);
} catch (Exception e) {
RestApiUtil.handleInternalServerError("An Error has occurred while adding mediation policy", e, log);
} finally {
IOUtils.closeQuietly(fileInputStream);
}
return null;
}
/**
* Retrieve a API specific mediation policy content
*
* @param apiId API identifier
* @param mediationPolicyId uuid of mediation policy
* @param ifNoneMatch If-None-Match header value
* @return updated mediation DTO as response
*/
@Override
public Response getAPIMediationPolicyContentByPolicyId(String apiId, String mediationPolicyId, String ifNoneMatch,
MessageContext messageContext) {
try {
APIProvider apiProvider = RestApiCommonUtil.getLoggedInUserProvider();
String tenantDomain = RestApiCommonUtil.getLoggedInUserTenantDomain();
// Getting resource correspond to the given uuid
Mediation mediationResource = apiProvider.getApiSpecificMediationPolicyByPolicyId(apiId, mediationPolicyId,
tenantDomain);
if (mediationResource == null) {
RestApiUtil.handleResourceNotFoundError(RestApiConstants.RESOURCE_MEDIATION_POLICY, mediationPolicyId,
log);
return null;
}
Object fileDataStream = new ByteArrayInputStream(mediationResource.getConfig().getBytes());
String name = mediationResource.getName();
return Response.ok(fileDataStream)
.header(RestApiConstants.HEADER_CONTENT_TYPE, RestApiConstants.APPLICATION_XML)
.header(RestApiConstants.HEADER_CONTENT_DISPOSITION, "attachment; filename=\"" + name + "\"")
.build();
} catch (APIManagementException e) {
// Auth failure occurs when cross tenant accessing APIs. Sends 404, since we don't need to expose the
// existence of the resource
if (RestApiUtil.isDueToResourceNotFound(e) || RestApiUtil.isDueToAuthorizationFailure(e)) {
RestApiUtil.handleResourceNotFoundError(RestApiConstants.RESOURCE_API, apiId, e, log);
} else if (isAuthorizationFailure(e)) {
RestApiUtil.handleAuthorizationFailure(
"Authorization failure while retrieving document : " + mediationPolicyId + " of API " + apiId,
e, log);
} else {
String errorMessage = "Error while retrieving document " + mediationPolicyId + " of the API " + apiId;
RestApiUtil.handleInternalServerError(errorMessage, e, log);
}
}
return null;
}
/**
* Add a API specific mediation policy
*
* @param type Type of the mediation policy
* @param apiId API identifier
* @param fileInputStream input stream of mediation policy
* @param fileDetail mediation policy file
* @param inlineContent mediation policy content
* @param ifMatch If-match header value
* @return updated mediation DTO as response
*/
@Override
public Response addAPIMediationPolicy(String apiId, String type, String ifMatch, InputStream
fileInputStream, Attachment fileDetail, String inlineContent, MessageContext messageContext)
throws APIManagementException {
String fileName = "";
try {
APIIdentifier apiIdentifierFromTable = APIMappingUtil.getAPIIdentifierFromUUID(apiId);
if (apiIdentifierFromTable == null) {
throw new APIMgtResourceNotFoundException("Couldn't retrieve existing API with API UUID: "
+ apiId, ExceptionCodes.from(ExceptionCodes.API_NOT_FOUND,
apiId));
}
String tenantDomain = RestApiCommonUtil.getLoggedInUserTenantDomain();
APIProvider apiProvider = RestApiCommonUtil.getLoggedInUserProvider();
if (fileInputStream != null && inlineContent != null) {
RestApiUtil.handleBadRequest("Only one of 'file' and 'inlineContent' should be specified", log);
}
if (!StringUtils.isEmpty(type)) {
type.toLowerCase();
} else {
type = "in";
}
Mediation returnedPolicy = null;
if (fileInputStream != null) {
fileName = fileDetail.getDataHandler().getName();
String fileContentType = URLConnection.guessContentTypeFromName(fileName);
if (org.apache.commons.lang3.StringUtils.isBlank(fileContentType)) {
fileContentType = fileDetail.getContentType().toString();
}
ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
IOUtils.copy(fileInputStream, outputStream);
byte[] sequenceBytes = outputStream.toByteArray();
InputStream inSequenceStream = new ByteArrayInputStream(sequenceBytes);
String content = IOUtils.toString(inSequenceStream, StandardCharsets.UTF_8.name());
OMElement seqElement = APIUtil.buildOMElement(new ByteArrayInputStream(sequenceBytes));
String localName = seqElement.getLocalName();
fileName = seqElement.getAttributeValue(new QName("name"));
if (APIConstants.MEDIATION_SEQUENCE_ELEM.equals(localName)) {
Mediation mediationPolicy = new Mediation();
mediationPolicy.setConfig(content);
mediationPolicy.setName(fileName);
mediationPolicy.setType(type);
//Adding api specific mediation policy
returnedPolicy = apiProvider.addApiSpecificMediationPolicy(apiId, mediationPolicy, tenantDomain);
} else {
throw new APIManagementException("Sequence is malformed");
}
}
if (inlineContent != null) {
//Extracting the file name specified in the config
fileName = this.getMediationNameFromConfig(inlineContent);
Mediation mediationPolicy = new Mediation();
mediationPolicy.setConfig(inlineContent);
mediationPolicy.setName(fileName.replace(APIConstants.MEDIATION_CONFIG_EXT, ""));
mediationPolicy.setType(type);
returnedPolicy = apiProvider.addApiSpecificMediationPolicy(apiId, mediationPolicy, tenantDomain);
}
if (returnedPolicy != null) {
String uriString = RestApiConstants.RESOURCE_PATH_API_MEDIATION
.replace(RestApiConstants.APIID_PARAM, apiId) + "/" + returnedPolicy.getUuid();
URI uri = new URI(uriString);
MediationDTO createdPolicy =
MediationMappingUtil.fromMediationToDTO(returnedPolicy);
return Response.created(uri).entity(createdPolicy).build();
}
} catch (APIManagementException e) {
//Auth failure occurs when cross tenant accessing APIs. Sends 404, since we don't need
// to expose the existence of the resource
if (RestApiUtil.isDueToResourceNotFound(e) || RestApiUtil.isDueToAuthorizationFailure(e)) {
RestApiUtil.handleResourceNotFoundError(RestApiConstants.RESOURCE_API, apiId, e, log);
} else if (isAuthorizationFailure(e)) { //this is due to access control restriction.
RestApiUtil.handleAuthorizationFailure(
"Authorization failure while adding mediation policy for the API " + apiId, e, log);
} else {
throw e;
}
} catch (URISyntaxException e) {
String errorMessage = "Error while getting location header for created " +
"mediation policy " + fileName;
RestApiUtil.handleInternalServerError(errorMessage, e, log);
} catch (Exception e) {
RestApiUtil.handleInternalServerError("An Error has occurred while adding mediation policy", e, log);
} finally {
IOUtils.closeQuietly(fileInputStream);
}
return null;
}
/**
* Get API monetization status and monetized tier to billing plan mapping
*
* @param apiId API ID
* @param messageContext message context
* @return API monetization status and monetized tier to billing plan mapping
*/
@Override
public Response getAPIMonetization(String apiId, MessageContext messageContext) {
try {
if (StringUtils.isBlank(apiId)) {
String errorMessage = "API ID cannot be empty or null when retrieving monetized plans.";
RestApiUtil.handleBadRequest(errorMessage, log);
}
APIProvider apiProvider = RestApiCommonUtil.getLoggedInUserProvider();
String tenantDomain = RestApiCommonUtil.getLoggedInUserTenantDomain();
APIIdentifier apiIdentifier;
APIRevision apiRevision = ApiMgtDAO.getInstance().checkAPIUUIDIsARevisionUUID(apiId);
if (apiRevision != null && apiRevision.getApiUUID() != null) {
apiIdentifier = APIMappingUtil.getAPIIdentifierFromUUID(apiRevision.getApiUUID());
} else {
apiIdentifier = APIMappingUtil.getAPIIdentifierFromUUID(apiId);
}
API api = apiProvider.getAPI(apiIdentifier);
Monetization monetizationImplementation = apiProvider.getMonetizationImplClass();
Map<String, String> monetizedPoliciesToPlanMapping = monetizationImplementation.
getMonetizedPoliciesToPlanMapping(api);
APIMonetizationInfoDTO monetizationInfoDTO = APIMappingUtil.getMonetizedTiersDTO
(apiIdentifier, monetizedPoliciesToPlanMapping);
return Response.ok().entity(monetizationInfoDTO).build();
} catch (APIManagementException e) {
String errorMessage = "Failed to retrieve monetized plans for API : " + apiId;
RestApiUtil.handleInternalServerError(errorMessage, e, log);
} catch (MonetizationException e) {
String errorMessage = "Failed to fetch monetized plans of API : " + apiId;
RestApiUtil.handleInternalServerError(errorMessage, e, log);
}
return Response.serverError().build();
}
/**
* Monetize (enable or disable) for a given API
*
* @param apiId API ID
* @param body request body
* @param messageContext message context
* @return monetizationDTO
*/
@Override
public Response addAPIMonetization(String apiId, APIMonetizationInfoDTO body, MessageContext messageContext) {
try {
if (StringUtils.isBlank(apiId)) {
String errorMessage = "API ID cannot be empty or null when configuring monetization.";
RestApiUtil.handleBadRequest(errorMessage, log);
}
APIProvider apiProvider = RestApiCommonUtil.getLoggedInUserProvider();
String tenantDomain = RestApiCommonUtil.getLoggedInUserTenantDomain();
APIIdentifier apiIdentifier = APIMappingUtil.getAPIIdentifierFromUUID(apiId);
if (apiIdentifier == null) {
throw new APIMgtResourceNotFoundException("Couldn't retrieve existing API with API UUID: "
+ apiId, ExceptionCodes.from(ExceptionCodes.API_NOT_FOUND,
apiId));
}
API api = apiProvider.getAPI(apiIdentifier);
if (!APIConstants.PUBLISHED.equalsIgnoreCase(api.getStatus())) {
String errorMessage = "API " + apiIdentifier.getApiName() +
" should be in published state to configure monetization.";
RestApiUtil.handleBadRequest(errorMessage, log);
}
//set the monetization status
boolean monetizationEnabled = body.isEnabled();
api.setMonetizationStatus(monetizationEnabled);
//clear the existing properties related to monetization
api.getMonetizationProperties().clear();
Map<String, String> monetizationProperties = body.getProperties();
if (MapUtils.isNotEmpty(monetizationProperties)) {
String errorMessage = RestApiPublisherUtils.validateMonetizationProperties(monetizationProperties);
if (!errorMessage.isEmpty()) {
RestApiUtil.handleBadRequest(errorMessage, log);
}
for (Map.Entry<String, String> currentEntry : monetizationProperties.entrySet()) {
api.addMonetizationProperty(currentEntry.getKey(), currentEntry.getValue());
}
}
Monetization monetizationImplementation = apiProvider.getMonetizationImplClass();
HashMap monetizationDataMap = new Gson().fromJson(api.getMonetizationProperties().toString(), HashMap.class);
boolean isMonetizationStateChangeSuccessful = false;
if (MapUtils.isEmpty(monetizationDataMap)) {
String errorMessage = "Monetization is not configured. Monetization data is empty for "
+ apiIdentifier.getApiName();
RestApiUtil.handleBadRequest(errorMessage, log);
}
try {
if (monetizationEnabled) {
isMonetizationStateChangeSuccessful = monetizationImplementation.enableMonetization
(tenantDomain, api, monetizationDataMap);
} else {
isMonetizationStateChangeSuccessful = monetizationImplementation.disableMonetization
(tenantDomain, api, monetizationDataMap);
}
} catch (MonetizationException e) {
String errorMessage = "Error while changing monetization status for API ID : " + apiId;
RestApiUtil.handleInternalServerError(errorMessage, e, log);
}
if (isMonetizationStateChangeSuccessful) {
apiProvider.configureMonetizationInAPIArtifact(api);
APIMonetizationInfoDTO monetizationInfoDTO = APIMappingUtil.getMonetizationInfoDTO(apiIdentifier);
return Response.ok().entity(monetizationInfoDTO).build();
} else {
String errorMessage = "Unable to change monetization status for API : " + apiId;
RestApiUtil.handleBadRequest(errorMessage, log);
}
} catch (APIManagementException e) {
String errorMessage = "Error while configuring monetization for API ID : " + apiId;
RestApiUtil.handleInternalServerError(errorMessage, e, log);
}
return Response.serverError().build();
}
/**
* Publish API to given external stores.
*
* @param apiId API Id
* @param externalStoreIds External Store Ids
* @param ifMatch If-match header value
* @param messageContext CXF Message Context
* @return Response of published external store list
*/
@Override
public Response publishAPIToExternalStores(String apiId, String externalStoreIds, String ifMatch,
MessageContext messageContext) throws APIManagementException {
String tenantDomain = RestApiCommonUtil.getLoggedInUserTenantDomain();
APIProvider apiProvider = RestApiCommonUtil.getLoggedInUserProvider();
API api = null;
List<String> externalStoreIdList = Arrays.asList(externalStoreIds.split("\\s*,\\s*"));
try {
APIIdentifier apiIdentifier = APIMappingUtil.getAPIIdentifierFromUUID(apiId);
if (apiIdentifier == null) {
throw new APIMgtResourceNotFoundException("Couldn't retrieve existing API with API UUID: "
+ apiId, ExceptionCodes.from(ExceptionCodes.API_NOT_FOUND,
apiId));
}
api = apiProvider.getAPIbyUUID(apiId, tenantDomain);
} catch (APIManagementException e) {
if (RestApiUtil.isDueToResourceNotFound(e)) {
RestApiUtil.handleResourceNotFoundError(RestApiConstants.RESOURCE_API, apiId, e, log);
} else {
String errorMessage = "Error while getting API: " + apiId;
log.error(errorMessage, e);
RestApiUtil.handleInternalServerError(errorMessage, e, log);
}
}
if (apiProvider.publishToExternalAPIStores(api, externalStoreIdList)) {
Set<APIStore> publishedStores = apiProvider.getPublishedExternalAPIStores(api.getId());
APIExternalStoreListDTO apiExternalStoreListDTO =
ExternalStoreMappingUtil.fromAPIExternalStoreCollectionToDTO(publishedStores);
return Response.ok().entity(apiExternalStoreListDTO).build();
}
return Response.serverError().build();
}
/**
* Get the resource policies(inflow/outflow).
*
* @param apiId API ID
* @param sequenceType sequence type('in' or 'out')
* @param resourcePath api resource path
* @param verb http verb
* @param ifNoneMatch If-None-Match header value
* @return json response of the resource policies according to the resource path
*/
@Override
public Response getAPIResourcePolicies(String apiId, String sequenceType, String resourcePath,
String verb, String ifNoneMatch, MessageContext messageContext) {
try {
String tenantDomain = RestApiCommonUtil.getLoggedInUserTenantDomain();
APIProvider provider = RestApiCommonUtil.getLoggedInUserProvider();
API api = provider.getLightweightAPIByUUID(apiId, tenantDomain);
if (APIConstants.API_TYPE_SOAPTOREST.equals(api.getType())) {
if (StringUtils.isEmpty(sequenceType) || !(RestApiConstants.IN_SEQUENCE.equals(sequenceType)
|| RestApiConstants.OUT_SEQUENCE.equals(sequenceType))) {
String errorMessage = "Sequence type should be either of the values from 'in' or 'out'";
RestApiUtil.handleBadRequest(errorMessage, log);
}
String resourcePolicy = SequenceUtils.getRestToSoapConvertedSequence(api, sequenceType);
if (StringUtils.isEmpty(resourcePath) && StringUtils.isEmpty(verb)) {
ResourcePolicyListDTO resourcePolicyListDTO = APIMappingUtil
.fromResourcePolicyStrToDTO(resourcePolicy);
return Response.ok().entity(resourcePolicyListDTO).build();
}
if (StringUtils.isNotEmpty(resourcePath) && StringUtils.isNotEmpty(verb)) {
JSONObject sequenceObj = (JSONObject) new JSONParser().parse(resourcePolicy);
JSONObject resultJson = new JSONObject();
String key = resourcePath + "_" + verb;
JSONObject sequenceContent = (JSONObject) sequenceObj.get(key);
if (sequenceContent == null) {
String errorMessage = "Cannot find any resource policy for Resource path : " + resourcePath +
" with type: " + verb;
RestApiUtil.handleResourceNotFoundError(errorMessage, log);
}
resultJson.put(key, sequenceObj.get(key));
ResourcePolicyListDTO resourcePolicyListDTO = APIMappingUtil
.fromResourcePolicyStrToDTO(resultJson.toJSONString());
return Response.ok().entity(resourcePolicyListDTO).build();
} else if (StringUtils.isEmpty(resourcePath)) {
String errorMessage = "Resource path cannot be empty for the defined verb: " + verb;
RestApiUtil.handleBadRequest(errorMessage, log);
} else if (StringUtils.isEmpty(verb)) {
String errorMessage = "HTTP verb cannot be empty for the defined resource path: " + resourcePath;
RestApiUtil.handleBadRequest(errorMessage, log);
}
} else {
String errorMessage = "The provided api with id: " + apiId + " is not a soap to rest converted api.";
RestApiUtil.handleBadRequest(errorMessage, log);
}
} catch (APIManagementException e) {
String errorMessage = "Error while retrieving the API : " + apiId;
RestApiUtil.handleInternalServerError(errorMessage, e, log);
} catch (ParseException e) {
String errorMessage = "Error while retrieving the resource policies for the API : " + apiId;
RestApiUtil.handleInternalServerError(errorMessage, e, log);
}
return null;
}
/**
* Get the resource policy given the resource id.
*
* @param apiId API ID
* @param resourcePolicyId resource policy id
* @param ifNoneMatch If-None-Match header value
* @return json response of the resource policy for the resource id given
*/
@Override
public Response getAPIResourcePoliciesByPolicyId(String apiId, String resourcePolicyId,
String ifNoneMatch, MessageContext messageContext) {
try {
String tenantDomain = RestApiCommonUtil.getLoggedInUserTenantDomain();
APIProvider provider = RestApiCommonUtil.getLoggedInUserProvider();
API api = provider.getLightweightAPIByUUID(apiId, tenantDomain);
if (APIConstants.API_TYPE_SOAPTOREST.equals(api.getType())) {
if (StringUtils.isEmpty(resourcePolicyId)) {
String errorMessage = "Resource id should not be empty to update a resource policy.";
RestApiUtil.handleBadRequest(errorMessage, log);
}
String policyContent = SequenceUtils.getResourcePolicyFromRegistryResourceId(api, resourcePolicyId);
ResourcePolicyInfoDTO resourcePolicyInfoDTO = APIMappingUtil
.fromResourcePolicyStrToInfoDTO(policyContent);
return Response.ok().entity(resourcePolicyInfoDTO).build();
} else {
String errorMessage = "The provided api with id: " + apiId + " is not a soap to rest converted api.";
RestApiUtil.handleBadRequest(errorMessage, log);
}
} catch (APIManagementException e) {
String errorMessage = "Error while retrieving the API : " + apiId;
RestApiUtil.handleInternalServerError(errorMessage, e, log);
}
return null;
}
/**
* Update the resource policies(inflow/outflow) given the resource id.
*
* @param apiId API ID
* @param resourcePolicyId resource policy id
* @param body resource policy content
* @param ifMatch If-Match header value
* @return json response of the updated sequence content
*/
@Override
public Response updateAPIResourcePoliciesByPolicyId(String apiId, String resourcePolicyId,
ResourcePolicyInfoDTO body, String ifMatch, MessageContext messageContext) {
try {
String tenantDomain = RestApiCommonUtil.getLoggedInUserTenantDomain();
APIProvider provider = RestApiCommonUtil.getLoggedInUserProvider();
API api = provider.getLightweightAPIByUUID(apiId, tenantDomain);
if (api == null) {
throw new APIMgtResourceNotFoundException("Couldn't retrieve existing API with API UUID: "
+ apiId, ExceptionCodes.from(ExceptionCodes.API_NOT_FOUND,
apiId));
}
if (APIConstants.API_TYPE_SOAPTOREST.equals(api.getType())) {
if (StringUtils.isEmpty(resourcePolicyId)) {
String errorMessage = "Resource id should not be empty to update a resource policy.";
RestApiUtil.handleBadRequest(errorMessage, log);
}
boolean isValidSchema = RestApiPublisherUtils.validateXMLSchema(body.getContent());
if (isValidSchema) {
List<SOAPToRestSequence> sequence = api.getSoapToRestSequences();
for (SOAPToRestSequence soapToRestSequence : sequence) {
if (soapToRestSequence.getUuid().equals(resourcePolicyId)) {
soapToRestSequence.setContent(body.getContent());
break;
}
}
API originalAPI = provider.getAPIbyUUID(apiId, tenantDomain);
provider.updateAPI(api, originalAPI);
String updatedPolicyContent = SequenceUtils
.getResourcePolicyFromRegistryResourceId(api, resourcePolicyId);
ResourcePolicyInfoDTO resourcePolicyInfoDTO = APIMappingUtil
.fromResourcePolicyStrToInfoDTO(updatedPolicyContent);
return Response.ok().entity(resourcePolicyInfoDTO).build();
} else {
String errorMessage =
"Error while validating the resource policy xml content for the API : " + apiId;
RestApiUtil.handleInternalServerError(errorMessage, log);
}
} else {
String errorMessage = "The provided api with id: " + apiId + " is not a soap to rest converted api.";
RestApiUtil.handleBadRequest(errorMessage, log);
}
} catch (APIManagementException | FaultGatewaysException e) {
String errorMessage = "Error while retrieving the API : " + apiId;
RestApiUtil.handleInternalServerError(errorMessage, e, log);
}
return null;
}
/**
* Get total revenue for a given API from all its' subscriptions
*
* @param apiId API ID
* @param messageContext message context
* @return revenue data for a given API
*/
@Override
public Response getAPIRevenue(String apiId, MessageContext messageContext) {
if (StringUtils.isBlank(apiId)) {
String errorMessage = "API ID cannot be empty or null when getting revenue details.";
RestApiUtil.handleBadRequest(errorMessage, log);
}
try {
APIProvider apiProvider = RestApiCommonUtil.getLoggedInUserProvider();
Monetization monetizationImplementation = apiProvider.getMonetizationImplClass();
String tenantDomain = RestApiCommonUtil.getLoggedInUserTenantDomain();
APIIdentifier apiIdentifier = APIMappingUtil.getAPIIdentifierFromUUID(apiId);
API api = apiProvider.getAPI(apiIdentifier);
if (!APIConstants.PUBLISHED.equalsIgnoreCase(api.getStatus())) {
String errorMessage = "API " + apiIdentifier.getApiName() +
" should be in published state to get total revenue.";
RestApiUtil.handleBadRequest(errorMessage, log);
}
Map<String, String> revenueUsageData = monetizationImplementation.getTotalRevenue(api, apiProvider);
APIRevenueDTO apiRevenueDTO = new APIRevenueDTO();
apiRevenueDTO.setProperties(revenueUsageData);
return Response.ok().entity(apiRevenueDTO).build();
} catch (APIManagementException e) {
String errorMessage = "Failed to retrieve revenue data for API ID : " + apiId;
RestApiUtil.handleInternalServerError(errorMessage, e, log);
} catch (MonetizationException e) {
String errorMessage = "Failed to get current revenue data for API ID : " + apiId;
RestApiUtil.handleInternalServerError(errorMessage, e, log);
}
return null;
}
/**
* Retrieves the swagger document of an API
*
* @param apiId API identifier
* @param ifNoneMatch If-None-Match header value
* @return Swagger document of the API
*/
@Override
public Response getAPISwagger(String apiId, String ifNoneMatch, MessageContext messageContext) {
try {
APIProvider apiProvider = RestApiCommonUtil.getLoggedInUserProvider();
String tenantDomain = RestApiCommonUtil.getLoggedInUserTenantDomain();
//this will fail if user does not have access to the API or the API does not exist
API api = apiProvider.getAPIbyUUID(apiId, tenantDomain);
String updatedDefinition = RestApiCommonUtil.retrieveSwaggerDefinition(api, apiProvider);
return Response.ok().entity(updatedDefinition).header("Content-Disposition",
"attachment; filename=\"" + "swagger.json" + "\"" ).build();
} catch (APIManagementException e) {
//Auth failure occurs when cross tenant accessing APIs. Sends 404, since we don't need to expose the existence of the resource
if (RestApiUtil.isDueToResourceNotFound(e) || RestApiUtil.isDueToAuthorizationFailure(e)) {
RestApiUtil.handleResourceNotFoundError(RestApiConstants.RESOURCE_API, apiId, e, log);
} else if (isAuthorizationFailure(e)) {
RestApiUtil
.handleAuthorizationFailure("Authorization failure while retrieving swagger of API : " + apiId,
e, log);
} else {
String errorMessage = "Error while retrieving swagger of API : " + apiId;
RestApiUtil.handleInternalServerError(errorMessage, e, log);
}
}
return null;
}
/**
* Updates the swagger definition of an existing API
*
* @param apiId API identifier
* @param apiDefinition Swagger definition
* @param url Swagger definition URL
* @param fileInputStream Swagger definition input file content
* @param fileDetail file meta information as Attachment
* @param ifMatch If-match header value
* @return updated swagger document of the API
*/
@Override
public Response updateAPISwagger(String apiId, String ifMatch, String apiDefinition, String url,
InputStream fileInputStream, Attachment fileDetail, MessageContext messageContext) {
try {
String updatedSwagger;
String tenantDomain = RestApiCommonUtil.getLoggedInUserTenantDomain();
APIIdentifier apiIdentifier = APIMappingUtil.getAPIIdentifierFromUUID(apiId);
if (apiIdentifier == null) {
throw new APIMgtResourceNotFoundException("Couldn't retrieve existing API with API UUID: "
+ apiId, ExceptionCodes.from(ExceptionCodes.API_NOT_FOUND,
apiId));
}
boolean isSoapToRestConvertedAPI = SOAPOperationBindingUtils.isSOAPToRESTApi(apiIdentifier.getApiName(),
apiIdentifier.getVersion(), apiIdentifier.getProviderName());
//Handle URL and file based definition imports
if(url != null || fileInputStream != null) {
// Validate and retrieve the OpenAPI definition
Map validationResponseMap = validateOpenAPIDefinition(url, fileInputStream, fileDetail,
true, false);
APIDefinitionValidationResponse validationResponse =
(APIDefinitionValidationResponse) validationResponseMap .get(RestApiConstants.RETURN_MODEL);
if (!validationResponse.isValid()) {
RestApiUtil.handleBadRequest(validationResponse.getErrorItems(), log);
}
updatedSwagger = PublisherCommonUtils.updateSwagger(apiId, validationResponse, false);
} else {
updatedSwagger = updateSwagger(apiId, apiDefinition);
}
return Response.ok().entity(updatedSwagger).build();
} catch (APIManagementException e) {
//Auth failure occurs when cross tenant accessing APIs. Sends 404, since we don't need
// to expose the existence of the resource
if (RestApiUtil.isDueToResourceNotFound(e) || RestApiUtil.isDueToAuthorizationFailure(e)) {
RestApiUtil.handleResourceNotFoundError(RestApiConstants.RESOURCE_API, apiId, e, log);
} else if (isAuthorizationFailure(e)) {
RestApiUtil.handleAuthorizationFailure(
"Authorization failure while updating swagger definition of API: " + apiId, e, log);
} else {
String errorMessage = "Error while updating the swagger definition of the API: " + apiId + " - "
+ e.getMessage();
RestApiUtil.handleInternalServerError(errorMessage, e, log);
}
} catch (FaultGatewaysException e) {
String errorMessage = "Error while updating API : " + apiId;
RestApiUtil.handleInternalServerError(errorMessage, e, log);
}
return null;
}
/**
* update swagger definition of the given api. The swagger will be validated before updating.
*
* @param apiId API Id
* @param apiDefinition swagger definition
* @return updated swagger definition
* @throws APIManagementException when error occurred updating swagger
* @throws FaultGatewaysException when error occurred publishing API to the gateway
*/
private String updateSwagger(String apiId, String apiDefinition)
throws APIManagementException, FaultGatewaysException {
APIDefinitionValidationResponse response = OASParserUtil
.validateAPIDefinition(apiDefinition, true);
if (!response.isValid()) {
RestApiUtil.handleBadRequest(response.getErrorItems(), log);
}
return PublisherCommonUtils.updateSwagger(apiId, response, false);
}
/**
* Retrieves the thumbnail image of an API specified by API identifier
*
* @param apiId API Id
* @param ifNoneMatch If-None-Match header value
* @param messageContext If-Modified-Since header value
* @return Thumbnail image of the API
*/
@Override
public Response getAPIThumbnail(String apiId, String ifNoneMatch, MessageContext messageContext) {
try {
APIProvider apiProvider = RestApiCommonUtil.getLoggedInUserProvider();
String tenantDomain = RestApiCommonUtil.getLoggedInUserTenantDomain();
//this will fail if user does not have access to the API or the API does not exist
//APIIdentifier apiIdentifier = APIMappingUtil.getAPIIdentifierFromUUID(apiId, tenantDomain);
ResourceFile thumbnailResource = apiProvider.getIcon(apiId, tenantDomain);
if (thumbnailResource != null) {
return Response
.ok(thumbnailResource.getContent(), MediaType.valueOf(thumbnailResource.getContentType()))
.build();
} else {
return Response.noContent().build();
}
} catch (APIManagementException e) {
//Auth failure occurs when cross tenant accessing APIs. Sends 404, since we don't need to expose the
// existence of the resource
if (RestApiUtil.isDueToResourceNotFound(e) || RestApiUtil.isDueToAuthorizationFailure(e)) {
RestApiUtil.handleResourceNotFoundError(RestApiConstants.RESOURCE_API, apiId, e, log);
} else if (isAuthorizationFailure(e)) {
RestApiUtil.handleAuthorizationFailure(
"Authorization failure while retrieving thumbnail of API : " + apiId, e, log);
} else {
String errorMessage = "Error while retrieving thumbnail of API : " + apiId;
RestApiUtil.handleInternalServerError(errorMessage, e, log);
}
}
return null;
}
@Override
public Response updateAPIThumbnail(String apiId, InputStream fileInputStream, Attachment fileDetail,
String ifMatch, MessageContext messageContext) {
try {
APIIdentifier apiIdentifier = APIMappingUtil.getAPIIdentifierFromUUID(apiId);
if (apiIdentifier == null) {
throw new APIMgtResourceNotFoundException("Couldn't retrieve existing API with API UUID: "
+ apiId, ExceptionCodes.from(ExceptionCodes.API_NOT_FOUND,
apiId));
}
APIProvider apiProvider = RestApiCommonUtil.getLoggedInUserProvider();
String tenantDomain = RestApiCommonUtil.getLoggedInUserTenantDomain();
String fileName = fileDetail.getDataHandler().getName();
String fileContentType = URLConnection.guessContentTypeFromName(fileName);
if (org.apache.commons.lang3.StringUtils.isBlank(fileContentType)) {
fileContentType = fileDetail.getContentType().toString();
}
ResourceFile apiImage = new ResourceFile(fileInputStream, fileContentType);
apiProvider.setThumbnailToAPI(apiId, apiImage, tenantDomain);
String uriString = RestApiConstants.RESOURCE_PATH_THUMBNAIL
.replace(RestApiConstants.APIID_PARAM, apiId);
URI uri = new URI(uriString);
FileInfoDTO infoDTO = new FileInfoDTO();
infoDTO.setRelativePath(uriString);
infoDTO.setMediaType(fileContentType);
return Response.created(uri).entity(infoDTO).build();
} catch (APIManagementException e) {
//Auth failure occurs when cross tenant accessing APIs. Sends 404, since we don't need to expose the
// existence of the resource
if (RestApiUtil.isDueToResourceNotFound(e) || RestApiUtil.isDueToAuthorizationFailure(e)) {
RestApiUtil.handleResourceNotFoundError(RestApiConstants.RESOURCE_API, apiId, e, log);
} else if (isAuthorizationFailure(e)) {
RestApiUtil
.handleAuthorizationFailure("Authorization failure while adding thumbnail for API : " + apiId,
e, log);
} else {
String errorMessage = "Error while retrieving thumbnail of API : " + apiId;
RestApiUtil.handleInternalServerError(errorMessage, e, log);
}
} catch (URISyntaxException e) {
String errorMessage = "Error while updating thumbnail of API: " + apiId;
RestApiUtil.handleInternalServerError(errorMessage, e, log);
} finally {
IOUtils.closeQuietly(fileInputStream);
}
return null;
}
@Override
public Response validateAPI(String query, String ifNoneMatch, MessageContext messageContext) {
boolean isSearchArtifactExists = false;
if (StringUtils.isEmpty(query)) {
RestApiUtil.handleBadRequest("The query should not be empty", log);
}
try {
APIProvider apiProvider = RestApiCommonUtil.getLoggedInUserProvider();
if (query.contains(":")) {
String[] queryTokens = query.split(":");
switch (queryTokens[0]) {
case "name":
isSearchArtifactExists = apiProvider.isApiNameExist(queryTokens[1]) ||
apiProvider.isApiNameWithDifferentCaseExist(queryTokens[1]);
break;
case "context":
default: // API version validation.
isSearchArtifactExists = apiProvider.isContextExist(queryTokens[1]);
break;
}
} else { // consider the query as api name
isSearchArtifactExists =
apiProvider.isApiNameExist(query) || apiProvider.isApiNameWithDifferentCaseExist(query);
}
} catch(APIManagementException e){
RestApiUtil.handleInternalServerError("Error while checking the api existence", e, log);
}
return isSearchArtifactExists ? Response.status(Response.Status.OK).build() :
Response.status(Response.Status.NOT_FOUND).build();
}
@Override
public Response validateDocument(String apiId, String name, String ifMatch, MessageContext messageContext) {
if (StringUtils.isEmpty(name) || StringUtils.isEmpty(apiId)) {
RestApiUtil.handleBadRequest("API Id and/ or document name should not be empty", log);
}
try {
String tenantDomain = RestApiCommonUtil.getLoggedInUserTenantDomain();
APIProvider apiProvider = RestApiCommonUtil.getLoggedInUserProvider();
APIIdentifier apiIdentifier = APIMappingUtil.getAPIIdentifierFromUUID(apiId);
if (apiIdentifier == null) {
throw new APIMgtResourceNotFoundException("Couldn't retrieve existing API with API UUID: "
+ apiId, ExceptionCodes.from(ExceptionCodes.API_NOT_FOUND,
apiId));
}
return apiProvider.isDocumentationExist(apiId, name, tenantDomain) ? Response.status(Response.Status.OK).build() :
Response.status(Response.Status.NOT_FOUND).build();
} catch(APIManagementException e){
RestApiUtil.handleInternalServerError("Error while checking the api existence", e, log);
}
return Response.status(Response.Status.NOT_FOUND).build();
}
@Override
public Response validateEndpoint(String endpointUrl, String apiId, MessageContext messageContext) {
ApiEndpointValidationResponseDTO apiEndpointValidationResponseDTO = new ApiEndpointValidationResponseDTO();
apiEndpointValidationResponseDTO.setError("");
try {
URL url = new URL(endpointUrl);
if (url.getProtocol().matches("https")) {
ServerConfiguration serverConfig = CarbonUtils.getServerConfiguration();
String trustStorePath = serverConfig.getFirstProperty("Security.TrustStore.Location");
String trustStorePassword = serverConfig.getFirstProperty("Security.TrustStore.Password");
System.setProperty("javax.net.ssl.trustStore", trustStorePath);
System.setProperty("javax.net.ssl.trustStorePassword", trustStorePassword);
String keyStore = serverConfig.getFirstProperty("Security.KeyStore.Location");
String keyStoreType = serverConfig.getFirstProperty("Security.KeyStore.Type");
String keyStorePassword = serverConfig.getFirstProperty("Security.KeyStore.Password");
System.setProperty("javax.net.ssl.keyStoreType", keyStoreType);
System.setProperty("javax.net.ssl.keyStore", keyStore);
System.setProperty("javax.net.ssl.keyStorePassword", keyStorePassword);
/* apiId can be used to get the related API's uriTemplates. These uriTemplates can be used to extract
the API operations and append those operations separately to the API endpoint url. This edited url can
be used to test the endpoint, in case their is no valid url for the sole endpoint url provided. */
apiEndpointValidationResponseDTO = sendHttpHEADRequest(endpointUrl);
return Response.status(Response.Status.OK).entity(apiEndpointValidationResponseDTO).build();
} else if (url.getProtocol().matches("http")) {
apiEndpointValidationResponseDTO = sendHttpHEADRequest(endpointUrl);
return Response.status(Response.Status.OK).entity(apiEndpointValidationResponseDTO).build();
}
} catch (MalformedURLException e) {
log.error("Malformed Url error occurred while sending the HEAD request to the given endpoint url:", e);
apiEndpointValidationResponseDTO.setError(e.getMessage());
} catch (Exception e) {
RestApiUtil.handleInternalServerError("Error while testing the validity of API endpoint url " +
"existence", e, log);
}
return Response.status(Response.Status.OK).entity(apiEndpointValidationResponseDTO).build();
}
@Override
public Response getAPIResourcePaths(String apiId, Integer limit, Integer offset, String ifNoneMatch,
MessageContext messageContext) {
try {
String tenantDomain = RestApiCommonUtil.getLoggedInUserTenantDomain();
APIProvider apiProvider = RestApiCommonUtil.getLoggedInUserProvider();
APIIdentifier apiIdentifier = APIMappingUtil.getAPIIdentifierFromUUID(apiId);
if (apiIdentifier == null) {
throw new APIMgtResourceNotFoundException("Couldn't retrieve existing API with API UUID: "
+ apiId, ExceptionCodes.from(ExceptionCodes.API_NOT_FOUND,
apiId));
}
List<ResourcePath> apiResourcePaths = apiProvider.getResourcePathsOfAPI(apiIdentifier);
ResourcePathListDTO dto = APIMappingUtil.fromResourcePathListToDTO(apiResourcePaths, limit, offset);
APIMappingUtil.setPaginationParamsForAPIResourcePathList(dto, offset, limit, apiResourcePaths.size());
return Response.ok().entity(dto).build();
} catch (APIManagementException e) {
if (RestApiUtil.isDueToResourceNotFound(e) || RestApiUtil.isDueToAuthorizationFailure(e)) {
RestApiUtil.handleResourceNotFoundError(RestApiConstants.RESOURCE_API, apiId, e, log);
} else if (isAuthorizationFailure(e)) {
RestApiUtil.handleAuthorizationFailure(
"Authorization failure while retrieving resource paths of API : " + apiId, e, log);
} else {
String errorMessage = "Error while retrieving resource paths of API : " + apiId;
RestApiUtil.handleInternalServerError(errorMessage, e, log);
}
}
return null;
}
/**
* Validate API Definition and retrieve as the response
*
* @param url URL of the OpenAPI definition
* @param fileInputStream InputStream for the provided file
* @param fileDetail File meta-data
* @param returnContent Whether to return the definition content
* @param messageContext CXF message context
* @return API Definition validation response
*/
@Override
public Response validateOpenAPIDefinition(Boolean returnContent, String url, InputStream fileInputStream,
Attachment fileDetail, MessageContext messageContext) {
// Validate and retrieve the OpenAPI definition
Map validationResponseMap = null;
try {
validationResponseMap = validateOpenAPIDefinition(url, fileInputStream, fileDetail, returnContent, false);
} catch (APIManagementException e) {
RestApiUtil.handleInternalServerError("Error occurred while validating API Definition", e, log);
}
OpenAPIDefinitionValidationResponseDTO validationResponseDTO =
(OpenAPIDefinitionValidationResponseDTO)validationResponseMap.get(RestApiConstants.RETURN_DTO);
return Response.ok().entity(validationResponseDTO).build();
}
/**
* Importing an OpenAPI definition and create an API
*
* @param fileInputStream InputStream for the provided file
* @param fileDetail File meta-data
* @param url URL of the OpenAPI definition
* @param additionalProperties API object (json) including additional properties like name, version, context
* @param messageContext CXF message context
* @return API Import using OpenAPI definition response
*/
@Override
public Response importOpenAPIDefinition(InputStream fileInputStream, Attachment fileDetail, String url,
String additionalProperties, MessageContext messageContext) {
// validate 'additionalProperties' json
if (StringUtils.isBlank(additionalProperties)) {
RestApiUtil.handleBadRequest("'additionalProperties' is required and should not be null", log);
}
// Convert the 'additionalProperties' json into an APIDTO object
ObjectMapper objectMapper = new ObjectMapper();
APIDTO apiDTOFromProperties;
try {
apiDTOFromProperties = objectMapper.readValue(additionalProperties, APIDTO.class);
} catch (IOException e) {
throw RestApiUtil.buildBadRequestException("Error while parsing 'additionalProperties'", e);
}
// Import the API and Definition
try {
APIDTO createdApiDTO = importOpenAPIDefinition(fileInputStream, url, apiDTOFromProperties, fileDetail, null);
if (createdApiDTO != null) {
// This URI used to set the location header of the POST response
URI createdApiUri = new URI(RestApiConstants.RESOURCE_PATH_APIS + "/" + createdApiDTO.getId());
return Response.created(createdApiUri).entity(createdApiDTO).build();
}
} catch (URISyntaxException e) {
String errorMessage = "Error while retrieving API location : " + apiDTOFromProperties.getProvider() + "-" +
apiDTOFromProperties.getName() + "-" + apiDTOFromProperties.getVersion();
RestApiUtil.handleInternalServerError(errorMessage, e, log);
}
return null;
}
/**
* Validate a provided WSDL definition via a URL or a file/zip
*
* @param url WSDL URL
* @param fileInputStream file/zip input stream
* @param fileDetail file/zip details
* @param messageContext messageContext object
* @return WSDL validation response
* @throws APIManagementException when error occurred during validation
*/
@Override
public Response validateWSDLDefinition(String url, InputStream fileInputStream, Attachment fileDetail,
MessageContext messageContext) throws APIManagementException {
Map validationResponseMap = validateWSDL(url, fileInputStream, fileDetail);
WSDLValidationResponseDTO validationResponseDTO =
(WSDLValidationResponseDTO)validationResponseMap.get(RestApiConstants.RETURN_DTO);
return Response.ok().entity(validationResponseDTO).build();
}
/**
* Validate the provided input parameters and returns the validation response DTO (for REST API)
* and the intermediate model as a Map
*
* @param url WSDL url
* @param fileInputStream file data stream
* @param fileDetail file details
* @return the validation response DTO (for REST API) and the intermediate model as a Map
* @throws APIManagementException if error occurred during validation of the WSDL
*/
private Map validateWSDL(String url, InputStream fileInputStream, Attachment fileDetail) throws APIManagementException {
WSDLValidationResponseDTO responseDTO;
WSDLValidationResponse validationResponse = new WSDLValidationResponse();
if (url != null) {
try {
URL wsdlUrl = new URL(url);
validationResponse = APIMWSDLReader.validateWSDLUrl(wsdlUrl);
} catch (MalformedURLException e) {
RestApiUtil.handleBadRequest("Invalid/Malformed URL : " + url, log);
}
} else if (fileInputStream != null) {
String filename = fileDetail.getContentDisposition().getFilename();
try {
if (filename.endsWith(".zip")) {
validationResponse =
APIMWSDLReader.extractAndValidateWSDLArchive(fileInputStream);
} else if (filename.endsWith(".wsdl")) {
validationResponse = APIMWSDLReader.validateWSDLFile(fileInputStream);
} else {
RestApiUtil.handleBadRequest("Unsupported extension type of file: " + filename, log);
}
} catch (APIManagementException e) {
String errorMessage = "Internal error while validating the WSDL from file:" + filename;
RestApiUtil.handleInternalServerError(errorMessage, e, log);
}
}
responseDTO =
APIMappingUtil.fromWSDLValidationResponseToDTO(validationResponse);
Map response = new HashMap();
response.put(RestApiConstants.RETURN_MODEL, validationResponse);
response.put(RestApiConstants.RETURN_DTO, responseDTO);
return response;
}
/**
* Import a WSDL file/url or an archive and create an API. The API can be a SOAP or REST depending on the
* provided implementationType.
*
* @param fileInputStream file input stream
* @param fileDetail file details
* @param url WSDL url
* @param additionalProperties API object (json) including additional properties like name, version, context
* @param implementationType SOAP or SOAPTOREST
* @return Created API's payload
* @throws APIManagementException when error occurred during the operation
*/
@Override
public Response importWSDLDefinition(InputStream fileInputStream, Attachment fileDetail, String url,
String additionalProperties, String implementationType, MessageContext messageContext)
throws APIManagementException {
try {
WSDLValidationResponse validationResponse = validateWSDLAndReset(fileInputStream, fileDetail, url);
if (StringUtils.isEmpty(implementationType)) {
implementationType = APIDTO.TypeEnum.SOAP.toString();
}
boolean isSoapToRestConvertedAPI = APIDTO.TypeEnum.SOAPTOREST.toString().equals(implementationType);
boolean isSoapAPI = APIDTO.TypeEnum.SOAP.toString().equals(implementationType);
APIDTO additionalPropertiesAPI = null;
APIDTO createdApiDTO;
URI createdApiUri;
// Minimum requirement name, version, context and endpointConfig.
additionalPropertiesAPI = new ObjectMapper().readValue(additionalProperties, APIDTO.class);
String username = RestApiCommonUtil.getLoggedInUsername();
additionalPropertiesAPI.setProvider(username);
additionalPropertiesAPI.setType(APIDTO.TypeEnum.fromValue(implementationType));
API apiToAdd = PublisherCommonUtils
.prepareToCreateAPIByDTO(additionalPropertiesAPI, RestApiCommonUtil.getLoggedInUserProvider(),
username);
apiToAdd.setWsdlUrl(url);
API createdApi = null;
if (isSoapAPI) {
createdApi = importSOAPAPI(fileInputStream, fileDetail, url, apiToAdd);
} else if (isSoapToRestConvertedAPI) {
String wsdlArchiveExtractedPath = null;
if (validationResponse.getWsdlArchiveInfo() != null) {
wsdlArchiveExtractedPath = validationResponse.getWsdlArchiveInfo().getLocation()
+ File.separator + APIConstants.API_WSDL_EXTRACTED_DIRECTORY;
}
createdApi = importSOAPToRESTAPI(fileInputStream, fileDetail, url, wsdlArchiveExtractedPath, apiToAdd);
} else {
RestApiUtil.handleBadRequest("Invalid implementationType parameter", log);
}
createdApiDTO = APIMappingUtil.fromAPItoDTO(createdApi);
//This URI used to set the location header of the POST response
createdApiUri = new URI(RestApiConstants.RESOURCE_PATH_APIS + "/" + createdApiDTO.getId());
return Response.created(createdApiUri).entity(createdApiDTO).build();
} catch (IOException | URISyntaxException e) {
RestApiUtil.handleInternalServerError("Error occurred while importing WSDL", e, log);
}
return null;
}
/**
* Validates the provided WSDL and reset the streams as required
*
* @param fileInputStream file input stream
* @param fileDetail file details
* @param url WSDL url
* @throws APIManagementException when error occurred during the operation
*/
private WSDLValidationResponse validateWSDLAndReset(InputStream fileInputStream, Attachment fileDetail, String url)
throws APIManagementException {
Map validationResponseMap = validateWSDL(url, fileInputStream, fileDetail);
WSDLValidationResponse validationResponse =
(WSDLValidationResponse)validationResponseMap.get(RestApiConstants.RETURN_MODEL);
if (validationResponse.getWsdlInfo() == null) {
// Validation failure
RestApiUtil.handleBadRequest(validationResponse.getError(), log);
}
if (fileInputStream != null) {
if (fileInputStream.markSupported()) {
// For uploading the WSDL below will require re-reading from the input stream hence resetting
try {
fileInputStream.reset();
} catch (IOException e) {
throw new APIManagementException("Error occurred while trying to reset the content stream of the " +
"WSDL", e);
}
} else {
log.warn("Marking is not supported in 'fileInputStream' InputStream type: "
+ fileInputStream.getClass() + ". Skipping validating WSDL to avoid re-reading from the " +
"input stream.");
}
}
return validationResponse;
}
/**
* Import an API from WSDL as a SOAP API
*
* @param fileInputStream file data as input stream
* @param fileDetail file details
* @param url URL of the WSDL
* @param apiToAdd API object to be added to the system (which is not added yet)
* @return API added api
*/
private API importSOAPAPI(InputStream fileInputStream, Attachment fileDetail, String url, API apiToAdd) {
try {
APIProvider apiProvider = RestApiCommonUtil.getLoggedInUserProvider();
//adding the api
apiProvider.addAPI(apiToAdd);
String tenantDomain = RestApiCommonUtil.getLoggedInUserTenantDomain();
if (StringUtils.isNotBlank(url)) {
apiToAdd.setWsdlUrl(url);
apiProvider.addWSDLResource(apiToAdd.getUuid(), null, url, tenantDomain);
} else if (fileDetail != null && fileInputStream != null) {
ResourceFile wsdlResource = new ResourceFile(fileInputStream,
fileDetail.getContentType().toString());
if (APIConstants.APPLICATION_ZIP.equals(fileDetail.getContentType().toString()) ||
APIConstants.APPLICATION_X_ZIP_COMPRESSED.equals(fileDetail.getContentType().toString())) {
wsdlResource = new ResourceFile(fileInputStream, APIConstants.APPLICATION_ZIP);
} else {
wsdlResource = new ResourceFile(fileInputStream, fileDetail.getContentType().toString());
}
apiToAdd.setWsdlResource(wsdlResource);
apiProvider.addWSDLResource(apiToAdd.getUuid(), wsdlResource, null, tenantDomain);
}
//add the generated swagger definition to SOAP
APIDefinition oasParser = new OAS2Parser();
SwaggerData swaggerData = new SwaggerData(apiToAdd);
String apiDefinition = generateSOAPAPIDefinition(oasParser.generateAPIDefinition(swaggerData));
apiProvider.saveSwaggerDefinition(apiToAdd, apiDefinition, tenantDomain);
APIIdentifier createdApiId = apiToAdd.getId();
//Retrieve the newly added API to send in the response payload
API createdApi = apiProvider.getAPI(createdApiId);
return createdApi;
} catch (APIManagementException e) {
RestApiUtil.handleInternalServerError("Error while importing WSDL to create a SOAP API", e, log);
}
return null;
}
/**
* Add soap parameters to the default soap api resource.
*
* @param apiDefinition The API definition string.
* @return Modified api definition.
* */
private String generateSOAPAPIDefinition(String apiDefinition) throws APIManagementException {
JSONParser jsonParser = new JSONParser();
JSONObject apiJson;
JSONObject paths;
try {
apiJson = (JSONObject) jsonParser.parse(apiDefinition);
paths = (JSONObject) jsonParser.parse(RestApiPublisherUtils.getSOAPOperation());
apiJson.replace("paths", paths);
return apiJson.toJSONString();
} catch (ParseException e) {
throw new APIManagementException("Error while parsing the api definition.", e);
}
}
/**
* Import an API from WSDL as a SOAP-to-REST API
*
* @param fileInputStream file data as input stream
* @param fileDetail file details
* @param url URL of the WSDL
* @param apiToAdd API object to be added to the system (which is not added yet)
* @return API added api
*/
private API importSOAPToRESTAPI(InputStream fileInputStream, Attachment fileDetail, String url,
String wsdlArchiveExtractedPath, API apiToAdd) throws APIManagementException {
try {
APIProvider apiProvider = RestApiCommonUtil.getLoggedInUserProvider();
String tenantDomain = RestApiCommonUtil.getLoggedInUserTenantDomain();
//adding the api
API createdApi = apiProvider.addAPI(apiToAdd);
String swaggerStr = "";
if (StringUtils.isNotBlank(url)) {
swaggerStr = SOAPOperationBindingUtils.getSoapOperationMappingForUrl(url);
} else if (fileInputStream != null) {
String filename = fileDetail.getContentDisposition().getFilename();
if (filename.endsWith(".zip")) {
swaggerStr = SOAPOperationBindingUtils.getSoapOperationMapping(wsdlArchiveExtractedPath);;
} else if (filename.endsWith(".wsdl")) {
byte[] wsdlContent = APIUtil.toByteArray(fileInputStream);
swaggerStr = SOAPOperationBindingUtils.getSoapOperationMapping(wsdlContent);
} else {
throw new APIManagementException(ExceptionCodes.UNSUPPORTED_WSDL_FILE_EXTENSION);
}
}
String updatedSwagger = updateSwagger(createdApi.getUUID(), swaggerStr);
List<SOAPToRestSequence> list = SequenceGenerator.generateSequencesFromSwagger(updatedSwagger,
apiToAdd.getId());
API updatedAPI = apiProvider.getAPIbyUUID(createdApi.getUuid(), tenantDomain);
updatedAPI.setSoapToRestSequences(list);
apiProvider.updateAPI(updatedAPI, createdApi);
return updatedAPI;
} catch (FaultGatewaysException | IOException e) {
throw new APIManagementException("Error while importing WSDL to create a SOAP-to-REST API", e);
}
}
/**
* Retrieve the WSDL of an API
*
* @param apiId UUID of the API
* @param ifNoneMatch If-None-Match header value
* @return the WSDL of the API (can be a file or zip archive)
* @throws APIManagementException when error occurred while trying to retrieve the WSDL
*/
@Override
public Response getWSDLOfAPI(String apiId, String ifNoneMatch, MessageContext messageContext)
throws APIManagementException {
try {
APIProvider apiProvider = RestApiCommonUtil.getLoggedInUserProvider();
String tenantDomain = RestApiCommonUtil.getLoggedInUserTenantDomain();
//this will fail if user does not have access to the API or the API does not exist
//APIIdentifier apiIdentifier = APIMappingUtil.getAPIIdentifierFromUUID(apiId, tenantDomain);
ResourceFile resource = apiProvider.getWSDL(apiId, tenantDomain);
return RestApiUtil.getResponseFromResourceFile(resource.getName(), resource);
} catch (APIManagementException e) {
//Auth failure occurs when cross tenant accessing APIs. Sends 404, since we don't need
// to expose the existence of the resource
if (RestApiUtil.isDueToResourceNotFound(e) || RestApiUtil.isDueToAuthorizationFailure(e)) {
RestApiUtil.handleResourceNotFoundError(RestApiConstants.RESOURCE_API, apiId, e, log);
} else if (isAuthorizationFailure(e)) {
RestApiUtil
.handleAuthorizationFailure("Authorization failure while retrieving wsdl of API: "
+ apiId, e, log);
} else {
throw e;
}
}
return null;
}
/**
* Update the WSDL of an API
*
* @param apiId UUID of the API
* @param fileInputStream file data as input stream
* @param fileDetail file details
* @param url URL of the WSDL
* @return 200 OK response if the operation is successful. 400 if the provided inputs are invalid. 500 if a server
* error occurred.
* @throws APIManagementException when error occurred while trying to retrieve the WSDL
*/
@Override
public Response updateWSDLOfAPI(String apiId, String ifMatch, InputStream fileInputStream, Attachment fileDetail,
String url, MessageContext messageContext) throws APIManagementException {
validateWSDLAndReset(fileInputStream, fileDetail, url);
APIProvider apiProvider = RestApiCommonUtil.getLoggedInUserProvider();
String tenantDomain = RestApiCommonUtil.getLoggedInUserTenantDomain();
APIIdentifier apiIdentifier = APIMappingUtil.getAPIIdentifierFromUUID(apiId);
if (apiIdentifier == null) {
throw new APIMgtResourceNotFoundException("Couldn't retrieve existing API with API UUID: "
+ apiId, ExceptionCodes.from(ExceptionCodes.API_NOT_FOUND,
apiId));
}
API api = apiProvider.getAPIbyUUID(apiId, tenantDomain);
if (api == null) {
RestApiUtil.handleResourceNotFoundError(RestApiConstants.RESOURCE_API, apiId, log);
}
if (StringUtils.isNotBlank(url)) {
apiProvider.addWSDLResource(apiId, null, url, tenantDomain);
} else {
ResourceFile wsdlResource;
if (APIConstants.APPLICATION_ZIP.equals(fileDetail.getContentType().toString()) ||
APIConstants.APPLICATION_X_ZIP_COMPRESSED.equals(fileDetail.getContentType().toString())) {
wsdlResource = new ResourceFile(fileInputStream, APIConstants.APPLICATION_ZIP);
} else {
wsdlResource = new ResourceFile(fileInputStream, fileDetail.getContentType().toString());
}
apiProvider.addWSDLResource(apiId, wsdlResource, null, tenantDomain);
}
return Response.ok().build();
}
@Override
public Response changeAPILifecycle(String action, String apiId, String lifecycleChecklist,
String ifMatch, MessageContext messageContext) {
//pre-processing
String[] checkListItems = lifecycleChecklist != null ? lifecycleChecklist.split(",") : new String[0];
try {
APIProvider apiProvider = RestApiCommonUtil.getLoggedInUserProvider();
String tenantDomain = RestApiCommonUtil.getLoggedInUserTenantDomain();
APIIdentifier apiIdentifier = APIMappingUtil.getAPIIdentifierFromUUID(apiId);
if (apiIdentifier == null) {
throw new APIMgtResourceNotFoundException("Couldn't retrieve existing API with API UUID: "
+ apiId, ExceptionCodes.from(ExceptionCodes.API_NOT_FOUND,
apiId));
}
Map<String, Object> apiLCData = apiProvider.getAPILifeCycleData(apiId, tenantDomain);
String[] nextAllowedStates = (String[]) apiLCData.get(APIConstants.LC_NEXT_STATES);
if (!ArrayUtils.contains(nextAllowedStates, action)) {
RestApiUtil.handleBadRequest(
"Action '" + action + "' is not allowed. Allowed actions are " + Arrays
.toString(nextAllowedStates), log);
}
//check and set lifecycle check list items including "Deprecate Old Versions" and "Require Re-Subscription".
Map<String, Boolean> lcMap = new HashMap<String, Boolean>();
for (String checkListItem : checkListItems) {
String[] attributeValPair = checkListItem.split(":");
if (attributeValPair.length == 2) {
String checkListItemName = attributeValPair[0].trim();
boolean checkListItemValue = Boolean.valueOf(attributeValPair[1].trim());
lcMap.put(checkListItemName, checkListItemValue);
//apiProvider.checkAndChangeAPILCCheckListItem(apiIdentifier, checkListItemName, checkListItemValue);
}
}
//todo: check if API's tiers are properly set before Publishing
//APIStateChangeResponse stateChangeResponse = apiProvider.changeLifeCycleStatus(apiIdentifier, action.toString());
APIStateChangeResponse stateChangeResponse = apiProvider.changeLifeCycleStatus(tenantDomain, apiId, action.toString(),
lcMap);
//returns the current lifecycle state
LifecycleStateDTO stateDTO = getLifecycleState(apiIdentifier, apiId); // todo try to prevent this call
WorkflowResponseDTO workflowResponseDTO = APIMappingUtil
.toWorkflowResponseDTO(stateDTO, stateChangeResponse);
return Response.ok().entity(workflowResponseDTO).build();
} catch (APIManagementException e) {
//Auth failure occurs when cross tenant accessing APIs. Sends 404, since we don't need to expose the existence of the resource
if (RestApiUtil.isDueToResourceNotFound(e) || RestApiUtil.isDueToAuthorizationFailure(e)) {
RestApiUtil.handleResourceNotFoundError(RestApiConstants.RESOURCE_API, apiId, e, log);
} else if (isAuthorizationFailure(e)) {
RestApiUtil.handleAuthorizationFailure(
"Authorization failure while updating the lifecycle of API " + apiId, e, log);
} else {
RestApiUtil.handleInternalServerError("Error while updating lifecycle of API " + apiId, e, log);
}
} catch (FaultGatewaysException e) {
String errorMessage = "Error while updating the API in Gateway " + apiId;
RestApiUtil.handleInternalServerError(errorMessage, e, log);
}
return null;
}
@Override
public Response createNewAPIVersion(String newVersion, String apiId, Boolean defaultVersion,
String serviceVersion, MessageContext messageContext) {
URI newVersionedApiUri;
APIDTO newVersionedApi;
try {
APIIdentifier apiIdentifierFromTable = APIMappingUtil.getAPIIdentifierFromUUID(apiId);
if (apiIdentifierFromTable == null) {
throw new APIMgtResourceNotFoundException("Couldn't retrieve existing API with API UUID: "
+ apiId, ExceptionCodes.from(ExceptionCodes.API_NOT_FOUND,
apiId));
}
APIProvider apiProvider = RestApiCommonUtil.getLoggedInUserProvider();
String tenantDomain = RestApiCommonUtil.getLoggedInUserTenantDomain();
String username = RestApiCommonUtil.getLoggedInUsername();
int tenantId = APIUtil.getTenantId(username);
if (StringUtils.isNotEmpty(serviceVersion)) {
ServiceCatalogImpl serviceCatalog = new ServiceCatalogImpl();
API existingAPI = apiProvider.getAPIbyUUID(apiId, tenantDomain);
String serviceKey = apiProvider.retrieveServiceKeyByApiId(existingAPI.getId().getId(), tenantId);
ServiceEntry service = serviceCatalog.getServiceByKey(serviceKey, tenantId);
if (existingAPI == null) {
throw new APIMgtResourceNotFoundException("API not found for id " + apiId,
ExceptionCodes.from(ExceptionCodes.API_NOT_FOUND, apiId));
}
if (newVersion.equals(existingAPI.getId().getVersion())) {
throw new APIMgtResourceAlreadyExistsException("Version " + newVersion + " exists for api "
+ existingAPI.getId().getApiName(), ExceptionCodes.from(API_ALREADY_EXISTS, apiId));
}
APIDTO apidto = createAPIDTO(existingAPI, newVersion);
newVersionedApi = importOpenAPIDefinition(service.getEndpointDef(), null, apidto, null, service);
} else {
API versionedAPI = apiProvider.createNewAPIVersion(apiId, newVersion, defaultVersion, tenantDomain);
newVersionedApi = APIMappingUtil.fromAPItoDTO(versionedAPI);
}
//This URI used to set the location header of the POST response
newVersionedApiUri =
new URI(RestApiConstants.RESOURCE_PATH_APIS + "/" + newVersionedApi.getId());
return Response.created(newVersionedApiUri).entity(newVersionedApi).build();
} catch (APIManagementException | DuplicateAPIException e) {
if (RestApiUtil.isDueToResourceAlreadyExists(e)) {
String errorMessage = "Requested new version " + newVersion + " of API " + apiId + " already exists";
RestApiUtil.handleResourceAlreadyExistsError(errorMessage, e, log);
} else if (RestApiUtil.isDueToResourceNotFound(e) || RestApiUtil.isDueToAuthorizationFailure(e)) {
//Auth failure occurs when cross tenant accessing APIs. Sends 404, since we don't need to expose the existence of the resource
RestApiUtil.handleResourceNotFoundError(RestApiConstants.RESOURCE_API, apiId, e, log);
} else if (isAuthorizationFailure(e)) {
RestApiUtil.handleAuthorizationFailure("Authorization failure while copying API : " + apiId, e, log);
} else {
String errorMessage = "Error while copying API : " + apiId;
RestApiUtil.handleInternalServerError(errorMessage, e, log);
}
} catch (URISyntaxException e) {
String errorMessage = "Error while retrieving API location of " + apiId;
RestApiUtil.handleInternalServerError(errorMessage, e, log);
}
return null;
}
/**
* Exports an API from API Manager for a given API using the ApiId. ID. Meta information, API icon, documentation,
* WSDL and sequences are exported. This service generates a zipped archive which contains all the above mentioned
* resources for a given API.
*
* @param apiId UUID of an API
* @param name Name of the API that needs to be exported
* @param version Version of the API that needs to be exported
* @param providerName Provider name of the API that needs to be exported
* @param format Format of output documents. Can be YAML or JSON
* @param preserveStatus Preserve API status on export
* @return
*/
@Override public Response exportAPI(String apiId, String name, String version, String revisionNum,
String providerName, String format, Boolean preserveStatus,
Boolean exportLatestRevision, MessageContext messageContext) {
//If not specified status is preserved by default
preserveStatus = preserveStatus == null || preserveStatus;
// Default export format is YAML
ExportFormat exportFormat = StringUtils.isNotEmpty(format) ?
ExportFormat.valueOf(format.toUpperCase()) :
ExportFormat.YAML;
try {
ImportExportAPI importExportAPI = APIImportExportUtil.getImportExportAPI();
File file = importExportAPI.exportAPI(apiId, name, version, revisionNum, providerName, preserveStatus,
exportFormat, true, true, exportLatestRevision);
return Response.ok(file).header(RestApiConstants.HEADER_CONTENT_DISPOSITION,
"attachment; filename=\"" + file.getName() + "\"").build();
} catch (APIManagementException | APIImportExportException e) {
RestApiUtil.handleInternalServerError("Error while exporting " + RestApiConstants.RESOURCE_API, e, log);
}
return null;
}
/**
* Import a GraphQL Schema
* @param type APIType
* @param fileInputStream input file
* @param fileDetail file Detail
* @param additionalProperties api object as string format
* @param ifMatch If--Match header value
* @param messageContext messageContext
* @return Response with GraphQL API
*/
@Override
public Response importGraphQLSchema(String ifMatch, String type, InputStream fileInputStream,
Attachment fileDetail, String additionalProperties, MessageContext messageContext) {
APIDTO additionalPropertiesAPI = null;
String schema = "";
try {
if (fileInputStream == null || StringUtils.isBlank(additionalProperties)) {
String errorMessage = "GraphQL schema and api details cannot be empty.";
RestApiUtil.handleBadRequest(errorMessage, log);
} else {
schema = IOUtils.toString(fileInputStream, RestApiConstants.CHARSET);
}
if (!StringUtils.isBlank(additionalProperties) && !StringUtils.isBlank(schema)) {
if (log.isDebugEnabled()) {
log.debug("Deseriallizing additionalProperties: " + additionalProperties + "/n"
+ "importing schema: " + schema);
}
}
additionalPropertiesAPI = new ObjectMapper().readValue(additionalProperties, APIDTO.class);
additionalPropertiesAPI.setType(APIDTO.TypeEnum.GRAPHQL);
APIProvider apiProvider = RestApiCommonUtil.getLoggedInUserProvider();
API apiToAdd = PublisherCommonUtils.prepareToCreateAPIByDTO(additionalPropertiesAPI, apiProvider,
RestApiCommonUtil.getLoggedInUsername());
//Save swagger definition of graphQL
APIDefinition parser = new OAS3Parser();
SwaggerData swaggerData = new SwaggerData(apiToAdd);
String apiDefinition = parser.generateAPIDefinition(swaggerData);
apiToAdd.setSwaggerDefinition(apiDefinition);
//adding the api
API createdApi = apiProvider.addAPI(apiToAdd);
String tenantDomain = RestApiCommonUtil.getLoggedInUserTenantDomain();
apiProvider.saveGraphqlSchemaDefinition(createdApi.getUuid(), schema, tenantDomain);
APIDTO createdApiDTO = APIMappingUtil.fromAPItoDTO(createdApi);
//This URI used to set the location header of the POST response
URI createdApiUri = new URI(RestApiConstants.RESOURCE_PATH_APIS + "/" + createdApiDTO.getId());
return Response.created(createdApiUri).entity(createdApiDTO).build();
} catch (APIManagementException e) {
String errorMessage = "Error while adding new API : " + additionalPropertiesAPI.getProvider() + "-" +
additionalPropertiesAPI.getName() + "-" + additionalPropertiesAPI.getVersion() + " - " + e.getMessage();
RestApiUtil.handleInternalServerError(errorMessage, e, log);
} catch (URISyntaxException e) {
String errorMessage = "Error while retrieving API location : " + additionalPropertiesAPI.getProvider() + "-"
+ additionalPropertiesAPI.getName() + "-" + additionalPropertiesAPI.getVersion();
RestApiUtil.handleInternalServerError(errorMessage, e, log);
} catch (IOException e) {
String errorMessage = "Error while retrieving content from file : " + additionalPropertiesAPI.getProvider()
+ "-" + additionalPropertiesAPI.getName() + "-" + additionalPropertiesAPI.getVersion();
RestApiUtil.handleInternalServerError(errorMessage, e, log);
}
return null;
}
/**
* Import an API by uploading an archive file. All relevant API data will be included upon the creation of
* the API. Depending on the choice of the user, provider of the imported API will be preserved or modified.
*
* @param fileInputStream Input stream from the REST request
* @param fileDetail File details as Attachment
* @param preserveProvider User choice to keep or replace the API provider
* @param overwrite Whether to update the API or not. This is used when updating already existing APIs.
* @return API import response
* @throws APIManagementException when error occurred while trying to import the API
*/
@Override public Response importAPI(InputStream fileInputStream, Attachment fileDetail,
Boolean preserveProvider, Boolean rotateRevision, Boolean overwrite, MessageContext messageContext) throws APIManagementException {
// Check whether to update. If not specified, default value is false.
overwrite = overwrite == null ? false : overwrite;
// Check if the URL parameter value is specified, otherwise the default value is true.
preserveProvider = preserveProvider == null || preserveProvider;
String[] tokenScopes = (String[]) PhaseInterceptorChain.getCurrentMessage().getExchange()
.get(RestApiConstants.USER_REST_API_SCOPES);
ImportExportAPI importExportAPI = APIImportExportUtil.getImportExportAPI();
importExportAPI.importAPI(fileInputStream, preserveProvider, rotateRevision, overwrite, tokenScopes);
return Response.status(Response.Status.OK).entity("API imported successfully.").build();
}
/**
* Validate graphQL Schema
* @param fileInputStream input file
* @param fileDetail file Detail
* @param messageContext messageContext
* @return Validation response
*/
@Override
public Response validateGraphQLSchema(InputStream fileInputStream, Attachment fileDetail,
MessageContext messageContext) {
GraphQLValidationResponseDTO validationResponse = new GraphQLValidationResponseDTO();
String filename = fileDetail.getContentDisposition().getFilename();
try {
String schema = IOUtils.toString(fileInputStream, RestApiConstants.CHARSET);
validationResponse = PublisherCommonUtils.validateGraphQLSchema(filename, schema);
} catch (IOException | APIManagementException e) {
validationResponse.setIsValid(false);
validationResponse.setErrorMessage(e.getMessage());
}
return Response.ok().entity(validationResponse).build();
}
/**
* Generates Mock response examples for Inline prototyping
* of a swagger
*
* @param apiId API Id
* @param ifNoneMatch If-None-Match header value
* @param messageContext message context
* @return apiDefinition
* @throws APIManagementException
*/
@Override
public Response generateMockScripts(String apiId, String ifNoneMatch, MessageContext messageContext) throws APIManagementException {
APIIdentifier apiIdentifierFromTable = APIMappingUtil.getAPIIdentifierFromUUID(apiId);
if (apiIdentifierFromTable == null) {
throw new APIMgtResourceNotFoundException("Couldn't retrieve existing API with API UUID: "
+ apiId, ExceptionCodes.from(ExceptionCodes.API_NOT_FOUND,
apiId));
}
String tenantDomain = RestApiCommonUtil.getLoggedInUserTenantDomain();
APIProvider apiProvider = RestApiCommonUtil.getLoggedInUserProvider();
API originalAPI = apiProvider.getAPIbyUUID(apiId, tenantDomain);
String apiDefinition = apiProvider.getOpenAPIDefinition(apiId, tenantDomain);
apiDefinition = String.valueOf(OASParserUtil.generateExamples(apiDefinition).get(APIConstants.SWAGGER));
apiProvider.saveSwaggerDefinition(originalAPI, apiDefinition, tenantDomain);
return Response.ok().entity(apiDefinition).build();
}
@Override
public Response getAPISubscriptionPolicies(String apiId, String ifNoneMatch, String xWSO2Tenant,
MessageContext messageContext) throws APIManagementException {
APIProvider apiProvider = RestApiCommonUtil.getLoggedInUserProvider();
APIDTO apiInfo = getAPIByID(apiId, apiProvider);
List<Tier> availableThrottlingPolicyList = new ThrottlingPoliciesApiServiceImpl()
.getThrottlingPolicyList(ThrottlingPolicyDTO.PolicyLevelEnum.SUBSCRIPTION.toString());
if (apiInfo != null ) {
List<String> apiPolicies = apiInfo.getPolicies();
if (apiPolicies != null && !apiPolicies.isEmpty()) {
List<Tier> apiThrottlingPolicies = new ArrayList<>();
for (Tier tier : availableThrottlingPolicyList) {
if (apiPolicies.contains(tier.getName())) {
apiThrottlingPolicies.add(tier);
}
}
return Response.ok().entity(apiThrottlingPolicies).build();
}
}
return null;
}
/**
* Retrieve deployment status of APIs in cloud clusters
* @return Deployment status response
*/
@Override
public Response deploymentsGetStatus(String apiId,MessageContext messageContext) throws APIManagementException{
String tenantDomain = RestApiCommonUtil.getLoggedInUserTenantDomain();
APIIdentifier apiIdentifier = APIMappingUtil.getAPIIdentifierFromUUID(apiId);
//APIProvider apiProvider = RestApiUtil.getLoggedInUserProvider();
DeploymentStatusListDTO deploymentStatusListDTO = APIMappingUtil.fromDeploymentStatustoDTO(apiIdentifier);
return Response.ok().entity(deploymentStatusListDTO).build();
}
private APIDTO getAPIByID(String apiId, APIProvider apiProvider) {
try {
String tenantDomain = RestApiCommonUtil.getLoggedInUserTenantDomain();
API api = apiProvider.getAPIbyUUID(apiId, tenantDomain);
APIRevision apiRevision = ApiMgtDAO.getInstance().checkAPIUUIDIsARevisionUUID(apiId);
if (apiRevision != null && !StringUtils.isEmpty(apiRevision.getApiUUID())) {
api.setRevision(true);
api.setRevisionedApiId(apiRevision.getApiUUID());
api.setRevisionId(apiRevision.getId());
}
return APIMappingUtil.fromAPItoDTO(api, apiProvider);
} catch (APIManagementException e) {
//Auth failure occurs when cross tenant accessing APIs. Sends 404, since we don't need
// to expose the existence of the resource
if (RestApiUtil.isDueToResourceNotFound(e) || RestApiUtil.isDueToAuthorizationFailure(e)) {
RestApiUtil.handleResourceNotFoundError(RestApiConstants.RESOURCE_API, apiId, e, log);
} else if (isAuthorizationFailure(e)) {
RestApiUtil.handleAuthorizationFailure("User is not authorized to access the API", e, log);
} else {
String errorMessage = "Error while retrieving API : " + apiId;
RestApiUtil.handleInternalServerError(errorMessage, e, log);
}
}
return null;
}
private APIDTO createAPIDTO(API existingAPI, String newVersion) {
APIDTO apidto = new APIDTO();
apidto.setName(existingAPI.getId().getApiName());
apidto.setContext(existingAPI.getContext());
apidto.setVersion(newVersion);
return apidto;
}
/**
* Validate the provided OpenAPI definition (via file or url) and return a Map with the validation response
* information.
*
* @param url OpenAPI definition url
* @param fileInputStream file as input stream
* @param returnContent whether to return the content of the definition in the response DTO
* @return Map with the validation response information. A value with key 'dto' will have the response DTO
* of type OpenAPIDefinitionValidationResponseDTO for the REST API. A value with key 'model' will have the
* validation response of type APIDefinitionValidationResponse coming from the impl level.
*/
private Map validateOpenAPIDefinition(String url, InputStream fileInputStream, Attachment fileDetail,
Boolean returnContent, Boolean isServiceAPI) throws APIManagementException {
//validate inputs
handleInvalidParams(fileInputStream, fileDetail, url, isServiceAPI);
OpenAPIDefinitionValidationResponseDTO responseDTO;
APIDefinitionValidationResponse validationResponse = new APIDefinitionValidationResponse();
if (url != null) {
validationResponse = OASParserUtil.validateAPIDefinitionByURL(url, returnContent);
} else if (fileInputStream != null) {
try {
if (fileDetail != null) {
String filename = fileDetail.getContentDisposition().getFilename();
if (filename.endsWith(".zip")) {
validationResponse =
OASParserUtil.extractAndValidateOpenAPIArchive(fileInputStream, returnContent);
} else {
String openAPIContent = IOUtils.toString(fileInputStream, RestApiConstants.CHARSET);
validationResponse = OASParserUtil.validateAPIDefinition(openAPIContent, returnContent);
}
} else {
String openAPIContent = IOUtils.toString(fileInputStream, RestApiConstants.CHARSET);
validationResponse = OASParserUtil.validateAPIDefinition(openAPIContent, returnContent);
}
} catch (IOException e) {
RestApiUtil.handleInternalServerError("Error while reading file content", e, log);
}
}
responseDTO = APIMappingUtil.getOpenAPIDefinitionValidationResponseFromModel(validationResponse,
returnContent);
Map response = new HashMap();
response.put(RestApiConstants.RETURN_MODEL, validationResponse);
response.put(RestApiConstants.RETURN_DTO, responseDTO);
return response;
}
/**
* Validate API import definition/validate definition parameters
*
* @param fileInputStream file content stream
* @param url URL of the definition
*/
private void handleInvalidParams(InputStream fileInputStream, Attachment fileDetail, String url,
Boolean isServiceAPI) {
String msg = "";
boolean isFileSpecified = (fileInputStream != null && fileDetail != null &&
fileDetail.getContentDisposition() != null && fileDetail.getContentDisposition().getFilename() != null)
|| (fileInputStream != null && isServiceAPI);
if (url == null && !isFileSpecified) {
msg = "Either 'file' or 'url' should be specified";
}
if (isFileSpecified && url != null) {
msg = "Only one of 'file' and 'url' should be specified";
}
if (StringUtils.isNotBlank(msg)) {
RestApiUtil.handleBadRequest(msg, log);
}
}
/**
* To check whether a particular exception is due to access control restriction.
*
* @param e Exception object.
* @return true if the the exception is caused due to authorization failure.
*/
private boolean isAuthorizationFailure(Exception e) {
String errorMessage = e.getMessage();
return errorMessage != null && errorMessage.contains(APIConstants.UN_AUTHORIZED_ERROR_MESSAGE);
}
/***
* To check if the API is modified or not when the given sequence is in API.
*
* @param api
* @param mediation
* @return if the API is modified or not
*/
private boolean isAPIModified(API api, Mediation mediation) {
if (mediation != null) {
String sequenceName;
if (APIConstants.API_CUSTOM_SEQUENCE_TYPE_IN.equalsIgnoreCase(mediation.getType())) {
sequenceName = api.getInSequence();
if (isSequenceExistsInAPI(sequenceName, mediation)) {
api.setInSequence(null);
return true;
}
} else if (APIConstants.API_CUSTOM_SEQUENCE_TYPE_OUT.equalsIgnoreCase(mediation.getType())) {
sequenceName = api.getOutSequence();
if (isSequenceExistsInAPI(sequenceName, mediation)) {
api.setOutSequence(null);
return true;
}
} else {
sequenceName = api.getFaultSequence();
if (isSequenceExistsInAPI(sequenceName, mediation)) {
api.setFaultSequence(null);
return true;
}
}
}
return false;
}
private boolean isSequenceExistsInAPI(String sequenceName, Mediation mediation) {
return StringUtils.isNotEmpty(sequenceName) && mediation.getName().equals(sequenceName);
}
/**
* Returns the mediation policy name specify inside mediation config
*
* @param config mediation config content
* @return name of the mediation policy or null
*/
public String getMediationNameFromConfig(String config) {
try {
//convert xml content in to json
String configInJson = XML.toJSONObject(config).toString();
JSONParser parser = new JSONParser();
//Extracting mediation policy name from the json string
JSONObject jsonObject = (JSONObject) parser.parse(configInJson);
JSONObject rootObject = (JSONObject) jsonObject.get(APIConstants.MEDIATION_SEQUENCE_ELEM);
String name = rootObject.get(APIConstants.POLICY_NAME_ELEM).toString();
return name + APIConstants.MEDIATION_CONFIG_EXT;
} catch (JSONException e) {
log.error("JSON Error occurred while converting the mediation config string to json", e);
} catch (ParseException e) {
log.error("Parser Error occurred while parsing config json string in to json object", e);
}
return null;
}
/**
* Check the existence of the mediation policy
*
* @param mediationResourcePath mediation config content
*/
public void checkMediationPolicy(APIProvider apiProvider, String mediationResourcePath, String name) throws APIManagementException {
if (apiProvider.checkIfResourceExists(mediationResourcePath)) {
throw new APIManagementException(ExceptionCodes.MEDIATION_POLICY_API_ALREADY_EXISTS);
}
if (StringUtils.isNotBlank(name) && name.length() > APIConstants.MAX_LENGTH_MEDIATION_POLICY_NAME) {
throw new APIManagementException(ExceptionCodes.from(ExceptionCodes.MEDIATION_POLICY_NAME_TOO_LONG,
APIConstants.MAX_LENGTH_MEDIATION_POLICY_NAME + ""));
}
}
/**
* Send HTTP HEAD request to test the endpoint url
*
* @param urlVal url for which the HEAD request is sent
* @return ApiEndpointValidationResponseDTO Response DTO containing validity information of the HEAD request made
* to test the endpoint url
*/
public static ApiEndpointValidationResponseDTO sendHttpHEADRequest(String urlVal) {
ApiEndpointValidationResponseDTO apiEndpointValidationResponseDTO = new ApiEndpointValidationResponseDTO();
HttpHead head = new HttpHead(urlVal);
org.apache.commons.httpclient.HttpClient client = new org.apache.commons.httpclient.HttpClient();
// extract the host name and add the Host http header for sanity
head.addHeader("Host", urlVal.replaceAll("https?://", "").
replaceAll("(/.*)?", ""));
client.getParams().setParameter("http.socket.timeout", 4000);
client.getParams().setParameter("http.connection.timeout", 4000);
HttpMethod method = new HeadMethod(urlVal);
if (System.getProperty(APIConstants.HTTP_PROXY_HOST) != null &&
System.getProperty(APIConstants.HTTP_PROXY_PORT) != null) {
log.debug("Proxy configured, hence routing through configured proxy");
String proxyHost = System.getProperty(APIConstants.HTTP_PROXY_HOST);
String proxyPort = System.getProperty(APIConstants.HTTP_PROXY_PORT);
HostConfiguration hostConfiguration = client.getHostConfiguration();
hostConfiguration.setProxy(proxyHost, Integer.parseInt(proxyPort));
client.setHostConfiguration(hostConfiguration);
}
try {
int statusCode = client.executeMethod(method);
apiEndpointValidationResponseDTO.setStatusCode(statusCode);
apiEndpointValidationResponseDTO.setStatusMessage(HttpStatus.getStatusText(statusCode));
} catch (UnknownHostException e) {
log.error("UnknownHostException occurred while sending the HEAD request to the given endpoint url:", e);
apiEndpointValidationResponseDTO.setError("Unknown Host");
} catch (IOException e) {
log.error("Error occurred while sending the HEAD request to the given endpoint url:", e);
apiEndpointValidationResponseDTO.setError("Connection error");
} finally {
method.releaseConnection();
}
return apiEndpointValidationResponseDTO;
}
/**
* Retrieve available revisions of an API
*
* @param apiId UUID of the API
* @param query Search query string
* @param messageContext message context object
* @return response containing list of API revisions
*/
@Override
public Response getAPIRevisions(String apiId, String query, MessageContext messageContext) {
try {
APIProvider apiProvider = RestApiCommonUtil.getLoggedInUserProvider();
APIRevisionListDTO apiRevisionListDTO;
List<APIRevision> apiRevisions = apiProvider.getAPIRevisions(apiId);
if (StringUtils.equalsIgnoreCase(query, "deployed:true")) {
List<APIRevision> apiDeployedRevisions = new ArrayList<>();
for (APIRevision apiRevision : apiRevisions) {
if (apiRevision.getApiRevisionDeploymentList().size() != 0) {
apiDeployedRevisions.add(apiRevision);
}
}
apiRevisionListDTO = APIMappingUtil.fromListAPIRevisiontoDTO(apiDeployedRevisions);
} else {
apiRevisionListDTO = APIMappingUtil.fromListAPIRevisiontoDTO(apiRevisions);
}
return Response.ok().entity(apiRevisionListDTO).build();
} catch (APIManagementException e) {
String errorMessage = "Error while adding retrieving API Revision for api id : " + apiId + " - " + e.getMessage();
RestApiUtil.handleInternalServerError(errorMessage, e, log);
}
return null;
}
/**
* Create a new API revision
*
* @param apiId UUID of the API
* @param apIRevisionDTO API object that needs to be added
* @param messageContext message context object
* @return response containing newly created APIRevision object
*/
@Override
public Response createAPIRevision(String apiId, APIRevisionDTO apIRevisionDTO, MessageContext messageContext) {
try {
APIProvider apiProvider = RestApiCommonUtil.getLoggedInUserProvider();
APIRevision apiRevision = new APIRevision();
apiRevision.setApiUUID(apiId);
apiRevision.setDescription(apIRevisionDTO.getDescription());
String tenantDomain = RestApiCommonUtil.getLoggedInUserTenantDomain();
//adding the api revision
String revisionId = apiProvider.addAPIRevision(apiRevision, tenantDomain);
//Retrieve the newly added APIRevision to send in the response payload
APIRevision createdApiRevision = apiProvider.getAPIRevision(revisionId);
APIRevisionDTO createdApiRevisionDTO = APIMappingUtil.fromAPIRevisiontoDTO(createdApiRevision);
//This URI used to set the location header of the POST response
URI createdApiUri = new URI(RestApiConstants.RESOURCE_PATH_APIS
+ "/" + createdApiRevisionDTO.getApiInfo().getId() + "/"
+ RestApiConstants.RESOURCE_PATH_REVISIONS + "/" + createdApiRevisionDTO.getId());
return Response.created(createdApiUri).entity(createdApiRevisionDTO).build();
} catch (APIManagementException e) {
String errorMessage = "Error while adding new API Revision for API : " + apiId;
RestApiUtil.handleInternalServerError(errorMessage, e, log);
} catch (URISyntaxException e) {
String errorMessage = "Error while retrieving created revision API location for API : "
+ apiId;
RestApiUtil.handleInternalServerError(errorMessage, e, log);
}
return null;
}
/**
* Retrieve a revision of an API
*
* @param apiId UUID of the API
* @param revisionId Revision ID of the API
* @param messageContext message context object
* @return response containing APIRevision object
*/
@Override
public Response getAPIRevision(String apiId, String revisionId, MessageContext messageContext) {
// remove errorObject and add implementation code!
ErrorDTO errorObject = new ErrorDTO();
Response.Status status = Response.Status.NOT_IMPLEMENTED;
errorObject.setCode((long) status.getStatusCode());
errorObject.setMessage(status.toString());
errorObject.setDescription("The requested resource has not been implemented");
return Response.status(status).entity(errorObject).build();
}
/**
* Delete a revision of an API
*
* @param apiId UUID of the API
* @param revisionId Revision ID of the API
* @param messageContext message context object
* @return response with 204 status code and no content
*/
@Override
public Response deleteAPIRevision(String apiId, String revisionId, MessageContext messageContext)
throws APIManagementException {
APIProvider apiProvider = RestApiCommonUtil.getLoggedInUserProvider();
String tenantDomain = RestApiCommonUtil.getLoggedInUserTenantDomain();
apiProvider.deleteAPIRevision(apiId, revisionId, tenantDomain);
List<APIRevision> apiRevisions = apiProvider.getAPIRevisions(apiId);
APIRevisionListDTO apiRevisionListDTO = APIMappingUtil.fromListAPIRevisiontoDTO(apiRevisions);
return Response.ok().entity(apiRevisionListDTO).build();
}
/**
* Deploy a revision
*
* @param apiId UUID of the API
* @param revisionId Revision ID of the API
* @param messageContext message context object
* @return response with 200 status code
*/
@Override
public Response deployAPIRevision(String apiId, String revisionId,
List<APIRevisionDeploymentDTO> apIRevisionDeploymentDTOList,
MessageContext messageContext) throws APIManagementException {
APIProvider apiProvider = RestApiCommonUtil.getLoggedInUserProvider();
List<APIRevisionDeployment> apiRevisionDeployments = new ArrayList<>();
for (APIRevisionDeploymentDTO apiRevisionDeploymentDTO : apIRevisionDeploymentDTOList) {
APIRevisionDeployment apiRevisionDeployment = new APIRevisionDeployment();
apiRevisionDeployment.setRevisionUUID(revisionId);
apiRevisionDeployment.setDeployment(apiRevisionDeploymentDTO.getName());
apiRevisionDeployment.setVhost(apiRevisionDeploymentDTO.getVhost());
apiRevisionDeployment.setDisplayOnDevportal(apiRevisionDeploymentDTO.isDisplayOnDevportal());
apiRevisionDeployments.add(apiRevisionDeployment);
}
apiProvider.deployAPIRevision(apiId, revisionId, apiRevisionDeployments);
List<APIRevisionDeployment> apiRevisionDeploymentsResponse = apiProvider.getAPIRevisionDeploymentList(revisionId);
List<APIRevisionDeploymentDTO> apiRevisionDeploymentDTOS = new ArrayList<>();
for (APIRevisionDeployment apiRevisionDeployment : apiRevisionDeploymentsResponse) {
apiRevisionDeploymentDTOS.add(APIMappingUtil.fromAPIRevisionDeploymenttoDTO(apiRevisionDeployment));
}
Response.Status status = Response.Status.CREATED;
return Response.status(status).entity(apiRevisionDeploymentDTOS).build();
}
/**
* Get revision deployment list
*
* @param apiId UUID of the API
* @param messageContext message context object
* @return response with 200 status code
*/
@Override
public Response getAPIRevisionDeployments(String apiId, MessageContext messageContext) throws APIManagementException {
APIProvider apiProvider = RestApiCommonUtil.getLoggedInUserProvider();
List<APIRevisionDeployment> apiRevisionDeploymentsList = new ArrayList<>();
List<APIRevision> apiRevisions = apiProvider.getAPIRevisions(apiId);
for (APIRevision apiRevision : apiRevisions) {
List<APIRevisionDeployment> apiRevisionDeploymentsResponse =
apiProvider.getAPIRevisionDeploymentList(apiRevision.getRevisionUUID());
for (APIRevisionDeployment apiRevisionDeployment : apiRevisionDeploymentsResponse) {
apiRevisionDeploymentsList.add(apiRevisionDeployment);
}
}
List<APIRevisionDeploymentDTO> apiRevisionDeploymentDTOS = new ArrayList<>();
for (APIRevisionDeployment apiRevisionDeployment : apiRevisionDeploymentsList) {
apiRevisionDeploymentDTOS.add(APIMappingUtil.fromAPIRevisionDeploymenttoDTO(apiRevisionDeployment));
}
return Response.ok().entity(apiRevisionDeploymentDTOS).build();
}
@Override
public Response undeployAPIRevision(String apiId, String revisionId, String revisionNum, Boolean allEnvironments,
List<APIRevisionDeploymentDTO> apIRevisionDeploymentDTOList,
MessageContext messageContext) throws APIManagementException {
APIProvider apiProvider = RestApiCommonUtil.getLoggedInUserProvider();
if (revisionId == null && revisionNum != null) {
revisionId = apiProvider.getAPIRevisionUUID(revisionNum, apiId);
if (revisionId == null) {
return Response.status(Response.Status.BAD_REQUEST).entity(null).build();
}
}
List<APIRevisionDeployment> apiRevisionDeployments = new ArrayList<>();
if (allEnvironments) {
apiRevisionDeployments = apiProvider.getAPIRevisionDeploymentList(revisionId);
} else {
for (APIRevisionDeploymentDTO apiRevisionDeploymentDTO : apIRevisionDeploymentDTOList) {
APIRevisionDeployment apiRevisionDeployment = new APIRevisionDeployment();
apiRevisionDeployment.setRevisionUUID(revisionId);
apiRevisionDeployment.setDeployment(apiRevisionDeploymentDTO.getName());
apiRevisionDeployment.setVhost(apiRevisionDeploymentDTO.getVhost());
apiRevisionDeployment.setDisplayOnDevportal(apiRevisionDeploymentDTO.isDisplayOnDevportal());
apiRevisionDeployments.add(apiRevisionDeployment);
}
}
apiProvider.undeployAPIRevisionDeployment(apiId, revisionId, apiRevisionDeployments);
List<APIRevisionDeployment> apiRevisionDeploymentsResponse = apiProvider.getAPIRevisionDeploymentList(revisionId);
List<APIRevisionDeploymentDTO> apiRevisionDeploymentDTOS = new ArrayList<>();
for (APIRevisionDeployment apiRevisionDeployment : apiRevisionDeploymentsResponse) {
apiRevisionDeploymentDTOS.add(APIMappingUtil.fromAPIRevisionDeploymenttoDTO(apiRevisionDeployment));
}
Response.Status status = Response.Status.CREATED;
return Response.status(status).entity(apiRevisionDeploymentDTOS).build();
}
/**
* Restore a revision to the working copy of the API
*
* @param apiId UUID of the API
* @param revisionId Revision ID of the API
* @param messageContext message context object
* @return response with 200 status code
*/
@Override
public Response restoreAPIRevision(String apiId, String revisionId, MessageContext messageContext)
throws APIManagementException {
APIProvider apiProvider = RestApiCommonUtil.getLoggedInUserProvider();
String tenantDomain = RestApiCommonUtil.getLoggedInUserTenantDomain();
apiProvider.restoreAPIRevision(apiId, revisionId, tenantDomain);
APIDTO apiToReturn = getAPIByID(apiId, apiProvider);
Response.Status status = Response.Status.CREATED;
return Response.status(status).entity(apiToReturn).build();
}
/**
* Validate AsyncAPI Specification and retrieve as the response
*
* @param url URL of the AsyncAPI Specification
* @param fileInputStream InputStream for the provided file
* @param fileDetail File meta-data
* @param returnContent Whether to return the definition content
* @param messageContext CXF message context
* @return AsyncAPI Specification Validation response
*/
@Override
public Response validateAsyncAPISpecification(Boolean returnContent, String url, InputStream fileInputStream, Attachment fileDetail, MessageContext messageContext) throws APIManagementException {
//validate and retrieve the AsyncAPI specification
Map validationResponseMap = null;
try {
validationResponseMap = validateAsyncAPISpecification(url, fileInputStream, fileDetail, returnContent);
} catch (APIManagementException e) {
RestApiUtil.handleInternalServerError("Error occurred while validating API Definition", e, log);
}
AsyncAPISpecificationValidationResponseDTO validationResponseDTO =
(AsyncAPISpecificationValidationResponseDTO)validationResponseMap.get(RestApiConstants.RETURN_DTO);
return Response.ok().entity(validationResponseDTO).build();
}
/**
* Validate the provided AsyncAPI specification (via file or url) and return a Map with the validation response
* information
*
* @param url AsyncAPI specification url
* @param fileInputStream file as input stream
* @param returnContent whether to return the content of the definition in the response DTO
* @return Map with the validation response information. A value with key 'dto' will have the response DTO
* of type AsyncAPISpecificationValidationResponseDTO for the REST API. A value with the key 'model' will have the
* validation response of type APIDefinitionValidationResponse coming from the impl level
*/
private Map validateAsyncAPISpecification(String url, InputStream fileInputStream, Attachment fileDetail,
Boolean returnContent) throws APIManagementException {
//validate inputs
handleInvalidParams(fileInputStream, fileDetail, url, false);
AsyncAPISpecificationValidationResponseDTO responseDTO;
APIDefinitionValidationResponse validationResponse = new APIDefinitionValidationResponse();
String schemaToBeValidated = null;
if (url != null) {
//validate URL
validationResponse = AsyncApiParserUtil.validateAsyncAPISpecificationByURL(url, returnContent);
} else if (fileInputStream != null){
//validate file
String fileName = fileDetail.getContentDisposition().getFilename();
try {
if (fileName.endsWith(APIConstants.YAML_FILE_EXTENSION) || fileName.endsWith(APIConstants.YML_FILE_EXTENSION)){
//convert .yml or .yaml to JSON for validation
ObjectMapper yamlReader = new ObjectMapper(new YAMLFactory());
Object obj = yamlReader.readValue(fileInputStream, Object.class);
ObjectMapper jsonWriter = new ObjectMapper();
schemaToBeValidated = jsonWriter.writeValueAsString(obj);
} else if (fileName.endsWith(APIConstants.JSON_FILE_EXTENSION)){
//continue with .json
JSONTokener jsonDataFile = new JSONTokener(fileInputStream);
schemaToBeValidated = new org.json.JSONObject(jsonDataFile).toString();
}
validationResponse = AsyncApiParserUtil.validateAsyncAPISpecification(schemaToBeValidated, returnContent);
} catch (IOException e){
//error while reading the schemas
RestApiUtil.handleInternalServerError("Error while reading file content", e, log);
}
}
responseDTO = APIMappingUtil.getAsyncAPISpecificationValidationResponseFromModel(validationResponse, returnContent);
Map response = new HashMap();
response.put(RestApiConstants.RETURN_MODEL, validationResponse);
response.put(RestApiConstants.RETURN_DTO, responseDTO);
return response;
}
/**
* Importing and AsyncAPI Specification and create and API
*
* @param fileInputStream InputStream for the provided file
* @param fileDetail File meta-data
* @param url URL of the AsyncAPI Specification
* @param additionalProperties API object (json) including additional properties like name, version, context
* @param messageContext CXF message context
* @return API import using AsyncAPI specification response
*/
@Override
public Response importAsyncAPISpecification(InputStream fileInputStream, Attachment fileDetail, String url, String additionalProperties, MessageContext messageContext) throws APIManagementException {
// validate 'additionalProperties' json
if (StringUtils.isBlank(additionalProperties)) {
RestApiUtil.handleBadRequest("'additionalProperties' is required and should not be null", log);
}
//validate and retrieve the AsyncAPI specification
Map validationResponseMap = null;
try {
validationResponseMap = validateAsyncAPISpecification(url, fileInputStream, fileDetail, true);
} catch (APIManagementException e) {
RestApiUtil.handleInternalServerError("Error occurred while validating API Definition", e, log);
}
AsyncAPISpecificationValidationResponseDTO validationResponseDTO =
(AsyncAPISpecificationValidationResponseDTO) validationResponseMap.get(RestApiConstants.RETURN_DTO);
APIDefinitionValidationResponse validationResponse =
(APIDefinitionValidationResponse) validationResponseMap.get(RestApiConstants.RETURN_MODEL);
if (!validationResponseDTO.isIsValid()) {
ErrorDTO errorDTO = APIMappingUtil.getErrorDTOFromErrorListItems(validationResponseDTO.getErrors());
throw RestApiUtil.buildBadRequestException(errorDTO);
}
// Convert the 'additionalProperties' json into an APIDTO object
ObjectMapper objectMapper = new ObjectMapper();
APIDTO apiDTOFromProperties;
try {
apiDTOFromProperties = objectMapper.readValue(additionalProperties, APIDTO.class);
} catch (IOException e) {
throw RestApiUtil.buildBadRequestException("Error while parsing 'additionalProperties'", e);
}
//validate websocket url and change type of the API in APIDTO
/*if (PublisherCommonUtils.isValidWSAPI(apiDTOFromProperties)){
apiDTOFromProperties.setType(APIDTO.TypeEnum.WS);
ArrayList<String> websocketTransports = new ArrayList<>();
websocketTransports.add(APIConstants.WS_PROTOCOL);
websocketTransports.add(APIConstants.WSS_PROTOCOL);
apiDTOFromProperties.setTransport(websocketTransports);
}*/
//Only WS type APIs should be allowed
/*if (!APIDTO.TypeEnum.WS.equals(apiDTOFromProperties.getType())){
throw RestApiUtil.buildBadRequestException("The API's type should only be WebSocket when "+
"importing an AsyncAPI specification");
}*/
//Import the API and Definition
try {
APIProvider apiProvider = RestApiCommonUtil.getLoggedInUserProvider();
API apiToAdd = PublisherCommonUtils.prepareToCreateAPIByDTO(apiDTOFromProperties, apiProvider,
RestApiCommonUtil.getLoggedInUsername());
String definitionToAdd = validationResponse.getJsonContent();
apiProvider.addAPI(apiToAdd);
apiProvider.saveAsyncApiDefinition(apiToAdd, definitionToAdd);
//load topics from AsyncAPI
if (APIDTO.TypeEnum.WEBSUB.equals(apiDTOFromProperties.getType())){
try {
apiProvider.updateAPI(AsyncApiParserUtil.loadTopicsFromAsyncAPIDefinition(apiToAdd, definitionToAdd));
} catch (FaultGatewaysException e) {
e.printStackTrace();
}
}
APIDTO createdAPIDTO = APIMappingUtil.fromAPItoDTO(apiProvider.getAPI(apiToAdd.getId()));
URI createdApiUri = new URI(RestApiConstants.RESOURCE_PATH_APIS + "/" + createdAPIDTO.getId());
return Response.created(createdApiUri).entity(createdAPIDTO).build();
} catch (APIManagementException e) {
String errorMessage = "Error while adding new API : " + apiDTOFromProperties.getProvider() + "-" +
apiDTOFromProperties.getName() + "-" + apiDTOFromProperties.getVersion() + " - " + e.getMessage();
RestApiUtil.handleInternalServerError(errorMessage, e, log);
} catch (URISyntaxException e) {
String errorMessage = "Error while retrieving API location : " + apiDTOFromProperties.getProvider() + "-" +
apiDTOFromProperties.getName() + "-" + apiDTOFromProperties.getVersion();
RestApiUtil.handleInternalServerError(errorMessage, e, log);
}
return null;
}
@Override
public Response apisApiIdAsyncapiGet(String apiId, String ifNoneMatch, MessageContext messageContext) throws APIManagementException {
try {
APIProvider apiProvider = RestApiCommonUtil.getLoggedInUserProvider();
String tenantDomain = RestApiCommonUtil.getLoggedInUserTenantDomain();
//this will fail if user does not have access to the API or the API does not exist
//APIIdentifier apiIdentifier = APIMappingUtil.getAPIIdentifierFromUUID(apiId, tenantDomain);
//String asyncAPIString = apiProvider.getAsyncAPIDefinition(apiIdentifier);
API api = apiProvider.getAPIbyUUID(apiId, tenantDomain);
String updatedDefinition = RestApiCommonUtil.retrieveAsyncAPIDefinition(api, apiProvider);
return Response.ok().entity(updatedDefinition).header("Content-Disposition",
"attachment; fileNme=\"" + "asyncapi.json" + "\"").build();
} catch (APIManagementException e) {
//Auth failure occurs when cross tenant acessing APIs. Sends 404, since we don't need to expose the existence of the resource
if (RestApiUtil.isDueToResourceNotFound(e) || RestApiUtil.isDueToAuthorizationFailure(e)) {
RestApiUtil.handleResourceNotFoundError(RestApiConstants.RESOURCE_API, apiId, e, log);
} else if (isAuthorizationFailure(e)) {
RestApiUtil.
handleAuthorizationFailure("Authorization failre while retrieving AsyncAPI of API : " + apiId,
e, log);
} else {
String errorMessage = "Error while retrieving AsyncAPI for API : " + apiId;
RestApiUtil.handleInternalServerError(errorMessage, e, log);
}
}
return null;
}
@Override
public Response apisApiIdAsyncapiPut(String apiId, String ifMatch, String apiDefinition, String url, InputStream fileInputStream, Attachment fileDetail, MessageContext messageContext) throws APIManagementException {
try {
String updatedAsyncAPIDefinition;
String tenantDomain = RestApiCommonUtil.getLoggedInUserTenantDomain();
//Handle URL and file based definition imports
if (url != null || fileInputStream != null){
//Validate and retrieve the AsyncAPI definition
Map validationResponseMap = validateAsyncAPISpecification(url, fileInputStream,
fileDetail, true);
APIDefinitionValidationResponse validationResponse =
(APIDefinitionValidationResponse) validationResponseMap.get(RestApiConstants.RETURN_MODEL);
if (!validationResponse.isValid()) {
RestApiUtil.handleBadRequest(validationResponse.getErrorItems(), log);
}
updatedAsyncAPIDefinition = updateAsyncAPIDefinition(apiId, validationResponse);
} else {
updatedAsyncAPIDefinition = updateAsyncAPIDefinition(apiId, apiDefinition);
}
return Response.ok().entity(updatedAsyncAPIDefinition).build();
} catch (APIManagementException e) {
//Auth failure occurs when cross tenant accessing APIs. Sends 404, since we don't need
// to expose the existence of the resource
if (RestApiUtil.isDueToResourceNotFound(e) || RestApiUtil.isDueToAuthorizationFailure(e)) {
RestApiUtil.handleResourceNotFoundError(RestApiConstants.RESOURCE_API, apiId, e, log);
} else if (isAuthorizationFailure(e)) {
RestApiUtil.handleAuthorizationFailure(
"Authorization failure while updating AsyncAPI definition of API: " + apiId, e, log);
} else {
String errorMessage = "Error while updating the AsyncAPI definition of the API: " + apiId + " - "
+ e.getMessage();
RestApiUtil.handleInternalServerError(errorMessage, e, log);
}
} catch (FaultGatewaysException e) {
String errorMessage = "Error while updating API : " + apiId;
RestApiUtil.handleInternalServerError(errorMessage, e, log);
}
return null;
}
/**
* update AsyncAPI definition of the given API. The AsyncAPI will be validated before updating.
*
* @param apiId API Id
* @param apiDefinition AsyncAPI definition
* @return updated AsyncAPI definition
* @throws APIManagementException when error occurred updating AsyncAPI
* @throws FaultGatewaysException when error occurred publishing API to the gateway
*/
private String updateAsyncAPIDefinition(String apiId, String apiDefinition)
throws APIManagementException, FaultGatewaysException {
APIDefinitionValidationResponse response = AsyncApiParserUtil
.validateAsyncAPISpecification(apiDefinition, true);
if (!response.isValid()) {
RestApiUtil.handleBadRequest(response.getErrorItems(), log);
}
return updateAsyncAPIDefinition(apiId, response);
}
/**
* update AsyncPI definition of the given api
*
* @param apiId API Id
* @param response response of the AsyncAPI definition validation call
* @return updated AsyncAPI definition
* @throws APIManagementException when error occurred updating AsyncAPI definition
* @throws FaultGatewaysException when error occurred publishing API to the gateway
*/
private String updateAsyncAPIDefinition(String apiId, APIDefinitionValidationResponse response)
throws APIManagementException, FaultGatewaysException {
APIProvider apiProvider = RestApiCommonUtil.getLoggedInUserProvider();
String tenantDomain = RestApiCommonUtil.getLoggedInUserTenantDomain();
//this will fall if user does not have access to the API or the API does not exist
API existingAPI = apiProvider.getAPIbyUUID(apiId, tenantDomain);
String apiDefinition = response.getJsonContent();
//updating APi with the new AsyncAPI definition
apiProvider.saveAsyncApiDefinition(existingAPI, apiDefinition);
apiProvider.updateAPI(existingAPI);
//retrieves the updated AsyncAPI definition
return apiProvider.getAsyncAPIDefinition(existingAPI.getId());
}
@Override
public Response importServiceFromCatalog(String serviceKey, APIDTO apiDto, MessageContext messageContext) {
if (StringUtils.isEmpty(serviceKey)) {
RestApiUtil.handleBadRequest("Required parameter serviceKey is missing", log);
}
try {
ServiceCatalogImpl serviceCatalog = new ServiceCatalogImpl();
String username = RestApiCommonUtil.getLoggedInUsername();
int tenantId = APIUtil.getTenantId(username);
ServiceEntry service = serviceCatalog.getServiceByKey(serviceKey, tenantId);
if (service == null) {
RestApiUtil.handleResourceNotFoundError("Service", serviceKey, log);
}
APIDTO createdApiDTO = null;
if (ServiceEntry.DefinitionType.OAS2.equals(service.getDefinitionType()) ||
ServiceEntry.DefinitionType.OAS3.equals(service.getDefinitionType())) {
createdApiDTO = importOpenAPIDefinition(service.getEndpointDef(), null, apiDto, null, service);
}
if (createdApiDTO != null) {
URI createdApiUri = new URI(RestApiConstants.RESOURCE_PATH_APIS + "/" + createdApiDTO.getId());
return Response.created(createdApiUri).entity(createdApiDTO).build();
} else {
RestApiUtil.handleBadRequest("Unsupported definition type provided. Cannot create API " +
"using the service type " + service.getDefinitionType().name(), log);
}
} catch (APIManagementException e) {
if (RestApiUtil.isDueToResourceNotFound(e)) {
RestApiUtil.handleResourceNotFoundError("Service", serviceKey, e, log);
} else {
String errorMessage = "Error while creating API using Service with Id : " + serviceKey
+ " from Service Catalog";
RestApiUtil.handleInternalServerError(errorMessage, e, log);
}
} catch (URISyntaxException e) {
String errorMessage = "Error while retrieving API location : " + apiDto.getName() + "-"
+ apiDto.getVersion();
RestApiUtil.handleInternalServerError(errorMessage, e, log);
}
return null;
}
@Override
public Response reimportServiceFromCatalog(String apiId, MessageContext messageContext)
throws APIManagementException {
APIProvider apiProvider = RestApiCommonUtil.getLoggedInUserProvider();
String username = RestApiCommonUtil.getLoggedInUsername();
String tenantDomain = RestApiCommonUtil.getLoggedInUserTenantDomain();
int tenantId = APIUtil.getTenantId(username);
try {
API api = apiProvider.getLightweightAPIByUUID(apiId, tenantDomain);
API originalAPI = apiProvider.getAPIbyUUID(apiId, tenantDomain);
String serviceKey = apiProvider.retrieveServiceKeyByApiId(originalAPI.getId().getId(), tenantId);
ServiceCatalogImpl serviceCatalog = new ServiceCatalogImpl();
ServiceEntry service = serviceCatalog.getServiceByKey(serviceKey, tenantId);
String endpointConfig = PublisherCommonUtils.constructEndpointConfigForService(service);
api.setEndpointConfig(endpointConfig);
JSONObject serviceInfo = new JSONObject();
serviceInfo.put("name", service.getName());
serviceInfo.put("version", service.getVersion());
serviceInfo.put("key", service.getKey());
serviceInfo.put("md5", service.getMd5());
api.setServiceInfo(serviceInfo);
API updatedApi = apiProvider.updateAPI(api, originalAPI);
if (ServiceEntry.DefinitionType.OAS2.equals(service.getDefinitionType()) ||
ServiceEntry.DefinitionType.OAS3.equals(service.getDefinitionType())) {
Map validationResponseMap = validateOpenAPIDefinition(null, service.getEndpointDef(), null,
true, true);
APIDefinitionValidationResponse validationResponse =
(APIDefinitionValidationResponse) validationResponseMap.get(RestApiConstants.RETURN_MODEL);
if (!validationResponse.isValid()) {
RestApiUtil.handleBadRequest(validationResponse.getErrorItems(), log);
}
PublisherCommonUtils.updateSwagger(apiId, validationResponse, true);
} else {
RestApiUtil.handleBadRequest("Unsupported definition type provided. Cannot re-import service to " +
"API using the service type " + service.getDefinitionType(), log);
}
return Response.ok().entity(APIMappingUtil.fromAPItoDTO(updatedApi)).build();
} catch (APIManagementException e) {
RestApiUtil.handleInternalServerError("Error while retrieving the service key of the service " +
"associated with API with id " + apiId, log);
} catch (FaultGatewaysException e) {
String errorMessage = "Error while updating API : " + apiId;
RestApiUtil.handleInternalServerError(errorMessage, e, log);
}
return null;
}
private APIDTO importOpenAPIDefinition(InputStream definition, String definitionUrl, APIDTO apiDTOFromProperties,
Attachment fileDetail, ServiceEntry service) {
// Validate and retrieve the OpenAPI definition
Map validationResponseMap = null;
try {
boolean isServiceAPI = false;
if (service != null) {
isServiceAPI = true;
}
validationResponseMap = validateOpenAPIDefinition(definitionUrl, definition, fileDetail, true,
isServiceAPI);
} catch (APIManagementException e) {
RestApiUtil.handleInternalServerError("Error occurred while validating API Definition", e, log);
}
OpenAPIDefinitionValidationResponseDTO validationResponseDTO =
(OpenAPIDefinitionValidationResponseDTO) validationResponseMap.get(RestApiConstants.RETURN_DTO);
APIDefinitionValidationResponse validationResponse =
(APIDefinitionValidationResponse) validationResponseMap.get(RestApiConstants.RETURN_MODEL);
if (!validationResponseDTO.isIsValid()) {
ErrorDTO errorDTO = APIMappingUtil.getErrorDTOFromErrorListItems(validationResponseDTO.getErrors());
throw RestApiUtil.buildBadRequestException(errorDTO);
}
// Only HTTP type APIs should be allowed
if (!APIDTO.TypeEnum.HTTP.equals(apiDTOFromProperties.getType())) {
throw RestApiUtil.buildBadRequestException("The API's type should only be HTTP when " +
"importing an OpenAPI definition");
}
// Import the API and Definition
try {
APIProvider apiProvider = RestApiCommonUtil.getLoggedInUserProvider();
API apiToAdd = PublisherCommonUtils.prepareToCreateAPIByDTO(apiDTOFromProperties, apiProvider,
RestApiCommonUtil.getLoggedInUsername());
if (service != null) {
apiToAdd.setServiceInfo("key", service.getKey());
apiToAdd.setServiceInfo("md5", service.getMd5());
apiToAdd.setEndpointConfig(PublisherCommonUtils.constructEndpointConfigForService(service));
}
boolean syncOperations = apiDTOFromProperties.getOperations().size() > 0;
// Rearrange paths according to the API payload and save the OpenAPI definition
APIDefinition apiDefinition = validationResponse.getParser();
SwaggerData swaggerData;
String definitionToAdd = validationResponse.getJsonContent();
if (syncOperations) {
PublisherCommonUtils.validateScopes(apiToAdd);
swaggerData = new SwaggerData(apiToAdd);
definitionToAdd = apiDefinition.populateCustomManagementInfo(definitionToAdd, swaggerData);
}
definitionToAdd = OASParserUtil.preProcess(definitionToAdd);
Set<URITemplate> uriTemplates = apiDefinition.getURITemplates(definitionToAdd);
Set<Scope> scopes = apiDefinition.getScopes(definitionToAdd);
apiToAdd.setUriTemplates(uriTemplates);
apiToAdd.setScopes(scopes);
//Set extensions from API definition to API object
apiToAdd = OASParserUtil.setExtensionsToAPI(definitionToAdd, apiToAdd);
if (!syncOperations) {
PublisherCommonUtils.validateScopes(apiToAdd);
swaggerData = new SwaggerData(apiToAdd);
definitionToAdd = apiDefinition
.populateCustomManagementInfo(validationResponse.getJsonContent(), swaggerData);
}
// adding the API and definition
apiToAdd.setSwaggerDefinition(definitionToAdd);
API addedAPI = apiProvider.addAPI(apiToAdd);
//apiProvider.saveSwaggerDefinition(apiToAdd, definitionToAdd);
// retrieving the added API for returning as the response
// this would provide the updated templates
addedAPI = apiProvider.getAPIbyUUID(addedAPI.getUuid(), RestApiCommonUtil.getLoggedInUserTenantDomain());
return APIMappingUtil.fromAPItoDTO(addedAPI);
} catch (APIManagementException e) {
String errorMessage = "Error while adding new API : " + apiDTOFromProperties.getProvider() + "-" +
apiDTOFromProperties.getName() + "-" + apiDTOFromProperties.getVersion() + " - " + e.getMessage();
RestApiUtil.handleInternalServerError(errorMessage, e, log);
}
return null;
}
}
|
apache-2.0
|
jesse108/infectionMap
|
lib/Util/Time.class.php
|
2764
|
<?php
class Util_Time{
const TIMER_PRECISION_MSEC = 1; //毫秒
const TIMER_PRECISION_USEC = 2; //微秒
const TIMER_PRECISION_SEC = 3; //秒
private static $timerList = array(); //计时用数组
//获取当前的毫秒数 单位毫秒
public static function GetMilliTime(){
return floatval(self::GetMicroTime()/1000);
}
//获取当前的微秒数 单位微秒
public static function GetMicroTime(){
list($usec,$sec) = explode(' ', microtime());
$usec = floatval($usec) * 1000000;
$sec = floatval($sec) * 1000000;
$uTime = $usec + $sec;
return floatval($uTime);
}
//////////////计时开始
public static function TimerStart($key = 'default'){
$key = md5($key);
self::$timerList[$key] = self::GetMicroTime();
}
///////计时结束
public static function TimerStop($key = 'default',$precision = self::TIMER_PRECISION_MSEC){
$key = md5($key);
$startTime = self::$timerList[$key];
if(!isset($startTime)){
return 0;
}
$endTime = self::GetMicroTime();
$duration = $endTime - $startTime;
switch ($precision){
case self::TIMER_PRECISION_SEC://秒
$duration = $duration / 1000000;
break;
case self::TIMER_PRECISION_MSEC://毫秒
$duration = $duration / 1000;
break;
case self::TIMER_PRECISION_USEC:
break;
}
return $duration;
}
///////
public static function getManReadTime($time,$currentTime = 0){
$currentTime = $currentTime ? $currentTime : time();
if(abs($time-$currentTime) <= 60){
return "现在";
}
$currentDate = date('Y-m-d',$currentTime);
$currentDateTime = strtotime($currentDate);
$date = date('Y-m-d',$time);
$dateTime = strtotime($date);
$dateDiff = $dateTime - $currentDateTime;
if($dateDiff >=0 && $dateDiff < 86400){
$showDay = "今天";
} else if($dateDiff >= 86400 && $dateDiff < 2*86400){
$showDay = "明天";
} else if($dateDiff > -86400 && $dateDiff < 0){
$showDay = "昨天";
} else {
$showDay = $date;
}
$hour = date("H",$time);
$hour = intval($hour);
if($hour < 7){
$halfDay = "早上";
} else if($hour >=7 && $hour < 11){
$halfDay = "上午";
} else if($hour >= 11 && $hour < 2){
$halfDay = "中午";
} else if($hour >=2 && $hour < 18){
$halfDay = "下午";
} else {
$halfDay = "晚上";
}
$miniute = date("i",$time);
if(intval($miniute) == 0){
$showMiniute = "";
} else if(intval($miniute) == 15){
$showMiniute = "一刻";
} else if(intval($miniute) == 30){
$showMiniute = "半";
} else {
$showMiniute = $miniute .'分';
}
$showTime = "{$showDay}{$halfDay}{$hour}点{$showMiniute}";
return $showTime;
}
}
|
apache-2.0
|
LordAkkarin/BaseDefense2
|
src/main/java/basedefense/client/renderer/gui/OutdatedVersionGUIRenderer.java
|
2485
|
/*
* Copyright 2015 Johannes Donath <johannesd@torchmind.com>
* and other copyright owners as documented in the project's IP log.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package basedefense.client.renderer.gui;
import cpw.mods.fml.common.eventhandler.SubscribeEvent;
import lombok.AllArgsConstructor;
import net.minecraft.client.Minecraft;
import net.minecraft.client.gui.FontRenderer;
import net.minecraft.client.gui.GuiMainMenu;
import net.minecraft.util.EnumChatFormatting;
import net.minecraftforge.client.event.GuiScreenEvent;
import net.minecraftforge.common.ForgeVersion;
import net.minecraftforge.common.ForgeVersion.Status;
/**
* Provides an event listener used for rendering outdated version notices.
*
* @author Johannes Donath <a href="mailto:johannesd@torchmind.com">johannesd@torchmind.com</a>
*/
@AllArgsConstructor
public class OutdatedVersionGUIRenderer {
public static final String OUTDATED_MESSAGE = EnumChatFormatting.RED + "BaseDefense %s is available (%s installed)";
private final String installed;
private final String latest;
/**
* Handles {@link net.minecraftforge.client.event.GuiScreenEvent.DrawScreenEvent}.
*
* @param event The event.
*/
@SubscribeEvent
public void onGuiScreen (GuiScreenEvent.DrawScreenEvent.Post event) {
if (!(event.gui instanceof GuiMainMenu)) return;
GuiMainMenu mainMenu = ((GuiMainMenu) event.gui);
FontRenderer font = Minecraft.getMinecraft ().fontRenderer;
Status forgeStatus = ForgeVersion.getStatus ();
String line = String.format (OUTDATED_MESSAGE, this.latest, this.installed);
int lineWidth = font.getStringWidth (line);
mainMenu.drawString (font, line, ((mainMenu.width / 2) - (lineWidth / 2)), ((mainMenu.width - lineWidth) > 400 ? (mainMenu.height - font.FONT_HEIGHT - 1) : 1), -1);
}
}
|
apache-2.0
|
google/flax
|
tests/linen/linen_module_test.py
|
42380
|
# Copyright 2022 The Flax Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for flax.linen."""
import dataclasses
import functools
import operator
from absl.testing import absltest
from flax.linen.module import override_named_call
import jax
from jax import random
from jax import lax
from jax.nn import initializers
import jax.numpy as jnp
import numpy as np
from typing import (Any, Tuple, Iterable, Callable, Generic, TypeVar,
Mapping, NamedTuple)
from flax import linen as nn
from flax import errors
from flax import struct
from flax.linen import compact
from flax.core import Scope, freeze, tracers
# Parse absl flags test_srcdir and test_tmpdir.
jax.config.parse_flags_with_absl()
def tree_equals(x, y):
return jax.tree_util.tree_all(
jax.tree_multimap(operator.eq, x, y))
class DummyModule(nn.Module):
@compact
def __call__(self, x):
bias = self.param('bias', initializers.ones, x.shape)
return x + bias
class Dense(nn.Module):
features: int
@compact
def __call__(self, x):
kernel = self.param('kernel',
initializers.lecun_normal(),
(x.shape[-1], self.features))
y = jnp.dot(x, kernel)
return y
class ModuleTest(absltest.TestCase):
def test_init_module(self):
rngkey = jax.random.PRNGKey(0)
x = jnp.array([1.])
scope = Scope({}, {'params': rngkey}, mutable=['params'])
y = DummyModule(parent=scope)(x)
params = scope.variables()['params']
y2 = DummyModule(parent=scope.rewound())(x)
np.testing.assert_allclose(y, y2)
np.testing.assert_allclose(y, jnp.array([2.]))
self.assertEqual(params, {'bias': jnp.array([1.])})
def test_arg_module(self):
rngkey = jax.random.PRNGKey(0)
x = jnp.ones((10,))
scope = Scope({}, {'params': rngkey}, mutable=['params'])
y = Dense(3, parent=scope)(x)
params = scope.variables()['params']
y2 = Dense(3, parent=scope.rewound())(x)
np.testing.assert_allclose(y, y2)
self.assertEqual(params['kernel'].shape, (10, 3))
def test_util_fun(self):
rngkey = jax.random.PRNGKey(0)
class MLP(nn.Module):
@compact
def __call__(self, x):
x = self._mydense(x)
x = self._mydense(x)
return x
def _mydense(self, x):
return Dense(3)(x)
x = jnp.ones((10,))
scope = Scope({}, {'params': rngkey}, mutable=['params'])
y = MLP(parent=scope)(x)
params = scope.variables()['params']
y2 = MLP(parent=scope.rewound())(x)
np.testing.assert_allclose(y, y2)
param_shape = jax.tree_map(jnp.shape, params)
self.assertEqual(param_shape,
{'Dense_0': {'kernel': (10, 3)},
'Dense_1': {'kernel': (3, 3)}})
def test_nested_module_reuse(self):
rngkey = jax.random.PRNGKey(0)
class MLP(nn.Module):
@compact
def __call__(self, x):
x = self._mydense(x)
x = self._mydense(x)
return x
def _mydense(self, x):
return Dense(3)(x)
class Top(nn.Module):
@compact
def __call__(self, x):
mlp = MLP()
y = mlp(x)
z = mlp(x)
return y + z
x = jnp.ones((10,))
scope = Scope({}, {'params': rngkey}, mutable=['params'])
y = Top(parent=scope)(x)
params = scope.variables()['params']
y2 = Top(parent=scope.rewound())(x)
np.testing.assert_allclose(y, y2)
param_shape = jax.tree_map(jnp.shape, params)
self.assertEqual(param_shape,
{'MLP_0':
{'Dense_0': {'kernel': (10, 3)},
'Dense_1': {'kernel': (3, 3)}}})
def test_setup_dict_assignment(self):
rngkey = jax.random.PRNGKey(0)
class MLP(nn.Module):
def setup(self):
self.lyrs1 = {'a': Dense(3), 'b': Dense(3),}
self.lyrs2 = [Dense(3), Dense(3)]
def __call__(self, x):
y = self.lyrs1['a'](x)
z = self.lyrs1['b'](y)
#w = self.lyrs2[0](x)
return z
x = jnp.ones((10,))
scope = Scope({}, {'params': rngkey}, mutable=['params'])
y = MLP(parent=scope)(x)
params = scope.variables()['params']
y2 = MLP(parent=scope.rewound())(x)
np.testing.assert_allclose(y, y2)
param_shape = jax.tree_map(jnp.shape, params)
self.assertEqual(param_shape,
{'lyrs1_a': {'kernel': (10, 3)},
'lyrs1_b': {'kernel': (3, 3)}})
def test_setup_cloning(self):
class MLP(nn.Module):
def setup(self):
self.dense = Dense(3)
scope = Scope({})
MLPclone = MLP(parent=scope).clone()
def test_submodule_attr(self):
rngkey = jax.random.PRNGKey(0)
class Inner(nn.Module):
@compact
def __call__(self):
self.param('x', lambda rng: 40)
class Outer(nn.Module):
inner: nn.Module
def __call__(self):
return self.inner()
class Wrapper(nn.Module):
def setup(self):
self.inner = Inner()
self.outer = Outer(self.inner)
def __call__(self):
return self.outer()
scope = Scope({'params': {}}, rngs={'params': rngkey}, mutable=['params'])
# Make sure this doesn't raise "Can't attach to remote parent"
wrapper = Wrapper(parent=scope)
wrapper()
# Make sure that variables are registered at the level of the
# Wrapper submodule, not the Outer submodule.
self.assertEqual(40, scope.variables()['params']['inner']['x'])
def test_param_in_setup(self):
rngkey = jax.random.PRNGKey(0)
class DummyModule(nn.Module):
xshape: Tuple[int]
def setup(self):
self.bias = self.param('bias', initializers.ones, self.xshape)
def __call__(self, x):
return x + self.bias
x = jnp.array([1.])
scope = Scope({}, {'params': rngkey}, mutable=['params'])
y = DummyModule(x.shape, parent=scope)(x)
params = scope.variables()['params']
y2 = DummyModule(x.shape, parent=scope.rewound())(x)
np.testing.assert_allclose(y, y2)
np.testing.assert_allclose(y, jnp.array([2.]))
self.assertEqual(params, {'bias': jnp.array([1.])})
def test_init_outside_setup_without_compact(self):
rngkey = jax.random.PRNGKey(0)
class DummyModule(nn.Module):
def __call__(self, x):
bias = self.param('bias', initializers.ones, x.shape)
return x + bias
x = jnp.array([1.])
scope = Scope({}, {'params': rngkey}, mutable=['params'])
with self.assertRaisesRegex(ValueError, 'must be initialized.*setup'):
y = DummyModule(parent=scope)(x)
def test_init_outside_call(self):
rngkey = jax.random.PRNGKey(0)
class Dummy(nn.Module):
@compact
def __call__(self, x):
bias = self.param('bias', initializers.ones, x.shape)
return x + bias
def foo(self, x):
bias = self.param('bias', initializers.ones, x.shape)
return x + bias
x = jnp.array([1.])
scope = Scope({}, {'params': rngkey}, mutable=['params'])
with self.assertRaisesRegex(ValueError, 'must be initialized.*setup'):
y = Dummy(parent=scope).foo(x)
def test_setup_call_var_collision(self):
rngkey = jax.random.PRNGKey(0)
class Dummy(nn.Module):
xshape: Tuple[int]
def setup(self):
self.bias = self.param('bias', initializers.ones, self.xshape)
@compact
def __call__(self, x):
bias = self.param('bias', initializers.ones, x.shape)
return x + self.bias
x = jnp.array([1.])
scope = Scope({}, {'params': rngkey}, mutable=['params'])
msg = 'Could not create param "bias" in Module Dummy: Name in use'
with self.assertRaisesRegex(errors.NameInUseError, msg):
y = Dummy(x.shape, parent=scope)(x)
def test_call_var_collision(self):
rngkey = jax.random.PRNGKey(0)
class Dummy(nn.Module):
xshape: Tuple[int]
@compact
def __call__(self, x):
bias = self.param('bias', initializers.ones, self.xshape)
bias = self.param('bias', initializers.ones, self.xshape)
return x + bias
x = jnp.array([1.])
scope = Scope({}, {'params': rngkey}, mutable=['params'])
msg = 'Could not create param "bias" in Module Dummy: Name in use'
with self.assertRaisesRegex(errors.NameInUseError, msg):
y = Dummy(x.shape, parent=scope)(x)
def test_setup_var_collision(self):
rngkey = jax.random.PRNGKey(0)
class Dummy(nn.Module):
xshape: Tuple[int]
def setup(self):
self.bias = self.param('bias', initializers.ones, self.xshape)
self.bias = self.param('bias', initializers.ones, self.xshape)
def __call__(self, x):
return x + self.bias
x = jnp.array([1.])
scope = Scope({}, {'params': rngkey}, mutable=['params'])
msg = 'Could not create param "bias" in Module Dummy: Name in use'
with self.assertRaisesRegex(errors.NameInUseError, msg):
y = Dummy(x.shape, parent=scope)(x)
def test_setattr_name_var_disagreement_allowed_in_lists(self):
rngkey = jax.random.PRNGKey(0)
class Dummy(nn.Module):
xshape: Tuple[int]
def setup(self):
self.biases = [
self.param(f'bias_{i}', initializers.ones, self.xshape)
for i in range(4)]
def __call__(self, x):
return x + self.biases[0]
x = jnp.array([1.])
scope = Scope({}, {'params': rngkey}, mutable=['params'])
y = Dummy(x.shape, parent=scope)(x)
self.assertEqual(y, jnp.array([2.]))
def test_setattr_name_var_disagreement_allowed_in_dicts(self):
rngkey = jax.random.PRNGKey(0)
class Dummy(nn.Module):
xshape: Tuple[int]
def setup(self):
self.biases = {
# NOTE that keys still must be strings. This is to make a possible
# future transition to automatically derived parameter names when assigned
# as a dict easier (like we currently have with submodules).
# See a bit of discussion here: https://github.com/google/flax/issues/705#issuecomment-738761853
str(i): self.param(f'bias_{i}', initializers.ones, self.xshape)
for i in range(4)}
def __call__(self, x):
return x + self.biases['0']
x = jnp.array([1.])
scope = Scope({}, {'params': rngkey}, mutable=['params'])
y = Dummy(x.shape, parent=scope)(x)
self.assertEqual(y, jnp.array([2.]))
def test_submodule_var_collision_with_scope(self):
rngkey = jax.random.PRNGKey(0)
class Dummy(nn.Module):
xshape: Tuple[int]
def setup(self):
self.bias = self.param('bias', initializers.ones, self.xshape)
self.bias = DummyModule()
def __call__(self, x):
return x + self.bias
x = jnp.array([1.])
scope = Scope({}, {'params': rngkey}, mutable=['params'])
msg = 'Duplicate use of scope name: "bias"'
with self.assertRaisesWithLiteralMatch(ValueError, msg):
y = Dummy(x.shape, parent=scope)(x)
def test_submodule_var_collision_with_submodule(self):
rngkey = jax.random.PRNGKey(0)
class Dummy(nn.Module):
xshape: Tuple[int]
def setup(self):
self.bias = self.param('bias', initializers.ones, self.xshape)
@compact
def __call__(self, x):
bias = DummyModule(name='bias')
return x + self.bias
x = jnp.array([1.])
scope = Scope({}, {'params': rngkey}, mutable=['params'])
msg = 'Could not create submodule "bias" in Module Dummy: Name in use'
with self.assertRaisesRegex(errors.NameInUseError, msg):
y = Dummy(x.shape, parent=scope)(x)
def test_submodule_var_collision_with_params(self):
rngkey = jax.random.PRNGKey(0)
class Dummy(nn.Module):
xshape: Tuple[int]
def setup(self):
self.bias = DummyModule()
@compact
def __call__(self, x):
bias = self.param('bias', initializers.ones, self.xshape)
return x + self.bias
x = jnp.array([1.])
scope = Scope({}, {'params': rngkey}, mutable=['params'])
msg = 'Could not create param "bias" in Module Dummy: Name in use'
with self.assertRaisesRegex(errors.NameInUseError, msg):
y = Dummy(x.shape, parent=scope)(x)
def test_attr_empty_container(self):
class Foo(nn.Module):
bar: Mapping[str, Any]
@compact
def __call__(self):
pass
Foo({"a": ()}).apply({})
def test_attr_param_name_collision(self):
rngkey = jax.random.PRNGKey(0)
class Dummy(nn.Module):
bias: bool
def setup(self):
self.bias = self.param('bias', initializers.ones, (3, 3))
def __call__(self, x):
return x + self.bias
x = jnp.array([1.])
scope = Scope({}, {'params': rngkey}, mutable=['params'])
msg = 'Could not create param "bias" in Module Dummy: Name in use'
with self.assertRaisesRegex(errors.NameInUseError, msg):
y = Dummy(x.shape, parent=scope)(x)
def test_attr_submodule_name_collision(self):
rngkey = jax.random.PRNGKey(0)
class Dummy(nn.Module):
bias: bool
def setup(self):
self.bias = DummyModule(name='bias')
def __call__(self, x):
return self.bias(x)
x = jnp.array([1.])
scope = Scope({}, {'params': rngkey}, mutable=['params'])
msg = 'Could not create submodule "bias" in Module Dummy: Name in use'
with self.assertRaisesRegex(errors.NameInUseError, msg):
y = Dummy(x.shape, parent=scope)(x)
def test_only_one_compact_method(self):
msg = 'Only one method per class can be @compact'
with self.assertRaisesRegex(errors.MultipleMethodsCompactError, msg):
class Dummy(nn.Module):
@compact
def call1(self):
pass
@compact
def call2(self):
pass
def test_only_one_compact_method_subclass(self):
class Dummy(nn.Module):
@nn.compact
def __call__(self):
pass
class SubDummy(Dummy):
@nn.compact
def __call__(self):
super().__call__()
scope = Scope(variables={})
subdummy = SubDummy(parent=scope)
# Make sure the @compact annotation is valid on both base class and subclass, as long
# as its on the same method.
subdummy()
def test_forgotten_compact_annotation(self):
class Bar(nn.Module):
# user forgot to add @compact
def __call__(self, x):
return nn.Dense(1)(x)
class Foo(nn.Module):
@nn.compact
def __call__(self, x):
bar = Bar()
x = bar(x)
x = bar(x)
return x
msg = (r'Submodule Dense must be defined in `setup\(\)` or in a method '
'wrapped in `@compact`')
with self.assertRaisesRegex(errors.AssignSubModuleError, msg):
Foo().init(random.PRNGKey(0), jnp.ones((1, 3)))
def test_forgotten_compact_annotation_with_explicit_parent(self):
class Bar(nn.Module):
def __call__(self, x):
return nn.Dense(1, parent=self)(x)
class Foo(nn.Module):
@nn.compact
def __call__(self, x):
bar = Bar()
x = bar(x)
x = bar(x)
return x
msg = (r'Submodule Dense must be defined in `setup\(\)` or in a method '
'wrapped in `@compact`')
with self.assertRaisesRegex(errors.AssignSubModuleError, msg):
Foo().init(random.PRNGKey(0), jnp.ones((1, 3)))
def test_numpy_array_shape_class_args(self):
class MLP(nn.Module):
widths: Iterable
@nn.compact
def __call__(self, x):
for width in self.widths[:-1]:
x = nn.relu(nn.Dense(width)(x))
return nn.Dense(self.widths[-1])(x)
test = MLP(np.array([3, 3], np.int32))
params = test.init({'params': random.PRNGKey(42)}, jnp.ones((3, 3)))
_ = test.apply(params, jnp.ones((3, 3)))
def test_get_local_methods(self):
class Base:
@staticmethod
def bar(x):
return x
@classmethod
def baz(cls, x):
return x
def bleep(self, x):
return x
class Derived1(Base):
@staticmethod
def bar2(x):
return x
@classmethod
def baz2(cls, x):
return x
def bloop(self, x):
return x
class Derived2(Derived1):
pass
self.assertEqual(nn.module._get_local_method_names(Base), ('bleep',))
self.assertEqual(nn.module._get_local_method_names(Derived1), ('bloop',))
self.assertEqual(
nn.module._get_local_method_names(Derived1, exclude=('bloop',)), ())
self.assertEqual(nn.module._get_local_method_names(Derived2), ())
def test_inheritance_dataclass_attribs(self):
class Test(nn.Module):
bar: int
def __call__(self, x):
return x
class Test2(Test):
baz: int
def __call__(self, x):
return x
class Test3(Test):
baz: int
def __call__(self, x):
return x
class Test4(Test2):
def __call__(self, x):
return x
key = random.PRNGKey(0)
x = jnp.ones((5,))
test1 = Test(bar=4)
test2 = Test2(bar=4, baz=2)
test3 = Test3(bar=4, baz=2)
test4 = Test4(bar=5, baz=3)
self.assertEqual(test1.init_with_output(key, x), (x, freeze({})))
self.assertEqual(test2.init_with_output(key, x), (x, freeze({})))
self.assertEqual(test3.init_with_output(key, x), (x, freeze({})))
self.assertEqual(test4.init_with_output(key, x), (x, freeze({})))
self.assertTrue(hasattr(test1, 'bar'))
self.assertTrue(hasattr(test1, 'name'))
self.assertTrue(hasattr(test1, 'parent'))
self.assertTrue(hasattr(test2, 'bar'))
self.assertTrue(hasattr(test2, 'baz'))
self.assertTrue(hasattr(test2, 'name'))
self.assertTrue(hasattr(test2, 'parent'))
self.assertTrue(hasattr(test3, 'bar'))
self.assertTrue(hasattr(test3, 'baz'))
self.assertTrue(hasattr(test3, 'name'))
self.assertTrue(hasattr(test3, 'parent'))
self.assertTrue(hasattr(test4, 'bar'))
self.assertTrue(hasattr(test4, 'baz'))
self.assertTrue(hasattr(test4, 'name'))
self.assertTrue(hasattr(test4, 'parent'))
self.assertEqual(
list(Test.__dataclass_fields__.keys()),
['bar', 'parent', 'name'])
self.assertEqual(
list(Test2.__dataclass_fields__.keys()),
['bar', 'baz', 'parent', 'name'])
self.assertEqual(
list(Test3.__dataclass_fields__.keys()),
['bar', 'baz', 'parent', 'name'])
self.assertEqual(
list(Test4.__dataclass_fields__.keys()),
['bar', 'baz', 'parent', 'name'])
def test_get_suffix_value_pairs(self):
for x in [(), [], {}, None, 0, set()]:
self.assertEqual(
nn.module._get_suffix_value_pairs(x), [('', x)])
self.assertEqual(
nn.module._get_suffix_value_pairs(
{'a': 1, 'b': 2}), [('_a', 1), ('_b', 2)])
self.assertEqual(
nn.module._get_suffix_value_pairs(
[1, 2, 3]), [('_0', 1), ('_1', 2), ('_2', 3)])
x1 = [nn.Dense(10), nn.relu, nn.Dense(10)]
y1 = nn.module._get_suffix_value_pairs(x1)
self.assertEqual(y1, [('_0', x1[0]), ('_1', x1[1]), ('_2', x1[2])])
x2 = {'a': 1, 'b': {'c': nn.Dense(10), 'd': nn.relu}}
y2 = nn.module._get_suffix_value_pairs(x2)
self.assertEqual(y2,
[('_a', 1), ('_b_c', x2['b']['c']), ('_b_d', x2['b']['d'])])
def test_mixed_list_assignment_in_setup(self):
class Test(nn.Module):
def setup(self):
self.layers = [nn.Dense(10), nn.relu, nn.Dense(10)]
def __call__(self, x):
for lyr in self.layers:
x = lyr(x)
return x
x = random.uniform(random.PRNGKey(0), (5,5))
variables = Test().init(random.PRNGKey(0), jnp.ones((5,5)))
y = Test().apply(variables, x)
m0 = variables['params']['layers_0']['kernel']
m1 = variables['params']['layers_2']['kernel']
self.assertTrue(jnp.all(y == jnp.dot(nn.relu(jnp.dot(x, m0)), m1)))
def test_module_is_hashable(self):
module_a = nn.Dense(10)
module_a_2 = nn.Dense(10)
module_b = nn.Dense(5)
self.assertEqual(hash(module_a), hash(module_a_2))
self.assertNotEqual(hash(module_a), hash(module_b))
def test_module_custom_hash(self):
class Test(nn.Module):
x: int = 3
y: int = 5
def __hash__(self):
return 42 + self.x
module_a = Test(1, 2)
module_a_2 = Test(1, 5)
module_b = Test(2, 2)
self.assertEqual(hash(module_a), hash(module_a_2))
self.assertNotEqual(hash(module_a), hash(module_b))
def test_module_with_scope_is_not_hashable(self):
module_a = nn.Dense(10, parent=Scope({}))
msg = 'Can\'t call __hash__ on modules that hold variables.'
with self.assertRaisesWithLiteralMatch(TypeError, msg):
hash(module_a)
def test_module_trace(self):
class MLP(nn.Module):
act: Callable = nn.relu
sizes: Iterable[int] = (3, 2)
@nn.compact
def __call__(self, x):
for size in self.sizes:
x = nn.Dense(size)(x)
x = self.act(x)
return repr(self)
mlp = MLP()
expected_trace = (
"""MLP(
# attributes
act = relu
sizes = (3, 2)
# children
Dense_0 = Dense(
# attributes
features = 3
use_bias = True
dtype = float32
param_dtype = float32
precision = None
kernel_init = init
bias_init = zeros
)
Dense_1 = Dense(
# attributes
features = 2
use_bias = True
dtype = float32
param_dtype = float32
precision = None
kernel_init = init
bias_init = zeros
)
)""")
x = jnp.ones((1, 2))
trace, variables = mlp.init_with_output(random.PRNGKey(0), x)
self.assertEqual(trace, expected_trace)
trace = mlp.apply(variables, x)
self.assertEqual(trace, expected_trace)
def test_module_apply_method(self):
class Foo(nn.Module):
@nn.compact
def __call__(self):
pass
def test(self):
pass
# We can use both instance and class methods in apply.
Foo().apply({}, method=Foo.test)
Foo().apply({}, method=Foo().test)
# We also use a function that is not in the provided Module, although it
# should have a first argument representing an instance of the Module (Foo
# in this case).
x = Foo().apply({}, method=lambda foo_instance: foo_instance)
self.assertEqual(type(x), type(Foo()))
# This is not allowed.
msg = 'Cannot call apply()'
with self.assertRaisesRegex(errors.ApplyModuleInvalidMethodError, msg):
Foo().apply({}, method=lambda: True)
with self.assertRaisesRegex(errors.ApplyModuleInvalidMethodError, msg):
Foo().apply({}, method='allowed_apply_fn')
def test_call_unbound_compact_module_methods(self):
dense = Dense(3)
msg = r'Can\'t call compact methods on unbound modules'
with self.assertRaisesRegex(errors.CallCompactUnboundModuleError, msg):
dense(jnp.ones((1, )))
def test_call_unbound_has_variable(self):
class EmptyModule(nn.Module):
def foo(self):
self.has_variable('bar', 'baz')
empty = EmptyModule()
with self.assertRaisesRegex(ValueError, "variable.*unbound module"):
empty.foo()
def test_call_unbound_make_rng(self):
class EmptyModule(nn.Module):
def foo(self):
self.make_rng('bar')
empty = EmptyModule()
with self.assertRaisesRegex(ValueError, "RNGs.*unbound module"):
empty.foo()
def test_call_unbound_variables(self):
class EmptyModule(nn.Module):
def foo(self):
self.variables
empty = EmptyModule()
with self.assertRaisesRegex(ValueError, "variables.*unbound module"):
empty.foo()
def test_call_unbound_noncompact_module_methods(self):
class EmptyModule(nn.Module):
foo: int = 3
def bar(self):
return self.foo
empty = EmptyModule()
# It's fine to call methods of unbound methods that don't depend on
# attributes defined during `setup`.
self.assertEqual(empty.bar(), 3)
def test_call_unbound_noncompact_module_methods_depending_on_setup(self):
class EmptyModule(nn.Module):
def setup(self):
self.foo = 2
def bar(self):
return self.foo
empty = EmptyModule()
msg = r'"EmptyModule" object has no attribute "foo"'
with self.assertRaisesRegex(AttributeError, msg):
empty.bar()
def test_module_with_attrs(self):
class Foo(nn.Module):
bar: nn.Dense = dataclasses.field(init=False)
def setup(self):
self.bar = nn.Dense(3)
def __call__(self, x):
return self.bar(x)
foo = Foo()
x = jnp.ones((2,))
variables = foo.init(random.PRNGKey(0), x)
self.assertEqual(variables['params']['bar']['kernel'].shape, (2, 3))
def test_noncompact_module_frozen(self):
class Foo(nn.Module):
def setup(self):
self.i = 1 # This is allowed (for assigning submodules).
def __call__(self):
self.i = 2 # This is not allowed.
msg = ('Can\'t set i=2 for Module of type Foo: Module instance is frozen '
'outside of setup method.')
with self.assertRaisesRegex(errors.SetAttributeFrozenModuleError, msg):
Foo().init(random.PRNGKey(0))
def test_compact_module_frozen(self):
class Foo(nn.Module):
@nn.compact
def __call__(self):
self.i = 2
msg = ('Can\'t set i=2 for Module of type Foo: Module instance is frozen '
'outside of setup method.')
with self.assertRaisesRegex(errors.SetAttributeFrozenModuleError, msg):
Foo().init(random.PRNGKey(0))
def test_submodule_frozen(self):
class Foo(nn.Module):
@nn.compact
def __call__(self):
dense = nn.Dense(10)
dense.features = 20 # <--- This is not allowed
msg = ('Can\'t set features=20 for Module of type Dense: Module instance '
'is frozen outside of setup method.')
with self.assertRaisesRegex(errors.SetAttributeFrozenModuleError, msg):
Foo().init(random.PRNGKey(0))
def test_module_call_not_implemented(self):
class Foo(nn.Module):
pass
msg = '"Foo" object has no attribute "__call__"'
with self.assertRaisesRegex(AttributeError, msg):
Foo().init(random.PRNGKey(0))
def test_is_mutable_collection(self):
class EmptyModule(nn.Module):
def __call__(self):
return self.is_mutable_collection('test')
empty = EmptyModule()
self.assertTrue(empty.apply({}, mutable=['test'])[0])
self.assertFalse(empty.apply({}, mutable=False))
def test_module_lazy_getattr_setup(self):
class A(nn.Module):
def setup(self):
self.d = nn.Dense(2)
def __call__(self, x):
return self.d(x)
class B(nn.Module):
def setup(self):
self.a = A()
def __call__(self, x):
y1 = self.a.d(x)
y2 = self.a(x)
return y1, y2
key = random.PRNGKey(0)
x = jnp.ones((2,))
(y1, y2), p = B().init_with_output(key, x)
np.testing.assert_array_equal(y1, y2)
def test_module_lazy_dir_setup(self):
class A(nn.Module):
def setup(self):
self.d = nn.Dense(2)
def __call__(self, x):
return self.d(x)
class B(nn.Module):
def setup(self):
self.a = A()
def __call__(self, x):
assert 'd' in dir(self.a)
y1 = self.a.d(x)
y2 = self.a(x)
return y1, y2
key = random.PRNGKey(0)
x = jnp.ones((2,))
_ = B().init_with_output(key, x)
def test_module_unbound_getattr(self):
class A(nn.Module):
def setup(self):
b = B()
b.c # B is unbound because it is not yet assigned to an attribute.
self.b = b
def __call__(self):
pass
class B(nn.Module):
def setup(self):
self.c = nn.Dense(2)
msg = '"B" object has no attribute "c"'
with self.assertRaisesRegex(AttributeError, msg):
A().init(random.PRNGKey(0))
def test_unbound_setup_call(self):
setup_called = False
class A(nn.Module):
def setup(self):
nonlocal setup_called
setup_called = True
def test(self):
pass
A().test()
self.assertFalse(setup_called)
def test_module_pass_as_attr(self):
class A(nn.Module):
def setup(self):
self.b = B(nn.Dense(2))
def __call__(self, x):
return self.b(x)
class B(nn.Module):
foo: Any
def __call__(self, x):
return self.foo(x)
variables = A().init(random.PRNGKey(0), jnp.ones((1,)))
var_shapes = jax.tree_map(jnp.shape, variables)
ref_var_shapes = freeze({
'params': {
'b': {
'foo': {
'bias': (2,),
'kernel': (1, 2),
}
},
},
})
self.assertTrue(tree_equals(var_shapes, ref_var_shapes))
def test_module_pass_in_closure(self):
a = nn.Dense(2)
class B(nn.Module):
def setup(self):
self.foo = a
def __call__(self, x):
return self.foo(x)
variables = B().init(random.PRNGKey(0), jnp.ones((1,)))
var_shapes = jax.tree_map(jnp.shape, variables)
ref_var_shapes = freeze({
'params': {
'foo': {
'bias': (2,),
'kernel': (1, 2),
}
},
})
self.assertTrue(tree_equals(var_shapes, ref_var_shapes))
self.assertEqual(a.name, None)
def test_toplevel_submodule_adoption(self):
class Encoder(nn.Module):
n_layers: int
ch: int
def setup(self):
self.layers = [nn.Dense(self.ch) for _ in range(self.n_layers)]
def __call__(self, x):
for layer in self.layers:
x = layer(x)
x = nn.relu(x)
return x
class Model(nn.Module):
encoder: nn.Module
n_out: int
def setup(self):
self.dense_out = nn.Dense(self.n_out)
def __call__(self, x):
x = self.encoder(x)
return self.dense_out(x)
# Define model.
encoder = Encoder(n_layers=1, ch=8)
model = Model(encoder=encoder, n_out=5)
# Initialize.
key = jax.random.PRNGKey(0)
x = random.uniform(key, (4, 4))
variables = model.init(key, x)
y = model.apply(variables, x)
self.assertEqual(y.shape, (4, 5))
var_shapes = jax.tree_map(jnp.shape, variables)
ref_var_shapes = freeze({
'params': {
'dense_out': {
'bias': (5,),
'kernel': (8, 5),
},
'encoder': {
'layers_0': {
'bias': (8,),
'kernel': (4, 8),
},
},
},
})
self.assertTrue(tree_equals(var_shapes, ref_var_shapes))
def test_toplevel_submodule_adoption_pytree(self):
class A(nn.Module):
@nn.compact
def __call__(self, c, x):
counter = self.variable('counter', 'i', jnp.zeros, ())
counter.value += 1
x = nn.Dense(1)(x)
return c, x
class B(nn.Module):
A: Any
@nn.compact
def __call__(self, c, x):
return self.A['foo'](*self.A['bar'](c, x))
a = A()
As = {'foo': A(), 'bar': A()}
b = B(As)
key = random.PRNGKey(0)
x = jnp.ones((2, 2))
p = B(As).init(key, x, x)
print('apply', x.shape)
y, cntrs = b.apply(p, x, x, mutable='counter')
ref_cntrs = freeze({
'counter': {
'A_bar': {
'i': jnp.array(2.0),
},
'A_foo': {
'i': jnp.array(2.0),
},
},
})
self.assertTrue(jax.tree_util.tree_all(
jax.tree_multimap(
lambda x, y: np.testing.assert_allclose(x, y, atol=1e-7),
cntrs, ref_cntrs)
))
def test_toplevel_submodule_adoption_sharing(self):
dense = functools.partial(nn.Dense, use_bias=False)
class A(nn.Module):
@nn.compact
def __call__(self, x):
return dense(2)(x)
class B(nn.Module):
a: nn.Module
@nn.compact
def __call__(self, x):
return dense(2)(x) + self.a(x)
class C(nn.Module):
a: nn.Module
b: nn.Module
@nn.compact
def __call__(self, x):
return dense(2)(x) + self.b(x) + self.a(x)
key = random.PRNGKey(0)
x = jnp.ones((2, 2))
a = A()
b = B(a)
c = C(a, b)
p = c.init(key, x)
var_shapes = jax.tree_map(jnp.shape, p)
ref_var_shapes = freeze({
'params': {
'Dense_0': {
'kernel': (2, 2),
},
'a': {
'Dense_0': {
'kernel': (2, 2),
},
},
'b': {
'Dense_0': {
'kernel': (2, 2),
},
},
},
})
self.assertTrue(tree_equals(var_shapes, ref_var_shapes))
def test_toplevel_named_submodule_adoption(self):
dense = functools.partial(nn.Dense, use_bias=False)
class A(nn.Module):
def setup(self):
self.dense = dense(4)
def __call__(self, x):
return self.dense(x)
class B(nn.Module):
a: A
def setup(self):
self.proj = dense(6)
def __call__(self, x):
return self.proj(self.a(x))
a = A(name='foo')
b = B(a=a)
k = jax.random.PRNGKey(0)
x = jnp.zeros((5,5))
init_vars = b.init(k, x)
var_shapes = jax.tree_map(jnp.shape, init_vars)
ref_var_shapes = freeze({
'params': {
'a': {
'dense': {
'kernel': (5, 4),
},
},
'proj': {
'kernel': (4, 6),
},
},
})
self.assertTrue(tree_equals(var_shapes, ref_var_shapes))
def test_toplevel_submodule_pytree_adoption_sharing(self):
class A(nn.Module):
@nn.compact
def __call__(self, x):
counter = self.variable('counter', 'i', jnp.zeros, ())
counter.value += 1
x = nn.Dense(1)(x)
return x
class B(nn.Module):
A: Any
@nn.compact
def __call__(self, x):
return self.A['foo'](x) + self.A['bar'](x) + self.A['baz'](x)
key = random.PRNGKey(0)
x = jnp.ones((2, 2))
a = A()
As = {'foo': a, 'bar': a, 'baz': a}
b = B(As)
p = b.init(key, x)
_, cntrs = b.apply(p, x, mutable='counter')
ref_cntrs = freeze({
'counter': {
'A_bar': {
'i': jnp.array(6.0),
},
},
})
self.assertTrue(tree_equals(cntrs, ref_cntrs))
def test_inner_class_def(self):
class X(nn.Module):
class Hyper(struct.PyTreeNode):
a: int
hyper: Hyper
@nn.compact
def __call__(self, x):
return x+1
self.assertTrue(isinstance(X.Hyper(a=1), X.Hyper))
def test_sow(self):
class Foo(nn.Module):
@nn.compact
def __call__(self, x, **sow_args):
self.sow('intermediates', 'h', x, **sow_args)
self.sow('intermediates', 'h', 2 * x, **sow_args)
return 3 * x
variables = Foo().init(random.PRNGKey(0), 1)
# during init we should not collect intermediates by default...
self.assertTrue('intermediates' not in variables)
# ...unless we override mutable
variables = Foo().init(random.PRNGKey(0), 1, mutable=True)
self.assertEqual(variables, {
'intermediates': {'h': (1, 2)}
})
_, state = Foo().apply({}, 1, mutable=['intermediates'])
self.assertEqual(state, {
'intermediates': {'h': (1, 2)}
})
_, state = Foo().apply(
{}, 1,
init_fn=lambda: 0,
reduce_fn=lambda a, b: a + b,
mutable=['intermediates'])
self.assertEqual(state, {
'intermediates': {'h': 3}
})
self.assertEqual(Foo().apply({}, 1), 3)
def test_capture_intermediates(self):
class Bar(nn.Module):
def test(self, x):
return x + 1
class Foo(nn.Module):
@nn.compact
def __call__(self, x):
return Bar().test(x) + 1
_, state = Foo().apply({}, 1, capture_intermediates=True)
self.assertEqual(state, {
'intermediates': {'__call__': (3,)}
})
fn = lambda mdl, _: isinstance(mdl, Bar)
_, state = Foo().apply({}, 1, capture_intermediates=fn)
self.assertEqual(state, {
'intermediates': {'Bar_0': {'test': (2,)}}
})
def test_functional_apply(self):
class Foo(nn.Module):
def setup(self):
self.a = nn.Dense(3)
self.b = nn.Dense(1)
def f(foo, x):
x = foo.a(x)
return foo.b(x)
foo = Foo()
x = jnp.ones((4,))
f_init = nn.init_with_output(f, foo)
f_apply = nn.apply(f, foo)
y1, variables = f_init(random.PRNGKey(0), x)
y2 = f_apply(variables, x)
self.assertEqual(y1, y2)
def test_bind(self):
class Foo(nn.Module):
def setup(self):
self.a = nn.Dense(3)
self.b = nn.Dense(1)
def f(foo, x):
x = foo.a(x)
return foo.b(x)
foo = Foo()
x = jnp.ones((4,))
f_init = nn.init_with_output(f, foo)
y1, variables = f_init(random.PRNGKey(0), x)
y2 = f(foo.bind(variables), x)
self.assertEqual(y1, y2)
def test_bind_stateful(self):
class Foo(nn.Module):
def setup(self):
self.a = nn.Dense(3)
self.bn = nn.BatchNorm()
self.b = nn.Dense(1)
def f(foo, x):
x = foo.a(x)
x = foo.bn(x, use_running_average=False)
return foo.b(x)
foo = Foo()
x = jnp.ones((4,))
f_init = nn.init_with_output(f, foo)
y1, variables = f_init(random.PRNGKey(0), x)
foo_b = foo.bind(variables, mutable='batch_stats')
y2 = f(foo_b, x)
y3, new_state = nn.apply(f, foo, mutable='batch_stats')(variables, x)
self.assertEqual(y1, y2)
self.assertEqual(y2, y3)
bs_1 = new_state['batch_stats']
bs_2 = foo_b.variables['batch_stats']
for x, y in zip(jax.tree_leaves(bs_1), jax.tree_leaves(bs_2)):
np.testing.assert_allclose(x, y)
def test_passing_mutable_variables(self):
class Foo(nn.Module):
@nn.compact
def __call__(self, x):
return nn.Dense(2)(x)
x = jnp.ones((3,))
variables = Foo().init(random.PRNGKey(0), x)
variables = variables.unfreeze()
y = Foo().apply(variables, x)
self.assertEqual(y.shape, (2,))
def test_super_compact(self):
class Foo(nn.Module):
@nn.compact
def __call__(self, x):
return nn.Dense(4)(x)
class Bar(Foo):
@nn.compact
def __call__(self, x):
y = super().__call__(x)
return nn.Dense(3)(y)
k = random.PRNGKey(0)
x = jnp.ones((4, 7))
variables = Bar().init(k, x)
shapes = jax.tree_map(np.shape, variables['params'])
self.assertEqual(shapes, {
'Dense_0': {'kernel': (7, 4), 'bias': (4,)},
'Dense_1': {'kernel': (4, 3), 'bias': (3,)},
})
y = Bar().apply(variables, x)
self.assertEqual(y.shape, (4, 3))
def test_super_setup(self):
class Foo(nn.Module):
def setup(self):
self.a = nn.Dense(4)
class Bar(Foo):
def setup(self):
super().setup()
self.b = nn.Dense(3)
def __call__(self, x):
y = self.a(x)
return self.b(y)
k = random.PRNGKey(0)
x = jnp.ones((4, 7))
variables = Bar().init(k, x)
y = Bar().apply(variables, x)
self.assertEqual(y.shape, (4, 3))
def test_freeze_attr(self):
class Foo(NamedTuple):
a: int
b: int
self.assertEqual(nn.module._freeze_attr([1, 2]), (1, 2))
xs = nn.module._freeze_attr(Foo(1, 2))
self.assertEqual(xs, (1, 2))
self.assertEqual(type(xs), Foo) # equality test for NamedTuple doesn't check class!
def test_generic_multiple_inheritance(self):
T = TypeVar('T')
class MyComponent(nn.Module, Generic[T]):
pass
class MyModule(nn.Module):
submodule: MyComponent[jnp.ndarray]
class MyComponent2(Generic[T], nn.Module):
pass
class MyModule2(nn.Module):
submodule: MyComponent2[jnp.ndarray]
def test_named_call_rng_equivalance(self):
model = nn.Dense(1, use_bias=False)
with override_named_call(False):
param = model.init(random.PRNGKey(0), np.ones((1, 1)))["params"]["kernel"]
with override_named_call(True):
param_2 = model.init(random.PRNGKey(0), np.ones((1, 1)))["params"]["kernel"]
self.assertEqual(param, param_2)
def test_rng_reuse_after_rewind(self):
class C(nn.Module):
@nn.compact
def __call__(self):
# Some module that has dropouts in it, in general,
# it does more than just dropout!
return self.make_rng('dropout')
class A(nn.Module):
@nn.compact
def __call__(self):
# Some module that has dropouts in it, in general,
# it does more than just dropout!
return C()()
class B(nn.Module):
@nn.compact
def __call__(self):
a = A()
x0 = a()
x1 = a()
return jnp.alltrue(x0 == x1)
k = random.PRNGKey(0)
rng_equals = B().apply({}, rngs={'dropout': k})
self.assertFalse(rng_equals)
def test_module_get_put_has_variable(self):
class A(nn.Module):
@nn.compact
def __call__(self, x):
self.put_variable('test_col', 'a', x)
assert self.has_variable('test_col', 'a')
return self.get_variable('test_col', 'a')
class B(nn.Module):
def __call__(self, x):
self.put_variable('test_col', 'a', x)
assert self.has_variable('test_col', 'a')
return self.get_variable('test_col', 'a')
class C(nn.Module):
def setup(self):
self.put_variable('test_col', 'a', jnp.ones(2,))
assert self.has_variable('test_col', 'a')
def __call__(self):
return self.get_variable('test_col', 'a')
key = random.PRNGKey(0)
x = jnp.ones((2,))
y, vs = A().apply({}, x, mutable=['test_col'])
np.testing.assert_array_equal(x, y)
np.testing.assert_array_equal(x, vs['test_col']['a'])
y, vs = B().apply({}, x, mutable=['test_col'])
np.testing.assert_array_equal(x, y)
np.testing.assert_array_equal(x, vs['test_col']['a'])
y, vs = C().apply({}, mutable=['test_col'])
np.testing.assert_array_equal(y, jnp.ones((2,)))
np.testing.assert_array_equal(y, vs['test_col']['a'])
if __name__ == '__main__':
absltest.main()
|
apache-2.0
|
jxt1234/Simple3D
|
src/vertex/GL_Normal.cpp
|
722
|
#include "vertex/GL_Normal.h"
#include "math/GLVector.h"
#include <assert.h>
#define VECTOR_NUM 3
using namespace GLVector;
GL_Normal::GL_Normal(IVarying& pos):GLAbstractVarying(3)
{
assert(pos.unit()>=VECTOR_NUM);
assert(pos.size()%VECTOR_NUM == 0);
int size = pos.size();
vec3 v1;
vec3 v2;
vec3 out;
for (int i=0; i<size; i+=VECTOR_NUM)
{
float* p1 = pos.load(i);
float* p2 = pos.load(i+1);
float* p3 = pos.load(i+2);
minus(v1, p1, p2, VECTOR_NUM);
minus(v2, p1, p3, VECTOR_NUM);
cross(out, v1, v2);
normalize(out);
for (int j=0;j<VECTOR_NUM;++j)
{
addPoint(out[0], out[1], out[2]);
}
}
}
|
apache-2.0
|
GitHamburg/agent-mac
|
funcs/funcs.go
|
1082
|
package funcs
import (
"../g"
"github.com/open-falcon/common/model"
"log"
)
type FuncsAndInterval struct {
Fs []func() []*model.MetricValue
Interval int
}
var Mappers []FuncsAndInterval
func BuildMappers() {
interval := g.Config().Transfer.Interval
log.Print("BuildMappers,interval:")
log.Print(interval)
Mappers = []FuncsAndInterval{
FuncsAndInterval{
Fs: []func() []*model.MetricValue{
AgentMetrics,
CpuMetrics,
NetMetrics,
LoadAvgMetrics,
MemMetrics,
DiskIOMetrics,
//NetstatMetrics,
//ProcMetrics,
//UdpMetrics,
},
Interval: interval,
},
FuncsAndInterval{
Fs: []func() []*model.MetricValue{
DeviceMetrics,
},
Interval: interval,
},
FuncsAndInterval{
Fs: []func() []*model.MetricValue{
PortMetrics,
//SocketStatSummaryMetrics,
},
Interval: interval,
},
FuncsAndInterval{
Fs: []func() []*model.MetricValue{
DuMetrics,
},
Interval: interval,
},
FuncsAndInterval{
Fs: []func() []*model.MetricValue{
UrlMetrics,
},
Interval: interval,
},
}
}
|
apache-2.0
|
Zakeshu/Brain_Academy_Java
|
src/com/brainacad/module_2/LabWork_2_2/Main.java
|
1294
|
package com.brainacad.module_2.LabWork_2_2;
import java.io.IOException;
/**
* Created by a.zemlyanskiy on 30.09.2016.
* Create class Person with fields firstName(String), lastName(String), age(int), gender(String),
* phoneNumber(int), and five overloaded methods that set this fields with different arguments list.
* And create class that will be use instance of class Person and his methods.
*/
public class Main {
public static void main(String[] args) {
Person person = new Person();
person.overloadedMethodsPerson("Alex", "Z", 29, "male", 76514525);
}
public static class Person {
private String firstName;
private String lastName;
private int age;
private String gender;
private int phoneNumber;
public void overloadedMethodsPerson() {
}
public void overloadedMethodsPerson(String firstName, String lastName) {
}
public void overloadedMethodsPerson(String firstName, String lastName, int age) {
}
public void overloadedMethodsPerson(String firstName, String lastName, int age, String gender) {
}
public void overloadedMethodsPerson(String firstName, String lastName, int age, String gender, int phoneNumber) {
}
}
}
|
apache-2.0
|
jamesnetherton/fuse-patch
|
core/src/main/java/org/wildfly/extras/patch/Server.java
|
2381
|
/*
* #%L
* Fuse Patch :: Core
* %%
* Copyright (C) 2015 Private
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
package org.wildfly.extras.patch;
import java.io.IOException;
import java.net.URL;
import java.nio.file.Path;
import java.util.List;
/**
* A server instance.
*
* @author thomas.diesler@jboss.com
* @since 10-Jun-2015
*/
public interface Server {
PatchId SERVER_ID = PatchId.fromString("server");
/**
* Get the server home path
* @return The path to the server home
*/
Path getServerHome();
/**
* Get the default repository URL
* @return The URL for the default repository
*/
URL getDefaultRepositoryURL();
/**
* Get the audit log
* @return A list containing the audit log content
*/
List<String> getAuditLog();
/**
* Query the list of applied packages
* @return A list of applied patches
*/
List<PatchId> queryAppliedPatches();
/**
* Query managed server paths
* @param pathsPattern The path pattern to query for
* @return A list of managed server paths
*/
List<ManagedPath> queryManagedPaths(String pathsPattern);
/**
* Get the applied package for a given prefix
* @param prefix The patch prefix
* @return package or null
*/
Patch getPatch(String prefix);
/**
* Get the applied package for the given id
* @param patchId The patch id
* @return The patch
*/
Patch getPatch(PatchId patchId);
/**
* Apply a smart patch and return the result
* @param smartPatch The patch to apply
* @param force Whether to force application of the patch
* @return The patched that was applied
* @throws java.io.IOException If an IO exception occurred
*/
Patch applySmartPatch(SmartPatch smartPatch, boolean force) throws IOException;
/**
* Apply cleanup tasks to a server
*/
void cleanUp();
}
|
apache-2.0
|
pponec/ujorm
|
samples/generated/src/main/java/org/version1/bo/Item.java
|
144
|
/** Editing of the class is allowed. */
package org.version1.bo;
import org.version1.bo.gen._Item;
final public class Item extends _Item {
}
|
apache-2.0
|
B-Stefan/Sofia
|
service-answer/answer_service/tests/test_anwser_transformer.py
|
1097
|
from unittest import TestCase
from answer_service.answer_module import AnswerModule
test_url = 'http://osgi-api.cloud.conts.de'
import asyncio
class AnswerModuleTest (TestCase):
def test_get_bundles(self):
instance = AnswerModule(test_url, "", None)
result = instance.get_bundles()
self.assertIsNotNone(result )
self.assertIsInstance(result ,list)
self.assertGreater(len(result),10)
def test_get_classes(self):
instance = AnswerModule(test_url, "", None)
result = instance.get_classes()
self.assertIsNotNone(result)
self.assertIsInstance(result, list)
self.assertGreater(len(result), 10)
def test_more_info(self):
type = "namespace"
def askStr():
return "com.gui"
instance = AnswerModule(test_url, "", askStr)
result_event = yield from instance.more_info(type)
self.assertIsNotNone(result_event)
self.assertIsInstance(result_event,object)
self.assertEqual(result_event['channel'],u'sofia.channel.{0}.messages.OutgoingSentence')
|
apache-2.0
|
skarsaune/hawtio
|
hawtio-system/src/main/java/io/hawt/HawtioContextListener.java
|
2498
|
package io.hawt;
import java.util.Objects;
import javax.servlet.ServletContextEvent;
import javax.servlet.ServletContextListener;
import io.hawt.jmx.About;
import io.hawt.jmx.JMXSecurity;
import io.hawt.jmx.JmxTreeWatcher;
import io.hawt.jmx.PluginRegistry;
import io.hawt.jmx.RBACRegistry;
import io.hawt.system.ConfigManager;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* A {@link javax.servlet.ServletContextListener} which initialises key hawtio services in the webapp
*/
public class HawtioContextListener implements ServletContextListener {
private static final Logger LOGGER = LoggerFactory.getLogger(HawtioContextListener.class);
private final About about;
private final JmxTreeWatcher treeWatcher;
private final PluginRegistry registry;
private final ConfigManager configManager;
private final JMXSecurity jmxSecurity;
private final RBACRegistry rbacRegistry;
public HawtioContextListener() {
this(new ConfigManager());
}
public HawtioContextListener(final ConfigManager configManager) {
this.configManager = Objects.requireNonNull(configManager);
this.about = new About();
this.treeWatcher = new JmxTreeWatcher();
this.registry = new PluginRegistry();
this.jmxSecurity = new JMXSecurity();
this.rbacRegistry = new RBACRegistry();
}
public void contextInitialized(ServletContextEvent servletContextEvent) {
LOGGER.info("Initialising hawtio services");
try {
about.init();
configManager.init(servletContextEvent.getServletContext());
treeWatcher.init();
registry.init();
jmxSecurity.init();
rbacRegistry.init();
} catch (Exception e) {
throw createServletException(e);
}
servletContextEvent.getServletContext().setAttribute(ConfigManager.CONFIG_MANAGER, configManager);
}
public void contextDestroyed(ServletContextEvent servletContextEvent) {
LOGGER.info("Destroying hawtio services");
try {
rbacRegistry.destroy();
about.destroy();
treeWatcher.destroy();
registry.destroy();
configManager.destroy();
jmxSecurity.destroy();
} catch (Exception e) {
throw createServletException(e);
}
}
protected RuntimeException createServletException(Exception e) {
return new RuntimeException(e);
}
}
|
apache-2.0
|
eman-1111/GoogleCodelabs
|
android_testing/src/main/java/ides/link/androidtesting/notes/notes/NotesFragment.java
|
8793
|
/*
* Copyright 2015, The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package ides.link.androidtesting.notes.notes;
import ides.link.androidtesting.notes.Injection;
import ides.link.androidtesting.notes.addnote.AddNoteActivity;
import ides.link.androidtesting.notes.notedetail.NoteDetailActivity;
import ides.link.androidtesting.notes.R;
import ides.link.androidtesting.notes.data.Note;
import android.app.Activity;
import android.content.Context;
import android.content.Intent;
import android.os.Bundle;
import android.support.annotation.Nullable;
import android.support.design.widget.FloatingActionButton;
import android.support.design.widget.Snackbar;
import android.support.v4.app.Fragment;
import android.support.v4.content.ContextCompat;
import android.support.v4.widget.SwipeRefreshLayout;
import android.support.v7.widget.GridLayoutManager;
import android.support.v7.widget.RecyclerView;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.TextView;
import java.util.ArrayList;
import java.util.List;
import static com.google.common.base.Preconditions.checkNotNull;
/**
* Display a grid of {@link Note}s
*/
public class NotesFragment extends Fragment implements NotesContract.View {
private static final int REQUEST_ADD_NOTE = 1;
private NotesContract.UserActionsListener mActionsListener;
private NotesAdapter mListAdapter;
public NotesFragment() {
// Requires empty public constructor
}
public static NotesFragment newInstance() {
return new NotesFragment();
}
@Override
public void onCreate(@Nullable Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
mListAdapter = new NotesAdapter(new ArrayList<Note>(0), mItemListener);
//get instance of the presenter and pass NotesRepository using Injection class instance
mActionsListener = new NotesPresenter(Injection.provideNotesRepository(), this);
}
@Override
public void onResume() {
super.onResume();
mActionsListener.loadNotes(false);
}
@Override
public void onActivityCreated(Bundle savedInstanceState) {
super.onActivityCreated(savedInstanceState);
setRetainInstance(true);
}
@Override
public void onActivityResult(int requestCode, int resultCode, Intent data) {
// If a note was successfully added, show snackbar
if (REQUEST_ADD_NOTE == requestCode && Activity.RESULT_OK == resultCode) {
Snackbar.make(getView(), getString(R.string.successfully_saved_note_message),
Snackbar.LENGTH_SHORT).show();
}
}
@Nullable
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container,
Bundle savedInstanceState) {
View root = inflater.inflate(R.layout.fragment_notes, container, false);
RecyclerView recyclerView = (RecyclerView) root.findViewById(R.id.notes_list);
recyclerView.setAdapter(mListAdapter);
int numColumns = getContext().getResources().getInteger(R.integer.num_notes_columns);
recyclerView.setHasFixedSize(true);
recyclerView.setLayoutManager(new GridLayoutManager(getContext(), numColumns));
// Set up floating action button
FloatingActionButton fab =
(FloatingActionButton) getActivity().findViewById(R.id.fab_add_notes);
fab.setImageResource(R.drawable.ic_add);
fab.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
mActionsListener.addNewNote();
}
});
// Pull-to-refresh
SwipeRefreshLayout swipeRefreshLayout =
(SwipeRefreshLayout) root.findViewById(R.id.refresh_layout);
swipeRefreshLayout.setColorSchemeColors(
ContextCompat.getColor(getActivity(), R.color.colorPrimary),
ContextCompat.getColor(getActivity(), R.color.colorAccent),
ContextCompat.getColor(getActivity(), R.color.colorPrimaryDark));
swipeRefreshLayout.setOnRefreshListener(new SwipeRefreshLayout.OnRefreshListener() {
@Override
public void onRefresh() {
mActionsListener.loadNotes(true);
}
});
return root;
}
/**
* Listener for clicks on notes in the RecyclerView.
*/
NoteItemListener mItemListener = new NoteItemListener() {
@Override
public void onNoteClick(Note clickedNote) {
mActionsListener.openNoteDetails(clickedNote);
}
};
@Override
public void setProgressIndicator(final boolean active) {
if (getView() == null) {
return;
}
final SwipeRefreshLayout srl =
(SwipeRefreshLayout) getView().findViewById(R.id.refresh_layout);
// Make sure setRefreshing() is called after the layout is done with everything else.
srl.post(new Runnable() {
@Override
public void run() {
srl.setRefreshing(active);
}
});
}
@Override
public void showNotes(List<Note> notes) {
mListAdapter.replaceData(notes);
}
@Override
public void showAddNote() {
Intent intent = new Intent(getContext(),AddNoteActivity.class);
startActivityForResult(intent, REQUEST_ADD_NOTE);
}
@Override
public void showNoteDetailUi(String noteId) {
// in it's own Activity, since it makes more sense that way and it gives us the flexibility
// to show some Intent stubbing.
Intent intent = new Intent(getContext(), NoteDetailActivity.class);
intent.putExtra(NoteDetailActivity.EXTRA_NOTE_ID, noteId);
startActivity(intent);
}
//adapter class
private static class NotesAdapter extends RecyclerView.Adapter<NotesAdapter.ViewHolder> {
private List<Note> mNotes;
private NoteItemListener mItemListener;
public NotesAdapter(List<Note> notes, NoteItemListener itemListener) {
setList(notes);
mItemListener = itemListener;
}
@Override
public ViewHolder onCreateViewHolder(ViewGroup parent, int viewType) {
Context context = parent.getContext();
LayoutInflater inflater = LayoutInflater.from(context);
View noteView = inflater.inflate(R.layout.item_note, parent, false);
return new ViewHolder(noteView, mItemListener);
}
@Override
public void onBindViewHolder(ViewHolder viewHolder, int position) {
Note note = mNotes.get(position);
viewHolder.title.setText(note.getTitle());
viewHolder.description.setText(note.getDescription());
}
public void replaceData(List<Note> notes) {
setList(notes);
notifyDataSetChanged();
}
private void setList(List<Note> notes) {
mNotes = checkNotNull(notes);
}
@Override
public int getItemCount() {
return mNotes.size();
}
public Note getItem(int position) {
return mNotes.get(position);
}
public class ViewHolder extends RecyclerView.ViewHolder implements View.OnClickListener {
public TextView title;
public TextView description;
private NoteItemListener mItemListener;
public ViewHolder(View itemView, NoteItemListener listener) {
super(itemView);
mItemListener = listener;
title = (TextView) itemView.findViewById(R.id.note_detail_title);
description = (TextView) itemView.findViewById(R.id.note_detail_description);
itemView.setOnClickListener(this);
}
@Override
public void onClick(View v) {
int position = getAdapterPosition();
Note note = getItem(position);
mItemListener.onNoteClick(note);
}
}
}
public interface NoteItemListener {
void onNoteClick(Note clickedNote);
}
}
|
apache-2.0
|
video-games-records/CoreBundle
|
Controller/GroupController.php
|
2319
|
<?php
namespace VideoGamesRecords\CoreBundle\Controller;
use Sensio\Bundle\FrameworkExtraBundle\Configuration\Cache;
use Symfony\Bundle\FrameworkBundle\Controller\Controller;
use Symfony\Component\HttpFoundation\Request;
use VideoGamesRecords\CoreBundle\Entity\Group;
use VideoGamesRecords\CoreBundle\Form\Type\SubmitFormFactory;
/**
* Class GroupController
*/
class GroupController extends Controller
{
/**
* @param Group $group
* @param Request $request
* @return mixed
*/
public function playerRankingPoints(Group $group, Request $request)
{
$maxRank = $request->query->get('maxRank', 5);
$idPlayer = $request->query->get('idPlayer', null);
$ranking = $this->getDoctrine()->getRepository('VideoGamesRecordsCoreBundle:PlayerGroup')->getRankingPoints($group->getId(), $maxRank, $idPlayer);
return $ranking;
}
/**
* @param Group $group
* @param Request $request
* @return mixed
*/
public function playerRankingMedals(Group $group, Request $request)
{
$maxRank = $request->query->get('maxRank', 5);
$idPlayer = $request->query->get('idPlayer', null);
$ranking = $this->getDoctrine()->getRepository('VideoGamesRecordsCoreBundle:PlayerGroup')->getRankingMedals($group->getId(), $maxRank, $idPlayer);
return $ranking;
}
/**
* @param Group $group
* @param Request $request
* @return mixed
*/
public function teamRankingPoints(Group $group, Request $request)
{
$maxRank = $request->query->get('maxRank', 5);
$idPlayer = $request->query->get('idPlayer', null);
$ranking = $this->getDoctrine()->getRepository('VideoGamesRecordsTeamBundle:TeamGroup')->getRankingPoints($group->getId(), $maxRank, $idPlayer);
return $ranking;
}
/**
* @param Group $group
* @param Request $request
* @return mixed
*/
public function teamRankingMedals(Group $group, Request $request)
{
$maxRank = $request->query->get('maxRank', 5);
$idPlayer = $request->query->get('idPlayer', null);
$ranking = $this->getDoctrine()->getRepository('VideoGamesRecordsTeamBundle:TeamGroup')->getRankingMedals($group->getId(), $maxRank, $idPlayer);
return $ranking;
}
}
|
apache-2.0
|
benjchristensen/RxJava
|
src/test/java/io/reactivex/internal/operators/observable/ObservableFlattenIterableTest.java
|
1745
|
/**
* Copyright 2016 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in
* compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is
* distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See
* the License for the specific language governing permissions and limitations under the License.
*/
package io.reactivex.internal.operators.observable;
import java.util.Arrays;
import org.junit.Test;
import io.reactivex.*;
import io.reactivex.functions.Function;
import io.reactivex.subjects.PublishSubject;
public class ObservableFlattenIterableTest {
@Test
public void dispose() {
TestHelper.checkDisposed(PublishSubject.create().flatMapIterable(new Function<Object, Iterable<Integer>>() {
@Override
public Iterable<Integer> apply(Object v) throws Exception {
return Arrays.asList(10, 20);
}
}));
}
@Test
public void badSource() {
TestHelper.checkBadSourceObservable(new Function<Observable<Integer>, Object>() {
@Override
public Object apply(Observable<Integer> o) throws Exception {
return o.flatMapIterable(new Function<Object, Iterable<Integer>>() {
@Override
public Iterable<Integer> apply(Object v) throws Exception {
return Arrays.asList(10, 20);
}
});
}
}, false, 1, 1, 10, 20);
}
}
|
apache-2.0
|
eFaps/eFaps-Kernel
|
src/main/java/org/efaps/db/stmt/selection/elements/LabelElement.java
|
1672
|
/*
* Copyright 2003 - 2018 The eFaps Team
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.efaps.db.stmt.selection.elements;
import org.efaps.admin.datamodel.Status;
import org.efaps.admin.datamodel.Type;
import org.efaps.util.EFapsException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class LabelElement
extends AbstractElement<LabelElement>
implements IAuxillary
{
/** The Constant LOG. */
private static final Logger LOG = LoggerFactory.getLogger(LabelElement.class);
@Override
public LabelElement getThis()
{
return this;
}
@Override
public Object getObject(final Object[] _row)
throws EFapsException
{
Object object = _row == null ? null : _row[0];
if (object != null) {
if (object instanceof Status) {
object = ((Status) object).getLabel();
} else if (object instanceof Type) {
object = ((Type) object).getLabel();
} else {
LOG.warn("LabelElement was called with unexpected Object: {}", object);
}
}
return object;
}
}
|
apache-2.0
|
multi-os-engine/moe-core
|
moe.apple/moe.platform.ios/src/main/java/apple/carplay/protocol/CPBarButtonProviding.java
|
2403
|
package apple.carplay.protocol;
import apple.carplay.CPBarButton;
import apple.foundation.NSArray;
import org.moe.natj.general.ann.Generated;
import org.moe.natj.general.ann.Library;
import org.moe.natj.general.ann.Runtime;
import org.moe.natj.objc.ObjCRuntime;
import org.moe.natj.objc.ann.ObjCProtocolName;
import org.moe.natj.objc.ann.Selector;
@Generated
@Library("CarPlay")
@Runtime(ObjCRuntime.class)
@ObjCProtocolName("CPBarButtonProviding")
public interface CPBarButtonProviding {
@Generated
@Selector("backButton")
CPBarButton backButton();
/**
* An array of bar buttons to be displayed on the leading side of the navigation bar.
* <p>
* [@note] The navigation bar may display a maximum of 2 buttons in the leading space.
* Setting more than 2 buttons to this property will only display the first 2 buttons.
*/
@Generated
@Selector("leadingNavigationBarButtons")
NSArray<? extends CPBarButton> leadingNavigationBarButtons();
@Generated
@Selector("setBackButton:")
void setBackButton(CPBarButton value);
/**
* An array of bar buttons to be displayed on the leading side of the navigation bar.
* <p>
* [@note] The navigation bar may display a maximum of 2 buttons in the leading space.
* Setting more than 2 buttons to this property will only display the first 2 buttons.
*/
@Generated
@Selector("setLeadingNavigationBarButtons:")
void setLeadingNavigationBarButtons(NSArray<? extends CPBarButton> value);
/**
* An array of bar buttons to be displayed on the trailing side of the navigation bar.
* <p>
* [@note] The navigation bar may display a maximum of 2 buttons in the trailing space.
* Setting more than 2 buttons to this property will only display the first 2 buttons.
*/
@Generated
@Selector("setTrailingNavigationBarButtons:")
void setTrailingNavigationBarButtons(NSArray<? extends CPBarButton> value);
/**
* An array of bar buttons to be displayed on the trailing side of the navigation bar.
* <p>
* [@note] The navigation bar may display a maximum of 2 buttons in the trailing space.
* Setting more than 2 buttons to this property will only display the first 2 buttons.
*/
@Generated
@Selector("trailingNavigationBarButtons")
NSArray<? extends CPBarButton> trailingNavigationBarButtons();
}
|
apache-2.0
|
benakiva/superduo
|
Football_Scores-master/app/src/main/java/barqsoft/footballscores/service/myFetchService.java
|
10649
|
package barqsoft.footballscores.service;
import android.app.IntentService;
import android.content.ContentValues;
import android.content.Context;
import android.content.Intent;
import android.net.Uri;
import android.util.Log;
import org.json.JSONArray;
import org.json.JSONException;
import org.json.JSONObject;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.net.HttpURLConnection;
import java.net.URL;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.TimeZone;
import java.util.Vector;
import barqsoft.footballscores.DatabaseContract;
import barqsoft.footballscores.R;
/**
* Created by yehya khaled on 3/2/2015.
*/
public class myFetchService extends IntentService
{
public static final String LOG_TAG = "myFetchService";
public myFetchService()
{
super("myFetchService");
}
@Override
protected void onHandleIntent(Intent intent)
{
getData("n2");
getData("p2");
return;
}
private void getData (String timeFrame)
{
//Creating fetch URL
final String BASE_URL = "http://api.football-data.org/alpha/fixtures"; //Base URL
final String QUERY_TIME_FRAME = "timeFrame"; //Time Frame parameter to determine days
//final String QUERY_MATCH_DAY = "matchday";
Uri fetch_build = Uri.parse(BASE_URL).buildUpon().
appendQueryParameter(QUERY_TIME_FRAME, timeFrame).build();
//Log.v(LOG_TAG, fetch_build.toString()); //log spam
HttpURLConnection m_connection = null;
BufferedReader reader = null;
String JSON_data = null;
//Opening Connection
try {
URL fetch = new URL(fetch_build.toString());
m_connection = (HttpURLConnection) fetch.openConnection();
m_connection.setRequestMethod("GET");
m_connection.addRequestProperty("X-Auth-Token","e136b7858d424b9da07c88f28b61989a");
m_connection.connect();
// Read the input stream into a String
InputStream inputStream = m_connection.getInputStream();
StringBuffer buffer = new StringBuffer();
if (inputStream == null) {
// Nothing to do.
return;
}
reader = new BufferedReader(new InputStreamReader(inputStream));
String line;
while ((line = reader.readLine()) != null) {
// Since it's JSON, adding a newline isn't necessary (it won't affect parsing)
// But it does make debugging a *lot* easier if you print out the completed
// buffer for debugging.
buffer.append(line + "\n");
}
if (buffer.length() == 0) {
// Stream was empty. No point in parsing.
return;
}
JSON_data = buffer.toString();
}
catch (Exception e)
{
Log.e(LOG_TAG,"Exception here" + e.getMessage());
}
finally {
if(m_connection != null)
{
m_connection.disconnect();
}
if (reader != null)
{
try {
reader.close();
}
catch (IOException e)
{
Log.e(LOG_TAG,"Error Closing Stream");
}
}
}
try {
if (JSON_data != null) {
//This bit is to check if the data contains any matches. If not, we call processJson on the dummy data
JSONArray matches = new JSONObject(JSON_data).getJSONArray("fixtures");
if (matches.length() == 0) {
//if there is no data, call the function on dummy data
//this is expected behavior during the off season.
processJSONdata(getString(R.string.dummy_data), getApplicationContext(), false);
return;
}
processJSONdata(JSON_data, getApplicationContext(), true);
} else {
//Could not Connect
Log.d(LOG_TAG, "Could not connect to server.");
}
}
catch(Exception e)
{
Log.e(LOG_TAG,e.getMessage());
}
}
private void processJSONdata (String JSONdata,Context mContext, boolean isReal)
{
//JSON data
final String SERIE_A = "357";
final String PREMIER_LEGAUE = "354";
final String CHAMPIONS_LEAGUE = "362";
final String PRIMERA_DIVISION = "358";
final String BUNDESLIGA = "351";
final String SEASON_LINK = "http://api.football-data.org/alpha/soccerseasons/";
final String MATCH_LINK = "http://api.football-data.org/alpha/fixtures/";
final String FIXTURES = "fixtures";
final String LINKS = "_links";
final String SOCCER_SEASON = "soccerseason";
final String SELF = "self";
final String MATCH_DATE = "date";
final String HOME_TEAM = "homeTeamName";
final String AWAY_TEAM = "awayTeamName";
final String RESULT = "result";
final String HOME_GOALS = "goalsHomeTeam";
final String AWAY_GOALS = "goalsAwayTeam";
final String MATCH_DAY = "matchday";
//Match data
String League = null;
String mDate = null;
String mTime = null;
String Home = null;
String Away = null;
String Home_goals = null;
String Away_goals = null;
String match_id = null;
String match_day = null;
try {
JSONArray matches = new JSONObject(JSONdata).getJSONArray(FIXTURES);
//ContentValues to be inserted
Vector<ContentValues> values = new Vector <ContentValues> (matches.length());
for(int i = 0;i < matches.length();i++)
{
JSONObject match_data = matches.getJSONObject(i);
League = match_data.getJSONObject(LINKS).getJSONObject(SOCCER_SEASON).
getString("href");
League = League.replace(SEASON_LINK,"");
if( League.equals(PREMIER_LEGAUE) ||
League.equals(SERIE_A) ||
League.equals(CHAMPIONS_LEAGUE) ||
League.equals(BUNDESLIGA) ||
League.equals(PRIMERA_DIVISION) )
{
match_id = match_data.getJSONObject(LINKS).getJSONObject(SELF).
getString("href");
match_id = match_id.replace(MATCH_LINK, "");
if(!isReal){
//This if statement changes the match ID of the dummy data so that it all goes into the database
match_id=match_id+Integer.toString(i);
}
mDate = match_data.getString(MATCH_DATE);
mTime = mDate.substring(mDate.indexOf("T") + 1, mDate.indexOf("Z"));
mDate = mDate.substring(0,mDate.indexOf("T"));
SimpleDateFormat match_date = new SimpleDateFormat("yyyy-MM-ddHH:mm:ss");
match_date.setTimeZone(TimeZone.getTimeZone("UTC"));
try {
Date parseddate = match_date.parse(mDate+mTime);
SimpleDateFormat new_date = new SimpleDateFormat("yyyy-MM-dd:HH:mm");
new_date.setTimeZone(TimeZone.getDefault());
mDate = new_date.format(parseddate);
mTime = mDate.substring(mDate.indexOf(":") + 1);
mDate = mDate.substring(0,mDate.indexOf(":"));
if(!isReal){
//This if statement changes the dummy data's date to match our current date range.
Date fragmentdate = new Date(System.currentTimeMillis()+((i-2)*86400000));
SimpleDateFormat mformat = new SimpleDateFormat("yyyy-MM-dd");
mDate=mformat.format(fragmentdate);
}
}
catch (Exception e)
{
Log.d(LOG_TAG, "error here!");
Log.e(LOG_TAG,e.getMessage());
}
Home = match_data.getString(HOME_TEAM);
Away = match_data.getString(AWAY_TEAM);
Home_goals = match_data.getJSONObject(RESULT).getString(HOME_GOALS);
Away_goals = match_data.getJSONObject(RESULT).getString(AWAY_GOALS);
match_day = match_data.getString(MATCH_DAY);
ContentValues match_values = new ContentValues();
match_values.put(DatabaseContract.scores_table.MATCH_ID,match_id);
match_values.put(DatabaseContract.scores_table.DATE_COL,mDate);
match_values.put(DatabaseContract.scores_table.TIME_COL,mTime);
match_values.put(DatabaseContract.scores_table.HOME_COL,Home);
match_values.put(DatabaseContract.scores_table.AWAY_COL,Away);
match_values.put(DatabaseContract.scores_table.HOME_GOALS_COL,Home_goals);
match_values.put(DatabaseContract.scores_table.AWAY_GOALS_COL,Away_goals);
match_values.put(DatabaseContract.scores_table.LEAGUE_COL,League);
match_values.put(DatabaseContract.scores_table.MATCH_DAY,match_day);
//log spam
//Log.v(LOG_TAG,match_id);
//Log.v(LOG_TAG,mDate);
//Log.v(LOG_TAG,mTime);
//Log.v(LOG_TAG,Home);
//Log.v(LOG_TAG,Away);
//Log.v(LOG_TAG,Home_goals);
//Log.v(LOG_TAG,Away_goals);
values.add(match_values);
}
}
int inserted_data = 0;
ContentValues[] insert_data = new ContentValues[values.size()];
values.toArray(insert_data);
inserted_data = mContext.getContentResolver().bulkInsert(
DatabaseContract.BASE_CONTENT_URI,insert_data);
//Log.v(LOG_TAG,"Succesfully Inserted : " + String.valueOf(inserted_data));
}
catch (JSONException e)
{
Log.e(LOG_TAG,e.getMessage());
}
}
}
|
apache-2.0
|
Netflix/staash
|
staash-svc/src/main/java/com/netflix/staash/service/MetaService.java
|
2022
|
/*******************************************************************************
* /*
* *
* * Copyright 2013 Netflix, Inc.
* *
* * Licensed under the Apache License, Version 2.0 (the "License");
* * you may not use this file except in compliance with the License.
* * You may obtain a copy of the License at
* *
* * http://www.apache.org/licenses/LICENSE-2.0
* *
* * Unless required by applicable law or agreed to in writing, software
* * distributed under the License is distributed on an "AS IS" BASIS,
* * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* * See the License for the specific language governing permissions and
* * limitations under the License.
* *
* *
******************************************************************************/
package com.netflix.staash.service;
import java.util.Map;
import com.netflix.staash.exception.PaasException;
import com.netflix.staash.exception.StorageDoesNotExistException;
import com.netflix.staash.json.JsonObject;
import com.netflix.staash.rest.dao.DataDao;
import com.netflix.staash.rest.dao.MetaDao;
import com.netflix.staash.rest.meta.entity.Entity;
import com.netflix.staash.rest.meta.entity.EntityType;
public interface MetaService {
public String writeMetaEntity(EntityType etype, String entity) throws StorageDoesNotExistException;
// public Entity readMetaEntity(String rowKey);
// public String writeRow(String db, String table, JsonObject rowObj);
// public String listRow(String db, String table, String keycol, String key);
public String listSchemas();
public String listTablesInSchema(String schemaname);
public String listTimeseriesInSchema(String schemaname);
public String listStorage();
public Map<String,String> getStorageMap();
public String CreateDB();
public String createTable();
public JsonObject runQuery(EntityType etype, String col);
public JsonObject getStorageForTable(String table);
}
|
apache-2.0
|
iLib-js/iLib
|
js/test/phone/testphonenum_BE.js
|
13790
|
/*
* phonenum_BE.js - Test parsing phone numbers in BE
*
* Copyright © 2014-2015,2017, JEDLSoft
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
*
* See the License for the specific language governing permissions and
* limitations under the License.
*/
if (typeof(PhoneNumber) === "undefined") {
var PhoneNumber = require("../../lib/PhoneNumber.js");
}
if (typeof(ilib) === "undefined") {
var ilib = require("../../lib/ilib.js");
}
module.exports.phonenum_BE = {
setUp: function(callback) {
ilib.clearCache();
callback();
},
testParseBEFull: function(test) {
test.expect(2);
var parsed = new PhoneNumber("038234567", {locale: "nl-BE"});
test.ok(typeof(parsed) !== "undefined");
var expected = new PhoneNumber({
trunkAccess: "0",
areaCode: "3",
subscriberNumber: "8234567"
}, {locale: "nl-BE"});
test.ok(parsed.equals(expected));
test.done();
},
testParseBEIgnoreFormatting: function(test) {
test.expect(2);
var parsed = new PhoneNumber("03-823-45-67", {locale: "nl-BE"});
test.ok(typeof(parsed) !== "undefined");
var expected = new PhoneNumber({
trunkAccess: "0",
areaCode: "3",
subscriberNumber: "8234567"
}, {locale: "nl-BE"});
test.ok(parsed.equals(expected));
test.done();
},
testParseBEIgnoreCrap: function(test) {
test.expect(2);
var parsed = new PhoneNumber("0@3!8$2^34(56_7", {locale: "nl-BE"});
test.ok(typeof(parsed) !== "undefined");
var expected = new PhoneNumber({
trunkAccess: "0",
areaCode: "3",
subscriberNumber: "8234567"
}, {locale: "nl-BE"});
test.ok(parsed.equals(expected));
test.done();
},
testParseBENoAreaCode: function(test) {
test.expect(2);
var parsed = new PhoneNumber("8234567", {locale: "nl-BE"});
test.ok(typeof(parsed) !== "undefined");
var expected = new PhoneNumber({
subscriberNumber: "8234567"
}, {locale: "nl-BE"});
test.ok(parsed.equals(expected));
test.done();
},
testParseBEPlusIDDToUS: function(test) {
test.expect(2);
var parsed = new PhoneNumber("+12028675309", {locale: "nl-BE"});
test.ok(typeof(parsed) !== "undefined");
var expected = new PhoneNumber({
iddPrefix: "+",
countryCode: "1",
areaCode: "202",
subscriberNumber: "8675309"
}, {locale: "nl-BE"});
test.ok(parsed.equals(expected));
test.done();
},
testParseBEZerosIDDToUS: function(test) {
test.expect(2);
var parsed = new PhoneNumber("0012028675309", {locale: "nl-BE"});
test.ok(typeof(parsed) !== "undefined");
var expected = new PhoneNumber({
iddPrefix: "00",
countryCode: "1",
areaCode: "202",
subscriberNumber: "8675309"
}, {locale: "nl-BE"});
test.ok(parsed.equals(expected));
test.done();
},
testParseBELongAreaCodeNoTrunk: function(test) {
test.expect(2);
// this number uses an area code to start it, but without the trunk, we should
// not recognize it as an area code
var parsed = new PhoneNumber("71123456", {locale: "nl-BE"});
test.ok(typeof(parsed) !== "undefined");
var expected = new PhoneNumber({
subscriberNumber: "71123456"
}, {locale: "nl-BE"});
test.ok(parsed.equals(expected));
test.done();
},
testParseBELocalNumber: function(test) {
test.expect(2);
// this number uses an area code to start it, but without the trunk, we should
// not recognize it as an area code
var parsed = new PhoneNumber("82 34 56", {locale: "nl-BE"});
test.ok(typeof(parsed) !== "undefined");
var expected = new PhoneNumber({
subscriberNumber: "823456"
}, {locale: "nl-BE"});
test.ok(parsed.equals(expected));
test.done();
},
testParseBEPlusIDDToGB: function(test) {
test.expect(2);
var parsed = new PhoneNumber("+442082345678", {locale: "nl-BE"});
test.ok(typeof(parsed) !== "undefined");
var expected = new PhoneNumber({
iddPrefix: "+",
countryCode: "44",
areaCode: "20",
subscriberNumber: "82345678"
}, {locale: "nl-BE"});
test.ok(parsed.equals(expected));
test.done();
},
testParseBEZerosIDDToGB: function(test) {
test.expect(2);
var parsed = new PhoneNumber("00442082345678", {locale: "nl-BE"});
test.ok(typeof(parsed) !== "undefined");
var expected = new PhoneNumber({
iddPrefix: "00",
countryCode: "44",
areaCode: "20",
subscriberNumber: "82345678"
}, {locale: "nl-BE"});
test.ok(parsed.equals(expected));
test.done();
},
testParseBEEmergencyNumber: function(test) {
test.expect(2);
var parsed = new PhoneNumber("112", {locale: "nl-BE"});
test.ok(typeof(parsed) !== "undefined");
var expected = new PhoneNumber({
emergency: "112"
}, {locale: "nl-BE"});
test.ok(parsed.equals(expected));
test.done();
},
testParseBEEmergencyNumberPlus: function(test) {
test.expect(2);
var parsed = new PhoneNumber("112115", {locale: "nl-BE"});
test.ok(typeof(parsed) !== "undefined");
var expected = new PhoneNumber({
emergency: "112",
subscriberNumber: "115"
}, {locale: "nl-BE"});
test.ok(parsed.equals(expected));
test.done();
},
testParseBEMobileNumber: function(test) {
test.expect(2);
var parsed = new PhoneNumber("0492 823456", {locale: "nl-BE"});
test.ok(typeof(parsed) !== "undefined");
var expected = new PhoneNumber({
trunkAccess: "0",
mobilePrefix: "492",
subscriberNumber: "823456"
}, {locale: "nl-BE"});
test.ok(parsed.equals(expected));
test.done();
},
testParseBEInternational: function(test) {
test.expect(2);
var parsed = new PhoneNumber("+32 3 823 45 67", {locale: "nl-BE"});
test.ok(typeof(parsed) !== "undefined");
var expected = new PhoneNumber({
iddPrefix: "+",
countryCode: "32",
areaCode: "3",
subscriberNumber: "8234567"
}, {locale: "nl-BE"});
test.ok(parsed.equals(expected));
test.done();
},
testParseBEInternationalMobile: function(test) {
test.expect(2);
var parsed = new PhoneNumber("+32 492 823 456", {locale: "nl-BE"});
test.ok(typeof(parsed) !== "undefined");
var expected = new PhoneNumber({
iddPrefix: "+",
countryCode: "32",
mobilePrefix: "492",
subscriberNumber: "823456"
}, {locale: "nl-BE"});
test.ok(parsed.equals(expected));
test.done();
},
testParseBEService: function(test) {
test.expect(2);
var parsed = new PhoneNumber("0800 82345678", {locale: "nl-BE"});
test.ok(typeof(parsed) !== "undefined");
var expected = new PhoneNumber({
trunkAccess: "0",
serviceCode: "800",
subscriberNumber: "82345678"
}, {locale: "nl-BE"});
test.ok(parsed.equals(expected));
test.done();
},
testParseBEBlock: function(test) {
test.expect(2);
var parsed = new PhoneNumber("116116", {locale: "nl-BE"});
test.ok(typeof(parsed) !== "undefined");
var expected = new PhoneNumber({
serviceCode: "116",
subscriberNumber: "116"
}, {locale: "nl-BE"});
test.ok(parsed.equals(expected));
test.done();
},
testParseBEPartial1: function(test) {
test.expect(2);
var parsed = new PhoneNumber("0", {locale: "nl-BE"});
test.ok(typeof(parsed) !== "undefined");
var expected = new PhoneNumber({
trunkAccess: "0"
}, {locale: "nl-BE"});
test.ok(parsed.equals(expected));
test.done();
},
testParseBEPartial2: function(test) {
test.expect(2);
var parsed = new PhoneNumber("05", {locale: "nl-BE"});
test.ok(typeof(parsed) !== "undefined");
var expected = new PhoneNumber({
trunkAccess: "0",
subscriberNumber: "5"
}, {locale: "nl-BE"});
test.ok(parsed.equals(expected));
test.done();
},
testParseBEPartial3: function(test) {
test.expect(2);
var parsed = new PhoneNumber("058", {locale: "nl-BE"});
test.ok(typeof(parsed) !== "undefined");
var expected = new PhoneNumber({
trunkAccess: "0",
areaCode: "58"
}, {locale: "nl-BE"});
test.ok(parsed.equals(expected));
test.done();
},
testParseBEPartial4: function(test) {
test.expect(2);
var parsed = new PhoneNumber("0584", {locale: "nl-BE"});
test.ok(typeof(parsed) !== "undefined");
var expected = new PhoneNumber({
trunkAccess: "0",
areaCode: "58",
subscriberNumber: "4"
}, {locale: "nl-BE"});
test.ok(parsed.equals(expected));
test.done();
},
testParseBEPartial5: function(test) {
test.expect(2);
var parsed = new PhoneNumber("05844", {locale: "nl-BE"});
test.ok(typeof(parsed) !== "undefined");
var expected = new PhoneNumber({
trunkAccess: "0",
areaCode: "58",
subscriberNumber: "44"
}, {locale: "nl-BE"});
test.ok(parsed.equals(expected));
test.done();
},
testParseBEPartial6: function(test) {
test.expect(2);
var parsed = new PhoneNumber("058441", {locale: "nl-BE"});
test.ok(typeof(parsed) !== "undefined");
var expected = new PhoneNumber({
trunkAccess: "0",
areaCode: "58",
subscriberNumber: "441"
}, {locale: "nl-BE"});
test.ok(parsed.equals(expected));
test.done();
},
testParseBEPartial7: function(test) {
test.expect(2);
var parsed = new PhoneNumber("0584412", {locale: "nl-BE"});
test.ok(typeof(parsed) !== "undefined");
var expected = new PhoneNumber({
trunkAccess: "0",
areaCode: "58",
subscriberNumber: "4412"
}, {locale: "nl-BE"});
test.ok(parsed.equals(expected));
test.done();
},
testParseBEPartial8: function(test) {
test.expect(2);
var parsed = new PhoneNumber("05844123", {locale: "nl-BE"});
test.ok(typeof(parsed) !== "undefined");
var expected = new PhoneNumber({
trunkAccess: "0",
areaCode: "58",
subscriberNumber: "44123"
}, {locale: "nl-BE"});
test.ok(parsed.equals(expected));
test.done();
},
testParseBEPartial9: function(test) {
test.expect(2);
var parsed = new PhoneNumber("058441234", {locale: "nl-BE"});
test.ok(typeof(parsed) !== "undefined");
var expected = new PhoneNumber({
trunkAccess: "0",
areaCode: "58",
subscriberNumber: "441234"
}, {locale: "nl-BE"});
test.ok(parsed.equals(expected));
test.done();
},
testParseBEPartial10: function(test) {
test.expect(2);
var parsed = new PhoneNumber("0584412345", {locale: "nl-BE"});
test.ok(typeof(parsed) !== "undefined");
var expected = new PhoneNumber({
trunkAccess: "0",
areaCode: "58",
subscriberNumber: "4412345"
}, {locale: "nl-BE"});
test.ok(parsed.equals(expected));
test.done();
},
testParseBEPartial11: function(test) {
test.expect(2);
var parsed = new PhoneNumber("05844123456", {locale: "nl-BE"});
test.ok(typeof(parsed) !== "undefined");
var expected = new PhoneNumber({
trunkAccess: "0",
areaCode: "58",
subscriberNumber: "44123456"
}, {locale: "nl-BE"});
test.ok(parsed.equals(expected));
test.done();
}
};
|
apache-2.0
|
felix-tien/TechLab
|
CSharpLab/TestCFX/Testing/UnitTest/Arch.CFramework.ConfigBean.Test/FileWritterTest.cs
|
2412
|
using Arch.CFramework.ConfigBean.ConfigurationAction.FileAction;
using Microsoft.VisualStudio.TestTools.UnitTesting;
using System;
namespace Arch.CFramework.ConfigBean.Test
{
/// <summary>
///This is a test class for FileWritterTest and is intended
///to contain all FileWritterTest Unit Tests
///</summary>
[TestClass()]
public class FileWritterTest
{
private TestContext testContextInstance;
/// <summary>
///Gets or sets the test context which provides
///information about and functionality for the current test run.
///</summary>
public TestContext TestContext
{
get
{
return testContextInstance;
}
set
{
testContextInstance = value;
}
}
#region Additional test attributes
//
//You can use the following additional attributes as you write your tests:
//
//Use ClassInitialize to run code before running the first test in the class
//[ClassInitialize()]
//public static void MyClassInitialize(TestContext testContext)
//{
//}
//
//Use ClassCleanup to run code after all tests in a class have run
//[ClassCleanup()]
//public static void MyClassCleanup()
//{
//}
//
//Use TestInitialize to run code before running each test
//[TestInitialize()]
//public void MyTestInitialize()
//{
//}
//
//Use TestCleanup to run code after each test has run
//[TestCleanup()]
//public void MyTestCleanup()
//{
//}
//
#endregion
/// <summary>
///A test for Add
///</summary>
[TestMethod()]
public void AddTest()
{
//string filename = "temp.fc.config"; // TODO: Initialize to an appropriate value
//FileWritter target = new FileWritter(filename); // TODO: Initialize to an appropriate value
//string key = string.Empty; // TODO: Initialize to an appropriate value
//string value = string.Empty; // TODO: Initialize to an appropriate value
//key = "fckey2";
//value = "3";
//target.Add(key, value);
//target.Save();
}
}
}
|
apache-2.0
|
IvanNikolaychuk/pentaho-kettle
|
plugins/repositories-plugin/src/test/java/org/pentaho/di/ui/repo/RepositoryConnectControllerTest.java
|
8428
|
/*! ******************************************************************************
*
* Pentaho Data Integration
*
* Copyright (C) 2002-2016 by Pentaho : http://www.pentaho.com
*
*******************************************************************************
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
******************************************************************************/
package org.pentaho.di.ui.repo;
import org.json.simple.JSONObject;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.Mock;
import org.mockito.runners.MockitoJUnitRunner;
import org.pentaho.di.core.KettleEnvironment;
import org.pentaho.di.core.database.DatabaseMeta;
import org.pentaho.di.core.exception.KettleException;
import org.pentaho.di.core.plugins.PluginInterface;
import org.pentaho.di.core.plugins.PluginRegistry;
import org.pentaho.di.core.plugins.RepositoryPluginType;
import org.pentaho.di.repository.AbstractRepository;
import org.pentaho.di.repository.RepositoriesMeta;
import org.pentaho.di.repository.Repository;
import org.pentaho.di.repository.RepositoryMeta;
import org.pentaho.di.repository.filerep.KettleFileRepositoryMeta;
import org.pentaho.di.ui.core.PropsUI;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import static org.mockito.Mockito.*;
/**
* Created by bmorrise on 5/3/16.
*/
@RunWith( MockitoJUnitRunner.class )
public class RepositoryConnectControllerTest {
public static final String PLUGIN_NAME = "PLUGIN NAME";
public static final String ID = "ID";
public static final String PLUGIN_DESCRIPTION = "PLUGIN DESCRIPTION";
public static final String DATABASE_NAME = "DATABASE NAME";
public static final String REPOSITORY_NAME = "Repository Name";
public static final String REPOSITORY_ID = "Repository ID";
public static final String REPOSITORY_DESCRIPTION = "Repository Description";
@Mock
RepositoriesMeta repositoriesMeta;
@Mock
PluginRegistry pluginRegistry;
@Mock
RepositoryMeta repositoryMeta;
@Mock
PluginInterface pluginInterface;
@Mock
AbstractRepository repository;
@Mock
DatabaseMeta databaseMeta;
@Mock
PropsUI propsUI;
private RepositoryConnectController controller;
@BeforeClass
public static void setUpClass() throws Exception {
if ( !KettleEnvironment.isInitialized() ) {
KettleEnvironment.init();
}
}
@Before
public void setUp() {
controller = new RepositoryConnectController( pluginRegistry, null, repositoriesMeta, propsUI );
when( pluginInterface.getName() ).thenReturn( PLUGIN_NAME );
when( pluginInterface.getIds() ).thenReturn( new String[] { ID } );
when( pluginInterface.getDescription() ).thenReturn( PLUGIN_DESCRIPTION );
List<PluginInterface> plugins = new ArrayList<>();
plugins.add( pluginInterface );
when( pluginRegistry.getPlugins( RepositoryPluginType.class ) ).thenReturn( plugins );
when( repositoryMeta.getId() ).thenReturn( ID );
when( repositoryMeta.getName() ).thenReturn( PLUGIN_NAME );
when( repositoryMeta.getDescription() ).thenReturn( PLUGIN_DESCRIPTION );
}
@Test
public void testGetPlugins() throws Exception {
String plugins = controller.getPlugins();
assertEquals( "[{\"name\":\"PLUGIN NAME\",\"description\":\"PLUGIN DESCRIPTION\",\"id\":\"ID\"}]", plugins );
}
@Test
public void testCreateRepository() throws Exception {
String id = ID;
Map<String, Object> items = new HashMap<>();
when( pluginRegistry.loadClass( RepositoryPluginType.class, id, RepositoryMeta.class ) )
.thenReturn( repositoryMeta );
when( pluginRegistry.loadClass( RepositoryPluginType.class, repositoryMeta.getId(), Repository.class ) )
.thenReturn( repository );
when( repository.test() ).thenReturn( true );
boolean result = controller.createRepository( id, items );
assertEquals( true, result );
when( repository.test() ).thenReturn( false );
result = controller.createRepository( id, items );
assertEquals( false, result );
when( repository.test() ).thenReturn( true );
doThrow( new KettleException() ).when( repositoriesMeta ).writeData();
result = controller.createRepository( id, items );
assertEquals( false, result );
}
@Test
public void testGetRepositories() {
when( repositoriesMeta.nrRepositories() ).thenReturn( 1 );
when( repositoriesMeta.getRepository( 0 ) ).thenReturn( repositoryMeta );
JSONObject json = new JSONObject();
json.put( "displayName", REPOSITORY_NAME );
json.put( "isDefault", false );
json.put( "description", REPOSITORY_DESCRIPTION );
json.put( "id", REPOSITORY_ID );
when( repositoryMeta.toJSONObject() ).thenReturn( json );
String repositories = controller.getRepositories();
assertEquals(
"[{\"isDefault\":false,\"displayName\":\"Repository Name\",\"description\":\"Repository Description\","
+ "\"id\":\"Repository ID\"}]",
repositories );
}
@Test
public void testConnectToRepository() throws Exception {
when( pluginRegistry.loadClass( RepositoryPluginType.class, repositoryMeta.getId(), Repository.class ) )
.thenReturn( repository );
controller.setCurrentRepository( repositoryMeta );
controller.connectToRepository();
verify( repository ).init( repositoryMeta );
verify( repository ).connect( null, null );
}
@Test
public void testGetDatabases() throws Exception {
when( repositoriesMeta.nrDatabases() ).thenReturn( 1 );
when( repositoriesMeta.getDatabase( 0 ) ).thenReturn( databaseMeta );
when( databaseMeta.getName() ).thenReturn( DATABASE_NAME );
String databases = controller.getDatabases();
assertEquals( "[{\"name\":\"DATABASE NAME\"}]", databases );
}
@Test
public void testDeleteRepository() throws Exception {
int index = 1;
when( repositoriesMeta.findRepository( REPOSITORY_NAME ) ).thenReturn( repositoryMeta );
when( repositoriesMeta.indexOfRepository( repositoryMeta ) ).thenReturn( index );
when( repositoriesMeta.getRepository( index ) ).thenReturn( repositoryMeta );
boolean result = controller.deleteRepository( REPOSITORY_NAME );
assertEquals( true, result );
verify( repositoriesMeta ).removeRepository( index );
verify( repositoriesMeta ).writeData();
}
@Test
public void testSetDefaultRepository() {
int index = 1;
when( repositoriesMeta.findRepository( REPOSITORY_NAME ) ).thenReturn( repositoryMeta );
when( repositoriesMeta.indexOfRepository( repositoryMeta ) ).thenReturn( index );
boolean result = controller.setDefaultRepository( REPOSITORY_NAME );
assertEquals( true, result );
}
@Test
public void testAddDatabase() throws Exception {
controller.addDatabase( databaseMeta );
verify( repositoriesMeta ).addDatabase( databaseMeta );
verify( repositoriesMeta ).writeData();
}
@Test
public void testGetDefaultUrl() throws Exception {
String defaultUrl = controller.getDefaultUrl();
assertNotNull( defaultUrl );
}
@Test
public void testGetRepository() throws Exception {
KettleFileRepositoryMeta kettleFileRepositoryMeta = new KettleFileRepositoryMeta();
kettleFileRepositoryMeta.setId( REPOSITORY_ID );
kettleFileRepositoryMeta.setDescription( REPOSITORY_DESCRIPTION );
kettleFileRepositoryMeta.setName( REPOSITORY_NAME );
when( repositoriesMeta.findRepository( REPOSITORY_NAME ) ).thenReturn( kettleFileRepositoryMeta );
String output = controller.getRepository( REPOSITORY_NAME );
assertEquals( true, output.contains( REPOSITORY_ID ) );
assertEquals( true, output.contains( REPOSITORY_DESCRIPTION ) );
assertEquals( true, output.contains( REPOSITORY_NAME ) );
}
}
|
apache-2.0
|
codingllama/trillian
|
storage/memory/admin_storage.go
|
5285
|
// Copyright 2017 Google Inc. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package memory
import (
"context"
"fmt"
"sync"
"time"
"github.com/golang/glog"
"github.com/golang/protobuf/ptypes"
"github.com/google/trillian"
"github.com/google/trillian/storage"
)
// NewAdminStorage returns a storage.AdminStorage implementation backed by
// memoryTreeStorage.
func NewAdminStorage(ms storage.LogStorage) storage.AdminStorage {
return &memoryAdminStorage{ms.(*memoryLogStorage).memoryTreeStorage}
}
// memoryAdminStorage implements storage.AdminStorage
type memoryAdminStorage struct {
ms *memoryTreeStorage
}
func (s *memoryAdminStorage) Snapshot(ctx context.Context) (storage.ReadOnlyAdminTX, error) {
return s.Begin(ctx)
}
func (s *memoryAdminStorage) Begin(ctx context.Context) (storage.AdminTX, error) {
return &adminTX{ms: s.ms}, nil
}
func (s *memoryAdminStorage) CheckDatabaseAccessible(ctx context.Context) error {
return nil
}
type adminTX struct {
ms *memoryTreeStorage
// mu guards reads/writes on closed, which happen only on
// Commit/Rollback/IsClosed/Close methods.
// We don't check closed on *all* methods (apart from the ones above),
// as we trust tx to keep tabs on its state (and consequently fail to do
// queries after closed).
mu sync.RWMutex
closed bool
}
func (t *adminTX) Commit() error {
// TODO(al): The admin implementation isn't transactional
t.mu.Lock()
defer t.mu.Unlock()
t.closed = true
return nil
}
func (t *adminTX) Rollback() error {
// TODO(al): The admin implementation isn't transactional
t.mu.Lock()
defer t.mu.Unlock()
t.closed = true
return nil
}
func (t *adminTX) IsClosed() bool {
t.mu.RLock()
defer t.mu.RUnlock()
return t.closed
}
func (t *adminTX) Close() error {
// Acquire and release read lock manually, without defer, as if the txn
// is not closed Rollback() will attempt to acquire the rw lock.
t.mu.RLock()
closed := t.closed
t.mu.RUnlock()
if !closed {
err := t.Rollback()
if err != nil {
glog.Warningf("Rollback error on Close(): %v", err)
}
return err
}
return nil
}
func (t *adminTX) GetTree(ctx context.Context, treeID int64) (*trillian.Tree, error) {
tree := t.ms.getTree(treeID)
tree.RLock()
defer tree.RUnlock()
if tree == nil {
return nil, fmt.Errorf("no such treeID %d", treeID)
}
return tree.meta, nil
}
func (t *adminTX) ListTreeIDs(ctx context.Context, includeDeleted bool) ([]int64, error) {
t.ms.mu.RLock()
defer t.ms.mu.RUnlock()
var ret []int64
for _, v := range t.ms.trees {
ret = append(ret, v.meta.TreeId)
}
return ret, nil
}
func (t *adminTX) ListTrees(ctx context.Context, includeDeleted bool) ([]*trillian.Tree, error) {
t.ms.mu.RLock()
defer t.ms.mu.RUnlock()
var ret []*trillian.Tree
for _, v := range t.ms.trees {
ret = append(ret, v.meta)
}
return ret, nil
}
func (t *adminTX) CreateTree(ctx context.Context, tr *trillian.Tree) (*trillian.Tree, error) {
if err := storage.ValidateTreeForCreation(ctx, tr); err != nil {
return nil, err
}
if err := validateStorageSettings(tr); err != nil {
return nil, err
}
id, err := storage.NewTreeID()
if err != nil {
return nil, err
}
now := time.Now()
meta := *tr
meta.TreeId = id
meta.CreateTime, err = ptypes.TimestampProto(now)
if err != nil {
return nil, err
}
meta.UpdateTime, err = ptypes.TimestampProto(now)
if err != nil {
return nil, err
}
t.ms.mu.Lock()
defer t.ms.mu.Unlock()
t.ms.trees[id] = newTree(meta)
glog.Infof("trees: %v", t.ms.trees)
return &meta, nil
}
func (t *adminTX) UpdateTree(ctx context.Context, treeID int64, updateFunc func(*trillian.Tree)) (*trillian.Tree, error) {
mTree := t.ms.getTree(treeID)
mTree.mu.Lock()
defer mTree.mu.Unlock()
tree := mTree.meta
beforeUpdate := *tree
updateFunc(tree)
if err := storage.ValidateTreeForUpdate(ctx, &beforeUpdate, tree); err != nil {
return nil, err
}
if err := validateStorageSettings(tree); err != nil {
return nil, err
}
var err error
tree.UpdateTime, err = ptypes.TimestampProto(time.Now())
if err != nil {
return nil, err
}
return tree, nil
}
func (t *adminTX) SoftDeleteTree(ctx context.Context, treeID int64) (*trillian.Tree, error) {
return nil, fmt.Errorf("method not supported: SoftDeleteTree")
}
func (t *adminTX) HardDeleteTree(ctx context.Context, treeID int64) error {
return fmt.Errorf("method not supported: HardDeleteTree")
}
func (t *adminTX) UndeleteTree(ctx context.Context, treeID int64) (*trillian.Tree, error) {
return nil, fmt.Errorf("method not supported: UndeleteTree")
}
func validateStorageSettings(tree *trillian.Tree) error {
if tree.StorageSettings != nil {
return fmt.Errorf("storage_settings not supported, but got %v", tree.StorageSettings)
}
return nil
}
|
apache-2.0
|
consulo/consulo-java
|
java-debugger-impl/src/main/java/com/intellij/debugger/ui/tree/render/CachedEvaluator.java
|
4478
|
/*
* Copyright 2000-2017 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.debugger.ui.tree.render;
import javax.annotation.Nullable;
import com.intellij.debugger.engine.DebuggerUtils;
import com.intellij.debugger.engine.evaluation.CodeFragmentFactory;
import com.intellij.debugger.engine.evaluation.EvaluateException;
import com.intellij.debugger.engine.evaluation.EvaluateExceptionUtil;
import com.intellij.debugger.engine.evaluation.TextWithImports;
import com.intellij.debugger.engine.evaluation.expression.ExpressionEvaluator;
import com.intellij.debugger.engine.evaluation.expression.UnsupportedExpressionException;
import com.intellij.debugger.impl.DebuggerUtilsEx;
import com.intellij.debugger.impl.DebuggerUtilsImpl;
import com.intellij.debugger.ui.impl.watch.CompilingEvaluatorImpl;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.util.Pair;
import com.intellij.psi.JavaCodeFragment;
import com.intellij.psi.PsiDocumentManager;
import com.intellij.psi.PsiElement;
import com.intellij.psi.PsiExpression;
import com.intellij.psi.PsiExpressionCodeFragment;
import com.intellij.psi.PsiType;
import com.intellij.reference.SoftReference;
public abstract class CachedEvaluator
{
private static class Cache
{
protected ExpressionEvaluator myEvaluator;
protected EvaluateException myException;
protected PsiExpression myPsiChildrenExpression;
}
SoftReference<Cache> myCache = new SoftReference<>(null);
private TextWithImports myReferenceExpression;
protected abstract String getClassName();
public TextWithImports getReferenceExpression()
{
return myReferenceExpression != null ? myReferenceExpression : DebuggerUtils.getInstance().createExpressionWithImports("");
}
public void setReferenceExpression(TextWithImports referenceExpression)
{
myReferenceExpression = referenceExpression;
clear();
}
public void clear()
{
myCache.clear();
}
protected Cache initEvaluatorAndChildrenExpression(final Project project)
{
final Cache cache = new Cache();
try
{
Pair<PsiElement, PsiType> psiClassAndType = DebuggerUtilsImpl.getPsiClassAndType(getClassName(), project);
PsiElement context = psiClassAndType.first;
if(context == null)
{
throw EvaluateExceptionUtil.CANNOT_FIND_SOURCE_CLASS;
}
CodeFragmentFactory factory = DebuggerUtilsEx.findAppropriateCodeFragmentFactory(myReferenceExpression, context);
JavaCodeFragment codeFragment = factory.createCodeFragment(myReferenceExpression, overrideContext(context), project);
codeFragment.setThisType(psiClassAndType.second);
DebuggerUtils.checkSyntax(codeFragment);
cache.myPsiChildrenExpression = codeFragment instanceof PsiExpressionCodeFragment ? ((PsiExpressionCodeFragment) codeFragment).getExpression() : null;
try
{
cache.myEvaluator = factory.getEvaluatorBuilder().build(codeFragment, null);
}
catch(UnsupportedExpressionException ex)
{
ExpressionEvaluator eval = CompilingEvaluatorImpl.create(project, context, element -> codeFragment);
if(eval != null)
{
cache.myEvaluator = eval;
}
throw ex;
}
}
catch(EvaluateException e)
{
cache.myException = e;
}
myCache = new SoftReference<>(cache);
return cache;
}
protected PsiElement overrideContext(PsiElement context)
{
return context;
}
protected ExpressionEvaluator getEvaluator(final Project project) throws EvaluateException
{
Cache cache = myCache.get();
if(cache == null)
{
cache = PsiDocumentManager.getInstance(project).commitAndRunReadAction(() -> initEvaluatorAndChildrenExpression(project));
}
if(cache.myException != null)
{
throw cache.myException;
}
return cache.myEvaluator;
}
@Nullable
protected PsiExpression getPsiExpression(final Project project)
{
Cache cache = myCache.get();
if(cache == null)
{
cache = initEvaluatorAndChildrenExpression(project);
}
return cache.myPsiChildrenExpression;
}
}
|
apache-2.0
|
weichweich/AluShare
|
app/src/main/java/edu/kit/tm/pseprak2/alushare/view/adapter/VideoRequestHandler.java
|
1492
|
package edu.kit.tm.pseprak2.alushare.view.adapter;
import android.graphics.Bitmap;
import android.media.ThumbnailUtils;
import android.provider.MediaStore;
import com.squareup.picasso.Picasso;
import com.squareup.picasso.Request;
import com.squareup.picasso.RequestHandler;
import java.io.IOException;
/**
* A class that enables getting thumbnails from video files and using Picasso to set imageViews.
*
* Created by arthuranselm on 19.08.15.
*/
public class VideoRequestHandler extends RequestHandler{
public static final String video_Scheme = "video";
/**
* Checks if the scheme of the uri in data equals video_scheme and return the boolean.
* @param data the request
* @return true if valid uri else false
*/
@Override
public boolean canHandleRequest(Request data) {
String scheme = data.uri.getScheme();
return video_Scheme.equals(scheme);
}
/**
* Creates the result with the video thumbnail of the uri in the given request.
* @param request the request that contains the video uri
* @param networkPolicy the networkPolicy
* @return the result for Picasso
* @throws IOException
*/
@Override
public Result load(Request request, int networkPolicy) throws IOException {
Bitmap bitmap = ThumbnailUtils.createVideoThumbnail(request.uri.getPath(),
MediaStore.Images.Thumbnails.MINI_KIND);
return new Result(bitmap, Picasso.LoadedFrom.DISK);
}
}
|
apache-2.0
|
ApplETS/ETSMobile-Android2
|
app/src/main/java/ca/etsmtl/applets/etsmobile/ui/calendar_decorator/TodayDecorator.java
|
1069
|
package ca.etsmtl.applets.etsmobile.ui.calendar_decorator;
import android.content.Context;
import android.graphics.Typeface;
import android.text.style.StyleSpan;
import com.prolificinteractive.materialcalendarview.CalendarDay;
import com.prolificinteractive.materialcalendarview.DayViewDecorator;
import com.prolificinteractive.materialcalendarview.DayViewFacade;
import androidx.core.content.ContextCompat;
import ca.etsmtl.applets.etsmobile2.R;
/**
* Created by komlan on 08/04/16.
*/
public class TodayDecorator implements DayViewDecorator {
private CalendarDay today;
private Context context;
public TodayDecorator(Context context) {
today = CalendarDay.today();
this.context = context;
}
@Override
public boolean shouldDecorate(CalendarDay day) {
return day.equals(today);
}
@Override
public void decorate(DayViewFacade view) {
view.setBackgroundDrawable(ContextCompat.getDrawable(context, R.drawable.calendar_today_circle));
view.addSpan(new StyleSpan(Typeface.BOLD));
}
}
|
apache-2.0
|
Kodestruct/Kodestruct.Design
|
Kodestruct.Concrete/ACI/ACI318_14/C17_Anchorage/Entities/TypeOfAnchorSleeve.cs
|
899
|
#region Copyright
/*Copyright (C) 2015 Konstantin Udilovich
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
#endregion
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace Kodestruct.Concrete.ACI318_14.Anchorage
{
public enum TypeOfAnchorSleeve
{
ConstantStiffness,
SeparatedSleeve
}
}
|
apache-2.0
|
Cognifide/APM
|
app/aem/api/src/main/java/com/cognifide/apm/api/actions/ActionResult.java
|
1389
|
/*-
* ========================LICENSE_START=================================
* AEM Permission Management
* %%
* Copyright (C) 2013 Wunderman Thompson Technology
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* =========================LICENSE_END==================================
*/
package com.cognifide.apm.api.actions;
import com.cognifide.apm.api.status.Status;
import java.util.List;
public interface ActionResult {
void logMessage(String message);
void logWarning(String warning);
void logError(String error);
void changeStatus(Status status, String message);
List<Message> getMessages();
void setAuthorizable(String authorizable);
ActionResult merge(ActionResult... actionResult);
ActionResult merge(List<ActionResult> actionResult);
String getAuthorizable();
Status getStatus();
}
|
apache-2.0
|
topoos/topoos_sdk_android
|
src/topoos/Objects/Track.java
|
2542
|
/**
* Copyright 2014-present topoos
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package topoos.Objects;
import java.io.Serializable;
import java.util.ArrayList;
/**
* The Class Track.
*
* @author topoos
*/
public class Track implements Serializable{
/**
* V0
*/
private static final long serialVersionUID = 1L;
/** The id. */
private Integer id=null;
/** The name. */
private String name=null;
/** The device. */
private Integer device=null;
/** The positions. */
private ArrayList<Position> positions=null;
/**
* Instantiates a new track.
*
* @param id the id
* @param name the name
* @param device the device
* @param positions the positions
*/
public Track(Integer id, String name, Integer device,
ArrayList<Position> positions) {
super();
this.id = id;
this.name = name;
this.device = device;
this.positions = positions;
}
/**
* get the id.
*
* @return the id
*/
public Integer getId() {
return id;
}
/**
* Sets the id.
*
* @param id the id to set
*/
public void setId(Integer id) {
this.id = id;
}
/**
* get the name.
*
* @return the name
*/
public String getName() {
return name;
}
/**
* Sets the name.
*
* @param name the name to set
*/
public void setName(String name) {
this.name = name;
}
/**
* get the device.
*
* @return the device
*/
public Integer getDevice() {
return device;
}
/**
* Sets the device.
*
* @param device the device to set
*/
public void setDevice(Integer device) {
this.device = device;
}
/**
* get the positions.
*
* @return the positions
*/
public ArrayList<Position> getPositions() {
return positions;
}
/**
* Sets the positions.
*
* @param positions the positions to set
*/
public void setPositions(ArrayList<Position> positions) {
this.positions = positions;
}
}
|
apache-2.0
|
jamesgraves/cockroach
|
roachpb/data_test.go
|
13687
|
// Copyright 2014 The Cockroach Authors.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
// implied. See the License for the specific language governing
// permissions and limitations under the License. See the AUTHORS file
// for names of contributors.
//
// Author: Spencer Kimball (spencer.kimball@gmail.com)
package roachpb
import (
"bytes"
"fmt"
"math"
"math/rand"
"reflect"
"testing"
"time"
"github.com/cockroachdb/cockroach/util/uuid"
)
// TestKeyNext tests that the method for creating lexicographic
// successors to byte slices works as expected.
func TestKeyNext(t *testing.T) {
a := Key("a")
aNext := a.Next()
if a.Equal(aNext) {
t.Errorf("expected key not equal to next")
}
if bytes.Compare(a, aNext) >= 0 {
t.Errorf("expected next key to be greater")
}
testCases := []struct {
key Key
next Key
}{
{nil, Key("\x00")},
{Key(""), Key("\x00")},
{Key("test key"), Key("test key\x00")},
{Key("\xff"), Key("\xff\x00")},
{Key("xoxo\x00"), Key("xoxo\x00\x00")},
}
for i, c := range testCases {
if !bytes.Equal(c.key.Next(), c.next) {
t.Errorf("%d: unexpected next bytes for %q: %q", i, c.key, c.key.Next())
}
}
}
func TestKeyPrefixEnd(t *testing.T) {
a := Key("a1")
aNext := a.Next()
aEnd := a.PrefixEnd()
if bytes.Compare(a, aEnd) >= 0 {
t.Errorf("expected end key to be greater")
}
if bytes.Compare(aNext, aEnd) >= 0 {
t.Errorf("expected end key to be greater than next")
}
testCases := []struct {
key Key
end Key
}{
{Key{}, KeyMax},
{Key{0}, Key{0x01}},
{Key{0xff}, Key{0xff}},
{Key{0xff, 0xff}, Key{0xff, 0xff}},
{KeyMax, KeyMax},
{Key{0xff, 0xfe}, Key{0xff, 0xff}},
{Key{0x00, 0x00}, Key{0x00, 0x01}},
{Key{0x00, 0xff}, Key{0x01, 0x00}},
{Key{0x00, 0xff, 0xff}, Key{0x01, 0x00, 0x00}},
}
for i, c := range testCases {
if !bytes.Equal(c.key.PrefixEnd(), c.end) {
t.Errorf("%d: unexpected prefix end bytes for %q: %q", i, c.key, c.key.PrefixEnd())
}
}
}
func TestKeyEqual(t *testing.T) {
a1 := Key("a1")
a2 := Key("a2")
if !a1.Equal(a1) {
t.Errorf("expected keys equal")
}
if a1.Equal(a2) {
t.Errorf("expected different keys not equal")
}
}
func TestKeyLess(t *testing.T) {
testCases := []struct {
a, b Key
less bool
}{
{nil, Key("\x00"), true},
{Key(""), Key("\x00"), true},
{Key("a"), Key("b"), true},
{Key("a\x00"), Key("a"), false},
{Key("a\x00"), Key("a\x01"), true},
}
for i, c := range testCases {
if (bytes.Compare(c.a, c.b) < 0) != c.less {
t.Fatalf("%d: unexpected %q < %q: %t", i, c.a, c.b, c.less)
}
}
}
func TestKeyCompare(t *testing.T) {
testCases := []struct {
a, b Key
compare int
}{
{nil, nil, 0},
{nil, Key("\x00"), -1},
{Key("\x00"), Key("\x00"), 0},
{Key(""), Key("\x00"), -1},
{Key("a"), Key("b"), -1},
{Key("a\x00"), Key("a"), 1},
{Key("a\x00"), Key("a\x01"), -1},
}
for i, c := range testCases {
if c.a.Compare(c.b) != c.compare {
t.Fatalf("%d: unexpected %q.Compare(%q): %d", i, c.a, c.b, c.compare)
}
}
}
// TestNextKey tests that the method for creating successors of a Key
// works as expected.
func TestNextKey(t *testing.T) {
testCases := []struct {
key Key
next Key
}{
{nil, Key("\x00")},
{Key(""), Key("\x00")},
{Key("test key"), Key("test key\x00")},
{Key("\xff\xff"), Key("\xff\xff\x00")},
{Key("xoxo\x00"), Key("xoxo\x00\x00")},
}
for i, c := range testCases {
if !c.key.Next().Equal(c.next) {
t.Fatalf("%d: unexpected next key for %q: %s", i, c.key, c.key.Next())
}
}
}
func TestKeyString(t *testing.T) {
if Key("hello").String() != `"hello"` {
t.Errorf("expected key to display pretty version: %s", Key("hello"))
}
if RKeyMax.String() != `"\xff\xff"` {
t.Errorf("expected key max to display pretty version: %s", RKeyMax)
}
}
func makeTS(walltime int64, logical int32) Timestamp {
return Timestamp{
WallTime: walltime,
Logical: logical,
}
}
func TestLess(t *testing.T) {
a := Timestamp{}
b := Timestamp{}
if a.Less(b) || b.Less(a) {
t.Errorf("expected %+v == %+v", a, b)
}
b = makeTS(1, 0)
if !a.Less(b) {
t.Errorf("expected %+v < %+v", a, b)
}
a = makeTS(1, 1)
if !b.Less(a) {
t.Errorf("expected %+v < %+v", b, a)
}
}
func TestEqual(t *testing.T) {
a := Timestamp{}
b := Timestamp{}
if !a.Equal(b) {
t.Errorf("expected %+v == %+v", a, b)
}
b = makeTS(1, 0)
if a.Equal(b) {
t.Errorf("expected %+v < %+v", a, b)
}
a = makeTS(1, 1)
if b.Equal(a) {
t.Errorf("expected %+v < %+v", b, a)
}
}
func TestTimestampNext(t *testing.T) {
testCases := []struct {
ts, expNext Timestamp
}{
{makeTS(1, 2), makeTS(1, 3)},
{makeTS(1, math.MaxInt32-1), makeTS(1, math.MaxInt32)},
{makeTS(1, math.MaxInt32), makeTS(2, 0)},
}
for i, c := range testCases {
if next := c.ts.Next(); !next.Equal(c.expNext) {
t.Errorf("%d: expected %s; got %s", i, c.expNext, next)
}
}
}
func TestTimestampPrev(t *testing.T) {
testCases := []struct {
ts, expPrev Timestamp
}{
{makeTS(1, 2), makeTS(1, 1)},
{makeTS(1, 1), makeTS(1, 0)},
{makeTS(1, 0), makeTS(0, math.MaxInt32)},
}
for i, c := range testCases {
if prev := c.ts.Prev(); !prev.Equal(c.expPrev) {
t.Errorf("%d: expected %s; got %s", i, c.expPrev, prev)
}
}
}
func TestValueChecksumEmpty(t *testing.T) {
k := []byte("key")
v := Value{}
// Before initializing checksum, always works.
if err := v.Verify(k); err != nil {
t.Error(err)
}
if err := v.Verify([]byte("key2")); err != nil {
t.Error(err)
}
v.InitChecksum(k)
if err := v.Verify(k); err != nil {
t.Error(err)
}
}
func TestValueChecksumWithBytes(t *testing.T) {
k := []byte("key")
v := MakeValueFromString("abc")
v.InitChecksum(k)
if err := v.Verify(k); err != nil {
t.Error(err)
}
// Try a different key; should fail.
if err := v.Verify([]byte("key2")); err == nil {
t.Error("expected checksum verification failure on different key")
}
// Mess with value.
v.SetBytes([]byte("abcd"))
if err := v.Verify(k); err == nil {
t.Error("expected checksum verification failure on different value")
}
}
func TestSetGetChecked(t *testing.T) {
v := Value{}
v.SetBytes(nil)
if _, err := v.GetBytes(); err != nil {
t.Fatal(err)
}
v.SetFloat(1.1)
if _, err := v.GetFloat(); err != nil {
t.Fatal(err)
}
v.SetInt(1)
if _, err := v.GetInt(); err != nil {
t.Fatal(err)
}
if err := v.SetProto(&Value{}); err != nil {
t.Fatal(err)
}
if err := v.GetProto(&Value{}); err != nil {
t.Fatal(err)
}
if _, err := v.GetBytes(); err != nil {
t.Fatal(err)
}
if err := v.SetProto(&InternalTimeSeriesData{}); err != nil {
t.Fatal(err)
}
if _, err := v.GetTimeseries(); err != nil {
t.Fatal(err)
}
v.SetTime(time.Time{})
if _, err := v.GetTime(); err != nil {
t.Fatal(err)
}
}
func TestTxnEqual(t *testing.T) {
tc := []struct {
txn1, txn2 *Transaction
eq bool
}{
{nil, nil, true},
{&Transaction{}, nil, false},
{&Transaction{ID: []byte("A")}, &Transaction{ID: []byte("B")}, false},
}
for i, c := range tc {
if c.txn1.Equal(c.txn2) != c.txn2.Equal(c.txn1) || c.txn1.Equal(c.txn2) != c.eq {
t.Errorf("%d: wanted %t", i, c.eq)
}
}
}
func TestTxnIDEqual(t *testing.T) {
txn1, txn2 := uuid.NewUUID4(), uuid.NewUUID4()
txn1Copy := append([]byte(nil), txn1...)
testCases := []struct {
a, b []byte
expEqual bool
}{
{txn1, txn1, true},
{txn1, txn2, false},
{txn1, txn1Copy, true},
}
for i, test := range testCases {
if eq := TxnIDEqual(test.a, test.b); eq != test.expEqual {
t.Errorf("%d: expected %q == %q: %t; got %t", i, test.a, test.b, test.expEqual, eq)
}
}
}
func TestTransactionString(t *testing.T) {
id := []byte("ת\x0f^\xe4-Fؽ\xf7\x16\xe4\xf9\xbe^\xbe")
ts1 := makeTS(10, 11)
txn := Transaction{
Name: "name",
Key: Key("foo"),
ID: id,
Priority: 957356782,
Isolation: SERIALIZABLE,
Status: COMMITTED,
Epoch: 2,
LastHeartbeat: &ts1,
Timestamp: makeTS(20, 21),
OrigTimestamp: makeTS(30, 31),
MaxTimestamp: makeTS(40, 41),
}
expStr := `"name" id=d7aa0f5e key="foo" rw=false pri=44.58039917 iso=SERIALIZABLE stat=COMMITTED ` +
`epo=2 ts=0.000000020,21 orig=0.000000030,31 max=0.000000040,41`
if str := txn.String(); str != expStr {
t.Errorf("expected txn %s; got %s", expStr, str)
}
}
// TestNodeList verifies that its exported methods Add() and Contain()
// operate as expected.
func TestNodeList(t *testing.T) {
sn := NodeList{}
items := append([]int{109, 104, 102, 108, 1000}, rand.Perm(100)...)
for i := range items {
n := NodeID(items[i])
if sn.Contains(n) {
t.Fatalf("%d: false positive hit for %d on slice %v",
i, n, sn.Nodes)
}
// Add this item and, for good measure, all the previous ones.
for j := i; j >= 0; j-- {
sn.Add(NodeID(items[j]))
}
if nodes := sn.Nodes; len(nodes) != i+1 {
t.Fatalf("%d: missing values or duplicates: %v",
i, nodes)
}
if !sn.Contains(n) {
t.Fatalf("%d: false negative hit for %d on slice %v",
i, n, sn.Nodes)
}
}
}
func TestTransactionUpdate(t *testing.T) {
nodes := NodeList{
Nodes: []int32{101, 103, 105},
}
ts := makeTS(10, 11)
txn := Transaction{
Name: "name",
Key: Key("foo"),
ID: uuid.NewUUID4(),
Priority: 957356782,
Isolation: SNAPSHOT,
Status: COMMITTED,
Epoch: 2,
LastHeartbeat: &ts,
Timestamp: makeTS(20, 21),
OrigTimestamp: makeTS(30, 31),
MaxTimestamp: makeTS(40, 41),
CertainNodes: nodes,
Writing: true,
}
noZeroField := func(txn Transaction) error {
ele := reflect.ValueOf(&txn).Elem()
eleT := ele.Type()
for i := 0; i < ele.NumField(); i++ {
f := ele.Field(i)
zero := reflect.Zero(f.Type())
if reflect.DeepEqual(f.Interface(), zero.Interface()) {
return fmt.Errorf("expected %s field to be non-zero", eleT.Field(i).Name)
}
}
return nil
}
if err := noZeroField(txn); err != nil {
t.Fatal(err)
}
var txn2 Transaction
txn2.Update(&txn)
if err := noZeroField(txn2); err != nil {
t.Fatal(err)
}
}
func TestIsPrev(t *testing.T) {
for i, tc := range []struct {
k, m Key
ok bool
}{
{k: Key(""), m: Key{0}, ok: true},
{k: nil, m: nil, ok: false},
{k: Key("a"), m: Key{'a', 0, 0}, ok: false},
{k: Key{'z', 'a', 0}, m: Key{'z', 'a'}, ok: false},
{k: Key("bro"), m: Key{'b', 'r', 'o', 0}, ok: true},
{k: Key("foo"), m: Key{'b', 'a', 'r', 0}, ok: false},
} {
if tc.ok != tc.k.IsPrev(tc.m) {
t.Errorf("%d: wanted %t", i, tc.ok)
}
}
}
// TestRSpanContains verifies methods to check whether a key
// or key range is contained within the span.
func TestRSpanContains(t *testing.T) {
rs := RSpan{Key: []byte("a"), EndKey: []byte("b")}
testData := []struct {
start, end []byte
contains bool
}{
// Single keys.
{[]byte("a"), []byte("a"), true},
{[]byte("a"), nil, true},
{[]byte("aa"), []byte("aa"), true},
{[]byte("`"), []byte("`"), false},
{[]byte("b"), []byte("b"), false},
{[]byte("b"), nil, false},
{[]byte("c"), []byte("c"), false},
// Key ranges.
{[]byte("a"), []byte("b"), true},
{[]byte("a"), []byte("aa"), true},
{[]byte("aa"), []byte("b"), true},
{[]byte("0"), []byte("9"), false},
{[]byte("`"), []byte("a"), false},
{[]byte("b"), []byte("bb"), false},
{[]byte("0"), []byte("bb"), false},
{[]byte("aa"), []byte("bb"), false},
{[]byte("b"), []byte("a"), false},
}
for i, test := range testData {
if bytes.Compare(test.start, test.end) == 0 {
if rs.ContainsKey(test.start) != test.contains {
t.Errorf("%d: expected key %q within range", i, test.start)
}
}
if rs.ContainsKeyRange(test.start, test.end) != test.contains {
t.Errorf("%d: expected key %q within range", i, test.start)
}
}
}
// TestRSpanIntersect verifies rSpan.intersect.
func TestRSpanIntersect(t *testing.T) {
rs := RSpan{Key: RKey("b"), EndKey: RKey("e")}
testData := []struct {
startKey, endKey RKey
expected RSpan
}{
// Partially overlapping.
{RKey("a"), RKey("c"), RSpan{Key: RKey("b"), EndKey: RKey("c")}},
{RKey("d"), RKey("f"), RSpan{Key: RKey("d"), EndKey: RKey("e")}},
// Descriptor surrounds the span.
{RKey("a"), RKey("f"), RSpan{Key: RKey("b"), EndKey: RKey("e")}},
// Span surrounds the descriptor.
{RKey("c"), RKey("d"), RSpan{Key: RKey("c"), EndKey: RKey("d")}},
// Descriptor has the same range as the span.
{RKey("b"), RKey("e"), RSpan{Key: RKey("b"), EndKey: RKey("e")}},
}
for i, test := range testData {
desc := RangeDescriptor{}
desc.StartKey = test.startKey
desc.EndKey = test.endKey
actual, err := rs.Intersect(&desc)
if err != nil {
t.Error(err)
continue
}
if bytes.Compare(actual.Key, test.expected.Key) != 0 ||
bytes.Compare(actual.EndKey, test.expected.EndKey) != 0 {
t.Errorf("%d: expected RSpan [%q,%q) but got [%q,%q)",
i, test.expected.Key, test.expected.EndKey,
actual.Key, actual.EndKey)
}
}
// Error scenarios
errorTestData := []struct {
startKey, endKey RKey
}{
{RKey("a"), RKey("b")},
{RKey("e"), RKey("f")},
{RKey("f"), RKey("g")},
}
for i, test := range errorTestData {
desc := RangeDescriptor{}
desc.StartKey = test.startKey
desc.EndKey = test.endKey
if _, err := rs.Intersect(&desc); err == nil {
t.Errorf("%d: unexpected sucess", i)
}
}
}
|
apache-2.0
|
lky5230/lky5230.github.io
|
test/resource/123/123.js
|
7094
|
'use strict';
/* __V3D_TEMPLATE__ - template-based file; delete this line to prevent this file from being updated */
window.addEventListener('load', function() {
var CONTAINER_ID = 'v3d-container';
(function() {
var params = v3d.AppUtils.getPageParams();
var PUZZLES_DIR = '/puzzles/';
var logicURL = params.logic ? params.logic : '__LOGIC__visual_logic.js'.replace('__LOGIC__', '');
var sceneURL = params.load ? params.load : '__URL__123.gltf'.replace('__URL__', '');
if (!sceneURL) {
console.log('No scene URL specified');
return;
}
// some puzzles can benefit from cache
v3d.Cache.enabled = true;
if (v3d.AppUtils.isXML(logicURL)) {
var logicURLJS = logicURL.match(/(.*)\.xml$/)[1] + '.js';
new v3d.PuzzlesLoader().loadEditorWithLogic(PUZZLES_DIR, logicURLJS,
function() {
var initOptions = v3d.PL ? v3d.PL.execInitPuzzles({
container: CONTAINER_ID }).initOptions
: { useFullscreen: true };
var appInstance = loadScene(sceneURL, initOptions);
v3d.PE.viewportUseAppInstance(appInstance);
}
);
} else if (v3d.AppUtils.isJS(logicURL)) {
new v3d.PuzzlesLoader().loadLogic(logicURL, function() {
var initOptions = v3d.PL ? v3d.PL.execInitPuzzles({
container: CONTAINER_ID }).initOptions
: { useFullscreen: true };
loadScene(sceneURL, initOptions);
});
} else {
loadScene(sceneURL, { useFullscreen: true });
}
})();
function loadScene(sceneURL, initOptions) {
initOptions = initOptions || {};
var ctxSettings = {};
if (initOptions.useBkgTransp) ctxSettings.alpha = true;
if (initOptions.preserveDrawBuf) ctxSettings.preserveDrawingBuffer = true;
var preloader = initOptions.useCustomPreloader
? createCustomPreloader(initOptions.preloaderProgressCb,
initOptions.preloaderEndCb)
: new v3d.SimplePreloader({ container: CONTAINER_ID });
if (v3d.PE) {
puzzlesEditorPreparePreloader(preloader);
}
var app = new v3d.App(CONTAINER_ID, ctxSettings, preloader);
if (initOptions.useBkgTransp) {
app.clearBkgOnLoad = true;
app.renderer.setClearColor(0x000000, 0);
}
// namespace for communicating with code generated by Puzzles
app.ExternalInterface = {};
prepareExternalInterface(app);
if (initOptions.preloaderStartCb) initOptions.preloaderStartCb();
if (initOptions.useFullscreen) {
initFullScreen();
} else {
var fsButton = document.getElementById('fullscreen_button');
if (fsButton) fsButton.style.display = 'none';
}
sceneURL = initOptions.useCompAssets ? sceneURL + '.xz' : sceneURL;
app.loadScene(sceneURL, function() {
app.enableControls();
app.run();
if (v3d.PE) v3d.PE.updateAppInstance(app);
if (v3d.PL) v3d.PL.init(app, initOptions);
runCode(app);
}, null, function() {
console.log('Can\'t load the scene ' + sceneURL);
});
return app;
}
function createCustomPreloader(updateCb, finishCb) {
function CustomPreloader() {
v3d.Preloader.call(this);
}
CustomPreloader.prototype = Object.assign(Object.create(v3d.Preloader.prototype), {
onUpdate: function(percentage) {
v3d.Preloader.prototype.onUpdate.call(this, percentage);
if (updateCb) updateCb(percentage);
},
onFinish: function() {
v3d.Preloader.prototype.onFinish.call(this);
if (finishCb) finishCb();
}
});
return new CustomPreloader();
}
/**
* Modify the app's preloader to track the loading process in the Puzzles Editor.
*/
function puzzlesEditorPreparePreloader(preloader) {
// backward compatibility for loading new projects within the old Puzzles Editor
if (v3d.PE.loadingUpdateCb !== undefined && v3d.PE.loadingFinishCb !== undefined) {
var _onUpdate = preloader.onUpdate.bind(preloader);
preloader.onUpdate = function(percentage) {
_onUpdate(percentage);
v3d.PE.loadingUpdateCb(percentage);
}
var _onFinish = preloader.onFinish.bind(preloader);
preloader.onFinish = function() {
_onFinish();
v3d.PE.loadingFinishCb();
}
}
}
function initFullScreen() {
var fsButton = document.getElementById('fullscreen_button');
if (!fsButton) return;
var container = document.getElementById(CONTAINER_ID);
if (document.fullscreenEnabled ||
document.webkitFullscreenEnabled ||
document.mozFullScreenEnabled ||
document.msFullscreenEnabled)
fsButton.style.display = 'inline';
fsButton.addEventListener('click', function(event) {
event.stopPropagation();
if (document.fullscreenElement ||
document.webkitFullscreenElement ||
document.mozFullScreenElement ||
document.msFullscreenElement) {
exitFullscreen();
} else
requestFullscreen(container);
});
function changeFullscreen() {
if (document.fullscreenElement ||
document.webkitFullscreenElement ||
document.mozFullScreenElement ||
document.msFullscreenElement) {
fsButton.classList.remove('fullscreen-open');
fsButton.classList.add('fullscreen-close');
} else {
fsButton.classList.remove('fullscreen-close');
fsButton.classList.add('fullscreen-open');
}
}
document.addEventListener('webkitfullscreenchange', changeFullscreen);
document.addEventListener('mozfullscreenchange', changeFullscreen);
document.addEventListener('msfullscreenchange', changeFullscreen);
document.addEventListener('fullscreenchange', changeFullscreen);
function requestFullscreen(elem) {
if (elem.requestFullscreen)
elem.requestFullscreen();
else if (elem.mozRequestFullScreen)
elem.mozRequestFullScreen();
else if (elem.webkitRequestFullscreen)
elem.webkitRequestFullscreen();
else if (elem.msRequestFullscreen)
elem.msRequestFullscreen();
}
function exitFullscreen() {
if (document.exitFullscreen)
document.exitFullscreen();
else if (document.mozCancelFullScreen)
document.mozCancelFullScreen();
else if (document.webkitExitFullscreen)
document.webkitExitFullscreen();
else if (document.msExitFullscreen)
document.msExitFullscreen();
}
}
function prepareExternalInterface(app) {
// register functions in the app.ExternalInterface to call them from Puzzles, e.g:
// app.ExternalInterface.myJSFunction = function() {
// console.log('Hello, World!');
// }
}
function runCode(app) {
// add your code here, e.g. console.log('Hello, World!');
}
});
|
apache-2.0
|
InstaList/instalist-android
|
app/src/main/java/org/noorganization/instalist/view/sidedrawermodelwrapper/helper/IShoppingListHelper.java
|
2229
|
/*
* Copyright 2016 Tino Siegmund, Michael Wodniok
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.noorganization.instalist.view.sidedrawermodelwrapper.helper;
import android.view.ContextMenu;
import android.view.MenuItem;
import android.view.View;
import org.noorganization.instalist.view.interfaces.ICategoryAdapter;
import org.noorganization.instalist.view.interfaces.IShoppingListAdapter;
/**
* General interface to communicate with the single helper classes.
* Created by tinos_000 on 25.06.2015.
*/
public interface IShoppingListHelper extends IShoppingListAdapter, ICategoryAdapter{
/**
* Called to create a context menu relating to the current list.
*
* @param _Menu the context menu where the menu items should be added.
* @param _View the given View by onCreateContextMenu.
* @param _MenuInfo the given MenuInfo given by onCreteContextMenu.
* @return the extended ContextMenu.
*/
ContextMenu createContextMenu(ContextMenu _Menu, View _View, ContextMenu.ContextMenuInfo _MenuInfo);
void onContextMenuItemClicked(MenuItem _Item);
/**
* Checks if the current ListRenderer is active.
*
* @return true if active, false if inactive.
*/
boolean isActive();
/**
* Set the List that should be rendered to true, the other lists to false. Also sets the visibility to visible or gone.
*
* @param _IsActive true if the ShoppingList should be rendered else it is not.
*/
void setActiveState(boolean _IsActive);
/**
* Used to update the underlying adapterdata to the current state. Used to prevent the recreation of the helper strucutre.
*/
void updateAdapter();
}
|
apache-2.0
|
vincenzomazzeo/map-engine
|
src/main/java/it/alidays/mapengine/core/fetch/ForEachMethod.java
|
1717
|
/*
* Copyright 2015 Alidays S.p.A.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
* implied.
*
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package it.alidays.mapengine.core.fetch;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import org.dom4j.Element;
public class ForEachMethod implements ToEntityMethod {
private final String path;
private final Map<String, Binder> binderMap;
protected ForEachMethod(String path) {
this.path = path;
this.binderMap = new LinkedHashMap<>();
}
@Override
public List<Map<String, Object>> run(Element baseNode) {
List<Map<String, Object>> result = new ArrayList<>();
@SuppressWarnings("unchecked")
List<Element> nodes = baseNode.selectNodes(this.path);
for (Element node : nodes) {
Map<String, Object> tupla = new HashMap<>();
result.add(tupla);
for (String attribute : this.binderMap.keySet()) {
Binder binder = this.binderMap.get(attribute);
tupla.put(attribute, binder.bind(node));
}
}
return result;
}
protected void addBinder(String attribute, Binder binder) {
this.binderMap.put(attribute, binder);
}
}
|
apache-2.0
|
dropbox/bazel
|
src/test/java/com/google/devtools/build/lib/analysis/constraints/ConstraintsTest.java
|
54776
|
// Copyright 2015 The Bazel Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.devtools.build.lib.analysis.constraints;
import static com.google.common.truth.Truth.assertThat;
import com.google.devtools.build.lib.analysis.BaseRuleClasses;
import com.google.devtools.build.lib.analysis.ConfiguredRuleClassProvider;
import com.google.devtools.build.lib.analysis.RuleDefinition;
import com.google.devtools.build.lib.analysis.RuleDefinitionEnvironment;
import com.google.devtools.build.lib.analysis.util.MockRule;
import com.google.devtools.build.lib.cmdline.Label;
import com.google.devtools.build.lib.packages.Attribute;
import com.google.devtools.build.lib.packages.BuildType;
import com.google.devtools.build.lib.packages.RuleClass;
import com.google.devtools.build.lib.testutil.TestRuleClassProvider;
import com.google.devtools.build.lib.testutil.UnknownRuleConfiguredTarget;
import com.google.devtools.build.lib.util.FileTypeSet;
import java.util.Set;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.JUnit4;
/** Tests for the constraint enforcement system. */
@RunWith(JUnit4.class)
public class ConstraintsTest extends AbstractConstraintsTest {
@Before
public final void createBuildFile() throws Exception {
// Support files for RuleClassWithImplicitAndLateBoundDefaults:
scratch.file("helpers/BUILD",
"sh_library(name = 'implicit', srcs = ['implicit.sh'])",
"sh_library(name = 'latebound', srcs = ['latebound.sh'])",
"sh_library(name = 'default', srcs = ['default.sh'])");
scratch.file("config/BUILD",
"config_setting(name = 'a', values = {'define': 'mode=a'})",
"config_setting(name = 'b', values = {'define': 'mode=b'})");
}
/**
* Dummy rule class for testing rule class defaults. This class applies valid defaults. Note
* that the specified environments must be independently created.
*/
private static final class RuleClassDefaultRule implements RuleDefinition {
@Override
public RuleClass build(RuleClass.Builder builder, RuleDefinitionEnvironment env) {
return builder
.setUndocumented()
.compatibleWith(Label.parseAbsoluteUnchecked("//buildenv/rule_class_compat:b"))
.restrictedTo(Label.parseAbsoluteUnchecked("//buildenv/rule_class_restrict:d"))
.build();
}
@Override
public Metadata getMetadata() {
return RuleDefinition.Metadata.builder()
.name("rule_class_default")
.ancestors(BaseRuleClasses.RuleBase.class)
.factoryClass(UnknownRuleConfiguredTarget.class)
.build();
}
}
/**
* Dummy rule class for testing rule class defaults. This class applies invalid defaults. Note
* that the specified environments must be independently created.
*/
private static final MockRule BAD_RULE_CLASS_DEFAULT_RULE =
() ->
MockRule.define(
"bad_rule_class_default",
(builder, env) ->
builder
.setUndocumented()
// These defaults are invalid since compatibleWith and restrictedTo can't mix
// environments from the same group.
.compatibleWith(
Label.parseAbsoluteUnchecked("//buildenv/rule_class_compat:a"))
.restrictedTo(
Label.parseAbsoluteUnchecked("//buildenv/rule_class_compat:b")));
private static final MockRule RULE_WITH_IMPLICIT_AND_LATEBOUND_DEFAULTS =
() ->
MockRule.define(
"rule_with_implicit_and_latebound_deps",
(builder, env) ->
builder
.setUndocumented()
.add(
Attribute.attr("$implicit", BuildType.LABEL)
.value(Label.parseAbsoluteUnchecked("//helpers:implicit")))
.add(
Attribute.attr(":latebound", BuildType.LABEL)
.value(
Attribute.LateBoundDefault.fromConstantForTesting(
Label.parseAbsoluteUnchecked("//helpers:latebound"))))
.add(
Attribute.attr("normal", BuildType.LABEL)
.allowedFileTypes(FileTypeSet.NO_FILE)
.value(Label.parseAbsoluteUnchecked("//helpers:default"))));
private static final MockRule RULE_WITH_ENFORCED_IMPLICIT_ATTRIBUTE = () -> MockRule.define(
"rule_with_enforced_implicit_deps",
(builder, env) ->
builder
.setUndocumented()
.add(Attribute.attr("$implicit", BuildType.LABEL)
.value(Label.parseAbsoluteUnchecked("//helpers:implicit"))
.checkConstraints()));
private static final MockRule RULE_WITH_SKIPPED_ATTRIBUTE = () -> MockRule.define(
"rule_with_skipped_attr",
(builder, env) ->
builder
.setUndocumented()
.add(Attribute.attr("some_attr", BuildType.LABEL)
.allowedFileTypes(FileTypeSet.NO_FILE)
.dontCheckConstraints()));
private static final MockRule CONSTRAINT_EXEMPT_RULE_CLASS = () -> MockRule.define(
"totally_free_rule",
(builder, env) ->
builder
.setUndocumented()
.exemptFromConstraintChecking(
"for testing removal of restricted_to / compatible_with"));
/**
* Injects the rule class default rules into the default test rule class provider.
*/
@Override
protected ConfiguredRuleClassProvider getRuleClassProvider() {
ConfiguredRuleClassProvider.Builder builder = new ConfiguredRuleClassProvider.Builder();
TestRuleClassProvider.addStandardRules(builder);
builder.addRuleDefinition(new RuleClassDefaultRule());
builder.addRuleDefinition(BAD_RULE_CLASS_DEFAULT_RULE);
builder.addRuleDefinition(RULE_WITH_IMPLICIT_AND_LATEBOUND_DEFAULTS);
builder.addRuleDefinition(RULE_WITH_ENFORCED_IMPLICIT_ATTRIBUTE);
builder.addRuleDefinition(RULE_WITH_SKIPPED_ATTRIBUTE);
builder.addRuleDefinition(CONSTRAINT_EXEMPT_RULE_CLASS);
return builder.build();
}
/**
* Writes the environments and environment groups referred to by the rule class defaults.
*/
private void writeRuleClassDefaultEnvironments() throws Exception {
new EnvironmentGroupMaker("buildenv/rule_class_compat").setEnvironments("a", "b")
.setDefaults("a").make();
new EnvironmentGroupMaker("buildenv/rule_class_restrict").setEnvironments("c", "d")
.setDefaults("c").make();
}
@Test
public void packageErrorOnEnvironmentGroupWithMissingEnvironments() throws Exception {
scratch.file("buildenv/envs/BUILD",
"environment(name = 'env1')",
"environment(name = 'env2')",
"environment_group(",
" name = 'envs',",
" environments = [':env1', ':en2'],",
" defaults = [':env1'])");
reporter.removeHandler(failFastHandler);
assertThat(scratchConfiguredTarget("foo", "g",
"genrule("
+ " name = 'g',"
+ " srcs = [],"
+ " outs = ['g.out'],"
+ " cmd = '',"
+ " restricted_to = ['//buildenv/envs:env1'])"))
.isNull();
assertContainsEvent("environment //buildenv/envs:en2 does not exist");
}
/**
* By default, a rule *implicitly* supports all defaults, meaning the explicitly known
* environment set is empty.
*/
@Test
public void defaultSupportedEnvironments() throws Exception {
new EnvironmentGroupMaker("buildenv/foo").setEnvironments("a", "b").setDefaults("a").make();
String ruleDef = getDependencyRule();
assertThat(supportedEnvironments("dep", ruleDef)).isEmpty();
}
/**
* "Constraining" a rule's environments explicitly sets them.
*/
@Test
public void constrainedSupportedEnvironments() throws Exception {
new EnvironmentGroupMaker("buildenv/foo").setEnvironments("a", "b", "c").setDefaults("a")
.make();
String ruleDef = getDependencyRule(constrainedTo("//buildenv/foo:c"));
assertThat(supportedEnvironments("dep", ruleDef))
.containsExactlyElementsIn(asLabelSet("//buildenv/foo:c"));
}
/**
* Specifying compatibility adds the specified environments to the defaults.
*/
@Test
public void compatibleSupportedEnvironments() throws Exception {
new EnvironmentGroupMaker("buildenv/foo").setEnvironments("a", "b", "c").setDefaults("a")
.make();
String ruleDef = getDependencyRule(compatibleWith("//buildenv/foo:c"));
assertThat(supportedEnvironments("dep", ruleDef))
.containsExactlyElementsIn(asLabelSet("//buildenv/foo:a", "//buildenv/foo:c"));
}
/**
* A rule can't support *no* environments.
*/
@Test
public void supportedEnvironmentsConstrainedtoNothing() throws Exception {
new EnvironmentGroupMaker("buildenv/foo").setEnvironments("a", "b").setDefaults("a").make();
reporter.removeHandler(failFastHandler);
String ruleDef = getDependencyRule(constrainedTo());
assertThat(scratchConfiguredTarget("hello", "dep", ruleDef)).isNull();
assertContainsEvent("attribute cannot be empty");
}
/**
* Restrict the environments within one group, declare compatibility for another.
*/
@Test
public void supportedEnvironmentsInMultipleGroups() throws Exception {
new EnvironmentGroupMaker("buildenv/foo").setEnvironments("a", "b").setDefaults("a").make();
new EnvironmentGroupMaker("buildenv/bar").setEnvironments("c", "d").setDefaults("c").make();
String ruleDef = getDependencyRule(
constrainedTo("//buildenv/foo:b"), compatibleWith("//buildenv/bar:d"));
assertThat(supportedEnvironments("dep", ruleDef))
.containsExactlyElementsIn(
asLabelSet("//buildenv/foo:b", "//buildenv/bar:c", "//buildenv/bar:d"));
}
/**
* The same label can't appear in both a constraint and a compatibility declaration.
*/
@Test
public void sameEnvironmentCompatibleAndRestricted() throws Exception {
new EnvironmentGroupMaker("buildenv/foo").setEnvironments("a", "b").setDefaults("a").make();
reporter.removeHandler(failFastHandler);
String ruleDef = getDependencyRule(
constrainedTo("//buildenv/foo:b"), compatibleWith("//buildenv/foo:b"));
assertThat(scratchConfiguredTarget("hello", "dep", ruleDef)).isNull();
assertContainsEvent("//buildenv/foo:b cannot appear both here and in restricted_to");
}
/**
* Two labels from the same group can't appear in different attributes.
*/
@Test
public void sameGroupCompatibleAndRestricted() throws Exception {
new EnvironmentGroupMaker("buildenv/foo").setEnvironments("a", "b").setDefaults("a").make();
reporter.removeHandler(failFastHandler);
String ruleDef = getDependencyRule(
constrainedTo("//buildenv/foo:a"), compatibleWith("//buildenv/foo:b"));
assertThat(scratchConfiguredTarget("hello", "dep", ruleDef)).isNull();
assertContainsEvent(
"//buildenv/foo:b and //buildenv/foo:a belong to the same environment group");
}
/**
* Tests that rule class defaults change a rule's default set of environments.
*/
@Test
public void supportedEnvironmentsRuleClassDefaults() throws Exception {
writeRuleClassDefaultEnvironments();
String ruleDef = "rule_class_default(name = 'a')";
Set<Label> expectedEnvironments = asLabelSet("//buildenv/rule_class_compat:a",
"//buildenv/rule_class_compat:b", "//buildenv/rule_class_restrict:d");
assertThat(supportedEnvironments("a", ruleDef)).containsExactlyElementsIn(expectedEnvironments);
}
/**
* Tests that explicit declarations override rule class defaults.
*/
@Test
public void explicitAttributesOverrideRuleClassDefaults() throws Exception {
writeRuleClassDefaultEnvironments();
String ruleDef = "rule_class_default("
+ " name = 'a',"
+ " compatible_with = ['//buildenv/rule_class_restrict:c'],"
+ " restricted_to = ['//buildenv/rule_class_compat:a'],"
+ ")";
Set<Label> expectedEnvironments = asLabelSet("//buildenv/rule_class_compat:a",
"//buildenv/rule_class_restrict:c", "//buildenv/rule_class_restrict:d");
assertThat(supportedEnvironments("a", ruleDef)).containsExactlyElementsIn(expectedEnvironments);
}
/**
* Tests that a rule's "known" supported environments includes those from groups referenced
* in rule class defaults but not in explicit rule attributes.
*/
@Test
public void knownEnvironmentsIncludesThoseFromRuleClassDefaults() throws Exception {
writeRuleClassDefaultEnvironments();
new EnvironmentGroupMaker("buildenv/foo").setEnvironments("a", "b").setDefaults("a").make();
String ruleDef = "rule_class_default("
+ " name = 'a',"
+ " restricted_to = ['//buildenv/foo:b'],"
+ ")";
Set<Label> expectedEnvironments = asLabelSet("//buildenv/rule_class_compat:a",
"//buildenv/rule_class_compat:b", "//buildenv/rule_class_restrict:d",
"//buildenv/foo:b");
assertThat(supportedEnvironments("a", ruleDef)).containsExactlyElementsIn(expectedEnvironments);
}
/**
* Tests that environments from the same group can't appear in both restriction and
* compatibility rule class defaults.
*/
@Test
public void sameEnvironmentRuleClassCompatibleAndRestricted() throws Exception {
writeRuleClassDefaultEnvironments();
reporter.removeHandler(failFastHandler);
String ruleDef = "bad_rule_class_default(name = 'a')";
assertThat(scratchConfiguredTarget("hello", "a", ruleDef)).isNull();
assertContainsEvent("//buildenv/rule_class_compat:a and //buildenv/rule_class_compat:b "
+ "belong to the same environment group");
}
/**
* Tests that a dependency is valid if both rules implicitly inherit all default environments.
*/
@Test
public void allDefaults() throws Exception {
new EnvironmentGroupMaker("buildenv/foo").setEnvironments("a", "b").setDefaults("a").make();
scratch.file("hello/BUILD",
getDependencyRule(),
getDependingRule());
assertThat(getConfiguredTarget("//hello:main")).isNotNull();
assertNoEvents();
}
/**
* Tests that a dependency is valid when both rules explicitly declare the same constraints.
*/
@Test
public void sameConstraintsDeclaredExplicitly() throws Exception {
new EnvironmentGroupMaker("buildenv/foo").setEnvironments("a", "b").setDefaults("a").make();
scratch.file("hello/BUILD",
getDependencyRule(constrainedTo("//buildenv/foo:b")),
getDependingRule(constrainedTo("//buildenv/foo:b")));
assertThat(getConfiguredTarget("//hello:main")).isNotNull();
assertNoEvents();
}
/**
* Tests that a dependency is valid when both the depender and dependency explicitly declare
* their constraints and the depender supports a subset of the dependency's environments
*/
@Test
public void validConstraintsDeclaredExplicitly() throws Exception {
new EnvironmentGroupMaker("buildenv/foo").setEnvironments("a", "b").setDefaults("a").make();
scratch.file("hello/BUILD",
getDependencyRule(constrainedTo("//buildenv/foo:a", "//buildenv/foo:b")),
getDependingRule(constrainedTo("//buildenv/foo:b")));
assertThat(getConfiguredTarget("//hello:main")).isNotNull();
assertNoEvents();
}
/**
* Tests that a dependency is invalid when both the depender and dependency explicitly declare
* their constraints and the depender supports an environment the dependency doesn't.
*/
@Test
public void invalidConstraintsDeclaredExplicitly() throws Exception {
new EnvironmentGroupMaker("buildenv/foo").setEnvironments("a", "b").setDefaults("a").make();
scratch.file("hello/BUILD",
getDependencyRule(constrainedTo("//buildenv/foo:b")),
getDependingRule(constrainedTo("//buildenv/foo:a", "//buildenv/foo:b")));
reporter.removeHandler(failFastHandler);
assertThat(getConfiguredTarget("//hello:main")).isNull();
assertContainsEvent(
"dependency //hello:dep doesn't support expected environment: //buildenv/foo:a");
}
/**
* Tests that a dependency is valid when both rules add the same set of environments to their
* defaults.
*/
@Test
public void sameCompatibilityConstraints() throws Exception {
new EnvironmentGroupMaker("buildenv/foo").setEnvironments("a", "b", "c").setDefaults("a")
.make();
scratch.file("hello/BUILD",
getDependencyRule(compatibleWith("//buildenv/foo:b", "//buildenv/foo:c")),
getDependingRule(compatibleWith("//buildenv/foo:b", "//buildenv/foo:c")));
assertThat(getConfiguredTarget("//hello:main")).isNotNull();
assertNoEvents();
}
/**
* Tests that a dependency is valid when both rules add environments to their defaults and
* the depender only adds environments also added by the dependency.
*/
@Test
public void validCompatibilityConstraints() throws Exception {
new EnvironmentGroupMaker("buildenv/foo").setEnvironments("a", "b", "c").setDefaults("a")
.make();
scratch.file("hello/BUILD",
getDependencyRule(compatibleWith("//buildenv/foo:b", "//buildenv/foo:c")),
getDependingRule(compatibleWith("//buildenv/foo:c")));
assertThat(getConfiguredTarget("//hello:main")).isNotNull();
assertNoEvents();
}
/**
* Tests that a dependency is invalid when both rules add environments to their defaults and
* the depender adds environments not added by the dependency.
*/
@Test
public void invalidCompatibilityConstraints() throws Exception {
new EnvironmentGroupMaker("buildenv/foo").setEnvironments("a", "b", "c").setDefaults("a")
.make();
scratch.file("hello/BUILD",
getDependencyRule(compatibleWith("//buildenv/foo:c")),
getDependingRule(compatibleWith("//buildenv/foo:b", "//buildenv/foo:c")));
reporter.removeHandler(failFastHandler);
assertThat(getConfiguredTarget("//hello:main")).isNull();
assertContainsEvent(
"dependency //hello:dep doesn't support expected environment: //buildenv/foo:b");
}
/**
* Tests the error message when the dependency is missing multiple expected environments.
*/
@Test
public void multipleMissingEnvironments() throws Exception {
new EnvironmentGroupMaker("buildenv/foo").setEnvironments("a", "b", "c").setDefaults("a")
.make();
scratch.file("hello/BUILD",
getDependencyRule(),
getDependingRule(compatibleWith("//buildenv/foo:b", "//buildenv/foo:c")));
reporter.removeHandler(failFastHandler);
assertThat(getConfiguredTarget("//hello:main")).isNull();
assertContainsEvent("dependency //hello:dep doesn't support expected environments: "
+ "//buildenv/foo:b, //buildenv/foo:c");
}
/**
* Tests a valid dependency including environments from different groups.
*/
@Test
public void validMultigroupConstraints() throws Exception {
new EnvironmentGroupMaker("buildenv/foo").setEnvironments("a", "b", "c").setDefaults("a")
.make();
new EnvironmentGroupMaker("buildenv/bar").setEnvironments("d", "e", "f").setDefaults("d")
.make();
scratch.file("hello/BUILD",
getDependencyRule(constrainedTo("//buildenv/foo:b", "//buildenv/foo:c"),
compatibleWith("//buildenv/bar:e")),
getDependingRule(constrainedTo("//buildenv/foo:c"), compatibleWith("//buildenv/bar:e")));
assertThat(getConfiguredTarget("//hello:main")).isNotNull();
assertNoEvents();
}
/**
* Tests an invalid dependency including environments from different groups.
*/
@Test
public void invalidMultigroupConstraints() throws Exception {
new EnvironmentGroupMaker("buildenv/foo").setEnvironments("a", "b", "c").setDefaults("a")
.make();
new EnvironmentGroupMaker("buildenv/bar").setEnvironments("d", "e", "f").setDefaults("d")
.make();
scratch.file("hello/BUILD",
getDependencyRule(constrainedTo("//buildenv/foo:c"), compatibleWith("//buildenv/bar:e")),
getDependingRule(constrainedTo("//buildenv/foo:b", "//buildenv/foo:c"),
compatibleWith("//buildenv/bar:e")));
reporter.removeHandler(failFastHandler);
assertThat(getConfiguredTarget("//hello:main")).isNull();
assertContainsEvent(
"dependency //hello:dep doesn't support expected environment: //buildenv/foo:b");
}
/**
* Tests a valid dependency where the dependency doesn't "know" about the expected environment's
* group, but implicitly supports it because that environment is a default.
*/
@Test
public void validConstraintsUnknownEnvironmentToDependency() throws Exception {
new EnvironmentGroupMaker("buildenv/foo").setEnvironments("a", "b", "c").setDefaults("a", "b")
.make();
scratch.file("hello/BUILD",
getDependencyRule(),
getDependingRule(constrainedTo("//buildenv/foo:b")));
assertThat(getConfiguredTarget("//hello:main")).isNotNull();
assertNoEvents();
}
/**
* Tests an invalid dependency where the dependency doesn't "know" about the expected
* environment's group and doesn't support it because it isn't a default.
*/
@Test
public void invalidConstraintsUnknownEnvironmentToDependency() throws Exception {
new EnvironmentGroupMaker("buildenv/foo").setEnvironments("a", "b", "c").setDefaults("a", "b")
.make();
scratch.file("hello/BUILD",
getDependencyRule(),
getDependingRule(constrainedTo("//buildenv/foo:c")));
reporter.removeHandler(failFastHandler);
assertThat(getConfiguredTarget("//hello:main")).isNull();
assertContainsEvent(
"dependency //hello:dep doesn't support expected environment: //buildenv/foo:c");
}
/**
* Tests a valid dependency where the depender doesn't "know" about one of the dependency's
* groups, the depender implicitly supports that group's defaults, and all of those defaults
* are accounted for in the dependency.
*/
@Test
public void validConstraintsUnknownEnvironmentToDependender() throws Exception {
new EnvironmentGroupMaker("buildenv/foo").setEnvironments("a", "b", "c").setDefaults("a")
.make();
scratch.file("hello/BUILD",
getDependencyRule(constrainedTo("//buildenv/foo:a", "//buildenv/foo:b")),
getDependingRule());
assertThat(getConfiguredTarget("//hello:main")).isNotNull();
assertNoEvents();
}
/**
* Tests an invalid dependency where the depender doesn't "know" about one of the dependency's
* groups, the depender implicitly supports that group's defaults, and one of those defaults
* isn't accounted for in the dependency.
*/
@Test
public void invalidConstraintsUnknownEnvironmentToDependender() throws Exception {
new EnvironmentGroupMaker("buildenv/foo").setEnvironments("a", "b", "c").setDefaults("a")
.make();
scratch.file("hello/BUILD",
getDependencyRule(constrainedTo("//buildenv/foo:b")),
getDependingRule());
reporter.removeHandler(failFastHandler);
assertThat(getConfiguredTarget("//hello:main")).isNull();
assertContainsEvent(
"dependency //hello:dep doesn't support expected environment: //buildenv/foo:a");
}
/**
* Tests the case where one dependency is valid and another one isn't.
*/
@Test
public void oneDependencyIsInvalid() throws Exception {
new EnvironmentGroupMaker("buildenv/foo").setEnvironments("a", "b").setDefaults("a").make();
scratch.file("hello/BUILD",
getRuleDef("sh_library", "bad_dep", constrainedTo("//buildenv/foo:b")),
getRuleDef("sh_library", "good_dep", compatibleWith("//buildenv/foo:b")),
getRuleDef("sh_library", "depender",
constrainedTo("//buildenv/foo:a", "//buildenv/foo:b"),
getAttrDef("deps", "good_dep", "bad_dep")));
reporter.removeHandler(failFastHandler);
assertThat(getConfiguredTarget("//hello:depender")).isNull();
assertContainsEvent("//hello:bad_dep doesn't support expected environment: //buildenv/foo:a");
assertDoesNotContainEvent("//hello:good_dep");
}
@Test
public void constraintEnforcementDisabled() throws Exception {
useConfiguration("--enforce_constraints=0");
new EnvironmentGroupMaker("buildenv/foo").setEnvironments("a", "b", "c").setDefaults("a")
.make();
scratch.file("hello/BUILD",
getDependencyRule(),
getDependingRule(compatibleWith("//buildenv/foo:b", "//buildenv/foo:c")));
assertThat(getConfiguredTarget("//hello:main")).isNotNull();
assertNoEvents();
}
@Test
public void constraintEnforcementDisabledHostConfig() throws Exception {
useConfiguration("--enforce_constraints=0");
new EnvironmentGroupMaker("buildenv/foo").setEnvironments("a", "b", "c").setDefaults().make();
scratch.file("hello/BUILD",
"genrule(",
" name = 'gen',",
" srcs = [],",
" outs = ['gen.out'],",
" cmd = '',",
" tools = [':main'])",
getDependencyRule(),
getDependingRule(compatibleWith("//buildenv/foo:a")));
assertThat(getConfiguredTarget("//hello:gen")).isNotNull();
assertNoEvents();
}
/**
* Tests that package defaults compatibility produces a valid dependency that would otherwise
* be invalid.
*/
@Test
public void compatibilityPackageDefaults() throws Exception {
new EnvironmentGroupMaker("buildenv/foo").setEnvironments("a", "b").setDefaults("a").make();
scratch.file("hello/BUILD",
"package(default_compatible_with = ['//buildenv/foo:b'])",
getDependencyRule(),
getDependingRule(compatibleWith("//buildenv/foo:b")));
assertThat(getConfiguredTarget("//hello:main")).isNotNull();
assertNoEvents();
}
/**
* Tests that a rule's compatibility declaration overrides its package defaults compatibility.
*/
@Test
public void packageDefaultsCompatibilityOverride() throws Exception {
new EnvironmentGroupMaker("buildenv/foo").setEnvironments("a", "b").setDefaults().make();
// We intentionally create an invalid dependency structure vs. a valid one. If we tested on
// a valid one, this test wouldn't be able to distinguish between rule declarations overriding
// package defaults and package defaults overriding rule declarations.
scratch.file("hello/BUILD",
"package(default_compatible_with = ['//buildenv/foo:b'])",
getDependencyRule(compatibleWith("//buildenv/foo:a")),
getDependingRule(compatibleWith("//buildenv/foo:a", "//buildenv/foo:b")));
reporter.removeHandler(failFastHandler);
assertThat(getConfiguredTarget("//hello:main")).isNull();
assertContainsEvent(
"dependency //hello:dep doesn't support expected environment: //buildenv/foo:b");
}
/**
* Tests that package defaults restriction produces an valid dependency that would otherwise
* be invalid.
*/
@Test
public void restrictionPackageDefaults() throws Exception {
new EnvironmentGroupMaker("buildenv/foo").setEnvironments("a", "b").setDefaults("a", "b")
.make();
scratch.file("hello/BUILD",
"package(default_restricted_to = ['//buildenv/foo:b'])",
getDependencyRule(constrainedTo("//buildenv/foo:b")),
getDependingRule());
assertThat(getConfiguredTarget("//hello:main")).isNotNull();
assertNoEvents();
}
/**
* Tests that a rule's restriction declaration overrides its package defaults restriction.
*/
@Test
public void packageDefaultsRestrictionOverride() throws Exception {
new EnvironmentGroupMaker("buildenv/foo").setEnvironments("a", "b").setDefaults().make();
// We intentionally create an invalid dependency structure vs. a valid one. If we tested on
// a valid one, this test wouldn't be able to distinguish between rule declarations overriding
// package defaults and package defaults overriding rule declarations.
scratch.file("hello/BUILD",
"package(default_restricted_to = ['//buildenv/foo:b'])",
getDependencyRule(constrainedTo("//buildenv/foo:a")),
getDependingRule(constrainedTo("//buildenv/foo:a", "//buildenv/foo:b")));
reporter.removeHandler(failFastHandler);
assertThat(getConfiguredTarget("//hello:main")).isNull();
assertContainsEvent(
"dependency //hello:dep doesn't support expected environment: //buildenv/foo:b");
}
/**
* Tests that "default_compatible_with" fills in a rule's "compatible_with" when not specified
* by the rule. This is different than, e.g., the rule declaration / rule class defaults model,
* where the "compatible_with" / "restricted_to" values of rule class defaults are merged together
* before being supplied to the rule. See comments in DependencyResolver for more discussion.
*/
@Test
public void packageDefaultsDirectlyFillRuleAttributes() throws Exception {
new EnvironmentGroupMaker("buildenv/foo").setEnvironments("a", "b").setDefaults().make();
scratch.file("hello/BUILD",
"package(default_restricted_to = ['//buildenv/foo:b'])",
getDependencyRule(compatibleWith("//buildenv/foo:a")));
reporter.removeHandler(failFastHandler);
assertThat(getConfiguredTarget("//hello:dep")).isNull();
assertContainsEvent("//buildenv/foo:a and //buildenv/foo:b belong to the same "
+ "environment group. They should be declared together either here or in restricted_to");
}
@Test
public void hostDependenciesAreNotChecked() throws Exception {
new EnvironmentGroupMaker("buildenv/foo").setEnvironments("a", "b").setDefaults("a").make();
scratch.file("hello/BUILD",
"sh_binary(name = 'host_tool',",
" srcs = ['host_tool.sh'],",
" restricted_to = ['//buildenv/foo:b'])",
"genrule(",
" name = 'hello',",
" srcs = [],",
" outs = ['hello.out'],",
" cmd = '',",
" tools = [':host_tool'],",
" compatible_with = ['//buildenv/foo:a'])");
assertThat(getConfiguredTarget("//hello:hello")).isNotNull();
assertNoEvents();
}
@Test
public void hostDependenciesNotCheckedNoDistinctHostConfiguration() throws Exception {
useConfiguration("--nodistinct_host_configuration");
new EnvironmentGroupMaker("buildenv/foo").setEnvironments("a", "b").setDefaults("a").make();
scratch.file("hello/BUILD",
"sh_binary(name = 'host_tool',",
" srcs = ['host_tool.sh'],",
" restricted_to = ['//buildenv/foo:b'])",
"genrule(",
" name = 'hello',",
" srcs = [],",
" outs = ['hello.out'],",
" cmd = '',",
" tools = [':host_tool'],",
" compatible_with = ['//buildenv/foo:a'])");
assertThat(getConfiguredTarget("//hello:hello")).isNotNull();
assertNoEvents();
}
@Test
public void implicitAndLateBoundDependenciesAreNotChecked() throws Exception {
new EnvironmentGroupMaker("buildenv/foo").setEnvironments("a", "b").setDefaults("a").make();
scratch.file("hello/BUILD",
"rule_with_implicit_and_latebound_deps(",
" name = 'hi',",
" compatible_with = ['//buildenv/foo:b'])");
assertThat(getConfiguredTarget("//hello:hi")).isNotNull();
// Note that the event "cannot build rule_with_implicit_and_latebound_deps" *does* occur
// because of the implementation of UnknownRuleConfiguredTarget.
assertDoesNotContainEvent(":implicit doesn't support expected environment");
assertDoesNotContainEvent(":latebound doesn't support expected environment");
assertDoesNotContainEvent("normal doesn't support expected environment");
}
@Test
public void implicitDepsWithWhiteListedAttributeAreChecked() throws Exception {
new EnvironmentGroupMaker("buildenv/foo").setEnvironments("a", "b").setDefaults("a").make();
scratch.file("hello/BUILD",
"rule_with_enforced_implicit_deps(",
" name = 'hi',",
" compatible_with = ['//buildenv/foo:b'])");
reporter.removeHandler(failFastHandler);
assertThat(getConfiguredTarget("//hello:hi")).isNull();
assertContainsEvent(
"dependency //helpers:implicit doesn't support expected environment: //buildenv/foo:b");
}
@Test
public void explicitDepWithEnforcementSkipOverride() throws Exception {
new EnvironmentGroupMaker("buildenv/foo").setEnvironments("a", "b").setDefaults("a").make();
scratch.file("hello/BUILD",
"rule_with_skipped_attr(",
" name = 'hi',",
" some_attr = '//helpers:default',",
" compatible_with = ['//buildenv/foo:b'])");
assertThat(getConfiguredTarget("//hello:hi")).isNotNull();
assertNoEvents();
}
@Test
public void javaDataAndResourcesAttributesSkipped() throws Exception {
new EnvironmentGroupMaker("buildenv/foo").setEnvironments("a", "b").setDefaults("a").make();
scratch.file("hello/BUILD",
"java_library(",
" name = 'hi',",
" data = ['//helpers:default'],",
" resources = ['//helpers:default'],",
" compatible_with = ['//buildenv/foo:b'])");
assertThat(getConfiguredTarget("//hello:hi")).isNotNull();
assertNoEvents();
}
@Test
public void filegroupDataAttributesSkipped() throws Exception {
new EnvironmentGroupMaker("buildenv/foo").setEnvironments("a", "b").setDefaults("a").make();
scratch.file("hello/BUILD",
"filegroup(",
" name = 'hi',",
" data = ['//helpers:default'],",
" compatible_with = ['//buildenv/foo:b'])");
assertThat(getConfiguredTarget("//hello:hi")).isNotNull();
assertNoEvents();
}
@Test
public void outputFilesAreChecked() throws Exception {
new EnvironmentGroupMaker("buildenv/foo").setEnvironments("a", "b").setDefaults().make();
scratch.file("hello/BUILD",
"genrule(name = 'gen', srcs = [], outs = ['shlib.sh'], cmd = '')",
"sh_library(",
" name = 'shlib',",
" srcs = ['shlib.sh'],",
" data = ['whatever.txt'],",
" compatible_with = ['//buildenv/foo:a'])");
reporter.removeHandler(failFastHandler);
assertThat(getConfiguredTarget("//hello:shlib")).isNull();
assertContainsEvent(
"dependency //hello:gen doesn't support expected environment: //buildenv/foo:a");
}
@Test
public void configSettingRulesAreNotChecked() throws Exception {
new EnvironmentGroupMaker("buildenv/foo").setEnvironments("a", "b").setDefaults().make();
scratch.file("hello/BUILD",
"sh_library(",
" name = 'shlib',",
" srcs = select({",
" '//config:a': ['shlib.sh'],",
" }),",
" compatible_with = ['//buildenv/foo:a'])");
useConfiguration("--define", "mode=a");
assertThat(getConfiguredTarget("//hello:shlib")).isNotNull();
assertNoEvents();
}
@Test
public void fulfills() throws Exception {
new EnvironmentGroupMaker("buildenv/foo")
.setEnvironments("a", "b")
.setFulfills("a", "b")
.setDefaults()
.make();
scratch.file("hello/BUILD",
getDependencyRule(constrainedTo("//buildenv/foo:a")),
getDependingRule(constrainedTo("//buildenv/foo:b")));
assertThat(getConfiguredTarget("//hello:main")).isNotNull();
assertNoEvents();
}
@Test
public void fulfillsIsNotSymmetric() throws Exception {
new EnvironmentGroupMaker("buildenv/foo")
.setEnvironments("a", "b")
.setFulfills("a", "b")
.setDefaults()
.make();
scratch.file("hello/BUILD",
getDependencyRule(constrainedTo("//buildenv/foo:b")),
getDependingRule(constrainedTo("//buildenv/foo:a")));
reporter.removeHandler(failFastHandler);
assertThat(getConfiguredTarget("//hello:main")).isNull();
assertContainsEvent(
"dependency //hello:dep doesn't support expected environment: //buildenv/foo:a");
}
@Test
public void fulfillsIsTransitive() throws Exception {
new EnvironmentGroupMaker("buildenv/foo")
.setEnvironments("a", "b", "c")
.setFulfills("a", "b")
.setFulfills("b", "c")
.setDefaults()
.make();
scratch.file("hello/BUILD",
getDependencyRule(constrainedTo("//buildenv/foo:a")),
getDependingRule(constrainedTo("//buildenv/foo:c")));
assertThat(getConfiguredTarget("//hello:main")).isNotNull();
assertNoEvents();
}
@Test
public void defaultEnvironmentDirectlyFulfills() throws Exception {
new EnvironmentGroupMaker("buildenv/foo")
.setEnvironments("a", "b")
.setFulfills("a", "b")
.setDefaults("a")
.make();
scratch.file("hello/BUILD",
getDependencyRule(),
getDependingRule(constrainedTo("//buildenv/foo:b")));
assertThat(getConfiguredTarget("//hello:main")).isNotNull();
assertNoEvents();
}
@Test
public void defaultEnvironmentIndirectlyFulfills() throws Exception {
new EnvironmentGroupMaker("buildenv/foo")
.setEnvironments("a", "b", "c")
.setFulfills("a", "b")
.setFulfills("b", "c")
.setDefaults("a")
.make();
scratch.file("hello/BUILD",
getDependencyRule(),
getDependingRule(constrainedTo("//buildenv/foo:c")));
assertThat(getConfiguredTarget("//hello:main")).isNotNull();
assertNoEvents();
}
@Test
public void environmentFulfillsExpectedDefault() throws Exception {
new EnvironmentGroupMaker("buildenv/foo")
.setEnvironments("a", "b")
.setFulfills("a", "b")
.setDefaults("b")
.make();
scratch.file("hello/BUILD",
getDependencyRule(constrainedTo("//buildenv/foo:a")),
getDependingRule());
assertThat(getConfiguredTarget("//hello:main")).isNotNull();
assertNoEvents();
}
@Test
public void constraintExemptRulesDontHaveConstraintAttributes() throws Exception {
new EnvironmentGroupMaker("buildenv/foo")
.setEnvironments("a", "b")
.setDefaults("a")
.make();
scratch.file("ihave/BUILD",
"totally_free_rule(",
" name = 'nolimits',",
" restricted_to = ['//buildenv/foo:b']",
")");
reporter.removeHandler(failFastHandler);
assertThat(getConfiguredTarget("//ihave:nolimits")).isNull();
assertContainsEvent("no such attribute 'restricted_to' in 'totally_free_rule'");
}
@Test
public void buildingEnvironmentGroupDirectlyDoesntCrash() throws Exception {
new EnvironmentGroupMaker("buildenv/foo")
.setEnvironments("a", "b")
.setDefaults("a")
.make();
assertThat(getConfiguredTarget("//buildenv/foo:foo")).isNotNull();
}
private void writeDepsForSelectTests() throws Exception {
scratch.file("deps/BUILD",
"cc_library(",
" name = 'dep_a',",
" srcs = [],",
" restricted_to = ['//buildenv/foo:a'])",
"cc_library(",
" name = 'dep_b',",
" srcs = [],",
" restricted_to = ['//buildenv/foo:b'])");
}
@Test
public void selectableDepsCanMissEnvironments() throws Exception {
new EnvironmentGroupMaker("buildenv/foo").setEnvironments("a", "b").setDefaults().make();
writeDepsForSelectTests();
scratch.file("hello/BUILD",
"cc_library(",
" name = 'lib',",
" srcs = [],",
" deps = select({",
" '//config:a': ['//deps:dep_a'],",
" '//config:b': ['//deps:dep_b'],",
" }),",
" compatible_with = ['//buildenv/foo:a', '//buildenv/foo:b'])");
useConfiguration("--define", "mode=a");
assertThat(getConfiguredTarget("//hello:lib")).isNotNull();
}
@Test
public void staticCheckingOnSelectsTemporarilyDisabled() throws Exception {
// TODO(bazel-team): update this test once static checking on selects is implemented. When
// that happens, the union of all deps in the select must support the environments in the
// depending rule. So the logic here is constraint-invalid because //buildenv/foo:c isn't
// fulfilled by any of the deps.
new EnvironmentGroupMaker("buildenv/foo").setEnvironments("a", "b", "c").setDefaults().make();
writeDepsForSelectTests();
scratch.file("hello/BUILD",
"cc_library(",
" name = 'lib',",
" srcs = [],",
" deps = select({",
" '//config:a': ['//deps:dep_a'],",
" '//config:b': ['//deps:dep_b'],",
" }),",
" compatible_with = ['//buildenv/foo:a', '//buildenv/foo:b', '//buildenv/foo:c'])");
useConfiguration("--define", "mode=a");
assertThat(getConfiguredTarget("//hello:lib")).isNotNull();
}
@Test
public void depInBothSelectAndUnconditionalListIsAlwaysChecked() throws Exception {
new EnvironmentGroupMaker("buildenv/foo").setEnvironments("a", "b").setDefaults().make();
writeDepsForSelectTests();
scratch.file("hello/BUILD",
"cc_library(",
" name = 'lib',",
" srcs = [],",
" deps = select({",
" '//config:a': ['//deps:dep_a'],",
" '//config:b': ['//deps:dep_b'],",
" }),",
" hdrs = ['//deps:dep_a'],",
" compatible_with = ['//buildenv/foo:a', '//buildenv/foo:b'])");
useConfiguration("--define", "mode=a");
reporter.removeHandler(failFastHandler);
assertThat(getConfiguredTarget("//hello:lib")).isNull();
assertContainsEvent(
"dependency //deps:dep_a doesn't support expected environment: //buildenv/foo:b");
}
@Test
public void unconditionalSelectsAlwaysChecked() throws Exception {
new EnvironmentGroupMaker("buildenv/foo").setEnvironments("a", "b").setDefaults().make();
writeDepsForSelectTests();
scratch.file("hello/BUILD",
"cc_library(",
" name = 'lib',",
" srcs = [],",
" deps = select({",
" '//conditions:default': ['//deps:dep_a'],",
" }),",
" compatible_with = ['//buildenv/foo:a', '//buildenv/foo:b'])");
reporter.removeHandler(failFastHandler);
assertThat(getConfiguredTarget("//hello:lib")).isNull();
assertContainsEvent(
"dependency //deps:dep_a doesn't support expected environment: //buildenv/foo:b");
}
@Test
public void refinedEnvironmentCheckingValidCaseDirect() throws Exception {
new EnvironmentGroupMaker("buildenv/foo").setEnvironments("a", "b").setDefaults().make();
writeDepsForSelectTests();
scratch.file("hello/BUILD",
"cc_library(",
" name = 'lib',",
" srcs = [],",
" deps = select({",
" '//config:a': ['//deps:dep_a'],",
" '//config:b': ['//deps:dep_b'],",
" }),",
" compatible_with = ['//buildenv/foo:a'])");
useConfiguration("--define", "mode=a");
// Valid because "--define mode=a" refines :lib to "compatible_with = ['//buildenv/foo:a']".
assertThat(getConfiguredTarget("//hello:lib")).isNotNull();
}
@Test
public void refinedEnvironmentCheckingBadCaseDirect() throws Exception {
new EnvironmentGroupMaker("buildenv/foo").setEnvironments("a", "b").setDefaults().make();
writeDepsForSelectTests();
scratch.file("hello/BUILD",
"cc_library(",
" name = 'lib',",
" srcs = [],",
" deps = select({",
" '//config:a': ['//deps:dep_a'],",
" '//config:b': ['//deps:dep_b'],",
" }),",
" compatible_with = ['//buildenv/foo:b'])");
useConfiguration("--define", "mode=a");
reporter.removeHandler(failFastHandler);
// Invalid because "--define mode=a" refines :lib to "compatible_with = []" (empty).
assertThat(getConfiguredTarget("//hello:lib")).isNull();
assertContainsEvent("//hello:lib: the current command-line flags disqualify all supported "
+ "environments because of incompatible select() paths:\n"
+ " environment: //buildenv/foo:b removed by: //hello:lib (/workspace/hello/BUILD:1:1)");
}
@Test
public void refinedEnvironmentCheckingValidCaseTransitive() throws Exception {
new EnvironmentGroupMaker("buildenv/foo").setEnvironments("a", "b").setDefaults().make();
writeDepsForSelectTests();
scratch.file("hello/BUILD",
"cc_library(",
" name = 'lib',",
" srcs = [],",
" deps = select({",
" '//config:a': ['//deps:dep_a'],",
" '//config:b': ['//deps:dep_b'],",
" }),",
" compatible_with = ['//buildenv/foo:a', '//buildenv/foo:b'])",
"cc_library(",
" name = 'depender',",
" srcs = [],",
" deps = [':lib'],",
" compatible_with = ['//buildenv/foo:a'])");
useConfiguration("--define", "mode=a");
// Valid because "--define mode=a" refines :lib to "compatible_with = ['//buildenv/foo:a']".
assertThat(getConfiguredTarget("//hello:depender")).isNotNull();
}
@Test
public void refinedEnvironmentCheckingBadCaseTransitive() throws Exception {
new EnvironmentGroupMaker("buildenv/foo").setEnvironments("a", "b").setDefaults().make();
writeDepsForSelectTests();
scratch.file("hello/BUILD",
"cc_library(",
" name = 'lib',",
" srcs = [],",
" deps = select({",
" '//config:a': ['//deps:dep_a'],",
" '//config:b': ['//deps:dep_b'],",
" }),",
" compatible_with = ['//buildenv/foo:a', '//buildenv/foo:b'])",
"cc_library(",
" name = 'depender',",
" srcs = [],",
" deps = [':lib'],",
" compatible_with = ['//buildenv/foo:b'])");
useConfiguration("--define", "mode=a");
reporter.removeHandler(failFastHandler);
// Invalid because "--define mode=a" refines :lib to "compatible_with = ['//buildenv/foo:a']".
assertThat(getConfiguredTarget("//hello:depender")).isNull();
assertContainsEvent("//hello:depender: the current command-line flags disqualify all supported "
+ "environments because of incompatible select() paths:\n"
+ " environment: //buildenv/foo:b removed by: //hello:lib (/workspace/hello/BUILD:1:1)");
}
@Test
public void refinedEnvironmentCheckingBadCaseChooseLowestLevelCulprit() throws Exception {
new EnvironmentGroupMaker("buildenv/foo").setEnvironments("a", "b").setDefaults().make();
writeDepsForSelectTests();
scratch.file("hello/BUILD",
"cc_library(",
" name = 'lib2',", // Even though both lib1 and lib2 refine away b, lib2 is the culprit.
" srcs = [],",
" deps = select({",
" '//config:a': ['//deps:dep_a'],",
" '//config:b': ['//deps:dep_b'],",
" }),",
" compatible_with = ['//buildenv/foo:a', '//buildenv/foo:b'])",
"cc_library(",
" name = 'lib1',",
" srcs = [],",
" deps = select({",
" '//config:a': [':lib2'],",
" '//config:b': ['//deps:dep_b'],",
" }),",
" compatible_with = ['//buildenv/foo:a', '//buildenv/foo:b'])",
"cc_library(",
" name = 'depender',",
" srcs = [],",
" deps = [':lib1'],",
" compatible_with = ['//buildenv/foo:b'])");
useConfiguration("--define", "mode=a");
reporter.removeHandler(failFastHandler);
// Invalid because "--define mode=a" refines :lib to "compatible_with = ['//buildenv/foo:a']".
assertThat(getConfiguredTarget("//hello:depender")).isNull();
assertContainsEvent("//hello:depender: the current command-line flags disqualify all supported "
+ "environments because of incompatible select() paths:\n"
+ " environment: //buildenv/foo:b removed by: //hello:lib2 (/workspace/hello/BUILD:1:1)");
}
@Test
public void environmentRefiningAccountsForImplicitDefaults() throws Exception {
new EnvironmentGroupMaker("buildenv/foo").setEnvironments("a", "b").setDefaults("b").make();
writeDepsForSelectTests();
scratch.file("hello/BUILD",
"cc_library(",
" name = 'lib',",
" srcs = [],",
" deps = select({",
" '//config:a': ['//deps:dep_a'],",
" '//config:b': ['//deps:dep_b'],",
" }))");
useConfiguration("--define", "mode=a");
reporter.removeHandler(failFastHandler);
// Invalid because :lib has an implicit default of ['//buildenv/foo:b'] and "--define mode=a"
// refines it to "compatible_with = []" (empty).
assertThat(getConfiguredTarget("//hello:lib")).isNull();
assertContainsEvent("//hello:lib: the current command-line flags disqualify all supported "
+ "environments because of incompatible select() paths:\n"
+ " environment: //buildenv/foo:b removed by: //hello:lib (/workspace/hello/BUILD:1:1)");
}
@Test
public void environmentRefiningChecksAllEnvironmentGroups() throws Exception {
new EnvironmentGroupMaker("buildenv/foo").setEnvironments("a", "b").setDefaults().make();
new EnvironmentGroupMaker("buildenv/bar").setEnvironments("c", "d").setDefaults().make();
scratch.file("deps/BUILD",
"cc_library(",
" name = 'dep_a',",
" srcs = [],",
" restricted_to = ['//buildenv/foo:a', '//buildenv/bar:d'])",
"cc_library(",
" name = 'dep_b',",
" srcs = [],",
" restricted_to = ['//buildenv/foo:b', '//buildenv/bar:c'])");
scratch.file("hello/BUILD",
"cc_library(",
" name = 'lib',",
" srcs = [],",
" deps = select({",
" '//config:a': ['//deps:dep_a'],",
" '//config:b': ['//deps:dep_b'],",
" }),",
" compatible_with = ['//buildenv/foo:a', '//buildenv/bar:c'])");
useConfiguration("--define", "mode=a");
reporter.removeHandler(failFastHandler);
// Invalid because while the //buildenv/foo refinement successfully refines :lib to
// ['//buildenv/foo:a'], the bar refinement refines it to [].
assertThat(getConfiguredTarget("//hello:lib")).isNull();
assertContainsEvent("//hello:lib: the current command-line flags disqualify all supported "
+ "environments because of incompatible select() paths:\n"
+ " environment: //buildenv/bar:c removed by: //hello:lib (/workspace/hello/BUILD:1:1)");
}
/**
* When multiple environment groups get cleared out by refinement, batch the missing environments
* by group membership.
*/
@Test
public void refinedEnvironmentCheckingPartitionsErrorsbyEnvironmentGroup() throws Exception {
new EnvironmentGroupMaker("buildenv/foo").setEnvironments("a", "b").setDefaults().make();
new EnvironmentGroupMaker("buildenv/bar").setEnvironments("c", "d").setDefaults().make();
scratch.file("hello/BUILD",
"cc_library(",
" name = 'all_groups_gone',",
" srcs = [],",
" restricted_to = ['//buildenv/foo:b', '//buildenv/bar:d'])",
"cc_library(",
" name = 'all_groups_there',",
" srcs = [],",
" restricted_to = ['//buildenv/foo:a', '//buildenv/bar:c'])",
"cc_library(",
" name = 'lib',",
" srcs = [],",
" deps = select({",
" '//config:a': [':all_groups_gone'],",
" '//config:b': [':all_groups_there'],",
" }),",
" compatible_with = ['//buildenv/foo:a', '//buildenv/bar:c'])");
useConfiguration("--define", "mode=a");
reporter.removeHandler(failFastHandler);
assertThat(getConfiguredTarget("//hello:lib")).isNull();
assertContainsEvent("//hello:lib: the current command-line flags disqualify all supported "
+ "environments because of incompatible select() paths:\n"
+ "\nenvironment group: //buildenv/foo:foo:\n"
+ " environment: //buildenv/foo:a removed by: //hello:lib (/workspace/hello/BUILD:9:1)\n"
+ "\nenvironment group: //buildenv/bar:bar:\n"
+ " environment: //buildenv/bar:c removed by: //hello:lib (/workspace/hello/BUILD:9:1)");
}
private void writeRulesForRefiningSubsetTests(String topLevelRestrictedTo) throws Exception {
new EnvironmentGroupMaker("buildenv/foo")
.setEnvironments("a", "b", "all")
.setFulfills("all", "a")
.setFulfills("all", "b")
.setDefaults()
.make();
scratch.file("hello/BUILD",
"cc_library(",
" name = 'lib',",
" srcs = [],",
" deps = [':dep1'],",
" restricted_to = ['//buildenv/foo:" + topLevelRestrictedTo + "'])",
"cc_library(",
" name = 'dep1',",
" srcs = [],",
// This is technically illegal because "dep1" declares support for both "a" and "b" but
// no dependency under the select can provide "b". This is known as "static select
// constraint checking" and is currently an unimplemented Bazel TODO.
" deps = select({",
" '//config:a': [':dep2'],",
" '//conditions:default': [':dep2'],",
" }),",
" restricted_to = ['//buildenv/foo:all'])",
"cc_library(",
" name = 'dep2',",
" srcs = [],",
" compatible_with = ['//buildenv/foo:a'])");
}
@Test
public void refiningReplacesRemovedEnvironmentWithValidFulfillingSubset() throws Exception {
writeRulesForRefiningSubsetTests("a");
assertThat(getConfiguredTarget("//hello:lib")).isNotNull();
}
@Test
public void refiningReplacesRemovedEnvironmentWithInvalidFulfillingSubset() throws Exception {
writeRulesForRefiningSubsetTests("b");
reporter.removeHandler(failFastHandler);
assertThat(getConfiguredTarget("//hello:lib")).isNull();
assertContainsEvent("//hello:lib: the current command-line flags disqualify all supported "
+ "environments because of incompatible select() paths");
}
}
|
apache-2.0
|
fdecampredon/jsx-typescript-old-version
|
tests/baselines/reference/commaOperatorOtherInvalidOperation.js
|
302
|
//// [commaOperatorOtherInvalidOperation.js]
//Expect to have compiler errors
//Comma operator in fuction arguments and return
function foo(x, y) {
return x, y;
}
var resultIsString = foo(1, "123");
//TypeParameters
function foo1() {
var x;
var y;
var result = (x, y);
}
|
apache-2.0
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.