repo_name
stringlengths
4
116
path
stringlengths
4
379
size
stringlengths
1
7
content
stringlengths
3
1.05M
license
stringclasses
15 values
skyeyester/ARToolkit
bin/PNG2RAW.py
1200
from PIL import Image import StringIO class PNG2RAW: """ This class is for translating PNG image in RGBA to RAW image in RGB. Usage: PNG2RAW obj(srcBuf) dstBuf = obj.convert() """ def __init__(self, srcBuf): self.readImg(srcBuf) def readImg(self, srcBuf): self.srcBuffer = srcBuf strio = StringIO.StringIO(self.srcBuffer) self.img = Image.open(strio).convert('RGB') def convert(self): """ Returns: A string: raw data of img in RGB. """ if self.img is None: self.readImg() outBuf = '' for r, g, b in list(self.img.getdata()): #r, g, b are integer outBuf += '%c%c%c' % (r, g, b) return outBuf def getLen(self): x, y = self.img.size return x*y*3 def getSize(self): """ Returns: <x, y> """ return self.img.size def getMode(self): """ Returns: A string: ex. RGB """ return self.img.mode if __name__ == "__main__": f = open('sample.png', 'rb') buf = f.read() convertor = PNG2RAW(buf) print 'create object' print convertor.getMode() print convertor.getSize() outBuf = convertor.convert() print len(outBuf) print convertor.getLen()
apache-2.0
xevious99/google-api-php-client
src/Google/Service/Datastore/RollbackResponse.php
973
<?php namespace Google\Service\Datastore; /* * Copyright 2010 Google Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ class RollbackResponse extends \Google\Model { protected $headerType = '\Google\Service\Datastore\ResponseHeader'; protected $headerDataType = ''; public function setHeader(\Google\Service\Datastore\ResponseHeader $header) { $this->header = $header; } public function getHeader() { return $this->header; } }
apache-2.0
orientechnologies/orientdb
core/src/test/java/com/orientechnologies/orient/core/db/FreezeAndRecordInsertAtomicityTest.java
5418
/* * * * Copyright 2010-2017 OrientDB LTD (http://orientdb.com) * * * * Licensed under the Apache License, Version 2.0 (the "License"); * * you may not use this file except in compliance with the License. * * You may obtain a copy of the License at * * * * http://www.apache.org/licenses/LICENSE-2.0 * * * * Unless required by applicable law or agreed to in writing, software * * distributed under the License is distributed on an "AS IS" BASIS, * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * * See the License for the specific language governing permissions and * * limitations under the License. * * * * For more information: http://orientdb.com * */ package com.orientechnologies.orient.core.db; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; import com.orientechnologies.orient.core.db.document.ODatabaseDocumentTx; import com.orientechnologies.orient.core.id.ORID; import com.orientechnologies.orient.core.index.OIndex; import com.orientechnologies.orient.core.metadata.schema.OClass; import com.orientechnologies.orient.core.metadata.schema.OType; import com.orientechnologies.orient.core.record.impl.ODocument; import java.io.File; import java.util.HashSet; import java.util.Random; import java.util.Set; import java.util.concurrent.CountDownLatch; import java.util.concurrent.ExecutionException; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.Future; import java.util.concurrent.TimeUnit; import java.util.stream.Stream; import org.junit.After; import org.junit.Before; import org.junit.Test; /** @author Sergey Sitnikov */ public class FreezeAndRecordInsertAtomicityTest { private static final String URL; private static final int THREADS = Runtime.getRuntime().availableProcessors() * 2; private static final int ITERATIONS = 100; static { String buildDirectory = System.getProperty("buildDirectory"); if (buildDirectory == null) buildDirectory = "./target"; URL = "plocal:" + buildDirectory + File.separator + FreezeAndRecordInsertAtomicityTest.class.getSimpleName(); } private Random random; private ODatabaseDocumentTx db; private ExecutorService executorService; private CountDownLatch countDownLatch; @Before public void before() { final long seed = System.currentTimeMillis(); System.out.println(FreezeAndRecordInsertAtomicityTest.class.getSimpleName() + " seed: " + seed); random = new Random(seed); db = new ODatabaseDocumentTx(URL); if (db.exists()) { db.open("admin", "admin"); db.drop(); } db.create(); db.getMetadata() .getSchema() .createClass("Person") .createProperty("name", OType.STRING) .createIndex(OClass.INDEX_TYPE.UNIQUE); executorService = Executors.newFixedThreadPool(THREADS); countDownLatch = new CountDownLatch(THREADS); } @After public void after() throws InterruptedException { executorService.shutdown(); assertTrue(executorService.awaitTermination(5, TimeUnit.SECONDS)); db.drop(); } @Test public void test() throws InterruptedException, ExecutionException { final Set<Future<?>> futures = new HashSet<Future<?>>(); for (int i = 0; i < THREADS; ++i) { final int thread = i; futures.add( executorService.submit( () -> { try { final ODatabaseDocumentInternal db = new ODatabaseDocumentTx(URL); db.open("admin", "admin"); final OIndex index = db.getMetadata().getIndexManagerInternal().getIndex(db, "Person.name"); for (int i1 = 0; i1 < ITERATIONS; ++i1) switch (random.nextInt(3)) { case 0: db.<ODocument>newInstance("Person") .field("name", "name-" + thread + "-" + i1) .save(); break; case 1: db.begin(); db.<ODocument>newInstance("Person") .field("name", "name-" + thread + "-" + i1) .save(); db.commit(); break; case 2: db.freeze(); try { for (ODocument document : db.browseClass("Person")) { try (Stream<ORID> rids = index.getInternal().getRids(document.field("name"))) { assertEquals(document.getIdentity(), rids.findFirst().orElse(null)); } } } finally { db.release(); } break; } } catch (RuntimeException | Error e) { e.printStackTrace(); throw e; } finally { countDownLatch.countDown(); } })); } countDownLatch.await(); for (Future<?> future : futures) future.get(); // propagate exceptions, if there are any } }
apache-2.0
peterhal/omakase
src/omakase/syntax/PredefinedNames.java
1028
// Copyright 2012 Peter Hallam // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package omakase.syntax; public final class PredefinedNames { private PredefinedNames() {} public static final String GET = "get"; public static final String SET = "set"; public static final String PROTOTYPE = "prototype"; public static final String BIND = "bind"; public static final String CONSTRUCTOR = "constructor"; public static final String JAVASCRIPT = "javascript"; public static final String MAIN = "main"; }
apache-2.0
xmpace/jetty-read
jetty-plus/src/main/java/org/eclipse/jetty/plus/annotation/PreDestroyCallback.java
2540
// // ======================================================================== // Copyright (c) 1995-2015 Mort Bay Consulting Pty. Ltd. // ------------------------------------------------------------------------ // All rights reserved. This program and the accompanying materials // are made available under the terms of the Eclipse Public License v1.0 // and Apache License v2.0 which accompanies this distribution. // // The Eclipse Public License is available at // http://www.eclipse.org/legal/epl-v10.html // // The Apache License v2.0 is available at // http://www.opensource.org/licenses/apache2.0.php // // You may elect to redistribute this code under either of these licenses. // ======================================================================== // package org.eclipse.jetty.plus.annotation; import java.lang.reflect.Method; import java.lang.reflect.Modifier; import org.eclipse.jetty.util.log.Log; import org.eclipse.jetty.util.log.Logger; /** * PreDestroyCallback * * */ public class PreDestroyCallback extends LifeCycleCallback { private static final Logger LOG = Log.getLogger(PreDestroyCallback.class); /** * Commons Annotations Specification section 2.6: * - no params to method * - returns void * - no checked exceptions * - not static * @see org.eclipse.jetty.plus.annotation.LifeCycleCallback#validate(java.lang.Class, java.lang.reflect.Method) */ public void validate(Class<?> clazz, Method method) { if (method.getExceptionTypes().length > 0) throw new IllegalArgumentException(clazz.getName()+"."+method.getName()+ " cannot not throw a checked exception"); if (!method.getReturnType().equals(Void.TYPE)) throw new IllegalArgumentException(clazz.getName()+"."+method.getName()+ " cannot not have a return type"); if (Modifier.isStatic(method.getModifiers())) throw new IllegalArgumentException(clazz.getName()+"."+method.getName()+ " cannot be static"); } public void callback(Object instance) { try { super.callback(instance); } catch (Exception e) { LOG.warn("Ignoring exception thrown on preDestroy call to "+getTargetClass()+"."+getTarget().getName(), e); } } public boolean equals(Object o) { if (super.equals(o) && (o instanceof PreDestroyCallback)) return true; return false; } }
apache-2.0
LightSun/data-mediator
Data-mediator-demo/app/src/main/java/com/heaven7/data/mediator/demo/fragment/BaseFragment.java
1040
package com.heaven7.data.mediator.demo.fragment; import android.content.Context; import android.os.Bundle; import android.support.annotation.Nullable; import android.support.v4.app.Fragment; import android.view.LayoutInflater; import android.view.View; import android.view.ViewGroup; import butterknife.ButterKnife; /** * Created by heaven7 on 2017/10/8. */ public abstract class BaseFragment extends Fragment { @Nullable @Override public View onCreateView(LayoutInflater inflater, @Nullable ViewGroup container, @Nullable Bundle savedInstanceState) { return inflater.inflate(getLayoutId(), container, false); } @Override public void onViewCreated(View view, @Nullable Bundle savedInstanceState) { super.onViewCreated(view, savedInstanceState); ButterKnife.bind(this, view); onInit(getContext(), savedInstanceState); } protected abstract int getLayoutId(); protected abstract void onInit(Context context, Bundle savedInstanceState); }
apache-2.0
ktmrmshk/Dresp
dresp/static/site/shop/Customjs.js
7390
$(document).ready(function(){ /*************** Checkbox script ***************/ var inputs = document.getElementsByTagName('input'); for (a = 0; a < inputs.length; a++) { if (inputs[a].type == "checkbox") { var id = inputs[a].getAttribute("id"); if (id==null){ id= "checkbox" +a; } inputs[a].setAttribute("id",id); var container = document.createElement('div'); container.setAttribute("class", "ttr_checkbox"); var label = document.createElement('label'); label.setAttribute("for", id); $(inputs[a]).wrap(container).after(label); } } /*************** Radiobutton script ***************/ var inputs = document.getElementsByTagName('input'); for (a = 0; a < inputs.length; a++) { if (inputs[a].type == "radio") { var id = inputs[a].getAttribute("id"); if (id==null){ id= "radio" +a; } inputs[a].setAttribute("id",id); var container = document.createElement('div'); container.setAttribute("class", "ttr_radio"); var label = document.createElement('label'); label.setAttribute("for", id); $(inputs[a]).wrap(container).after(label); } } /*************** Staticfooter script ***************/ var window_height = Math.max(document.documentElement.clientHeight, window.innerHeight || 0); var body_height = $(document.body).height(); $content = $("#ttr_content_and_sidebar_container"); if(body_height < window_height){ differ = (window_height - body_height); content_height = $content.height() + differ; $("#ttr_content_and_sidebar_container").css("min-height", content_height+"px"); } /* Slideshow Function Call */ if(jQuery('#ttr_slideshow_inner').length){ jQuery('#ttr_slideshow_inner').TTSlider({ slideShowSpeed:2000, begintime:1000,cssPrefix: 'ttr_' }); } /*************** Hamburgermenu slideleft script ***************/ $('#nav-expander').on('click',function(e){ e.preventDefault(); $('body').toggleClass('nav-expanded'); }); /*************** Menu click script ***************/ $('ul.ttr_menu_items.nav li [data-toggle=dropdown]').on('click', function() { var window_width = Math.max(document.documentElement.clientWidth, window.innerWidth || 0) if(window_width > 1025 && $(this).attr('href')){ window.location.href = $(this).attr('href'); } else{ if($(this).parent().hasClass('open')){ location.assign($(this).attr('href')); } } }); /*************** Sidebarmenu click script ***************/ $('ul.ttr_vmenu_items.nav li [data-toggle=dropdown]').on('click', function() { var window_width = Math.max(document.documentElement.clientWidth, window.innerWidth || 0) if(window_width > 1025 && $(this).attr('href')){ window.location.href = $(this).attr('href'); } else{ if($(this).parent().hasClass('open')){ location.assign($(this).attr('href')); } } }); /*************** Tab menu click script ***************/ $('.ttr_menu_items ul.dropdown-menu [data-toggle=dropdown]').on('click', function(event) { var window_width = Math.max(document.documentElement.clientWidth, window.innerWidth || 0); if(window_width < 1025){ event.preventDefault(); event.stopPropagation(); $(this).parent().siblings().removeClass('open'); $(this).parent().toggleClass(function() { if ($(this).is(".open") ) { window.location.href = $(this).children("[data-toggle=dropdown]").attr('href'); return ""; } else { return "open"; } }); } }); /*************** Page Alignment format tab script ***************/ var page_width = $('#ttr_page').width(); var window_width = Math.max(document.documentElement.clientWidth, window.innerWidth || 0); if(window_width < 1025){ $('.ttr_page_align_left').each(function() { var left_div_width = $(this).width(); var page_align_left_value = page_width - left_div_width; left_div_width = left_div_width + 1; $(this).css({'left' : '-' + page_align_left_value + 'px', 'width': left_div_width + 'px'}); }); $('.ttr_page_align_right').each(function() { var right_div_width = $(this).width(); var page_align_left_value = page_width - right_div_width; right_div_width = right_div_width + 1; $(this).css({'right' : '-' + page_align_left_value + 'px', 'width': right_div_width + 'px'}); }); } /*************** Tab-Sidebarmenu script ***************/ $('.ttr_vmenu_items ul.dropdown-menu [data-toggle=dropdown]').on('click', function(event) { var window_width = Math.max(document.documentElement.clientWidth, window.innerWidth || 0); if(window_width < 1025){ event.preventDefault(); event.stopPropagation(); $(this).parent().siblings().removeClass('open'); $(this).parent().toggleClass(function() { if ($(this).is(".open") ) { window.location.href = $(this).children("[data-toggle=dropdown]").attr('href'); return ""; } else { return "open"; } }); } }); /*************** Sticky menu script ***************/ var menutop = $('#ttr_menu').offset().top; $(window).scroll(function () { if ($(this).scrollTop() > menutop) { $('#ttr_menu').addClass('navbar-fixed-top'); } else { $('#ttr_menu').removeClass('navbar-fixed-top'); } }); /*************** Html video script ***************/ var objects = ['iframe[src*="youtube.com"]','iframe[src*="youtu.be"]', 'video','object']; for(var i = 0 ; i < objects.length ; i++){ if ($(objects[i]).length > 0) { $(objects[i]).wrap( "<div class='embed-responsive embed-responsive-16by9'></div>" ); $(objects[i]).addClass('embed-responsive-item'); } } /*************** Html Equal column height ***************/ $(window).bind('load', function() { tt_columns(); }); $(window).resize(tt_columns); }); /*************** Html Equal column height ***************/ function tt_equal_height(cols){ var maxHeight = 0; maxHeight = Math.max.apply(Math, cols.map(function(){return $(this).height(); }).get()); cols.each(function(){ $child_h = $(this).children().outerHeight(); $parent_h = $(this).height(); if(maxHeight != $parent_h){ $(this).children().css('height','inherit'); if($child_h == $parent_h){ $(this).css('height', maxHeight+'px'); } else{ $mrg = $parent_h - $child_h; $m = maxHeight - $mrg; $(this).css('height', $m + 'px'); } } }); } function tt_columns(){ var window_width = Math.max(document.documentElement.clientWidth, window.innerWidth || 0); $('#ttr_content .row').each(function() { $child = $(this).children(); var col = []; $k = 0; var params = []; for($i=0;$i<=$child.length;$i++){ $(params).css('height','auto'); if(window_width > 1199){ if($($child[$i]).hasClass('visible-lg-block')){ if( params.length > 1) { tt_equal_height($(params)); } params = []; $k = 0; } else if($($child[$i]).hasClass('post_column')){ params[$k] = $child[$i]; $k++; } else if(!($($child[$i]).hasClass('clearfix'))) { tt_equal_height($(params)); } } if(window_width > 767 && window_width < 1199){ if($($child[$i]).hasClass('visible-sm-block')){ if( params.length > 1) { tt_equal_height($(params)); } params = []; $k = 0; } else if($($child[$i]).hasClass('post_column')){ params[$k] = $child[$i]; $k++; } else if(!($($child[$i]).hasClass('clearfix'))) { tt_equal_height($(params)); } } if(window_width < 768){ if($($child[$i]).hasClass('visible-xs-block')){ if( params.length > 1) { tt_equal_height($(params)); } params = []; $k = 0; } else if($($child[$i]).hasClass('post_column')){ params[$k] = $child[$i]; $k++; } else if(!($($child[$i]).hasClass('clearfix'))) { tt_equal_height($(params)); } } } }); }
apache-2.0
shensiduanxing/devops-metadata-svc
src/test/java/org/flysnow/cloud/buildmeta/wsclient/ui/model/CreateBuildInfoRequest.java
300
package org.flysnow.cloud.buildmeta.wsclient.ui.model; import org.flysnow.cloud.buildmeta.wsclient.domain.model.Build; public class CreateBuildInfoRequest { private Build build; public Build getBuild() { return build; } public void setBuild(Build build) { this.build = build; } }
apache-2.0
woxiai/dbp
src/main/java/com/dbp/coco/base/models/BaseResult.java
625
package com.dbp.coco.base.models; /** * Created by Administrator on 2016/2/23. */ public class BaseResult { private String action; private boolean result; private String reason; public String getAction() { return action; } public void setAction(String action) { this.action = action; } public boolean isResult() { return result; } public void setResult(boolean result) { this.result = result; } public String getReason() { return reason; } public void setReason(String reason) { this.reason = reason; } }
apache-2.0
bronumski/HealthNet
src/VersionInfo.cs
111
using System.Reflection; [assembly: AssemblyMetadata("githash","5fccb4493e36b8ce227f90e968481bf1ed298830")]
apache-2.0
berinle/jawr-core
src/main/java/net/jawr/web/resource/bundle/generator/ResourceGeneratorReaderWrapper.java
4924
/** * Copyright 2009-2012 Ibrahim Chaehoi * * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file * except in compliance with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the * License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, * either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package net.jawr.web.resource.bundle.generator; import java.io.Reader; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Locale; import java.util.Map; import net.jawr.web.JawrConstant; import net.jawr.web.config.JawrConfig; import net.jawr.web.resource.FileNameUtils; import net.jawr.web.resource.bundle.generator.variant.VariantResourceGenerator; import net.jawr.web.resource.bundle.variant.VariantSet; import net.jawr.web.resource.handler.reader.ResourceReaderHandler; import net.jawr.web.resource.handler.reader.TextResourceReader; import net.jawr.web.util.StringUtils; /** * This class defines the wrapper class for resource generator in text resource reader. * * @author Ibrahim Chaehoi */ public class ResourceGeneratorReaderWrapper implements TextResourceReader { /** The resource generator wrapped */ private TextResourceGenerator generator; /** The resource handler */ private ResourceReaderHandler rsHandler; /** The Jawr config */ private JawrConfig config; /** * Constructor * @param generator the generator */ public ResourceGeneratorReaderWrapper(TextResourceGenerator generator, ResourceReaderHandler rsHandler, JawrConfig config) { this.generator = generator; this.config = config; this.rsHandler = rsHandler; } /* (non-Javadoc) * @see net.jawr.web.resource.handler.ResourceReader#getResource(java.lang.String) */ public Reader getResource(String resourceName) { return getResource(resourceName, false); } /* (non-Javadoc) * @see net.jawr.web.resource.handler.ResourceReader#getResource(java.lang.String, boolean) */ public Reader getResource(String resourceName, boolean processingBundle) { Locale locale = null; String path = generator.getResolver().getResourcePath(resourceName); Map<String, String> contextVariants = new HashMap<String, String>(); Map<String, VariantSet> variantSets = new HashMap<String, VariantSet>(); if(generator instanceof VariantResourceGenerator || generator instanceof LocaleAwareResourceGenerator){ int variantSuffixIdx = path.indexOf("@"); if(variantSuffixIdx != -1){ String variantKey = null; variantKey = FileNameUtils.getBaseName(path.substring(path.indexOf('@')+1)); // Remove variant suffix String extension = FileNameUtils.getExtension(path); path = path.substring(0, variantSuffixIdx); if(StringUtils.isNotEmpty(extension)){ path = path+"."+extension; } String[] variants = variantKey.split("@"); if(generator instanceof VariantResourceGenerator){ variantSets = ((VariantResourceGenerator)generator).getAvailableVariants(path); }else{ // instanceof LocaleAwareResourceGenerator List<String> availableLocales = ((LocaleAwareResourceGenerator)generator).getAvailableLocales(path); if(availableLocales != null){ variantSets.put(JawrConstant.LOCALE_VARIANT_TYPE, new VariantSet(JawrConstant.LOCALE_VARIANT_TYPE, "",availableLocales)); } } // Sort the variant types List<String> variantTypes = new ArrayList<String>(variantSets.keySet()); Collections.sort(variantTypes); int nbVariants = variants.length; for(int i = 0; i < nbVariants; i++){ String variantType = variantTypes.get(i); String variantValue = variants[i]; contextVariants.put(variantType, variantValue); if(variantType == JawrConstant.LOCALE_VARIANT_TYPE){ // Resourcebundle should be doing this for me... String[] params = variantValue.split("_"); switch(params.length) { case 3: locale = new Locale(params[0],params[1],params[2]); break; case 2: locale = new Locale(params[0],params[1]); break; default: locale = new Locale(variantValue); } } } } } GeneratorContext context = new GeneratorContext(config, path); context.setVariantMap(contextVariants); context.setVariantSets(variantSets); context.setLocale(locale); context.setResourceReaderHandler(rsHandler); context.setProcessingBundle(processingBundle); return generator.createResource(context); } }
apache-2.0
zillachan/LibZilla
app/src/main/java/com/zilla/libraryzilla/test/zlistview/ZListViewActivity.java
3742
/* * Copyright (c) 2015. Zilla Chen * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.zilla.libraryzilla.test.zlistview; import android.view.View; import android.widget.TextView; import com.zilla.libraryzilla.R; import com.zilla.libraryzilla.common.BaseActivity; import com.zilla.libraryzilla.test.api.GitHubService; import com.zilla.libraryzilla.test.api.model.Org; import java.util.List; import butterknife.BindView; import butterknife.ButterKnife; import retrofit2.Call; import retrofit2.Callback; import retrofit2.Response; import zilla.libcore.api.RetrofitAPI; import zilla.libcore.ui.InjectLayout; import zilla.libzilla.listview.ZListViewWraper; //import zilla.libcore.api.ZillaApi; @InjectLayout(R.layout.activity_zlistviewtest) public class ZListViewActivity extends BaseActivity { private ZListViewWraper<Org> xListViewWraper; @Override protected void initViews() { final GitHubService service = RetrofitAPI.Build.NormalService.create(GitHubService.class); xListViewWraper = new ZListViewWraper<Org>(getWindow().getDecorView(), R.layout.item_zlistview, ViewHolder.class) { @Override public void loadData() { Call<List<Org>> call=service.getRepos("octokit"); call.enqueue(new Callback<List<Org>>() { @Override public void onResponse(Call<List<Org>> call, Response<List<Org>> response) { if(response.isSuccessful()){ xListViewWraper.setModelList(response.body()); } } @Override public void onFailure(Call<List<Org>> call, Throwable t) { RetrofitAPI.dealNetError(t); xListViewWraper.refreshFail(); } }); } @Override public void loadMore() { Call<List<Org>> call=service.getRepos("octokit"); call.enqueue(new Callback<List<Org>>() { @Override public void onResponse(Call<List<Org>> call, Response<List<Org>> response) { if(response.isSuccessful()){ xListViewWraper.setModelList(response.body()); } } @Override public void onFailure(Call<List<Org>> call, Throwable t) { xListViewWraper.refreshFail(); } }); } }; } @Override protected void initDatas() { } /** * This class contains all butterknife-injected Views & Layouts from layout file 'item_zlistview.xml' * for easy to all layout elements. * * @author ButterKnifeZelezny, plugin for Android Studio by Avast Developers (http://github.com/avast) */ static class ViewHolder { @BindView(R.id.item_org_name) TextView name; @BindView(R.id.item_org_full_name) TextView full_name; ViewHolder(View view) { ButterKnife.bind(this, view); } } }
apache-2.0
chirpradio/chirpradio-machine
chirp/library/chirpradio_scripts/do_optimize_index.py
919
import codecs import time import urllib2 from chirp.common import chirpradio from google.appengine.api import datastore_errors from djdb import search chirpradio.connect() terms_to_opt = set() for line in codecs.open("index.data", "r", "utf-8"): F = [f.strip() for f in line.split(",")] if F[2] == "1": continue terms_to_opt.add(F[0]) deleted = 0 skipping = True for term in terms_to_opt: if skipping and term: skipping = False if skipping: continue attempt = 1 while True: try: n = search.optimize_index(term) break except (datastore_errors.Timeout, urllib2.URLError): attempt += 1 print "Timeout on attempt %d for %s!" % (attempt, term.encode("utf-8")) time.sleep(2) deleted += n print term.encode("utf-8"), n, deleted
apache-2.0
eHealthAfrica/extract-source-cases-migration
test/logs.js
2607
'use strict' var it = require('tape') , chai = require('tape-chai') , fakeDoc = require('./support/fake-doc') , timekeeper = require('timekeeper') , dataModels = require('data-models') , last = require('lodash/array/last') var migrate = require('../lib') it('adds script name to change log', function (expect) { var doc = fakeDoc() var result = migrate(doc) var log = last(result[0].changeLog) expect.propertyVal(log, 'user', 'extract-source-cases-migration') expect.end() }) it('adds revision to change log', function (expect) { var doc = fakeDoc({_rev: '5-revision-hash-id'}) var result = migrate(doc) var log = last(result[0].changeLog) expect.propertyVal(log, 'rev', '5-revision-hash-id') expect.end() }) it('adds timestamp to change log', function (expect) { var doc = fakeDoc() var now = new Date(1422613414251) try { timekeeper.freeze(now) var result = migrate(doc) var log = last(result[0].changeLog) expect.propertyVal(log, 'timestamp', 1422613414251) } finally { timekeeper.reset() expect.end() } }) it('adds source entry on created docs', function (expect) { var doc = fakeDoc() var created = migrate(doc)[1] expect.deepPropertyVal(created, 'sources[0].type', 'migration') expect.end() }) it('adds migration name to source entry', function (expect) { var doc = fakeDoc() var created = migrate(doc)[1] expect.deepPropertyVal(created, 'sources[0].name' , 'extract-source-cases-migration') expect.end() }) it('adds timestamp to source entry', function (expect) { var doc = fakeDoc() var now = new Date(1422613414251) try { timekeeper.freeze(now) var created = migrate(doc)[1] expect.deepPropertyVal(created, 'sources[0].timestamp', 1422613414251) } finally { timekeeper.reset() expect.end() } }) it('refers back into original doc', function (expect) { var doc = fakeDoc({_id: '123-cdef-678'}) var created = migrate(doc)[1] expect.deepPropertyVal(created, 'sources[0].origin' , '123-cdef-678/contact/source-cases/0') expect.end() }) it('creates valid docs', function (expect) { var doc = fakeDoc() var result = migrate(doc) var errors = dataModels.validate(result[1]) expect.notOk(errors) expect.end() }) it('keeps original data', function (expect) { var doc = fakeDoc({contact: {sourceCases: [ { name: 'John Doe' } ]}}) var created = migrate(doc)[1] expect.deepPropertyVal(created, 'sources[0].doc.name' , 'John Doe') expect.end() })
apache-2.0
vespa-engine/vespa
searchlib/src/tests/fef/termfieldmodel/termfieldmodel_test.cpp
11395
// Copyright Yahoo. Licensed under the terms of the Apache 2.0 license. See LICENSE in the project root. #include <vespa/vespalib/testkit/test_kit.h> #include <vespa/searchlib/fef/fef.h> #include <vespa/searchlib/queryeval/searchiterator.h> #include <algorithm> using namespace search::fef; struct State { SimpleTermData term; MatchData::UP md; TermFieldMatchData *f3; TermFieldMatchData *f5; TermFieldMatchData *f7; TermFieldMatchDataArray array; State(); ~State(); void setArray(TermFieldMatchDataArray value) { array = value; } }; State::State() : term(), md(), f3(0), f5(0), f7(0), array() {} State::~State() {} void testInvalidId() { const TermFieldMatchData empty; using search::queryeval::SearchIterator; EXPECT_EQUAL(TermFieldMatchData::invalidId(), empty.getDocId()); EXPECT_TRUE(TermFieldMatchData::invalidId() < (SearchIterator::beginId() + 1 ) || TermFieldMatchData::invalidId() > (search::endDocId - 1)); } void testSetup(State &state) { MatchDataLayout layout; state.term.addField(3); // docfreq = 1 state.term.addField(7); // docfreq = 2 state.term.addField(5); // docfreq = 3 typedef search::fef::ITermFieldRangeAdapter FRA; typedef search::fef::SimpleTermFieldRangeAdapter SFR; // lookup terms { int i = 1; for (SFR iter(state.term); iter.valid(); iter.next()) { iter.get().setDocFreq(25 * i++, 100); } } // reserve handles { for (SFR iter(state.term); iter.valid(); iter.next()) { iter.get().setHandle(layout.allocTermField(iter.get().getFieldId())); } } state.md = layout.createMatchData(); // init match data { for (FRA iter(state.term); iter.valid(); iter.next()) { const ITermFieldData& tfd = iter.get(); TermFieldHandle handle = tfd.getHandle(); TermFieldMatchData *data = state.md->resolveTermField(handle); switch (tfd.getFieldId()) { case 3: state.f3 = data; break; case 5: state.f5 = data; break; case 7: state.f7 = data; break; default: EXPECT_TRUE(false); } } EXPECT_EQUAL(3u, state.f3->getFieldId()); EXPECT_EQUAL(5u, state.f5->getFieldId()); EXPECT_EQUAL(7u, state.f7->getFieldId()); } // test that we can setup array EXPECT_EQUAL(false, state.array.valid()); state.setArray(TermFieldMatchDataArray().add(state.f3).add(state.f5).add(state.f7)); EXPECT_EQUAL(true, state.array.valid()); } void testGenerate(State &state) { // verify array EXPECT_EQUAL(3u, state.array.size()); EXPECT_EQUAL(state.f3, state.array[0]); EXPECT_EQUAL(state.f5, state.array[1]); EXPECT_EQUAL(state.f7, state.array[2]); // stale unpacked data state.f5->reset(5); EXPECT_EQUAL(5u, state.f5->getDocId()); { TermFieldMatchDataPosition pos; pos.setPosition(3); pos.setElementId(0); pos.setElementLen(10); state.f5->appendPosition(pos); EXPECT_EQUAL(1u, state.f5->getIterator().size()); EXPECT_EQUAL(10u, state.f5->getIterator().getFieldLength()); } state.f5->reset(6); EXPECT_EQUAL(6u, state.f5->getDocId()); EXPECT_EQUAL(FieldPositionsIterator::UNKNOWN_LENGTH, state.f5->getIterator().getFieldLength()); EXPECT_EQUAL(0u, state.f5->getIterator().size()); // fresh unpacked data state.f3->reset(10); { TermFieldMatchDataPosition pos; pos.setPosition(3); pos.setElementId(0); pos.setElementLen(10); EXPECT_EQUAL(FieldPositionsIterator::UNKNOWN_LENGTH, state.f3->getIterator().getFieldLength()); state.f3->appendPosition(pos); EXPECT_EQUAL(10u, state.f3->getIterator().getFieldLength()); } { TermFieldMatchDataPosition pos; pos.setPosition(15); pos.setElementId(1); pos.setElementLen(20); state.f3->appendPosition(pos); EXPECT_EQUAL(20u, state.f3->getIterator().getFieldLength()); } { TermFieldMatchDataPosition pos; pos.setPosition(1); pos.setElementId(2); pos.setElementLen(5); state.f3->appendPosition(pos); EXPECT_EQUAL(20u, state.f3->getIterator().getFieldLength()); } // raw score state.f7->setRawScore(10, 5.0); } void testAnalyze(State &state) { EXPECT_EQUAL(10u, state.f3->getDocId()); EXPECT_NOT_EQUAL(10u, state.f5->getDocId()); EXPECT_EQUAL(10u, state.f7->getDocId()); FieldPositionsIterator it = state.f3->getIterator(); EXPECT_EQUAL(20u, it.getFieldLength()); EXPECT_EQUAL(3u, it.size()); EXPECT_TRUE(it.valid()); EXPECT_EQUAL(3u, it.getPosition()); EXPECT_EQUAL(0u, it.getElementId()); EXPECT_EQUAL(10u, it.getElementLen()); it.next(); EXPECT_TRUE(it.valid()); EXPECT_EQUAL(15u, it.getPosition()); EXPECT_EQUAL(1u, it.getElementId()); EXPECT_EQUAL(20u, it.getElementLen()); it.next(); EXPECT_TRUE(it.valid()); EXPECT_EQUAL(1u, it.getPosition()); EXPECT_EQUAL(2u, it.getElementId()); EXPECT_EQUAL(5u, it.getElementLen()); it.next(); EXPECT_TRUE(!it.valid()); EXPECT_EQUAL(0.0, state.f3->getRawScore()); EXPECT_EQUAL(0.0, state.f5->getRawScore()); EXPECT_EQUAL(5.0, state.f7->getRawScore()); } TEST("term field model") { State state; testSetup(state); testGenerate(state); testAnalyze(state); testInvalidId(); } TEST("append positions") { TermFieldMatchData tfmd; tfmd.setFieldId(123); EXPECT_EQUAL(0u, tfmd.size()); EXPECT_EQUAL(1u, tfmd.capacity()); tfmd.reset(7); EXPECT_EQUAL(0u, tfmd.size()); EXPECT_EQUAL(1u, tfmd.capacity()); TermFieldMatchDataPosition pos(0x01020304, 0x10203040, 0x11223344, 0x12345678); tfmd.appendPosition(pos); EXPECT_EQUAL(1u, tfmd.size()); EXPECT_EQUAL(1u, tfmd.capacity()); EXPECT_EQUAL(0x01020304u, tfmd.begin()->getElementId()); EXPECT_EQUAL(0x10203040u, tfmd.begin()->getPosition()); EXPECT_EQUAL(0x11223344, tfmd.begin()->getElementWeight()); EXPECT_EQUAL(0x12345678u, tfmd.begin()->getElementLen()); tfmd.reset(11); EXPECT_EQUAL(0u, tfmd.size()); EXPECT_EQUAL(1u, tfmd.capacity()); TermFieldMatchDataPosition pos2(0x21020304, 0x20203040, 0x21223344, 0x22345678); tfmd.appendPosition(pos); tfmd.appendPosition(pos2); EXPECT_EQUAL(2u, tfmd.size()); EXPECT_EQUAL(42u, tfmd.capacity()); TermFieldMatchDataPosition pos3(0x31020304, 0x30203040, 0x31223344, 0x32345678); tfmd.appendPosition(pos3); EXPECT_EQUAL(3u, tfmd.size()); EXPECT_EQUAL(42u, tfmd.capacity()); EXPECT_EQUAL(0x01020304u, tfmd.begin()->getElementId()); EXPECT_EQUAL(0x10203040u, tfmd.begin()->getPosition()); EXPECT_EQUAL(0x11223344, tfmd.begin()->getElementWeight()); EXPECT_EQUAL(0x12345678u, tfmd.begin()->getElementLen()); EXPECT_EQUAL(0x21020304u, tfmd.begin()[1].getElementId()); EXPECT_EQUAL(0x20203040u, tfmd.begin()[1].getPosition()); EXPECT_EQUAL(0x21223344, tfmd.begin()[1].getElementWeight()); EXPECT_EQUAL(0x22345678u, tfmd.begin()[1].getElementLen()); EXPECT_EQUAL(0x31020304u, tfmd.begin()[2].getElementId()); EXPECT_EQUAL(0x30203040u, tfmd.begin()[2].getPosition()); EXPECT_EQUAL(0x31223344, tfmd.begin()[2].getElementWeight()); EXPECT_EQUAL(0x32345678u, tfmd.begin()[2].getElementLen()); } TEST("Access subqueries") { State state; testSetup(state); state.f3->reset(10); state.f3->setSubqueries(10, 42); EXPECT_EQUAL(42ULL, state.f3->getSubqueries()); state.f3->enableRawScore(); EXPECT_EQUAL(0ULL, state.f3->getSubqueries()); state.f3->reset(11); state.f3->appendPosition(TermFieldMatchDataPosition()); state.f3->setSubqueries(11, 42); EXPECT_EQUAL(0ULL, state.f3->getSubqueries()); } TEST("require that TermFieldMatchData can be tagged as needed or not") { TermFieldMatchData tfmd; tfmd.setFieldId(123); EXPECT_EQUAL(tfmd.getFieldId(),123u); EXPECT_TRUE(!tfmd.isNotNeeded()); EXPECT_TRUE(tfmd.needs_normal_features()); EXPECT_TRUE(tfmd.needs_interleaved_features()); tfmd.tagAsNotNeeded(); EXPECT_EQUAL(tfmd.getFieldId(),123u); EXPECT_TRUE(tfmd.isNotNeeded()); EXPECT_TRUE(!tfmd.needs_normal_features()); EXPECT_TRUE(!tfmd.needs_interleaved_features()); tfmd.setNeedNormalFeatures(true); EXPECT_EQUAL(tfmd.getFieldId(),123u); EXPECT_TRUE(!tfmd.isNotNeeded()); EXPECT_TRUE(tfmd.needs_normal_features()); EXPECT_TRUE(!tfmd.needs_interleaved_features()); tfmd.setNeedInterleavedFeatures(true); EXPECT_EQUAL(tfmd.getFieldId(),123u); EXPECT_TRUE(!tfmd.isNotNeeded()); EXPECT_TRUE(tfmd.needs_normal_features()); EXPECT_TRUE(tfmd.needs_interleaved_features()); tfmd.setNeedNormalFeatures(false); EXPECT_EQUAL(tfmd.getFieldId(),123u); EXPECT_TRUE(!tfmd.isNotNeeded()); EXPECT_TRUE(!tfmd.needs_normal_features()); EXPECT_TRUE(tfmd.needs_interleaved_features()); tfmd.setNeedInterleavedFeatures(false); EXPECT_EQUAL(tfmd.getFieldId(),123u); EXPECT_TRUE(tfmd.isNotNeeded()); EXPECT_TRUE(!tfmd.needs_normal_features()); EXPECT_TRUE(!tfmd.needs_interleaved_features()); } TEST("require that MatchData soft_reset retains appropriate state") { auto md = MatchData::makeTestInstance(10, 10); md->set_termwise_limit(0.5); auto *old_term = md->resolveTermField(7); old_term->tagAsNotNeeded(); old_term->populate_fixed()->setElementWeight(21); old_term->resetOnlyDocId(42); EXPECT_EQUAL(md->get_termwise_limit(), 0.5); EXPECT_TRUE(old_term->isNotNeeded()); EXPECT_EQUAL(old_term->getFieldId(), 7u); EXPECT_EQUAL(old_term->getWeight(), 21); EXPECT_EQUAL(old_term->getDocId(), 42u); md->soft_reset(); auto *new_term = md->resolveTermField(7); EXPECT_EQUAL(new_term, old_term); EXPECT_EQUAL(md->get_termwise_limit(), 1.0); EXPECT_TRUE(new_term->isNotNeeded()); EXPECT_EQUAL(new_term->getFieldId(), 7u); EXPECT_EQUAL(new_term->getWeight(), 21); EXPECT_EQUAL(new_term->getDocId(), TermFieldMatchData::invalidId()); } TEST("require that compareWithExactness implements a strict weak ordering") { TermFieldMatchDataPosition a(0, 1, 100, 1); TermFieldMatchDataPosition b(0, 2, 100, 1); TermFieldMatchDataPosition c(0, 2, 100, 1); TermFieldMatchDataPosition d(0, 3, 100, 3); TermFieldMatchDataPosition e(0, 3, 100, 3); TermFieldMatchDataPosition f(0, 4, 100, 1); d.setMatchExactness(0.75); e.setMatchExactness(0.5); bool (*cmp)(const TermFieldMatchDataPosition &a, const TermFieldMatchDataPosition &b) = TermFieldMatchDataPosition::compareWithExactness; EXPECT_EQUAL(true, cmp(a, b)); EXPECT_EQUAL(false, cmp(b, c)); EXPECT_EQUAL(true, cmp(c, d)); EXPECT_EQUAL(true, cmp(d, e)); EXPECT_EQUAL(true, cmp(e, f)); EXPECT_EQUAL(false, cmp(b, a)); EXPECT_EQUAL(false, cmp(c, b)); EXPECT_EQUAL(false, cmp(d, c)); EXPECT_EQUAL(false, cmp(e, d)); EXPECT_EQUAL(false, cmp(f, e)); } TEST_MAIN() { TEST_RUN_ALL(); }
apache-2.0
dump247/aws-sdk-java
aws-java-sdk-dynamodb/src/main/java/com/amazonaws/services/dynamodbv2/model/transform/WriteRequestJsonMarshaller.java
2733
/* * Copyright 2010-2016 Amazon.com, Inc. or its affiliates. All Rights * Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). * You may not use this file except in compliance with the License. * A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing * permissions and limitations under the License. */ package com.amazonaws.services.dynamodbv2.model.transform; import static com.amazonaws.util.StringUtils.UTF8; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.OutputStreamWriter; import java.io.StringWriter; import java.io.Writer; import java.util.Map; import java.util.List; import com.amazonaws.AmazonClientException; import com.amazonaws.services.dynamodbv2.model.*; import com.amazonaws.transform.Marshaller; import com.amazonaws.util.BinaryUtils; import com.amazonaws.util.StringUtils; import com.amazonaws.util.IdempotentUtils; import com.amazonaws.util.StringInputStream; import com.amazonaws.util.json.*; /** * WriteRequestMarshaller */ public class WriteRequestJsonMarshaller { /** * Marshall the given parameter object, and output to a SdkJsonGenerator */ public void marshall(WriteRequest writeRequest, SdkJsonGenerator jsonGenerator) { if (writeRequest == null) { throw new AmazonClientException( "Invalid argument passed to marshall(...)"); } try { jsonGenerator.writeStartObject(); if (writeRequest.getPutRequest() != null) { jsonGenerator.writeFieldName("PutRequest"); PutRequestJsonMarshaller.getInstance().marshall( writeRequest.getPutRequest(), jsonGenerator); } if (writeRequest.getDeleteRequest() != null) { jsonGenerator.writeFieldName("DeleteRequest"); DeleteRequestJsonMarshaller.getInstance().marshall( writeRequest.getDeleteRequest(), jsonGenerator); } jsonGenerator.writeEndObject(); } catch (Throwable t) { throw new AmazonClientException( "Unable to marshall request to JSON: " + t.getMessage(), t); } } private static WriteRequestJsonMarshaller instance; public static WriteRequestJsonMarshaller getInstance() { if (instance == null) instance = new WriteRequestJsonMarshaller(); return instance; } }
apache-2.0
antifriz/zetcheck
src/com/zetcheck/MenuActivity.java
4553
/* Copyright 2015 Ivan Jurin Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */package com.zetcheck; import android.content.Intent; import android.os.Bundle; import android.os.Handler; import android.support.v4.app.Fragment; import android.support.v4.app.FragmentActivity; import android.support.v4.app.FragmentManager; import android.support.v4.app.FragmentPagerAdapter; import android.support.v4.view.ViewPager; import com.zetcheck.R; import com.google.android.gms.ads.AdRequest; import com.google.android.gms.ads.AdView; public class MenuActivity extends FragmentActivity { protected static final String DEBUG_TAG = "mate"; private static SearchLinesFragment searchLinesFragment; private static FavoritesFragment favoritesFragment; private static SearchStationsFragment searchStationsFragment; public static boolean STAY_LIVE = true; public final Handler handler = new Handler(); AdView adView; /** * The {@link android.support.v4.view.PagerAdapter} that will provide * fragments for each of the sections. We use a * {@link android.support.v4.app.FragmentPagerAdapter} derivative, which * will keep every loaded fragment in memory. If this becomes too memory * intensive, it may be best to switch to a * {@link android.support.v4.app.FragmentStatePagerAdapter}. */ SectionsPagerAdapter mSectionsPagerAdapter; /** * The {@link ViewPager} that will host the section contents. */ public static ViewPager mViewPager; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_menu); AppData.context = this; mSectionsPagerAdapter = new SectionsPagerAdapter( getSupportFragmentManager()); mViewPager = (ViewPager) findViewById(R.id.menu_pager); mViewPager.setAdapter(mSectionsPagerAdapter); mViewPager.setOffscreenPageLimit(1); mViewPager.setCurrentItem(1); searchLinesFragment = new SearchLinesFragment(); favoritesFragment = new FavoritesFragment(); searchStationsFragment = new SearchStationsFragment(); adView = (AdView) this.findViewById(R.id.adView); AdRequest adRequest = new AdRequest.Builder().addTestDevice( "9F95B44EAECFE486860F1D1FAB856B0E").build(); adView.loadAd(adRequest); } /** * A {@link FragmentPagerAdapter} that returns a fragment corresponding to * one of the sections/tabs/pages. */ public class SectionsPagerAdapter extends FragmentPagerAdapter { public SectionsPagerAdapter(FragmentManager fm) { super(fm); } @Override public Fragment getItem(int position) { // getItem is called to instantiate the fragment for the given page. // Return a DummySectionFragment (defined as a static inner class // below) with the page number as its lone argument. switch (position) { case 0: return searchLinesFragment; case 1: return favoritesFragment; case 2: return searchStationsFragment; default: return null; } } @Override public int getCount() { return 3; } } @Override public void onWindowFocusChanged(boolean hasFocus) { // TODO Auto-generated method stub super.onWindowFocusChanged(hasFocus); if (hasFocus) { if (!STAY_LIVE) finish(); // if(AppData.setupData!=null&&AppData.setupData.hasFavorites) // favoritesFragment.handler.post(favoritesFragment.updateFavorites); // toggleFullscreen(false); } } @Override public void onBackPressed() { if (mViewPager.getCurrentItem() != 1) { mViewPager.setCurrentItem(1); return; } Intent intent1 = new Intent(Intent.ACTION_MAIN); intent1.addCategory(Intent.CATEGORY_HOME); intent1.setFlags(Intent.FLAG_ACTIVITY_NEW_TASK); startActivity(intent1); } @Override public void onResume() { super.onResume(); if (adView != null) { adView.resume(); } } @Override public void onPause() { super.onPause(); if (adView != null) { adView.pause(); } } @Override public void onDestroy() { if (adView != null) { adView.destroy(); } super.onDestroy(); } }
apache-2.0
nfuller/okhttp
mockwebserver/src/test/java/okhttp3/mockwebserver/MockWebServerTest.java
14536
/* * Copyright (C) 2011 Google Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package okhttp3.mockwebserver; import java.io.BufferedReader; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.io.OutputStream; import java.net.ConnectException; import java.net.HttpURLConnection; import java.net.ProtocolException; import java.net.SocketTimeoutException; import java.net.URL; import java.net.URLConnection; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; import okhttp3.Headers; import okhttp3.internal.Util; import org.junit.After; import org.junit.Rule; import org.junit.Test; import org.junit.runner.Description; import org.junit.runners.model.Statement; import static java.util.concurrent.TimeUnit.NANOSECONDS; import static java.util.concurrent.TimeUnit.SECONDS; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotEquals; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; public final class MockWebServerTest { @Rule public final MockWebServer server = new MockWebServer(); @Test public void defaultMockResponse() { MockResponse response = new MockResponse(); assertEquals(Arrays.asList("Content-Length: 0"), headersToList(response)); assertEquals("HTTP/1.1 200 OK", response.getStatus()); } @Test public void setResponseMockReason() { String[] reasons = { "Mock Response", "Informational", "OK", "Redirection", "Client Error", "Server Error", "Mock Response" }; for (int i = 0; i < 600; i++) { MockResponse response = new MockResponse().setResponseCode(i); String expectedReason = reasons[i / 100]; assertEquals("HTTP/1.1 " + i + " " + expectedReason, response.getStatus()); assertEquals(Arrays.asList("Content-Length: 0"), headersToList(response)); } } @Test public void setStatusControlsWholeStatusLine() { MockResponse response = new MockResponse().setStatus("HTTP/1.1 202 That'll do pig"); assertEquals(Arrays.asList("Content-Length: 0"), headersToList(response)); assertEquals("HTTP/1.1 202 That'll do pig", response.getStatus()); } @Test public void setBodyAdjustsHeaders() throws IOException { MockResponse response = new MockResponse().setBody("ABC"); assertEquals(Arrays.asList("Content-Length: 3"), headersToList(response)); assertEquals("ABC", response.getBody().readUtf8()); } @Test public void mockResponseAddHeader() { MockResponse response = new MockResponse() .clearHeaders() .addHeader("Cookie: s=square") .addHeader("Cookie", "a=android"); assertEquals(Arrays.asList("Cookie: s=square", "Cookie: a=android"), headersToList(response)); } @Test public void mockResponseSetHeader() { MockResponse response = new MockResponse() .clearHeaders() .addHeader("Cookie: s=square") .addHeader("Cookie: a=android") .addHeader("Cookies: delicious"); response.setHeader("cookie", "r=robot"); assertEquals(Arrays.asList("Cookies: delicious", "cookie: r=robot"), headersToList(response)); } @Test public void regularResponse() throws Exception { server.enqueue(new MockResponse().setBody("hello world")); URL url = server.url("/").url(); HttpURLConnection connection = (HttpURLConnection) url.openConnection(); connection.setRequestProperty("Accept-Language", "en-US"); InputStream in = connection.getInputStream(); BufferedReader reader = new BufferedReader(new InputStreamReader(in)); assertEquals(HttpURLConnection.HTTP_OK, connection.getResponseCode()); assertEquals("hello world", reader.readLine()); RecordedRequest request = server.takeRequest(); assertEquals("GET / HTTP/1.1", request.getRequestLine()); assertEquals("en-US", request.getHeader("Accept-Language")); } @Test public void redirect() throws Exception { server.enqueue(new MockResponse() .setResponseCode(HttpURLConnection.HTTP_MOVED_TEMP) .addHeader("Location: " + server.url("/new-path")) .setBody("This page has moved!")); server.enqueue(new MockResponse().setBody("This is the new location!")); URLConnection connection = server.url("/").url().openConnection(); InputStream in = connection.getInputStream(); BufferedReader reader = new BufferedReader(new InputStreamReader(in)); assertEquals("This is the new location!", reader.readLine()); RecordedRequest first = server.takeRequest(); assertEquals("GET / HTTP/1.1", first.getRequestLine()); RecordedRequest redirect = server.takeRequest(); assertEquals("GET /new-path HTTP/1.1", redirect.getRequestLine()); } /** * Test that MockWebServer blocks for a call to enqueue() if a request is made before a mock * response is ready. */ @Test public void dispatchBlocksWaitingForEnqueue() throws Exception { new Thread() { @Override public void run() { try { Thread.sleep(1000); } catch (InterruptedException ignored) { } server.enqueue(new MockResponse().setBody("enqueued in the background")); } }.start(); URLConnection connection = server.url("/").url().openConnection(); InputStream in = connection.getInputStream(); BufferedReader reader = new BufferedReader(new InputStreamReader(in)); assertEquals("enqueued in the background", reader.readLine()); } @Test public void nonHexadecimalChunkSize() throws Exception { server.enqueue(new MockResponse() .setBody("G\r\nxxxxxxxxxxxxxxxx\r\n0\r\n\r\n") .clearHeaders() .addHeader("Transfer-encoding: chunked")); URLConnection connection = server.url("/").url().openConnection(); InputStream in = connection.getInputStream(); try { in.read(); fail(); } catch (IOException expected) { } } @Test public void responseTimeout() throws Exception { server.enqueue(new MockResponse() .setBody("ABC") .clearHeaders() .addHeader("Content-Length: 4")); server.enqueue(new MockResponse().setBody("DEF")); URLConnection urlConnection = server.url("/").url().openConnection(); urlConnection.setReadTimeout(1000); InputStream in = urlConnection.getInputStream(); assertEquals('A', in.read()); assertEquals('B', in.read()); assertEquals('C', in.read()); try { in.read(); // if Content-Length was accurate, this would return -1 immediately fail(); } catch (SocketTimeoutException expected) { } URLConnection urlConnection2 = server.url("/").url().openConnection(); InputStream in2 = urlConnection2.getInputStream(); assertEquals('D', in2.read()); assertEquals('E', in2.read()); assertEquals('F', in2.read()); assertEquals(-1, in2.read()); assertEquals(0, server.takeRequest().getSequenceNumber()); assertEquals(0, server.takeRequest().getSequenceNumber()); } @Test public void disconnectAtStart() throws Exception { server.enqueue(new MockResponse().setSocketPolicy(SocketPolicy.DISCONNECT_AT_START)); server.enqueue(new MockResponse()); // The jdk's HttpUrlConnection is a bastard. server.enqueue(new MockResponse()); try { server.url("/a").url().openConnection().getInputStream(); } catch (IOException expected) { } server.url("/b").url().openConnection().getInputStream(); // Should succeed. } /** * Throttle the request body by sleeping 500ms after every 3 bytes. With a 6-byte request, this * should yield one sleep for a total delay of 500ms. */ @Test public void throttleRequest() throws Exception { server.enqueue(new MockResponse() .throttleBody(3, 500, TimeUnit.MILLISECONDS)); long startNanos = System.nanoTime(); URLConnection connection = server.url("/").url().openConnection(); connection.setDoOutput(true); connection.getOutputStream().write("ABCDEF".getBytes("UTF-8")); InputStream in = connection.getInputStream(); assertEquals(-1, in.read()); long elapsedNanos = System.nanoTime() - startNanos; long elapsedMillis = NANOSECONDS.toMillis(elapsedNanos); assertTrue(Util.format("Request + Response: %sms", elapsedMillis), elapsedMillis >= 500); assertTrue(Util.format("Request + Response: %sms", elapsedMillis), elapsedMillis < 1000); } /** * Throttle the response body by sleeping 500ms after every 3 bytes. With a 6-byte response, this * should yield one sleep for a total delay of 500ms. */ @Test public void throttleResponse() throws Exception { server.enqueue(new MockResponse() .setBody("ABCDEF") .throttleBody(3, 500, TimeUnit.MILLISECONDS)); long startNanos = System.nanoTime(); URLConnection connection = server.url("/").url().openConnection(); InputStream in = connection.getInputStream(); assertEquals('A', in.read()); assertEquals('B', in.read()); assertEquals('C', in.read()); assertEquals('D', in.read()); assertEquals('E', in.read()); assertEquals('F', in.read()); assertEquals(-1, in.read()); long elapsedNanos = System.nanoTime() - startNanos; long elapsedMillis = NANOSECONDS.toMillis(elapsedNanos); assertTrue(Util.format("Request + Response: %sms", elapsedMillis), elapsedMillis >= 500); assertTrue(Util.format("Request + Response: %sms", elapsedMillis), elapsedMillis < 1000); } /** Delay the response body by sleeping 1s. */ @Test public void delayResponse() throws IOException { server.enqueue(new MockResponse() .setBody("ABCDEF") .setBodyDelay(1, SECONDS)); long startNanos = System.nanoTime(); URLConnection connection = server.url("/").url().openConnection(); InputStream in = connection.getInputStream(); assertEquals('A', in.read()); long elapsedNanos = System.nanoTime() - startNanos; long elapsedMillis = NANOSECONDS.toMillis(elapsedNanos); assertTrue(Util.format("Request + Response: %sms", elapsedMillis), elapsedMillis >= 1000); in.close(); } @Test public void disconnectRequestHalfway() throws IOException { server.enqueue(new MockResponse().setSocketPolicy(SocketPolicy.DISCONNECT_DURING_REQUEST_BODY)); HttpURLConnection connection = (HttpURLConnection) server.url("/").url().openConnection(); connection.setRequestMethod("POST"); connection.setDoOutput(true); connection.setFixedLengthStreamingMode(1024 * 1024 * 1024); // 1 GB connection.connect(); OutputStream out = connection.getOutputStream(); byte[] data = new byte[1024 * 1024]; int i; for (i = 0; i < 1024; i++) { try { out.write(data); out.flush(); } catch (IOException e) { break; } } assertEquals(512f, i, 10f); // Halfway +/- 1% } @Test public void disconnectResponseHalfway() throws IOException { server.enqueue(new MockResponse() .setBody("ab") .setSocketPolicy(SocketPolicy.DISCONNECT_DURING_RESPONSE_BODY)); URLConnection connection = server.url("/").url().openConnection(); assertEquals(2, connection.getContentLength()); InputStream in = connection.getInputStream(); assertEquals('a', in.read()); try { int byteRead = in.read(); // OpenJDK behavior: end of stream. assertEquals(-1, byteRead); } catch (ProtocolException e) { // On Android, HttpURLConnection is implemented by OkHttp v2. OkHttp // treats an incomplete response body as a ProtocolException. } } private List<String> headersToList(MockResponse response) { Headers headers = response.getHeaders(); int size = headers.size(); List<String> headerList = new ArrayList<>(size); for (int i = 0; i < size; i++) { headerList.add(headers.name(i) + ": " + headers.value(i)); } return headerList; } @Test public void shutdownWithoutStart() throws IOException { MockWebServer server = new MockWebServer(); server.shutdown(); } @Test public void shutdownWithoutEnqueue() throws IOException { MockWebServer server = new MockWebServer(); server.start(); server.shutdown(); } @After public void tearDown() throws IOException { server.shutdown(); } @Test public void portImplicitlyStarts() throws IOException { assertTrue(server.getPort() > 0); } @Test public void hostnameImplicitlyStarts() throws IOException { assertNotNull(server.getHostName()); } @Test public void toProxyAddressImplicitlyStarts() throws IOException { assertNotNull(server.toProxyAddress()); } @Test public void differentInstancesGetDifferentPorts() throws IOException { MockWebServer other = new MockWebServer(); assertNotEquals(server.getPort(), other.getPort()); other.shutdown(); } @Test public void statementStartsAndStops() throws Throwable { final AtomicBoolean called = new AtomicBoolean(); Statement statement = server.apply(new Statement() { @Override public void evaluate() throws Throwable { called.set(true); server.url("/").url().openConnection().connect(); } }, Description.EMPTY); statement.evaluate(); assertTrue(called.get()); try { server.url("/").url().openConnection().connect(); fail(); } catch (ConnectException expected) { } } @Test public void shutdownWhileBlockedDispatching() throws Exception { // Enqueue a request that'll cause MockWebServer to hang on QueueDispatcher.dispatch(). HttpURLConnection connection = (HttpURLConnection) server.url("/").url().openConnection(); connection.setReadTimeout(500); try { connection.getResponseCode(); fail(); } catch (SocketTimeoutException expected) { } // Shutting down the server should unblock the dispatcher. server.shutdown(); } }
apache-2.0
b-cuts/wire-1
wire-runtime/src/test/java/com/squareup/wire/protos/redacted/RedactedCycleA.java
1550
// Code generated by Wire protocol buffer compiler, do not edit. // Source file: ../wire-runtime/src/test/proto/redacted_test.proto at 40:1 package com.squareup.wire.protos.redacted; import com.squareup.wire.Message; import com.squareup.wire.ProtoField; import java.lang.Object; import java.lang.Override; public final class RedactedCycleA extends Message<RedactedCycleA> { private static final long serialVersionUID = 0L; @ProtoField( tag = 1, type = "squareup.protos.redacted_test.RedactedCycleB" ) public final RedactedCycleB b; public RedactedCycleA(RedactedCycleB b) { this.b = b; } private RedactedCycleA(Builder builder) { this(builder.b); setBuilder(builder); } @Override public boolean equals(Object other) { if (other == this) return true; if (!(other instanceof RedactedCycleA)) return false; return equals(b, ((RedactedCycleA) other).b); } @Override public int hashCode() { int result = hashCode; return result != 0 ? result : (hashCode = b != null ? b.hashCode() : 0); } public static final class Builder extends com.squareup.wire.Message.Builder<RedactedCycleA, Builder> { public RedactedCycleB b; public Builder() { } public Builder(RedactedCycleA message) { super(message); if (message == null) return; this.b = message.b; } public Builder b(RedactedCycleB b) { this.b = b; return this; } @Override public RedactedCycleA build() { return new RedactedCycleA(this); } } }
apache-2.0
infoplat/elasticsearch-demo
src/main/java/com/suyuening/elasticsearch/demo/documentapis/GetAPI.java
1178
package com.suyuening.elasticsearch.demo.documentapis; import java.util.Map; import org.elasticsearch.action.get.GetResponse; import org.elasticsearch.client.Client; import com.suyuening.elasticsearch.utils.ESClient; /** * <a href= * "https://www.elastic.co/guide/en/elasticsearch/client/java-api/current/java-docs-get.html" * target="_blank">查看ES Doc</a> * * @author suyuening * */ public class GetAPI { public static void main(String[] args) { try (Client client = ESClient.client()) { GetResponse response = client.prepareGet("twitter", "tweet", "2").setOperationThreaded(false) // 默认为true .get(); if (response != null && response.isExists()) { System.out.println(response.getIndex()); System.out.println(response.getType()); System.out.println(response.getId()); System.out.println(response.getVersion()); Map<String, Object> fields = response.getSource(); for (Map.Entry<String, Object> field : fields.entrySet()) { System.out.println(String.format("%s:%s", field.getKey(), field.getValue())); } System.out.println(response.getSourceAsString()); } } } }
apache-2.0
artiomchi/R4MVC
src/R4Mvc.Tools/Locators/DefaultRazorViewLocator.cs
3115
using System; using System.Collections.Generic; using System.IO; namespace R4Mvc.Tools.Locators { public class DefaultRazorViewLocator : IViewLocator { public IEnumerable<View> Find(string projectRoot) { foreach (var view in FindViews(projectRoot, string.Empty)) yield return view; var areasPath = Path.Combine(projectRoot, "Areas"); if (Directory.Exists(areasPath)) { foreach (var areaPath in Directory.GetDirectories(areasPath)) { var areaName = Path.GetFileName(areaPath); foreach (var view in FindViews(areaPath, areaName)) yield return view; } } } private IEnumerable<View> FindViews(string root, string areaName) { var viewsPath = Path.Combine(root, "Views"); if (Directory.Exists(viewsPath)) { foreach (var controllerPath in Directory.GetDirectories(viewsPath)) { var controllerName = Path.GetFileName(controllerPath); foreach (var file in Directory.GetFiles(Path.Combine(viewsPath, controllerName), "*.cshtml")) { var relativePath = !string.IsNullOrEmpty(areaName) ? $"~/Areas/{areaName}/Views/{controllerName}/{Path.GetFileName(file)}" : $"~/Views/{controllerName}/{Path.GetFileName(file)}"; yield return GetView(file, controllerName, areaName); } var templatesPath = Path.Combine(controllerPath, "DisplayTemplates"); if (Directory.Exists(templatesPath)) { foreach (var file in Directory.GetFiles(templatesPath, "*.cshtml")) { yield return GetView(file, controllerName, areaName, Path.GetFileName(templatesPath)); } } templatesPath = Path.Combine(controllerPath, "EditorTemplates"); if (Directory.Exists(templatesPath)) { foreach (var file in Directory.GetFiles(templatesPath, "*.cshtml")) { yield return GetView(file, controllerName, areaName, Path.GetFileName(templatesPath)); } } } } } private View GetView(string filePath, string controllerName, string areaName, string templateKind = null) { var relativePath = !string.IsNullOrEmpty(areaName) ? $"~/Areas/{areaName}/Views/{controllerName}/{Path.GetFileName(filePath)}" : $"~/Views/{controllerName}/{Path.GetFileName(filePath)}"; return new View(areaName, controllerName, Path.GetFileNameWithoutExtension(filePath), new Uri(relativePath, UriKind.Relative), templateKind); } } }
apache-2.0
hpcloud/go-openstack
network/network.go
1139
package network import ( "encoding/json" "github.com/gertd/go-openstack/identity" "github.com/parnurzeal/gorequest" ) type networkResp struct { Networks []Network `json:"networks"` } type Network struct { Id string `json:"id"` Name string `json:"name"` Status string `json:"status"` Subnets []string `json:"subnets"` TenantId string `json:"tenant_id"` RouterExternal bool `json:"router:external"` AdminStateUp bool `json:"admin_state_up"` Shared bool `json:"shared"` PortSecurityEnabled bool `json:"port_security_enabled"` } func GetNetworks(url string, token identity.Token) (networks []Network, err error) { req := gorequest.New() _, body, errs := req.Get(url+"/v2.0/networks"). Set("Content-Type", "application/json"). Set("Accept", "application/json"). Set("X-Auth-Token", token.Id). End() if errs != nil { err = errs[len(errs)-1] return } var nw = networkResp{} if err = json.Unmarshal([]byte(body), &nw); err != nil { return } networks = nw.Networks err = nil return }
apache-2.0
remibergsma/cosmic
cosmic-core/plugins/authentication/ldap/src/main/java/com/cloud/api/command/LDAPConfigCmd.java
10008
package com.cloud.api.command; import com.cloud.api.APICommand; import com.cloud.api.ApiConstants; import com.cloud.api.BaseCmd; import com.cloud.api.Parameter; import com.cloud.api.ServerApiException; import com.cloud.api.response.LDAPConfigResponse; import com.cloud.api.response.ListResponse; import com.cloud.exception.ConcurrentOperationException; import com.cloud.exception.InsufficientCapacityException; import com.cloud.exception.ResourceAllocationException; import com.cloud.exception.ResourceUnavailableException; import com.cloud.framework.config.dao.ConfigurationDao; import com.cloud.framework.config.impl.ConfigurationVO; import com.cloud.ldap.LdapConfiguration; import com.cloud.ldap.LdapConfigurationVO; import com.cloud.ldap.LdapManager; import com.cloud.user.Account; import com.cloud.utils.Pair; import com.cloud.utils.exception.InvalidParameterValueException; import javax.inject.Inject; import java.util.ArrayList; import java.util.List; import org.apache.commons.lang.StringEscapeUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * @deprecated as of 4.3 use the new api {@link LdapAddConfigurationCmd} */ @Deprecated @APICommand(name = "ldapConfig", description = "Configure the LDAP context for this site.", responseObject = LDAPConfigResponse.class, since = "3.0.0", requestHasSensitiveInfo = true, responseHasSensitiveInfo = false) public class LDAPConfigCmd extends BaseCmd { public static final Logger s_logger = LoggerFactory.getLogger(LDAPConfigCmd.class.getName()); private static final String s_name = "ldapconfigresponse"; @Inject private ConfigurationDao _configDao; @Inject private LdapManager _ldapManager; @Inject private LdapConfiguration _ldapConfiguration; ///////////////////////////////////////////////////// //////////////// API parameters ///////////////////// ///////////////////////////////////////////////////// @Parameter(name = ApiConstants.LIST_ALL, type = CommandType.BOOLEAN, description = "If true return current LDAP configuration") private Boolean listAll; @Parameter(name = ApiConstants.HOST_NAME, type = CommandType.STRING, description = "Hostname or ip address of the ldap server eg: my.ldap.com") private String hostname; @Parameter(name = ApiConstants.PORT, type = CommandType.INTEGER, description = "Specify the LDAP port if required, default is 389.") private Integer port = 0; @Parameter(name = ApiConstants.USE_SSL, type = CommandType.BOOLEAN, description = "Check Use SSL if the external LDAP server is configured for LDAP over SSL.") private Boolean useSSL; @Parameter(name = ApiConstants.SEARCH_BASE, type = CommandType.STRING, description = "The search base defines the starting point for the search in the directory tree Example: dc=cloud,dc=com.") private String searchBase; @Parameter(name = ApiConstants.QUERY_FILTER, type = CommandType.STRING, description = "You specify a query filter here, which narrows down the users, who can be part of this domain.") private String queryFilter; @Parameter(name = ApiConstants.BIND_DN, type = CommandType.STRING, description = "Specify the distinguished name of a user with the search permission on the directory.") private String bindDN; @Parameter(name = ApiConstants.BIND_PASSWORD, type = CommandType.STRING, description = "Enter the password.") private String bindPassword; @Parameter(name = ApiConstants.TRUST_STORE, type = CommandType.STRING, description = "Enter the path to trust certificates store.") private String trustStore; @Parameter(name = ApiConstants.TRUST_STORE_PASSWORD, type = CommandType.STRING, description = "Enter the password for trust store.") private String trustStorePassword; @Override public void execute() throws ResourceUnavailableException, InsufficientCapacityException, ServerApiException, ConcurrentOperationException, ResourceAllocationException { if (getListAll()) { // return the existing conf final LdapListConfigurationCmd listConfigurationCmd = new LdapListConfigurationCmd(_ldapManager); final Pair<List<? extends LdapConfigurationVO>, Integer> result = _ldapManager.listConfigurations(listConfigurationCmd); final ListResponse<LDAPConfigResponse> response = new ListResponse<>(); final List<LDAPConfigResponse> responses = new ArrayList<>(); if (result.second() > 0) { final boolean useSSlConfig = _ldapConfiguration.getSSLStatus(); final String searchBaseConfig = _ldapConfiguration.getBaseDn(); final String bindDnConfig = _ldapConfiguration.getBindPrincipal(); for (final LdapConfigurationVO ldapConfigurationVO : result.first()) { responses.add(createLDAPConfigResponse(ldapConfigurationVO.getHostname(), ldapConfigurationVO.getPort(), useSSlConfig, null, searchBaseConfig, bindDnConfig)); } } response.setResponses(responses); response.setResponseName(getCommandName()); setResponseObject(response); } else if (getHostname() == null || getPort() == null) { throw new InvalidParameterValueException("You need to provide hostname, port to configure your LDAP server"); } else { final boolean result = updateLDAP(); if (result) { final LDAPConfigResponse lr = createLDAPConfigResponse(getHostname(), getPort(), getUseSSL(), getQueryFilter(), getSearchBase(), getBindDN()); lr.setResponseName(getCommandName()); setResponseObject(lr); } } } public Boolean getListAll() { return listAll == null ? Boolean.FALSE : listAll; } private LDAPConfigResponse createLDAPConfigResponse(final String hostname, final Integer port, final Boolean useSSL, final String queryFilter, final String searchBase, final String bindDN) { final LDAPConfigResponse lr = new LDAPConfigResponse(); lr.setHostname(hostname); lr.setPort(port.toString()); lr.setUseSSL(useSSL.toString()); lr.setQueryFilter(queryFilter); lr.setBindDN(bindDN); lr.setSearchBase(searchBase); lr.setObjectName("ldapconfig"); return lr; } public Integer getPort() { return (Integer) (port.intValue() <= 0 ? 389 : port.intValue()); } public String getHostname() { return hostname; } public void setHostname(final String hostname) { this.hostname = hostname; } private boolean updateLDAP() { _ldapManager.addConfiguration(hostname, port); /** * There is no query filter now. It is derived from ldap.user.object and ldap.search.group.principle */ // ConfigurationVO cvo = _configDao.findByName(LDAPParams.queryfilter.toString()); // _configDao.update(cvo.getName(),cvo.getCategory(),getQueryFilter()); ConfigurationVO cvo = _configDao.findByName("ldap.basedn"); _configDao.update(cvo.getName(), cvo.getCategory(), getSearchBase()); /** * There is no ssl now. it is derived from the presence of trust store and password */ // cvo = _configDao.findByName(LDAPParams.usessl.toString()); // _configDao.update(cvo.getName(),cvo.getCategory(),getUseSSL().toString()); cvo = _configDao.findByName("ldap.bind.principal"); _configDao.update(cvo.getName(), cvo.getCategory(), getBindDN()); cvo = _configDao.findByName("ldap.bind.password"); _configDao.update(cvo.getName(), cvo.getCategory(), getBindPassword()); cvo = _configDao.findByName("ldap.truststore"); _configDao.update(cvo.getName(), cvo.getCategory(), getTrustStore()); cvo = _configDao.findByName("ldap.truststore.password"); _configDao.update(cvo.getName(), cvo.getCategory(), getTrustStorePassword()); return true; } public Boolean getUseSSL() { return useSSL == null ? Boolean.FALSE : useSSL; } public String getQueryFilter() { return queryFilter; } public void setQueryFilter(final String queryFilter) { this.queryFilter = StringEscapeUtils.unescapeHtml(queryFilter); } public String getSearchBase() { return searchBase; } public String getBindDN() { return bindDN; } public String getBindPassword() { return bindPassword; } public String getTrustStore() { return trustStore; } public void setTrustStore(final String trustStore) { this.trustStore = trustStore; } public String getTrustStorePassword() { return trustStorePassword; } public void setBindDN(final String bdn) { bindDN = bdn; } ///////////////////////////////////////////////////// /////////////// API Implementation/////////////////// ///////////////////////////////////////////////////// public void setSearchBase(final String searchBase) { this.searchBase = searchBase; } public void setUseSSL(final Boolean useSSL) { this.useSSL = useSSL; } public void setPort(final Integer port) { this.port = port; } @Override public String getCommandName() { return s_name; } @Override public long getEntityOwnerId() { return Account.ACCOUNT_ID_SYSTEM; } private List<? extends LdapConfigurationVO> listLDAPConfig() { final LdapListConfigurationCmd listConfigurationCmd = new LdapListConfigurationCmd(_ldapManager); final Pair<List<? extends LdapConfigurationVO>, Integer> result = _ldapManager.listConfigurations(listConfigurationCmd); return result.first(); } }
apache-2.0
xbed/Mixly_Arduino
mixly_arduino/blockly/converters/microbit_python/display.js
15334
'use strict'; function show_or_scroll(blockid1, blockid2) { function converter(py2block, func, args, keywords, starargs, kwargs, node) { if (args.length === 1 && keywords.length === 4) {//Image.ALL_CLOCKS, delay=120, wait=True, loop=False, clear=False var imagesblock = null; var delayblock = null; var waitblock = null; var loopblock = null; var clearblock = null; imagesblock = py2block.convert(args[0]); for (var i = 0; i < keywords.length; i++) { var param = keywords[i]; var key = py2block.identifier(param.arg); if (key === "delay") { delayblock = py2block.convert(param.value); } else if (key === "wait" && param.value._astname == "Name") { waitblock = py2block.Name_str(param.value); } else if (key === "loop" && param.value._astname == "Name") { loopblock = py2block.Name_str(param.value); } else if (key === "clear" && param.value._astname == "Name") { clearblock = py2block.Name_str(param.value); } } if (imagesblock != null && delayblock != null && waitblock != null && loopblock != null && clearblock != null) { return [block(blockid2, func.lineno, { 'wait': waitblock, 'loop': loopblock, 'clear': clearblock, }, { 'images': imagesblock, 'delay': delayblock, }, { "inline": "true" })]; } } else if (args.length === 1 && keywords.length === 0) { return [block(blockid1, func.lineno, {}, { 'data': py2block.convert(args[0]), }, { "inline": "true" })]; } throw new Error("Incorrect number of arguments"); } return converter; } pbc.moduleFunctionD.get('display')['show'] = show_or_scroll('monitor_show_image_or_string', 'microbit_display_show_animation'); pbc.moduleFunctionD.get('display')['scroll'] = show_or_scroll('monitor_scroll_string', 'microbit_display_scroll_string_animation'); //创建图像 pbc.globalFunctionD['Image'] = function (py2block, func, args, keywords, starargs, kwargs, node) { if (args.length !== 1 || args[0]._astname != "Str") { throw new Error("Incorrect number of arguments"); } var colours = [ "#000000", "#440000", "#660000", "#880000", "#aa0000", "#bb0000", "#cc0000", "#dd0000", "#ee0000", "#ff0000" ]; var flag = 0; var tempblock = py2block.Str_value(args[0]); var temptext = new Array(); temptext = tempblock.split(':'); if (temptext.length == 5) { for (var i = 0; i < 5; i++) { if (temptext[i].length == 5) { flag++; } } } if (flag == 5) { return block('microbit_image_create', func.lineno, { "00": colours[temptext[0].charAt(0)], "01": colours[temptext[0].charAt(1)], "02": colours[temptext[0].charAt(2)], "03": colours[temptext[0].charAt(3)], "04": colours[temptext[0].charAt(4)], "10": colours[temptext[1].charAt(0)], "11": colours[temptext[1].charAt(1)], "12": colours[temptext[1].charAt(2)], "13": colours[temptext[1].charAt(3)], "14": colours[temptext[1].charAt(4)], "20": colours[temptext[2].charAt(0)], "21": colours[temptext[2].charAt(1)], "22": colours[temptext[2].charAt(2)], "23": colours[temptext[2].charAt(3)], "24": colours[temptext[2].charAt(4)], "30": colours[temptext[3].charAt(0)], "31": colours[temptext[3].charAt(1)], "32": colours[temptext[3].charAt(2)], "33": colours[temptext[3].charAt(3)], "34": colours[temptext[3].charAt(4)], "40": colours[temptext[4].charAt(0)], "41": colours[temptext[4].charAt(1)], "42": colours[temptext[4].charAt(2)], "43": colours[temptext[4].charAt(3)], "44": colours[temptext[4].charAt(4)], }, {}, { "inline": "false" }); } } pbc.moduleFunctionD.get('image')['height'] = function (py2block, func, args, keywords, starargs, kwargs, node) { if (args.length !== 1) { throw new Error("Incorrect number of arguments"); } return block('display_image_size', func.lineno, { 'OP': 'height' }, { 'VAR': py2block.convert(args[0]), }, { "inline": "true" }); } pbc.moduleFunctionD.get('image')['width'] = function (py2block, func, args, keywords, starargs, kwargs, node) { if (args.length !== 1) { throw new Error("Incorrect number of arguments"); } return block('display_image_size', func.lineno, { 'OP': 'width' }, { 'VAR': py2block.convert(args[0]), }, { "inline": "true" }); } function imageShift(mode){ function converter(py2block, func, args, keywords, starargs, kwargs, node) { if (args.length !== 1) { throw new Error("Incorrect number of arguments"); } var imageblock = py2block.convert(func.value); return block('image_shift', func.lineno, { 'OP': mode }, { 'img': imageblock, 'val': py2block.convert(args[0]), }, { "inline": "true" }); } return converter; } pbc.objectFunctionD.get('shift_up')['Image'] = imageShift('up'); pbc.objectFunctionD.get('shift_down')['Image'] = imageShift('down'); pbc.objectFunctionD.get('shift_left')['Image'] = imageShift('left'); pbc.objectFunctionD.get('shift_right')['Image'] = imageShift('right'); pbc.objectFunctionD.get('copy')['Image'] = function converter(py2block, func, args, keywords, starargs, kwargs, node) { if (args.length !== 0) { throw new Error("Incorrect number of arguments"); } var imageblock = py2block.convert(func.value); return block('microbit_image_copy', func.lineno, {}, { 'image': imageblock, }, { "inline": "true" }); } pbc.objectFunctionD.get('invert')['Image'] = function converter(py2block, func, args, keywords, starargs, kwargs, node) { if (args.length !== 0) { throw new Error("Incorrect number of arguments"); } var imageblock = py2block.convert(func.value); return block('microbit_image_invert', func.lineno, {}, { 'image': imageblock, }, { "inline": "true" }); } pbc.moduleFunctionD.get('display')['get_pixel'] = function(py2block, func, args, keywords, starargs, kwargs, node){ if (args.length !== 2){ throw new Error("Incorrect number of arguments"); } var astname = args[0]._astname; var astname1 = args[1]._astname; var xblock; var yblock; pbc.pinType = "pins_axis"; if(astname === "Call" && args[0].func._astname == "Name" && py2block.Name_str(args[0].func) === "int"){ //display.get_pixel(int(0), int(0)) xblock = py2block.convert(args[0].args[0]); }else{ xblock = py2block.convert(args[0]); } if(astname1 === "Call" && args[1].func._astname == "Name" && py2block.Name_str(args[1].func) === "int"){ //display.get_pixel(int(0), int(0)) yblock = py2block.convert(args[1].args[0]); }else{ yblock = py2block.convert(args[1]); } pbc.pinType = null; return block("monitor_get_pixel", func.lineno, {}, { 'x':xblock, 'y':yblock }, { "inline": "true" }); } pbc.moduleFunctionD.get('display')['set_pixel'] = function(py2block, func, args, keywords, starargs, kwargs, node){ if (args.length !== 3){ throw new Error("Incorrect number of arguments"); } var astname = args[0]._astname; var astname1 = args[1]._astname; pbc.pinType = "pins_brightness"; var brightblock = py2block.convert(args[2]); pbc.pinType = null; var xblock; var yblock; pbc.pinType = "pins_axis"; if(astname === "Call" && args[0].func._astname == "Name" && py2block.Name_str(args[0].func) === "int"){ //display.set_pixel(int(0), int(0)) xblock = py2block.convert(args[0].args[0]); }else{ xblock = py2block.convert(args[0]); } if(astname1 === "Call" && args[1].func._astname == "Name" && py2block.Name_str(args[1].func) === "int"){ //display.set_pixel(int(0), int(0)) yblock = py2block.convert(args[1].args[0]); }else{ yblock = py2block.convert(args[1]); } pbc.pinType = null; return [block("monitor_bright_point", func.lineno, {}, { 'x':xblock, 'y':yblock, 'brightness':brightblock, }, { "inline": "true" })]; } function displayOnOrOff(mode){ function converter(py2block, func, args, keywords, starargs, kwargs, node) { if (args.length !== 0) { throw new Error("Incorrect number of arguments"); } return [block("microbit_display_on", func.lineno, { 'on_off': mode }, {}, { "inline": "true" })]; } return converter; } pbc.moduleFunctionD.get('display')['on'] = displayOnOrOff('on'); pbc.moduleFunctionD.get('display')['off'] = displayOnOrOff('off'); pbc.moduleFunctionD.get('display')['is_on'] = function (py2block, func, args, keywords, starargs, kwargs, node) { if (args.length !== 0) { throw new Error("Incorrect number of arguments"); } return block("microbit_display_is_on", func.lineno, {}, {}, { "inline": "true" }); } pbc.moduleFunctionD.get('display')['clear'] = function (py2block, func, args, keywords, starargs, kwargs, node) { if (args.length !== 0) { throw new Error("Incorrect number of arguments"); } return [block("microbit_display_clear", func.lineno, {}, {}, { "inline": "true" })]; } pbc.assignD.get('Rgb')['check_assign'] = function(py2block, node, targets, value) { if(value._astname != "Call" || value.func._astname != "Attribute" || value.func.value._astname != "Name"){ return false; } var moduleName = py2block.Name_str(value.func.value); var funcName = py2block.identifier(value.func.attr); if(value._astname === "Call" && moduleName === "neopixel" && funcName === "NeoPixel" && value.args.length === 2) return true; return false; } pbc.assignD.get('Rgb')['create_block'] = function(py2block, node, targets, value){ pbc.pinType = "pins_digital"; var pinblock = py2block.convert(value.args[0]); pbc.pinType = null; var countblock = py2block.convert(value.args[1]); return block("display_rgb_init", node.lineno, {}, { "PIN":pinblock, "LEDCOUNT":countblock }); } pbc.globalFunctionD['mixly_rgb_show'] = function (py2block, func, args, keywords, starargs, kwargs, node) { if (args.length !== 4) { throw new Error("Incorrect number of arguments"); } return [block("display_rgb", func.lineno, { }, { '_LED_':py2block.convert(args[0]), "RVALUE":py2block.convert(args[1]), "GVALUE":py2block.convert(args[2]), "BVALUE":py2block.convert(args[3]) }, { "inline": "true" })]; } pbc.assignD.get('Lcd')['check_assign'] = function(py2block, node, targets, value) { if(value._astname != "Call" || value.func._astname != "Name"){ return false; } var className = py2block.Name_str(value.func); if(value._astname === "Call" && className === "LCD1602" && value.args.length === 1) return true; return false; } pbc.assignD.get('Lcd')['create_block'] = function(py2block, node, targets, value){ pbc.inScope = "lcd_init"; var argblock = py2block.convert(value.args[0]); pbc.inScope = null; return block("group_lcd_init", node.lineno, {}, { "device":argblock }); } pbc.moduleFunctionD.get('mylcd')['mixly_puts_two_lines'] = function (py2block, func, args, keywords, starargs, kwargs, node) { if (args.length !== 2) { throw new Error("Incorrect number of arguments"); } return [block("group_lcd_print", func.lineno, { }, { "TEXT":py2block.convert(args[0]), "TEXT2":py2block.convert(args[1]) }, { "inline": "false" })]; } pbc.moduleFunctionD.get('mylcd')['mixly_puts'] = function (py2block, func, args, keywords, starargs, kwargs, node) { if (args.length !== 3) { throw new Error("Incorrect number of arguments"); } return [block("group_lcd_print2", func.lineno, { }, { "row":py2block.convert(args[1]), "column":py2block.convert(args[2]), "TEXT":py2block.convert(args[0]), }, { "inline": "true" })]; } function mylcdOnOrOffOrClear(mode){ function converter(py2block, func, args, keywords, starargs, kwargs, node) { if (args.length !== 0) { throw new Error("Incorrect number of arguments"); } return [block("group_lcd_power", func.lineno, { 'STAT': mode }, {}, { "inline": "true" })]; } return converter; } pbc.moduleFunctionD.get('mylcd')['on'] = mylcdOnOrOffOrClear('on()'); pbc.moduleFunctionD.get('mylcd')['off'] = mylcdOnOrOffOrClear('off()'); pbc.moduleFunctionD.get('mylcd')['clear'] = mylcdOnOrOffOrClear('clear()'); pbc.moduleFunctionD.get('mylcd')['backlight'] = function (py2block, func, args, keywords, starargs, kwargs, node) { if (args.length !== 1 || args[0]._astname != "Name") { throw new Error("Incorrect number of arguments"); } var stat = py2block.Name_str(args[0]); return [block("group_lcd_power", func.lineno, { 'STAT': "backlight(" + stat + ")" }, {}, { "inline": "true" })]; } pbc.globalFunctionD['mixly_oled_text'] = function (py2block, func, args, keywords, starargs, kwargs, node) { if (args.length !== 4) { throw new Error("Incorrect number of arguments"); } return [block("lp2i_u8g_draw_4strings", func.lineno, { }, { "Text_line1":py2block.convert(args[0]), "Text_line2":py2block.convert(args[1]), "Text_line3":py2block.convert(args[2]), "Text_line4":py2block.convert(args[3]) }, { "inline": "false" })]; } pbc.objectFunctionD.get('show_fill')['monitor'] = function(py2block, func, args, keywords, starargs, kwargs, node) { if(args.length!=1){ throw new Error("Incorrect number of arguments"); } var objblock = py2block.convert(func.value); var flagblock = py2block.identifier(args[0].n.v); return [block("display_fill", func.lineno, {'key':flagblock}, { "SUB":objblock, }, { "inline": "true" })]; }
apache-2.0
maheshika/charon
modules/charon-core/src/main/java/org/wso2/charon/core/attributes/AttributeFactory.java
1680
/* * Copyright (c) 2005-2010, WSO2 Inc. (http://www.wso2.org) All Rights Reserved. * * WSO2 Inc. licenses this file to you under the Apache License, * Version 2.0 (the "License"); you may not use this file except * in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.wso2.charon.core.attributes; import org.wso2.charon.core.exceptions.CharonException; import org.wso2.charon.core.schema.AttributeSchema; import java.util.Map; /** * Provides a factory interface to create different types of attributes * defined in SCIM Schema spec. An implementer can provide a handler to an implementation of this * interface to AbstractResourceFactory to customize attribute creation logic. */ public interface AttributeFactory { public Attribute createSimpleAttribute(String attributeId); public Attribute createComplexAttribute(String attributeId); public Attribute createMultiValuedAttribute(String attributeId); /** * Create the attribute given the attribute schema and the attribute object - may be with * attribute value set. * * @param attributeSchema * @param attribute * @return */ public Attribute createAttribute(AttributeSchema attributeSchema, Attribute attribute) throws CharonException; }
apache-2.0
hnccfr/ccfrweb
admin/src/com/hundsun/network/gates/genshan/web/action/user/UserAccountAction.java
24069
/* */ package com.hundsun.network.gates.genshan.web.action.user; /* */ /* */ import com.hundsun.network.gates.genshan.biz.domain.baseset.UserLevel; /* */ import com.hundsun.network.gates.genshan.biz.domain.fund.FundAccountMsg; /* */ import com.hundsun.network.gates.genshan.biz.domain.query.UserAccountQuery; /* */ import com.hundsun.network.gates.genshan.biz.domain.user.UserAccount; /* */ import com.hundsun.network.gates.genshan.biz.domain.user.UserCreditInfo; /* */ import com.hundsun.network.gates.genshan.biz.domain.user.UserRole; /* */ import com.hundsun.network.gates.genshan.biz.enums.UserRoleEnum; /* */ import com.hundsun.network.gates.genshan.biz.enums.UserStatusEnum; /* */ import com.hundsun.network.gates.genshan.biz.enums.UserTypeEnum; /* */ import com.hundsun.network.gates.genshan.biz.service.fund.FundQueryService; /* */ import com.hundsun.network.gates.genshan.biz.service.user.UserAccountService; /* */ import com.hundsun.network.gates.genshan.common.UserAgent; /* */ import com.hundsun.network.gates.genshan.security.AdminAccess; /* */ import com.hundsun.network.gates.genshan.web.action.BaseAction; /* */ import com.hundsun.network.gates.genshan.web.validator.UserAccountValidator; /* */ import com.hundsun.network.gates.luosi.biz.enums.EnumPasswordType; /* */ import com.hundsun.network.gates.luosi.common.enums.EnumBank; /* */ import com.hundsun.network.gates.luosi.common.enums.EnumEnterpiseCertificateType; /* */ import com.hundsun.network.gates.luosi.common.enums.EnumPersonalCertificateType; /* */ import com.hundsun.network.gates.luosi.common.enums.EnumUserType; /* */ import com.hundsun.network.gates.luosi.houchao.reomte.request.AccountRequest; /* */ import com.hundsun.network.gates.luosi.houchao.reomte.result.FundOperateResult; /* */ import com.hundsun.network.gates.luosi.houchao.reomte.service.RemoteFundService; /* */ import com.hundsun.network.gates.luosi.wulin.reomte.enums.EnumUserResultErrors; /* */ import com.hundsun.network.gates.luosi.wulin.reomte.enums.EnumUserStatus; /* */ import com.hundsun.network.gates.luosi.wulin.reomte.result.UserResetPWDResult; /* */ import com.hundsun.network.gates.luosi.wulin.reomte.result.UserServiceResult; /* */ import com.hundsun.network.melody.common.util.StringUtil; /* */ import java.util.ArrayList; /* */ import java.util.Arrays; /* */ import java.util.List; /* */ import org.apache.commons.logging.Log; /* */ import org.springframework.beans.factory.annotation.Autowired; /* */ import org.springframework.beans.factory.annotation.Value; /* */ import org.springframework.stereotype.Controller; /* */ import org.springframework.ui.Model; /* */ import org.springframework.ui.ModelMap; /* */ import org.springframework.validation.BindingResult; /* */ import org.springframework.web.bind.annotation.ModelAttribute; /* */ import org.springframework.web.bind.annotation.RequestMapping; /* */ import org.springframework.web.bind.annotation.RequestParam; /* */ import org.springframework.web.bind.annotation.ResponseBody; /* */ /* */ @Controller /* */ public class UserAccountAction extends BaseAction /* */ { /* */ /* */ @Autowired /* */ private RemoteFundService remoteFundService; /* */ /* */ @Autowired /* */ private UserAccountService userAccountService; /* */ /* */ @Autowired /* */ private FundQueryService fundQueryService; /* */ /* */ @Autowired /* */ private UserAccountValidator userAccountValidator; /* */ /* */ @Value("${remot.fund.app}") /* */ private String remotFundApp; /* */ /* */ @AdminAccess({com.hundsun.network.gates.genshan.common.PermissionEnum.USER_R_LIST}) /* */ @RequestMapping({"/user/list"}) /* */ public void userAccountList(@ModelAttribute("query") UserAccountQuery query, Model model) /* */ { /* 86 */ List roleList = new ArrayList(); /* 87 */ roleList.add(UserRoleEnum.COMMON.getValue()); /* 88 */ roleList.add(UserRoleEnum.MIDDLE.getValue()); /* 89 */ roleList.add(UserRoleEnum.HIGH.getValue()); /* 90 */ query.setRoleList(roleList); /* 91 */ if (query.getAccount() != null) { /* 92 */ query.setAccount(query.getAccount().trim()); /* */ } /* 94 */ if (query.getName() != null) { /* 95 */ query.setName(query.getName().trim()); /* */ } /* 97 */ this.userAccountService.getUserAccountList(query); /* 98 */ List _list = Arrays.asList(UserRoleEnum.values()); /* 99 */ List userRoleList = new ArrayList(); /* 100 */ userRoleList.addAll(_list); /* 101 */ userRoleList.remove(UserRoleEnum.AUCTIONEER); /* 102 */ userRoleList.remove(UserRoleEnum.REVIEWER); /* 103 */ model.addAttribute("userTypeList", UserTypeEnum.values()); /* 104 */ model.addAttribute("userRoleList", userRoleList); /* 105 */ model.addAttribute("userStatusList", UserStatusEnum.values()); /* 106 */ model.addAttribute("userStatus", UserStatusEnum.FORBIDDEN.getValue()); /* 107 */ model.addAttribute("remotFundApp", this.remotFundApp); /* */ } /* */ /* */ @AdminAccess({com.hundsun.network.gates.genshan.common.PermissionEnum.USER_R_ACCOUNT_QUERY}) /* */ @RequestMapping(value={"/user/info"}, method={org.springframework.web.bind.annotation.RequestMethod.GET}) /* */ public void userAccountView(@ModelAttribute("userAccount") UserAccount userAccount, @ModelAttribute("userLevel") UserLevel userLevel, Model model) /* */ { /* 123 */ userAccount = this.userAccountService.getUserByAccount(userAccount.getAccount()); /* 124 */ userLevel = this.userAccountService.getUserLevelByUserAccount(userAccount.getAccount()); /* 125 */ FundAccountMsg fundAccountMsg = this.fundQueryService.queryFundAccountMsg(null, userAccount /* 126 */ .getAccount()); /* 127 */ model.addAttribute("userAccount", userAccount); /* 128 */ model.addAttribute("userLevel", userLevel); /* 129 */ model.addAttribute("fundAccountMsg", fundAccountMsg); /* */ } /* */ /* */ @AdminAccess({com.hundsun.network.gates.genshan.common.PermissionEnum.USER_U_PASSWORD}) /* */ @RequestMapping({"/user/password/reset"}) /* */ public void userPasswordReset(@RequestParam("userAccount") String userAccount, @RequestParam("passwordType") String passwordType, UserAgent userAgent, ModelMap model) /* */ { /* 145 */ UserResetPWDResult result = this.userAccountService.resetUserPwd(userAccount, userAgent /* 146 */ .getAccount(), passwordType); /* 147 */ if ((result != null) && (result.correct())) { /* 148 */ model.addAttribute("password", result.getNewPassword()); /* */ } /* 150 */ setResult(model, result); /* */ } /* */ /* */ @AdminAccess({com.hundsun.network.gates.genshan.common.PermissionEnum.USER_U_STATUS}) /* */ @RequestMapping(value={"/user/changeStatus"}, method={org.springframework.web.bind.annotation.RequestMethod.POST}) /* */ @ResponseBody /* */ public int userStatusChange(@ModelAttribute("userAccount") UserAccount userAccount, Model model) /* */ { /* 166 */ int number = this.userAccountService.changeUserStatus(userAccount); /* 167 */ return number; /* */ } /* */ /* */ @AdminAccess({com.hundsun.network.gates.genshan.common.PermissionEnum.USER_R_CREDIT}) /* */ @RequestMapping(value={"/user/credit"}, method={org.springframework.web.bind.annotation.RequestMethod.GET}) /* */ public void userCreditView(@RequestParam("userAccount") String userAccount, Model model) /* */ { /* 181 */ UserCreditInfo userCredit = this.userAccountService.getUserCreditByUserAccount(userAccount); /* 182 */ model.addAttribute("userCredit", userCredit); /* */ } /* */ /* */ @RequestMapping({"/fund/openAccountMock"}) /* */ public String openAccountMock(Model model, @RequestParam("fundAccount") String fundAccount, @RequestParam("bankNo") String bankNo, @RequestParam("bankAccount") String bankAccount, @RequestParam("idKind") String idKind, @RequestParam("idNo") String idNo) /* */ throws Exception /* */ { /* 196 */ AccountRequest request = new AccountRequest(); /* 197 */ request.setFundAccount(fundAccount); /* 198 */ request.setBankNo(bankNo); /* 199 */ request.setBankAccount(bankAccount); /* 200 */ request.setIdKind(idKind); /* 201 */ request.setIdNo(idNo); /* */ /* 203 */ request.setBankBranch("10000000"); /* 204 */ request.setBranchNo("100000"); /* 205 */ request.setBankAccountType("1"); /* 206 */ request.setMoneyType("CNY"); /* 207 */ request.setMemo("开户"); /* 208 */ request.setOperator("current user"); /* 209 */ request.setCountry("CHN"); /* */ /* 211 */ FundOperateResult result = this.remoteFundService.createFundAccount(request); /* */ /* 213 */ if (result.isError()) { /* 214 */ this.log.error(fundAccount + "激活失败!" + result.getErrorInfo()); /* 215 */ String msg = "激活失败!" + result.getErrorInfo(); /* 216 */ if (EnumUserResultErrors.PARAMETER_ERROR.getValue()==Integer.valueOf(result.getErrorNO())) { /* 217 */ msg = msg + ",请确认会员银行及银行卡号是否填写完整"; /* */ } /* 219 */ model.addAttribute("message", msg); /* 220 */ return "error"; /* */ } /* 222 */ model.addAttribute("url", "/user/list"); /* 223 */ return "success"; /* */ } /* */ /* */ @RequestMapping({"/index"}) /* */ public void initIndex(Model model) /* */ { /* 254 */ this.userAccountService.initIssueTodo(model); /* */ } /* */ /* */ private void initeRegPage(ModelMap model) /* */ { /* 262 */ EnumUserType[] userTypes = EnumUserType.values(); /* 263 */ EnumPersonalCertificateType[] pecertificateTypes = EnumPersonalCertificateType.values(); /* 264 */ EnumEnterpiseCertificateType[] enCertificateTypes = EnumEnterpiseCertificateType.values(); /* 265 */ EnumBank[] banks = EnumBank.values(); /* 266 */ model.addAttribute("userTypes", userTypes); /* 267 */ model.addAttribute("peCertificateTypes", pecertificateTypes); /* 268 */ model.addAttribute("enCertificateTypes", enCertificateTypes); /* 269 */ model.addAttribute("banks", banks); /* 270 */ List _userRoleList = Arrays.asList(UserRoleEnum.values()); /* 271 */ List userRoleList = new ArrayList(); /* 272 */ userRoleList.addAll(_userRoleList); /* 273 */ userRoleList.remove(UserRoleEnum.AUCTIONEER); /* 274 */ userRoleList.remove(UserRoleEnum.REVIEWER); /* 275 */ model.addAttribute("rolesList", userRoleList); /* */ } /* */ /* */ private void initeRegPage2(ModelMap model) /* */ { /* 283 */ EnumUserType[] userTypes = EnumUserType.values(); /* 284 */ EnumPersonalCertificateType[] pecertificateTypes = EnumPersonalCertificateType.values(); /* 285 */ EnumEnterpiseCertificateType[] enCertificateTypes = EnumEnterpiseCertificateType.values(); /* 286 */ model.addAttribute("userTypes", userTypes); /* 287 */ model.addAttribute("peCertificateTypes", pecertificateTypes); /* 288 */ model.addAttribute("enCertificateTypes", enCertificateTypes); /* */ } /* */ /* */ @RequestMapping(value={"/user/add"}, method={org.springframework.web.bind.annotation.RequestMethod.GET}) /* */ public String userAdd(@ModelAttribute("userAccount") UserAccount userAccount, ModelMap model) /* */ { /* 297 */ initeRegPage(model); /* 298 */ return "/user/add"; /* */ } /* */ /* */ @RequestMapping(value={"/user/add"}, method={org.springframework.web.bind.annotation.RequestMethod.POST}) /* */ public String register(@ModelAttribute("userAccount") UserAccount userAccount, BindingResult result, ModelMap model) /* */ { /* 309 */ this.userAccountValidator.validate(userAccount, result); /* 310 */ if (result.hasErrors()) { /* 311 */ initeRegPage(model); /* 312 */ return "/user/add"; /* */ } /* */ /* 315 */ UserServiceResult regResult = this.userAccountService.userAdd(userAccount); /* 316 */ if (regResult.error()) { /* 317 */ model.put("message", regResult.getErrorInfo()); /* 318 */ return error(model); /* */ } /* 320 */ model.put("url", "/user/list.htm"); /* 321 */ return success(); /* */ } /* */ /* */ @AdminAccess({com.hundsun.network.gates.genshan.common.PermissionEnum.AUCTIONEER_C_ADD}) /* */ @RequestMapping(value={"/user/auctioneer"}, method={org.springframework.web.bind.annotation.RequestMethod.GET}) /* */ public String auctioneerAdd(@ModelAttribute("userAccount") UserAccount userAccount, ModelMap model) /* */ { /* 335 */ initeRegPage2(model); /* 336 */ return "/user/auctioneer"; /* */ } /* */ /* */ @AdminAccess({com.hundsun.network.gates.genshan.common.PermissionEnum.AUCTIONEER_C_ADD}) /* */ @RequestMapping(value={"/user/auctioneer"}, method={org.springframework.web.bind.annotation.RequestMethod.POST}) /* */ public String auctioneer(@ModelAttribute("userAccount") UserAccount userAccount, BindingResult result, ModelMap model) /* */ { /* 348 */ this.userAccountValidator.validate(userAccount, result); /* 349 */ if (result.hasErrors()) { /* 350 */ initeRegPage2(model); /* 351 */ return "/user/auctioneer"; /* */ } /* 353 */ userAccount.setUserClass(UserRoleEnum.AUCTIONEER.getValue()); /* 354 */ UserServiceResult regResult = this.userAccountService.userAuctioneer(userAccount); /* 355 */ if (regResult.error()) { /* 356 */ model.put("message", regResult.getErrorInfo()); /* 357 */ return error(model); /* */ } /* 359 */ model.put("url", "/user/auctioneer/list"); /* 360 */ return success(model); /* */ } /* */ /* */ @RequestMapping({"/user/dialog"}) /* */ public void queryUserAccounts(@ModelAttribute("query") UserAccountQuery query, @RequestParam("isRadio") String isRadio, @RequestParam(value="roleName", required=false, defaultValue="") String roleName, Model model) /* */ throws Exception /* */ { /* 379 */ if (query != null) { /* 380 */ if (StringUtil.isNotEmpty(query.getAccount())) { /* 381 */ query.setAccount(query.getAccount().trim()); /* */ } /* 383 */ if (StringUtil.isNotEmpty(query.getName())) { /* 384 */ query.setName(query.getName().trim()); /* */ } /* 386 */ if (StringUtil.isNotEmpty(query.getFundAccount())) { /* 387 */ query.setFundAccount(query.getFundAccount().trim()); /* */ } /* */ } /* 390 */ if (StringUtil.isNotEmpty(roleName)) { /* 391 */ query.setUserRole(roleName); /* */ } /* 393 */ List roleList = new ArrayList(); /* 394 */ roleList.add(UserRoleEnum.COMMON.getValue()); /* 395 */ roleList.add(UserRoleEnum.MIDDLE.getValue()); /* 396 */ roleList.add(UserRoleEnum.HIGH.getValue()); /* 397 */ roleList.add(UserRoleEnum.AUCTIONEER.getValue()); /* 398 */ roleList.add(UserRoleEnum.REVIEWER.getValue()); /* 399 */ query.setRoleList(roleList); /* 400 */ query.setStatus(EnumUserStatus.Normal.getValue()); /* 401 */ this.userAccountService.getUserAccountList(query); /* 402 */ model.addAttribute("userTypeList", UserTypeEnum.values()); /* 403 */ model.addAttribute("isRadio", isRadio); /* 404 */ model.addAttribute("roleName", roleName); /* */ } /* */ /* */ @AdminAccess({com.hundsun.network.gates.genshan.common.PermissionEnum.REVIEWER_C_ADD}) /* */ @RequestMapping(value={"/user/addReviewer"}, method={org.springframework.web.bind.annotation.RequestMethod.GET}) /* */ public String addReviewer(@ModelAttribute("userAccount") UserAccount userAccount, ModelMap model) /* */ { /* 416 */ EnumPersonalCertificateType[] pecertificateTypes = EnumPersonalCertificateType.values(); /* 417 */ model.addAttribute("peCertificateTypes", pecertificateTypes); /* 418 */ return "/user/addReviewer"; /* */ } /* */ /* */ @AdminAccess({com.hundsun.network.gates.genshan.common.PermissionEnum.REVIEWER_C_ADD}) /* */ @RequestMapping(value={"/user/addReviewer"}, method={org.springframework.web.bind.annotation.RequestMethod.POST}) /* */ public String addReviewer(@ModelAttribute("userAccount") UserAccount userAccount, BindingResult result, ModelMap model) /* */ { /* 429 */ this.userAccountValidator.validate(userAccount, result); /* 430 */ if (result.hasErrors()) { /* 431 */ EnumPersonalCertificateType[] pecertificateTypes = EnumPersonalCertificateType.values(); /* 432 */ List<UserRole> roleList = this.userAccountService.getRoleList(); /* 433 */ UserRole reviewerRole = null; /* 434 */ for (UserRole userRole : roleList) { /* 435 */ if ("reviewer".equals(userRole.getName())) { /* 436 */ reviewerRole = userRole; /* */ } /* */ } /* 439 */ roleList.clear(); /* 440 */ roleList.add(reviewerRole); /* 441 */ model.addAttribute("rolesList", roleList); /* 442 */ model.addAttribute("peCertificateTypes", pecertificateTypes); /* 443 */ return "/user/addReviewer"; /* */ } /* 445 */ userAccount.setUserClass(UserRoleEnum.REVIEWER.getValue()); /* 446 */ UserServiceResult regResult = this.userAccountService.addReviewer(userAccount); /* 447 */ if (regResult.error()) { /* 448 */ model.put("message", regResult.getErrorInfo()); /* 449 */ return error(model); /* */ } /* 451 */ model.put("url", "/user/reviewer/list"); /* 452 */ return success(model); /* */ } /* */ /* */ @AdminAccess({com.hundsun.network.gates.genshan.common.PermissionEnum.AUCTIONEER_QUERY_R_LIST}) /* */ @RequestMapping({"/user/auctioneer/list"}) /* */ public void auctioneerList(@ModelAttribute("query") UserAccountQuery query, Model model) /* */ { /* 466 */ List roleList = new ArrayList(); /* 467 */ roleList.add(UserRoleEnum.AUCTIONEER.getValue()); /* 468 */ query.setRoleList(roleList); /* 469 */ initListParamer(model); /* 470 */ this.userAccountService.getUserAccountList(query); /* 471 */ model.addAttribute("userTypeList", UserTypeEnum.values()); /* 472 */ model.addAttribute("userStatus", UserStatusEnum.FORBIDDEN.getValue()); /* */ } /* */ /* */ @AdminAccess({com.hundsun.network.gates.genshan.common.PermissionEnum.AUCTIONEER_R_INFO}) /* */ @RequestMapping(value={"/user/auctioneer/info"}, method={org.springframework.web.bind.annotation.RequestMethod.GET}) /* */ public void auctioneerView(@ModelAttribute("userAccount") UserAccount userAccount, Model model) /* */ { /* 486 */ userAccount = this.userAccountService.getUserByAccount(userAccount.getAccount()); /* 487 */ model.addAttribute("userAccount", userAccount); /* */ } /* */ /* */ @AdminAccess({com.hundsun.network.gates.genshan.common.PermissionEnum.AUCTIONEER_U_PASSWORD_CHANGE}) /* */ @RequestMapping({"/auctioneer/password/reset"}) /* */ public String auctioneerPasswordReset(@RequestParam("userAccount") String userAccount, UserAgent userAgent, ModelMap model) /* */ { /* 503 */ UserResetPWDResult result = this.userAccountService.resetUserPwd(userAccount, userAgent /* 504 */ .getAccount(), EnumPasswordType.SYSTEM.getValue()); /* 505 */ if ((result != null) && (result.correct())) { /* 506 */ model.addAttribute("password", result.getNewPassword()); /* */ } /* 508 */ setResult(model, result); /* 509 */ return "user/password/reset"; /* */ } /* */ /* */ @AdminAccess({com.hundsun.network.gates.genshan.common.PermissionEnum.AUCTIONEER_U_STATUS_CHANGE}) /* */ @RequestMapping(value={"/auctioneer/changeStatus"}, method={org.springframework.web.bind.annotation.RequestMethod.POST}) /* */ @ResponseBody /* */ public int auctioneerStatusChange(@ModelAttribute("userAccount") UserAccount userAccount, Model model) /* */ { /* 525 */ int number = this.userAccountService.changeUserStatus(userAccount); /* 526 */ return number; /* */ } /* */ /* */ @AdminAccess({com.hundsun.network.gates.genshan.common.PermissionEnum.REVIEWER_QUERY_R_LIST}) /* */ @RequestMapping({"/user/reviewer/list"}) /* */ public void reviewerList(@ModelAttribute("query") UserAccountQuery query, Model model) /* */ { /* 540 */ List roleList = new ArrayList(); /* 541 */ roleList.add(UserRoleEnum.REVIEWER.getValue()); /* 542 */ query.setRoleList(roleList); /* 543 */ initListParamer(model); /* 544 */ this.userAccountService.getUserAccountList(query); /* 545 */ model.addAttribute("userTypeList", UserTypeEnum.values()); /* 546 */ model.addAttribute("userStatus", UserStatusEnum.FORBIDDEN.getValue()); /* */ } /* */ /* */ @AdminAccess({com.hundsun.network.gates.genshan.common.PermissionEnum.REVIEWER_R_INFO}) /* */ @RequestMapping(value={"/user/reviewer/info"}, method={org.springframework.web.bind.annotation.RequestMethod.GET}) /* */ public void reviewerView(@ModelAttribute("userAccount") UserAccount userAccount, Model model) /* */ { /* 560 */ userAccount = this.userAccountService.getUserByAccount(userAccount.getAccount()); /* 561 */ model.addAttribute("userAccount", userAccount); /* */ } /* */ /* */ @AdminAccess({com.hundsun.network.gates.genshan.common.PermissionEnum.REVIEWER_U_PASSWORD_CHANGE}) /* */ @RequestMapping({"/reviewer/password/reset"}) /* */ public String reviewerPasswordReset(@RequestParam("userAccount") String userAccount, UserAgent userAgent, ModelMap model) /* */ { /* 577 */ UserResetPWDResult result = this.userAccountService.resetUserPwd(userAccount, userAgent /* 578 */ .getAccount(), EnumPasswordType.SYSTEM.getValue()); /* 579 */ if ((result != null) && (result.correct())) { /* 580 */ model.addAttribute("password", result.getNewPassword()); /* */ } /* 582 */ setResult(model, result); /* 583 */ return "user/password/reset"; /* */ } /* */ /* */ @AdminAccess({com.hundsun.network.gates.genshan.common.PermissionEnum.REVIEWER_U_STATUS_CHANGE}) /* */ @RequestMapping(value={"/reviewer/changeStatus"}, method={org.springframework.web.bind.annotation.RequestMethod.POST}) /* */ @ResponseBody /* */ public int reviewerStatusChange(@ModelAttribute("userAccount") UserAccount userAccount, Model model) /* */ { /* 599 */ int number = this.userAccountService.changeUserStatus(userAccount); /* 600 */ return number; /* */ } /* */ /* */ public void initListParamer(Model model) /* */ { /* 611 */ List _list = Arrays.asList(UserStatusEnum.values()); /* 612 */ List list = new ArrayList(); /* 613 */ list.addAll(_list); /* 614 */ list.remove(UserStatusEnum.UNFUND); /* 615 */ list.remove(UserStatusEnum.NOACTIVED); /* 616 */ model.addAttribute("userStatusList", list); /* */ } /* */ } /* Location: E:\__安装归档\linquan-20161112\deploy15\genshan\webroot\WEB-INF\classes\ * Qualified Name: com.hundsun.network.gates.genshan.web.action.user.UserAccountAction * JD-Core Version: 0.6.0 */
apache-2.0
gsjohn/ark
app/Http/Controllers/ApiController.php
2752
<?php namespace App\Http\Controllers; use Illuminate\Http\Request; use App\Http\Controllers\Controller; abstract class ApiController extends Controller { public abstract function newEntity(array $attributes = []); public function fillQueryForIndex(Request $request, Builder &$query){ $search = $request->input('search', '{}'); $conditions = json_decode($search, true); if(!empty($conditions)) { //dump($conditions); foreach ($conditions as $k => $v) { $tmp = explode(' ', $k); $query->where($tmp[0], isset($tmp[1]) ? $tmp[1] : '=', $v); } } //return $query; } /** * Display a listing of the resource. * * @return \Illuminate\Http\Response */ public function index(Request $request) { // $page = $request->input('page', 1); $pageSize = $request->input('pageSize', 10); $sort = $request->input('sort', 'id asc'); $arr = explode(' ', $sort); $entity = $this->newEntity(); $query = $entity->query(); $this->fillQueryForIndex($request, $query); $count = $query->count(); $data = $query->orderBy($arr[0], $arr[1])->take($pageSize)->skip(($page-1)*$pageSize)->get(); return response(['count' => $count, 'list' => $data, 'page' => $page, 'pageSize' => $pageSize], 200); } /** * Store a newly created resource in storage. * * @param \Illuminate\Http\Request $request * @return \Illuminate\Http\Response */ public function store(Request $request) { // $data = $request->all(); unset($data['_sign']); $entity = $this->newEntity($data); //$entity = Entity::create($data); $re = $entity->save(); //LogSvr::Sync()->info('ModelCreated : '.json_encode($entity)); $status = $re ? 200 : 400; return response($entity, $status); } /** * Display the specified resource. * * @param int $id * @return \Illuminate\Http\Response */ public function show($id) { // $entity =$this->newEntity()->newQuery()->find($id); return response($entity, 200); } /** * Update the specified resource in storage. * * @param \Illuminate\Http\Request $request * @param int $id * @return \Illuminate\Http\Response */ public function update(Request $request, $id) { // $entity =$this->newEntity()->newQuery()->find($id); $data = $request->all(); //var_dump($data); unset($data['_sign']); $entity->fill($data); $re = $entity->save(); $status = $re ? 200 : 401; return response(['success' => $re], $status); } /** * Remove the specified resource from storage. * * @param int $id * @return \Illuminate\Http\Response */ public function destroy($id) { // $entity =$this->newEntity()->newQuery()->find($id); $re = $entity->delete(); $status = $re ? 200 : 401; return response(['success' => $re], $status); } }
apache-2.0
weiwenqiang/GitHub
expert/glide/library/src/main/java/com/bumptech/glide/load/resource/file/FileResource.java
385
package com.bumptech.glide.load.resource.file; import com.bumptech.glide.load.resource.SimpleResource; import java.io.File; /** * A simple {@link com.bumptech.glide.load.engine.Resource} that wraps a {@link File}. */ // Public API. @SuppressWarnings("WeakerAccess") public class FileResource extends SimpleResource<File> { public FileResource(File file) { super(file); } }
apache-2.0
cloudfoundry/cli
command/v7/create_isolation_segment_command_test.go
4191
package v7_test import ( "errors" "code.cloudfoundry.org/cli/actor/actionerror" "code.cloudfoundry.org/cli/actor/v7action" "code.cloudfoundry.org/cli/command/commandfakes" . "code.cloudfoundry.org/cli/command/v7" "code.cloudfoundry.org/cli/command/v7/v7fakes" "code.cloudfoundry.org/cli/resources" "code.cloudfoundry.org/cli/util/configv3" "code.cloudfoundry.org/cli/util/ui" . "github.com/onsi/ginkgo" . "github.com/onsi/gomega" . "github.com/onsi/gomega/gbytes" ) var _ = Describe("create-isolation-segment Command", func() { var ( cmd CreateIsolationSegmentCommand testUI *ui.UI fakeConfig *commandfakes.FakeConfig fakeSharedActor *commandfakes.FakeSharedActor fakeActor *v7fakes.FakeActor binaryName string executeErr error isolationSegment string ) BeforeEach(func() { testUI = ui.NewTestUI(nil, NewBuffer(), NewBuffer()) fakeConfig = new(commandfakes.FakeConfig) fakeSharedActor = new(commandfakes.FakeSharedActor) fakeActor = new(v7fakes.FakeActor) cmd = CreateIsolationSegmentCommand{ BaseCommand: BaseCommand{ UI: testUI, Config: fakeConfig, SharedActor: fakeSharedActor, Actor: fakeActor, }, } binaryName = "faceman" fakeConfig.BinaryNameReturns(binaryName) isolationSegment = "segment1" }) JustBeforeEach(func() { executeErr = cmd.Execute(nil) }) When("checking target fails", func() { BeforeEach(func() { fakeSharedActor.CheckTargetReturns(actionerror.NotLoggedInError{BinaryName: binaryName}) }) It("returns an error", func() { Expect(executeErr).To(MatchError(actionerror.NotLoggedInError{BinaryName: binaryName})) Expect(fakeSharedActor.CheckTargetCallCount()).To(Equal(1)) checkTargetedOrg, checkTargetedSpace := fakeSharedActor.CheckTargetArgsForCall(0) Expect(checkTargetedOrg).To(BeFalse()) Expect(checkTargetedSpace).To(BeFalse()) }) }) When("the user is logged in", func() { BeforeEach(func() { fakeActor.GetCurrentUserReturns(configv3.User{Name: "banana"}, nil) cmd.RequiredArgs.IsolationSegmentName = isolationSegment }) When("the create is successful", func() { BeforeEach(func() { fakeActor.CreateIsolationSegmentByNameReturns(v7action.Warnings{"I am a warning", "I am also a warning"}, nil) }) It("displays the header and ok", func() { Expect(executeErr).ToNot(HaveOccurred()) Expect(testUI.Out).To(Say("Creating isolation segment segment1 as banana...")) Expect(testUI.Out).To(Say("OK")) Expect(testUI.Err).To(Say("I am a warning")) Expect(testUI.Err).To(Say("I am also a warning")) Expect(fakeActor.CreateIsolationSegmentByNameCallCount()).To(Equal(1)) Expect(fakeActor.CreateIsolationSegmentByNameArgsForCall(0)).To(Equal(resources.IsolationSegment{Name: isolationSegment})) }) }) When("the create is unsuccessful", func() { Context("due to an unexpected error", func() { var expectedErr error BeforeEach(func() { expectedErr = errors.New("I am an error") fakeActor.CreateIsolationSegmentByNameReturns(v7action.Warnings{"I am a warning", "I am also a warning"}, expectedErr) }) It("displays the header and error", func() { Expect(executeErr).To(MatchError(expectedErr)) Expect(testUI.Out).To(Say("Creating isolation segment segment1 as banana...")) Expect(testUI.Err).To(Say("I am a warning")) Expect(testUI.Err).To(Say("I am also a warning")) }) }) Context("due to an IsolationSegmentAlreadyExistsError", func() { BeforeEach(func() { fakeActor.CreateIsolationSegmentByNameReturns(v7action.Warnings{"I am a warning", "I am also a warning"}, actionerror.IsolationSegmentAlreadyExistsError{}) }) It("displays the header and ok", func() { Expect(executeErr).ToNot(HaveOccurred()) Expect(testUI.Out).To(Say("Creating isolation segment segment1 as banana...")) Expect(testUI.Out).To(Say("OK")) Expect(testUI.Err).To(Say("I am a warning")) Expect(testUI.Err).To(Say("I am also a warning")) Expect(testUI.Err).To(Say("Isolation segment '%s' already exists.", isolationSegment)) }) }) }) }) })
apache-2.0
IndieSquare/counterwallet
app/lib/init.js
17300
// Initiallize globals = Alloy.Globals; globals.is_scrolling = false; globals.Accelerometer = 0; globals.reorg_views = {}; globals.windows = new Array(); globals.requires = new Array(); /* On iOS, all relative paths are currently interpreted as relative to the Resources directory, not to the current context. This is a known issue that will be addressed in a future release. http://docs.appcelerator.com/titanium/3.0/#!/api/Ti.Filesystem */ /* var w = Ti.Filesystem.getFile('window').getDirectoryListing(); for( var i = 0; i < w.length; i++ ){ var file = w[i].substr(0, w[i].indexOf('.')); globals.windows[file] = require('window/' + file); } var r = Ti.Filesystem.getFile('require').getDirectoryListing(); for( var i = 0; i < r.length; i++ ){ var file = r[i].substr(0, r[i].indexOf('.')); globals.requires[file] = require('require/' + file); } */ Ti.API.fin = "no"; var w = new Array( 'assetholders.js', 'assetinfo.js', 'createtoken.js', 'signin.js', 'home.js', 'settings.js', 'send.js', 'shapeshift.js', 'webview.js' ); for( var i = 0; i < w.length; i++ ){ var file = w[i].substr(0, w[i].indexOf('.')); globals.windows[file] = require('window/' + file); } var r = new Array( 'acs.js', 'auth.js', 'bitcore.js', 'cache.js', 'inputverify.js', 'layer.js', 'network.js', 'pubsub.js', 'tiker.js', 'util.js', 'webview.js' ); for( var i = 0; i < r.length; i++ ){ var file = r[i].substr(0, r[i].indexOf('.')); globals.requires[file] = require('require/' + file); } String.prototype.format = function(arg){ var rep_fn = null; if( typeof arg == 'object' ) rep_fn = function(m, k) { return arg[k]; }; else { var args = arguments; rep_fn = function(m, k) { return args[ parseInt(k) ]; }; } return this.replace( /\{(\w+)\}/g, rep_fn ); }; Number.prototype.toFixed2 = function(digit){ if( digit == null ) digit = 8; return this.toFixed(digit).replace(/0+$/, '').replace(/\.$/, ''); }; var image_url = 'https://counterpartychain.io/content/images/icons/-.png'; globals.coindaddy_default = null; try{ var default_image = Ti.UI.createImageView({image: image_url}); globals.coindaddy_default = Ti.Utils.base64encode(default_image.toBlob()).toString(); } catch(e){ globals.coindaddy_default = ''; } Math._getDecimalLength = function(value) { var list = (value.toString()).split('.'), result = 0; if (list[1] !== undefined && list[1].length > 0) result = list[1].length; return result; }; Math.multiply = function(value1, value2) { var intValue1 = parseInt( (value1.toString()).replace('.', ''), 10); var intValue2 = parseInt( (value2.toString()).replace('.', ''), 10); var decimalLength = Math._getDecimalLength(value1) + Math._getDecimalLength(value2); return (intValue1 * intValue2) / Math.pow(10, decimalLength); }; Math.divide = function(value1, value2) { var intValue1 = parseInt( (value1.toString()).replace('.', ''), 10); var intValue2 = parseInt( (value2.toString()).replace('.', ''), 10); var len1 = Math._getDecimalLength(value1); var len2 = Math._getDecimalLength(value2); if( len1 > len2 ) intValue2 *= Math.pow(10, len1 - len2); else if( len1 < len2 ) intValue1 *= Math.pow(10, len2 - len1); if( len1 > 0 && len2 > 0 ) decimalLength = 0; else decimalLength = len1 + len2; if( len1 == 0 || len2 == 0 ) decimalLength = 0; return (intValue1 / intValue2) * Math.pow(10, decimalLength); }; Math.subtract = function(value1, value2) { var max = Math.max(Math._getDecimalLength(value1), Math._getDecimalLength(value2)), k = Math.pow(10, max); return (Math.multiply(value1, k) - Math.multiply(value2, k)) / k; }; function reorg_finish(){ if( globals.isReorg ){ globals.isReorg = false; globals.reorg_views['home'].removeSelf(); } }; var service = null; globals.backgroundfetch = function (e) { globals.requires['network'].connectGETv2({ 'method': 'status/reorg', 'callback': function( result ){ if( !result.isReorg ){ reorg_finish(); if( OS_IOS ){ Ti.App.iOS.removeEventListener( 'backgroundfetch', globals.backgroundfetch ); var notification = Ti.App.iOS.scheduleLocalNotification({ alertBody: L('text_finish_reorg'), date: new Date(new Date().getTime()) }); } else if( service != null ){ service.stop(); service = null; var intent = Ti.Android.createIntent({ action: Ti.Android.ACTION_MAIN, className: 'inc.lireneosoft.counterparty.IndiesquareWalletActivity', packageName: 'inc.lireneosoft.counterparty' }); intent.flags |= Ti.Android.FLAG_ACTIVITY_RESET_TASK_IF_NEEDED | Ti.Android.FLAG_ACTIVITY_SINGLE_TOP; intent.addCategory(Ti.Android.CATEGORY_LAUNCHER); var notification = Ti.Android.createNotification({ contentTitle: 'IndieSquare Wallet', contentText : L('text_finish_reorg'), tickerText: L('text_finish_reorg'), contentIntent: Ti.Android.createPendingIntent({ 'intent': intent }), defaults: Ti.Android.DEFAULT_ALL, flags: Ti.Android.FLAG_SHOW_LIGHTS, icon: Ti.App.Android.R.drawable.appicon, number: 1, when: new Date() }); Ti.Android.NotificationManager.notify(1, notification); } } else if( OS_IOS && e != null ) Ti.App.iOS.endBackgroundHandler(e.handlerId); } }); }; globals.reorg_occured = function(){ if( !globals.isReorg ){ globals.isReorg = true; if( OS_IOS ){ globals.reorg_views['home'] = globals.requires['util'].setReorg(Ti.API.home_win); globals.reorg_views['history'] = globals.requires['util'].setReorg(Ti.API.history_win); globals.reorg_views['dex'] = globals.requires['util'].setReorg(Ti.API.exchange_win); globals.reorg_views['shapeshift'] = globals.x['util'].setReorg(Ti.API.ss_win); Ti.App.iOS.setMinimumBackgroundFetchInterval( Ti.App.iOS.BACKGROUNDFETCHINTERVAL_MIN ); Ti.App.iOS.addEventListener( 'backgroundfetch', globals.backgroundfetch ); } else{ globals.reorg_views['home'] = globals.requires['util'].setReorg(globals.main_window); var intent = Ti.Android.createServiceIntent( { url: 'background/fetch.js' } ); intent.putExtra('interval', 15000); service = Ti.Android.createService(intent); service.start(); } } }; globals._parseCip2 = function( url ){ if( !url.match(/^counterparty:/) ) return null; var scheme = url.replace(/^counterparty:/, '').split('?'); var data = { 'address': scheme[0] }; if( scheme.length > 1 ){ scheme[1].split('&').forEach(function( val ){ var v = val.split('='); try{ data[v[0]] = decodeURIComponent(v[1]); } catch(e){ Ti.API.info(e.error); } }); } return data; }; function urlToObject(url) { var returnObj = {}; url = url.replace('URLSCHEME://?', ''); var params = url.split('&'); params.forEach(function(param) { var keyAndValue = param.split('='); returnObj[keyAndValue[0]] = decodeURI(keyAndValue[1]); }); return obj; } function signAndSendMessageTokenly(url){ var loading; if( OS_IOS ){ if( Ti.API.home_win != null ){ loading = _requires['util'].showLoading(Ti.API.home_win, { width: Ti.UI.FILL, height: Ti.UI.FILL, message: L('label_please_wait')});} } else{ if( globals.main_window != null ){ loading = _requires['util'].showLoading(globals.main_window, { width: Ti.UI.FILL, height: Ti.UI.FILL, message: L('label_please_wait')});} } var scheme = url.split('?'); var msg = scheme[1]; msg = msg.replace('msg=', ''); var sig = _requires['bitcore'].signMessage(msg); var client = Ti.Network.createHTTPClient({ // function called when the response data is available onload : function(e) { if( loading != null ) loading.removeSelf(); globals.requires['util'].createDialog({ title : L('text_tokenly_sent'), message: L('text_tokenly_confirm'), buttonNames: [L('label_ok')] }).show(); }, // function called when an error occurs, including a timeout onerror : function(e) { if( loading != null ) loading.removeSelf(); var arr_from_json = JSON.parse( this.responseText); if(arr_from_json["error"] != undefined){ alert( arr_from_json["error"]); } else{ alert( this.responseText); } }, timeout : 15000 // in milliseconds }); // Prepare the connection. client.open("POST", url); // Send the request. client.send({ "msg":msg, "address":_requires['cache'].data.address, "sig":sig }); }; globals._parseArguments = function( url, is_fromQR ) { if( url == null ){ if( OS_IOS ) url = Ti.App.getArguments()['url']; else{ var launchIntent = Ti.App.Android.launchIntent; if( launchIntent != null ){ if( launchIntent.hasExtra('source') ){ url = 'indiewallet://' + launchIntent.getStringExtra('source'); } } } } var _requires = globals.requires; if( url && (is_fromQR || globals.lastUrl !== url) ) { globals.lastUrl = url; if( url.indexOf('/instant-verify/') > -1 ){ var tag = Ti.App.Properties.getString(_requires['cache'].data.address); if(tag == null || tag == 'NULL' ){ tag = ""; } var dialog = _requires['util'].createDialog({ message : L('text_tokenly_desc').format({'address':"\n\n" + tag + "\n" + _requires['cache'].data.address}), buttonNames : [L('label_cancel'), L('label_confirm')] }); dialog.addEventListener('click', function(e) { if( e.index != e.source.cancel ){ globals.requires['auth'].check({ title: L('text_authentication'), callback: function(e){ if( e.success ){ setTimeout(function() { signAndSendMessageTokenly(url); }, 1000); } }}); } }); dialog.show(); } else if( url.match(/^indiewallet:\/\/x-callback-url/) ){ var scheme = url.replace(/^indiewallet:\/\/x-callback-url\//, '').split('?'); var func = scheme[0]; var params = new Array(); var p = scheme[1].split('&'); for(var i = 0; i < p.length; i++){ var a = p[i].split('='); params[a[0]] = decodeURIComponent(a[1]); } if( func === 'getaddress' ){ if( 'x-success' in params ){ var address = Ti.App.Properties.getString("current_address"); var dialog = _requires['util'].createDialog({ title: L('label_callback_getaddress'), message: L('text_callback_getaddress').format( { 'address': address, 'name': params['x-success'] }), buttonNames: [L('label_cancel'), L('label_ok')] }); dialog.addEventListener('click', function(e){ if( e.index != e.source.cancel ){ if( 'channel' in params ){ var data = { 'address': address, }; _requires['pubsub'].publish({ 'channel' : params.channel, 'message' : JSON.stringify(data), 'callback': function(m){ Ti.API.info(JSON.stringify(m)); } }); } else if( 'msg' in params ){ try{ var sig = _requires['bitcore'].signMessage(params['msg']); Ti.Platform.openURL(params['x-success'] + '://sendaddress?address=' + _requires['cache'].data.address+'&msg='+params['msg']+'&sig='+sig); } catch(e){ Ti.API.info('error: '+e.error); } } else Ti.Platform.openURL(params['x-success'] + '://sendaddress?address=' + address); } }); dialog.show(); } } } else if( url.match(/^indiewallet:\/\//) ){ var loading; if( OS_IOS ){ if( Ti.API.home_win != null ){ loading = _requires['util'].showLoading(Ti.API.home_win, { width: Ti.UI.FILL, height: Ti.UI.FILL, message: L('label_please_wait')});} } else{ if( globals.main_window != null ){ loading = _requires['util'].showLoading(globals.main_window, { width: Ti.UI.FILL, height: Ti.UI.FILL, message: L('label_please_wait')});} } var scheme = url.replace(/^indiewallet:\/\//, '').split('?'); var func = scheme[0]; var params = JSON.parse(decodeURIComponent(scheme[1].split('=')[1])); if( func === 'screen_to' ){ if( params.screen === 'send' ){ var s = setInterval(function(){ if( globals.balances != null && globals.tiker != null ){ clearInterval(s); function gotoScreen( data ){ if( loading != null ) loading.removeSelf(); var asset = (data.accept_token != null)? data.accept_token: data.asset; var send_token = null; for( var i = 0; i < globals.balances.length; i++ ){ if( globals.balances[i].asset === asset ){ send_token = globals.balances[i]; break; } } if( send_token != null ){ var data = { 'asset': send_token.asset, 'balance': send_token.balance, 'fiat': globals.requires['tiker'].to(send_token.asset, send_token.balance, globals.requires['cache'].data.currncy), 'address': (data.destination != null)? data.destination: data.address, 'amount': params.amount, 'channel': params.channel, 'currency': data.currency }; globals.windows['send'].run(data); globals.publich = function(data){ globals.requires['pubsub'].publish({ 'channel' : params.channel, 'message' : JSON.stringify(data), 'callback': function(m){ Ti.API.info(JSON.stringify(m)); } }); }; } else{ globals.requires['util'].createDialog({ message: L('label_errortokenfound').format({token: asset}), buttonNames: [L('label_close')] }).show(); } } if( params.id != null ){ _requires['network'].connect({ 'method': 'get_vendings', 'post': { vending_id: params.id }, 'callback': function( result ){ gotoScreen( result.vendings[0] ); }, 'onError': function(error){ if( loading != null ) loading.removeSelf(); globals.requires['util'].createDialog({ message: L('text_readerror'), buttonNames: [L('label_close')] }).show(); } }); } else{ gotoScreen( params ); } } }, 100); } } else{ if( loading != null ) loading.removeSelf(); function authorization(){ globals.requires['auth'].check({ title: L('text_authentication'), callback: function(e){ if( e.success ){ function publish( data ){ if( data != null ){ globals.requires['pubsub'].publish({ 'channel' : params.channel, 'message' : JSON.stringify(data), 'callback': function(m){ if( params.vending_wait_id != null ){ _requires['network'].connect({ 'method': 'edit_vendingwait', 'post': { id: params.vending_wait_id, status: 2 }, 'callback': function( result ){ Ti.API.info('success'); }, 'onError': function(error){ Ti.API.info('failed'); } }); } } }); } if( !is_fromQR && params.scheme != null ){ if( params.scheme === 'http' ){ if( OS_ANDROID ){ var activity = Ti.Android.currentActivity; activity.finish(); } } else Ti.Platform.openURL(params.scheme+'://'); } } if( func === 'signin' ){ var data = { 'id': globals.requires['cache'].data.id, 'password': globals.requires['cache'].data.password, 'cs': params.cs }; publish( data ); } else if( func === 'new_address' ){ globals.addWallet(function(params){ if( params.status ){ var data = { 'id': globals.requires['cache'].data.id, 'cs': params.cs, 'address': params.address }; } else{ var data = { 'id': globals.requires['cache'].data.id, 'cs': params.cs, 'address': params.action }; } publish( data ); }); } else if( func === 'sign' ){ globals.requires['pubsub'].subscribe({ channel : params.channel + 'receive', connect : function(){}, callback : function( unsignd_hex ) { globals.requires['bitcore'].sign(unsignd_hex, { 'callback': function(signed_tx){ var data = { 'signed_tx': signed_tx }; publish( data ); }, 'fail': function(){ globals.requires['util'].createDialog({ message: L('text_signerror'), buttonNames: [L('label_close')] }).show(); } }); } }); } } }}); } var s = setInterval(function(){ clearInterval(s); authorization(); }, 100); } } } };
apache-2.0
fimtra/datafission
src/main/java/com/fimtra/util/SubscriptionManager.java
6186
/* * Copyright (c) 2013 Ramon Servadei * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.fimtra.util; import java.lang.reflect.Array; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.List; import java.util.Set; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; /** * A generic component that manages subscription interest. Subscribers are registered with the * manager against a subscription key. * <p> * <b>Calls to methods that add or remove subscribers should be synchronized.</b> * * @author Ramon Servadei */ public final class SubscriptionManager<SUBSCRIPTION_KEY, SUBSCRIBER> { final Object[] emptyArray; final Class<?> subscriberClass; /** Tracks the list of observers for a name. The list uses copy-on-write semantics */ final ConcurrentMap<SUBSCRIPTION_KEY, SUBSCRIBER[]> subscribersPerKey = new ConcurrentHashMap<SUBSCRIPTION_KEY, SUBSCRIBER[]>(2); /** * Construct the subscription manager passing in the class for the subscriber. This is required * to ensure correct array component type is used internall. * * @param subscriberClass */ public SubscriptionManager(Class<?> subscriberClass) { super(); this.subscriberClass = subscriberClass; this.emptyArray = (Object[]) Array.newInstance(subscriberClass, 0); } /** * Get the subscribers for the subscription key * <p> * <b>Calls to this method DO NOT NEED TO BE SYNCHRONIZED, unlike * {@link #addSubscriberFor(Object, Object)}.</b> * * @param key * the subscription key * @return an array of subscribers for the key. This is NOT a copy - DO NOT MESS WITH IT. Will * never be <code>null</code>. */ @SuppressWarnings("unchecked") public SUBSCRIBER[] getSubscribersFor(SUBSCRIPTION_KEY key) { if (key == null) { return (SUBSCRIBER[]) this.emptyArray; } final SUBSCRIBER[] current = this.subscribersPerKey.get(key); if (current == null) { return (SUBSCRIBER[]) this.emptyArray; } return current; } /** * Add the subscriber to the list of subscribers for the key. * <p> * <b>Calls to this method should be synchronized.</b> * * @param key * the subscription key * @param subscriber * the subscriber <b>INSTANCE</b> to add * @return <code>true</code> if the subscriber was added, <code>false</code> if this * <b>INSTANCE</b> has already been added against this key */ @SuppressWarnings("unchecked") public boolean addSubscriberFor(SUBSCRIPTION_KEY key, SUBSCRIBER subscriber) { SUBSCRIBER[] current = this.subscribersPerKey.get(key); if (current == null) { current = (SUBSCRIBER[]) Array.newInstance(this.subscriberClass, 1); current[0] = subscriber; } else { if (ArrayUtils.containsInstance(current, subscriber)) { return false; } SUBSCRIBER[] copy = (SUBSCRIBER[]) Array.newInstance(this.subscriberClass, current.length + 1); System.arraycopy(current, 0, copy, 0, current.length); copy[current.length] = subscriber; current = copy; } this.subscribersPerKey.put(key, current); return true; } /** * Remove a previously added subscriber from the list associated with the subscription key * <p> * <b>Calls to this method should be synchronized.</b> * * @param key * the subscription key * @param subscriber * the subscriber <b>INSTANCE</b> to remove * @return <code>true</code> if the subscriber was removed, <code>false</code> if the subscriber * <b>INSTANCE</b> was not found for the subscription key */ @SuppressWarnings("unchecked") public boolean removeSubscriberFor(SUBSCRIPTION_KEY key, SUBSCRIBER subscriber) { SUBSCRIBER[] current = this.subscribersPerKey.get(key); if (current == null) { return false; } List<SUBSCRIBER> list = Arrays.asList(current); List<SUBSCRIBER> copy = new ArrayList<SUBSCRIBER>(list); final boolean removed = copy.remove(subscriber); if (copy.size() > 0) { this.subscribersPerKey.put(key, copy.toArray((SUBSCRIBER[]) Array.newInstance(this.subscriberClass, copy.size()))); } else { this.subscribersPerKey.remove(key); } return removed; } @Override public String toString() { return "SubscriptionManager [" + this.subscribersPerKey + "]"; } /** * Remove all subscribers for the key. * <p> * <b>Calls to this method should be synchronized.</b> * * @param key * the subscription key * @return the subscribers that were removed, <code>null</code> if there were no subscribers for * this key */ public SUBSCRIBER[] removeSubscribersFor(SUBSCRIPTION_KEY key) { return this.subscribersPerKey.remove(key); } /** * @return the set of subscription keys for this manager */ public Set<SUBSCRIPTION_KEY> getAllSubscriptionKeys() { return Collections.unmodifiableSet(this.subscribersPerKey.keySet()); } public void destroy() { this.subscribersPerKey.clear(); } }
apache-2.0
mabel-xue/CoolcoolWeather
app/src/main/java/com/mabel/coolcoolweather/service/AutoUpdateService.java
3729
package com.mabel.coolcoolweather.service; import android.app.AlarmManager; import android.app.PendingIntent; import android.app.Service; import android.content.Intent; import android.content.SharedPreferences; import android.os.IBinder; import android.os.SystemClock; import android.preference.PreferenceManager; import android.support.annotation.IntDef; import com.bumptech.glide.Glide; import com.mabel.coolcoolweather.WeatherActivity; import com.mabel.coolcoolweather.gson.Weather; import com.mabel.coolcoolweather.util.HttpUtil; import com.mabel.coolcoolweather.util.Utility; import java.io.IOException; import okhttp3.Call; import okhttp3.Callback; import okhttp3.Response; public class AutoUpdateService extends Service { @Override public IBinder onBind(Intent intent) { return null; } @Override public int onStartCommand(Intent intent, int flags, int startId) { updateWeather(); updateBingPic(); AlarmManager manager = (AlarmManager) getSystemService(ALARM_SERVICE); int anHour = 8 * 60 * 60 * 1000; // 这是8小时的毫秒数 long triggerAtTime = SystemClock.elapsedRealtime() + anHour; Intent i = new Intent(this, AutoUpdateService.class); PendingIntent pi = PendingIntent.getBroadcast(this, 0, i, 0); manager.cancel(pi); manager.set(AlarmManager.ELAPSED_REALTIME_WAKEUP, triggerAtTime, pi); return super.onStartCommand(intent, flags, startId); } /** * 更新天气信息 */ private void updateWeather() { SharedPreferences prefs = PreferenceManager.getDefaultSharedPreferences(this); String weatherString = prefs.getString("weather", null); if (weatherString != null) { // 有缓存时直接解析天气数据 Weather weather = Utility.handleWeatherResponse(weatherString); String weatherId = weather.basic.weatherId; String weatherUrl = "http://guolin.tech/api/weather?cityid=" + weatherId + "&key=bc0418b57b2d4918819d3974ac1285d9"; HttpUtil.sendOkHttpRequest(weatherUrl, new Callback() { @Override public void onResponse(Call call, Response response) throws IOException { String responseText = response.body().string(); Weather weather = Utility.handleWeatherResponse(responseText); if (weather != null && "ok".equals(weather.status)) { SharedPreferences.Editor editor = PreferenceManager.getDefaultSharedPreferences(AutoUpdateService.this).edit(); editor.putString("weather", responseText); editor.apply(); } } @Override public void onFailure(Call call, IOException e) { e.printStackTrace(); } }); } } /** * 更新必应每日一图 */ private void updateBingPic() { String requestBingPic = "http://guolin.tech/api/bing_pic"; HttpUtil.sendOkHttpRequest(requestBingPic, new Callback() { @Override public void onResponse(Call call, Response response) throws IOException { String bingPic = response.body().string(); SharedPreferences.Editor editor = PreferenceManager .getDefaultSharedPreferences(AutoUpdateService.this).edit(); editor.putString("bing_pic", bingPic); editor.apply(); } @Override public void onFailure(Call call, IOException e) { e.printStackTrace(); } }); } }
apache-2.0
manuelRod/YAML-Wordpress-Configurator
vendor/composer/autoload_static.php
1194
<?php // autoload_static.php @generated by Composer namespace Composer\Autoload; class ComposerStaticInit7b5502ec67cd9f01f7c9c2d086e3b8bb { public static $prefixLengthsPsr4 = array ( 'S' => array ( 'Symfony\\Component\\Yaml\\' => 23, ), ); public static $prefixDirsPsr4 = array ( 'Symfony\\Component\\Yaml\\' => array ( 0 => __DIR__ . '/..' . '/symfony/yaml', ), ); public static $prefixesPsr0 = array ( 'C' => array ( 'Composer\\Installers\\' => array ( 0 => __DIR__ . '/..' . '/composer/installers/src', ), ), ); public static function getInitializer(ClassLoader $loader) { return \Closure::bind(function () use ($loader) { $loader->prefixLengthsPsr4 = ComposerStaticInit7b5502ec67cd9f01f7c9c2d086e3b8bb::$prefixLengthsPsr4; $loader->prefixDirsPsr4 = ComposerStaticInit7b5502ec67cd9f01f7c9c2d086e3b8bb::$prefixDirsPsr4; $loader->prefixesPsr0 = ComposerStaticInit7b5502ec67cd9f01f7c9c2d086e3b8bb::$prefixesPsr0; }, null, ClassLoader::class); } }
apache-2.0
foxerfly/Netbeans-JPA-Modeler
JPA Specification/src/org/netbeans/jpa/modeler/spec/OneToMany.java
19572
// // This file was generated by the JavaTM Architecture for XML Binding(JAXB) Reference Implementation, vJAXB 2.1.10 in JDK 6 // See <a href="http://java.sun.com/xml/jaxb">http://java.sun.com/xml/jaxb</a> // Any modifications to this file will be lost upon recompilation of the source schema. // Generated on: 2014.01.21 at 01:52:19 PM IST // package org.netbeans.jpa.modeler.spec; import java.util.ArrayList; import java.util.List; import javax.lang.model.element.AnnotationMirror; import javax.lang.model.element.Element; import javax.lang.model.element.VariableElement; import javax.lang.model.type.DeclaredType; import javax.xml.bind.annotation.XmlAccessType; import javax.xml.bind.annotation.XmlAccessorType; import javax.xml.bind.annotation.XmlAttribute; import javax.xml.bind.annotation.XmlElement; import javax.xml.bind.annotation.XmlType; import org.netbeans.jpa.modeler.spec.extend.JoinColumnHandler; import org.netbeans.jpa.modeler.spec.extend.RelationAttribute; import org.netbeans.jpa.source.JavaSourceParserUtil; import org.netbeans.modeler.core.NBModelerUtil; /** * * * @Target({METHOD, FIELD}) @Retention(RUNTIME) public @interface OneToMany { * Class targetEntity() default void.class; CascadeType[] cascade() default {}; * FetchType fetch() default LAZY; String mappedBy() default ""; } * * * * <p> * Java class for one-to-many complex type. * * <p> * The following schema fragment specifies the expected content contained within * this class. * * <pre> * &lt;complexType name="one-to-many"> * &lt;complexContent> * &lt;restriction base="{http://www.w3.org/2001/XMLSchema}anyType"> * &lt;sequence> * &lt;choice> * &lt;element name="order-by" type="{http://java.sun.com/xml/ns/persistence/orm}order-by" minOccurs="0"/> * &lt;element name="order-column" type="{http://java.sun.com/xml/ns/persistence/orm}order-column" minOccurs="0"/> * &lt;/choice> * &lt;choice> * &lt;element name="map-key" type="{http://java.sun.com/xml/ns/persistence/orm}map-key" minOccurs="0"/> * &lt;sequence> * &lt;element name="map-key-class" type="{http://java.sun.com/xml/ns/persistence/orm}map-key-class" minOccurs="0"/> * &lt;choice> * &lt;element name="map-key-temporal" type="{http://java.sun.com/xml/ns/persistence/orm}temporal" minOccurs="0"/> * &lt;element name="map-key-enumerated" type="{http://java.sun.com/xml/ns/persistence/orm}enumerated" minOccurs="0"/> * &lt;element name="map-key-attribute-override" type="{http://java.sun.com/xml/ns/persistence/orm}attribute-override" maxOccurs="unbounded" minOccurs="0"/> * &lt;/choice> * &lt;choice> * &lt;element name="map-key-column" type="{http://java.sun.com/xml/ns/persistence/orm}map-key-column" minOccurs="0"/> * &lt;element name="map-key-join-column" type="{http://java.sun.com/xml/ns/persistence/orm}map-key-join-column" maxOccurs="unbounded" minOccurs="0"/> * &lt;/choice> * &lt;/sequence> * &lt;/choice> * &lt;choice> * &lt;element name="join-table" type="{http://java.sun.com/xml/ns/persistence/orm}join-table" minOccurs="0"/> * &lt;element name="join-column" type="{http://java.sun.com/xml/ns/persistence/orm}join-column" maxOccurs="unbounded" minOccurs="0"/> * &lt;/choice> * &lt;element name="cascade" type="{http://java.sun.com/xml/ns/persistence/orm}cascade-type" minOccurs="0"/> * &lt;/sequence> * &lt;attribute name="name" use="required" type="{http://www.w3.org/2001/XMLSchema}string" /> * &lt;attribute name="target-entity" type="{http://www.w3.org/2001/XMLSchema}string" /> * &lt;attribute name="fetch" type="{http://java.sun.com/xml/ns/persistence/orm}fetch-type" /> * &lt;attribute name="access" type="{http://java.sun.com/xml/ns/persistence/orm}access-type" /> * &lt;attribute name="mapped-by" type="{http://www.w3.org/2001/XMLSchema}string" /> * &lt;attribute name="orphan-removal" type="{http://www.w3.org/2001/XMLSchema}boolean" /> * &lt;/restriction> * &lt;/complexContent> * &lt;/complexType> * </pre> * * */ @XmlAccessorType(XmlAccessType.FIELD) @XmlType(name = "one-to-many", propOrder = { "orderBy", "orderColumn", "mapKey", "mapKeyClass", "mapKeyTemporal", "mapKeyEnumerated", "mapKeyAttributeOverride", "mapKeyColumn", "mapKeyJoinColumn", "joinTable", "joinColumn", "cascade" }) public class OneToMany extends RelationAttribute implements JoinColumnHandler { @XmlElement(name = "order-by") protected String orderBy;//RENENG PENDING @XmlElement(name = "order-column") protected OrderColumn orderColumn;//RENENG PENDING @XmlElement(name = "map-key") protected MapKey mapKey;//RENENG PENDING @XmlElement(name = "map-key-class") protected MapKeyClass mapKeyClass;//RENENG PENDING @XmlElement(name = "map-key-temporal") protected TemporalType mapKeyTemporal;//RENENG PENDING @XmlElement(name = "map-key-enumerated") protected EnumType mapKeyEnumerated;//RENENG PENDING @XmlElement(name = "map-key-attribute-override") protected List<AttributeOverride> mapKeyAttributeOverride;//RENENG PENDING @XmlElement(name = "map-key-column") protected MapKeyColumn mapKeyColumn;//RENENG PENDING @XmlElement(name = "map-key-join-column") protected List<MapKeyJoinColumn> mapKeyJoinColumn;//RENENG PENDING @XmlElement(name = "join-table") protected JoinTable joinTable; @XmlElement(name = "join-column") protected List<JoinColumn> joinColumn; protected CascadeType cascade; @XmlAttribute(required = true) protected String name; @XmlAttribute(name = "target-entity") protected String targetEntity; @XmlAttribute protected FetchType fetch; @XmlAttribute protected AccessType access; @XmlAttribute(name = "mapped-by") protected String mappedBy; @XmlAttribute(name = "orphan-removal") protected Boolean orphanRemoval; @XmlAttribute(name = "collection-type") private String collectionType;//custom added public static OneToMany load(Element element, VariableElement variableElement) { AnnotationMirror annotationMirror = JavaSourceParserUtil.findAnnotation(element, "javax.persistence.OneToMany"); OneToMany oneToMany = new OneToMany(); oneToMany.setId(NBModelerUtil.getAutoGeneratedStringId()); oneToMany.joinTable = JoinTable.load(element, variableElement); AnnotationMirror joinColumnsAnnotationMirror = JavaSourceParserUtil.findAnnotation(element, "javax.persistence.JoinColumns"); if (joinColumnsAnnotationMirror != null) { List joinColumnsAnnot = (List) JavaSourceParserUtil.findAnnotationValue(joinColumnsAnnotationMirror, "value"); if (joinColumnsAnnot != null) { for (Object joinColumnObj : joinColumnsAnnot) { oneToMany.getJoinColumn().add(JoinColumn.load(element, variableElement, (AnnotationMirror) joinColumnObj)); } } } List cascadeList = (List) JavaSourceParserUtil.findAnnotationValue(annotationMirror, "cascade"); if (cascadeList != null) { CascadeType cascadeType = new CascadeType(); oneToMany.cascade = cascadeType; for (Object cascadeObj : cascadeList) { if (cascadeObj.equals("ALL")) { cascadeType.setCascadeAll(new EmptyType()); } else if (cascadeObj.equals("PERSIST")) { cascadeType.setCascadePersist(new EmptyType()); } else if (cascadeObj.equals("MERGE")) { cascadeType.setCascadeMerge(new EmptyType()); } else if (cascadeObj.equals("REMOVE")) { cascadeType.setCascadeMerge(new EmptyType()); } else if (cascadeObj.equals("REFRESH")) { cascadeType.setCascadeRefresh(new EmptyType()); } else if (cascadeObj.equals("DETACH")) { cascadeType.setCascadeDetach(new EmptyType()); } else { throw new IllegalStateException("Unknown Cascade Type : " + cascadeObj.toString()); } } } oneToMany.name = variableElement.getSimpleName().toString(); oneToMany.setCollectionType(((DeclaredType) variableElement.asType()).asElement().toString()); DeclaredType declaredType = (DeclaredType) JavaSourceParserUtil.findAnnotationValue(annotationMirror, "targetEntity"); if (declaredType == null) { declaredType = (DeclaredType) ((DeclaredType) variableElement.asType()).getTypeArguments().get(0); } oneToMany.targetEntity = declaredType.asElement().getSimpleName().toString(); Object fetchObj = JavaSourceParserUtil.findAnnotationValue(annotationMirror, "fetch"); if (fetchObj != null) { oneToMany.fetch = FetchType.valueOf(fetchObj.toString()); } // oneToMany.optional = (Boolean) JpaControllerUtil.findAnnotationValue(annotationMirror, "optional"); oneToMany.access = AccessType.load(element); oneToMany.mappedBy = (String) JavaSourceParserUtil.findAnnotationValue(annotationMirror, "mappedBy"); oneToMany.orphanRemoval = (Boolean) JavaSourceParserUtil.findAnnotationValue(annotationMirror, "orphanRemoval"); return oneToMany; } /** * Gets the value of the orderBy property. * * @return possible object is {@link String } * */ public String getOrderBy() { return orderBy; } /** * Sets the value of the orderBy property. * * @param value allowed object is {@link String } * */ public void setOrderBy(String value) { this.orderBy = value; } /** * Gets the value of the orderColumn property. * * @return possible object is {@link OrderColumn } * */ public OrderColumn getOrderColumn() { return orderColumn; } /** * Sets the value of the orderColumn property. * * @param value allowed object is {@link OrderColumn } * */ public void setOrderColumn(OrderColumn value) { this.orderColumn = value; } /** * Gets the value of the mapKey property. * * @return possible object is {@link MapKey } * */ public MapKey getMapKey() { return mapKey; } /** * Sets the value of the mapKey property. * * @param value allowed object is {@link MapKey } * */ public void setMapKey(MapKey value) { this.mapKey = value; } /** * Gets the value of the mapKeyClass property. * * @return possible object is {@link MapKeyClass } * */ public MapKeyClass getMapKeyClass() { return mapKeyClass; } /** * Sets the value of the mapKeyClass property. * * @param value allowed object is {@link MapKeyClass } * */ public void setMapKeyClass(MapKeyClass value) { this.mapKeyClass = value; } /** * Gets the value of the mapKeyTemporal property. * * @return possible object is {@link TemporalType } * */ public TemporalType getMapKeyTemporal() { return mapKeyTemporal; } /** * Sets the value of the mapKeyTemporal property. * * @param value allowed object is {@link TemporalType } * */ public void setMapKeyTemporal(TemporalType value) { this.mapKeyTemporal = value; } /** * Gets the value of the mapKeyEnumerated property. * * @return possible object is {@link EnumType } * */ public EnumType getMapKeyEnumerated() { return mapKeyEnumerated; } /** * Sets the value of the mapKeyEnumerated property. * * @param value allowed object is {@link EnumType } * */ public void setMapKeyEnumerated(EnumType value) { this.mapKeyEnumerated = value; } /** * Gets the value of the mapKeyAttributeOverride property. * * <p> * This accessor method returns a reference to the live list, not a * snapshot. Therefore any modification you make to the returned list will * be present inside the JAXB object. This is why there is not a * <CODE>set</CODE> method for the mapKeyAttributeOverride property. * * <p> * For example, to add a new item, do as follows: * <pre> * getMapKeyAttributeOverride().add(newItem); * </pre> * * * <p> * Objects of the following type(s) are allowed in the list * {@link AttributeOverride } * * */ public List<AttributeOverride> getMapKeyAttributeOverride() { if (mapKeyAttributeOverride == null) { mapKeyAttributeOverride = new ArrayList<AttributeOverride>(); } return this.mapKeyAttributeOverride; } /** * Gets the value of the mapKeyColumn property. * * @return possible object is {@link MapKeyColumn } * */ public MapKeyColumn getMapKeyColumn() { return mapKeyColumn; } /** * Sets the value of the mapKeyColumn property. * * @param value allowed object is {@link MapKeyColumn } * */ public void setMapKeyColumn(MapKeyColumn value) { this.mapKeyColumn = value; } /** * Gets the value of the mapKeyJoinColumn property. * * <p> * This accessor method returns a reference to the live list, not a * snapshot. Therefore any modification you make to the returned list will * be present inside the JAXB object. This is why there is not a * <CODE>set</CODE> method for the mapKeyJoinColumn property. * * <p> * For example, to add a new item, do as follows: * <pre> * getMapKeyJoinColumn().add(newItem); * </pre> * * * <p> * Objects of the following type(s) are allowed in the list * {@link MapKeyJoinColumn } * * */ public List<MapKeyJoinColumn> getMapKeyJoinColumn() { if (mapKeyJoinColumn == null) { mapKeyJoinColumn = new ArrayList<MapKeyJoinColumn>(); } return this.mapKeyJoinColumn; } /** * Gets the value of the joinTable property. * * @return possible object is {@link JoinTable } * */ @Override public JoinTable getJoinTable() { if (joinTable == null) { joinTable = new JoinTable(); } return joinTable; } /** * Sets the value of the joinTable property. * * @param value allowed object is {@link JoinTable } * */ public void setJoinTable(JoinTable value) { this.joinTable = value; } /** * Gets the value of the joinColumn property. * * <p> * This accessor method returns a reference to the live list, not a * snapshot. Therefore any modification you make to the returned list will * be present inside the JAXB object. This is why there is not a * <CODE>set</CODE> method for the joinColumn property. * * <p> * For example, to add a new item, do as follows: * <pre> * getJoinColumn().add(newItem); * </pre> * * * <p> * Objects of the following type(s) are allowed in the list * {@link JoinColumn } * * */ public List<JoinColumn> getJoinColumn() { if (joinColumn == null) { joinColumn = new ArrayList<JoinColumn>(); } return this.joinColumn; } @Override public void addJoinColumn(JoinColumn joinColumn_In) { if (joinColumn == null) { joinColumn = new ArrayList<JoinColumn>(); } joinColumn.add(joinColumn_In); } @Override public void removeJoinColumn(JoinColumn joinColumn_In) { if (joinColumn == null) { joinColumn = new ArrayList<JoinColumn>(); } joinColumn.remove(joinColumn_In); } /** * Gets the value of the cascade property. * * @return possible object is {@link CascadeType } * */ public CascadeType getCascade() { return cascade; } /** * Sets the value of the cascade property. * * @param value allowed object is {@link CascadeType } * */ public void setCascade(CascadeType value) { this.cascade = value; } /** * Gets the value of the name property. * * @return possible object is {@link String } * */ public String getName() { return name; } /** * Sets the value of the name property. * * @param value allowed object is {@link String } * */ public void setName(String value) { this.name = value; } /** * Gets the value of the targetEntity property. * * @return possible object is {@link String } * */ public String getTargetEntity() { return targetEntity; } /** * Sets the value of the targetEntity property. * * @param value allowed object is {@link String } * */ public void setTargetEntity(String value) { this.targetEntity = value; } /** * Gets the value of the fetch property. * * @return possible object is {@link FetchType } * */ public FetchType getFetch() { return fetch; } /** * Sets the value of the fetch property. * * @param value allowed object is {@link FetchType } * */ public void setFetch(FetchType value) { this.fetch = value; } /** * Gets the value of the access property. * * @return possible object is {@link AccessType } * */ public AccessType getAccess() { return access; } /** * Sets the value of the access property. * * @param value allowed object is {@link AccessType } * */ public void setAccess(AccessType value) { this.access = value; } /** * Gets the value of the mappedBy property. * * @return possible object is {@link String } * */ public String getMappedBy() { return mappedBy; } /** * Sets the value of the mappedBy property. * * @param value allowed object is {@link String } * */ public void setMappedBy(String value) { this.mappedBy = value; } /** * Gets the value of the orphanRemoval property. * * @return possible object is {@link Boolean } * */ public Boolean isOrphanRemoval() { return orphanRemoval; } /** * Sets the value of the orphanRemoval property. * * @param value allowed object is {@link Boolean } * */ public void setOrphanRemoval(Boolean value) { this.orphanRemoval = value; } /** * @return the collectionType */ public String getCollectionType() { if (collectionType == null) { collectionType = "java.util.Collection"; } return collectionType; } /** * @param collectionType the collectionType to set */ public void setCollectionType(String collectionType) { this.collectionType = collectionType; } }
apache-2.0
Puchaczov/TQL.RDL
TQL.RDL/TQL.RDL.Parser/Nodes/GreaterEqualNode.cs
435
using System; namespace TQL.RDL.Parser.Nodes { public class GreaterEqualNode : BinaryNode { public GreaterEqualNode(RdlSyntaxNode left, RdlSyntaxNode right) : base(left, right) { } public override Type ReturnType => typeof(bool); public override string ToString() => ToString(">="); public override void Accept(INodeVisitor visitor) => visitor.Visit(this); } }
apache-2.0
aws/aws-sdk-java
aws-java-sdk-appflow/src/main/java/com/amazonaws/services/appflow/model/transform/EventBridgeMetadataMarshaller.java
1679
/* * Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.appflow.model.transform; import javax.annotation.Generated; import com.amazonaws.SdkClientException; import com.amazonaws.services.appflow.model.*; import com.amazonaws.protocol.*; import com.amazonaws.annotation.SdkInternalApi; /** * EventBridgeMetadataMarshaller */ @Generated("com.amazonaws:aws-java-sdk-code-generator") @SdkInternalApi public class EventBridgeMetadataMarshaller { private static final EventBridgeMetadataMarshaller instance = new EventBridgeMetadataMarshaller(); public static EventBridgeMetadataMarshaller getInstance() { return instance; } /** * Marshall the given parameter object. */ public void marshall(EventBridgeMetadata eventBridgeMetadata, ProtocolMarshaller protocolMarshaller) { if (eventBridgeMetadata == null) { throw new SdkClientException("Invalid argument passed to marshall(...)"); } try { } catch (Exception e) { throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e); } } }
apache-2.0
onhate/schemorger
src/main/java/org/schema/CreditCard.java
610
package org.schema; /** * * A credit or debit card type as a standardized procedure for transferring the * monetary amount for a purchase. Commonly used values: * http://purl.org/goodrelations/v1#AmericanExpress * http://purl.org/goodrelations/v1#DinersClub * http://purl.org/goodrelations/v1#Discover * http://purl.org/goodrelations/v1#JCB * http://purl.org/goodrelations/v1#MasterCard * http://purl.org/goodrelations/v1#VISA * * @fullPath Thing > Intangible > Enumeration > PaymentMethod > CreditCard * * @author Texelz (by Onhate) * */ public class CreditCard extends PaymentMethod { }
apache-2.0
zckrbrt/verteilte-systeme
spring-redis-demo/src/main/java/de/hska/lkit/demo/redis/repo/impl/UserRepositoryImpl.java
3461
package de.hska.lkit.demo.redis.repo.impl; import java.util.Map; import javax.annotation.PostConstruct; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.data.redis.core.HashOperations; import org.springframework.data.redis.core.RedisTemplate; import org.springframework.data.redis.core.SetOperations; import org.springframework.data.redis.core.StringRedisTemplate; import org.springframework.data.redis.support.atomic.RedisAtomicLong; import org.springframework.stereotype.Repository; import de.hska.lkit.demo.redis.model.User; import de.hska.lkit.demo.redis.repo.UserRepository; /** * @author knad0001 * */ /** * @author knad0001 * */ @Repository public class UserRepositoryImpl implements UserRepository { /** * */ private static final String KEY_FOR_ALL_USERS = "all:users"; private static final String KEY_HASH_ALL_USERS = "hash:all:user"; /** * to generate unique ids for user */ private RedisAtomicLong userid; /** * to save data in String format */ private StringRedisTemplate stringRedisTemplate; /** * to save user data as object */ private RedisTemplate<String, User> redisTemplate; /** * hash operations for stringRedisTemplate */ private HashOperations<String, String, String> srt_hashOps; private SetOperations<String, String> setOps; /** * hash operations for redisTemplate */ private HashOperations<String, Object, Object> rt_hashOps; @Autowired public UserRepositoryImpl(RedisTemplate<String, User> redisTemplate, StringRedisTemplate stringRedisTemplate) { this.redisTemplate = redisTemplate; this.stringRedisTemplate = stringRedisTemplate; this.userid = new RedisAtomicLong("userid", stringRedisTemplate.getConnectionFactory()); } @PostConstruct private void init() { srt_hashOps = stringRedisTemplate.opsForHash(); setOps = stringRedisTemplate.opsForSet(); rt_hashOps = redisTemplate.opsForHash(); } /* * (non-Javadoc) * * @see * hska.iwi.vslab.repo.UserRepository#saveUser(hska.iwi.vslab.model.User) */ @Override public void saveUser(User user) { // generate a unique id String id = String.valueOf(userid.incrementAndGet()); user.setId(id); // to show how objects can be saved // be careful, if username already exists it's not added another time String key = "user:" + user.getUsername(); srt_hashOps.put(key, "id", id); srt_hashOps.put(key, "firstName", user.getFirstname()); srt_hashOps.put(key, "lastName", user.getLastname()); srt_hashOps.put(key, "username", user.getUsername()); srt_hashOps.put(key, "password", user.getPassword()); setOps.add(KEY_FOR_ALL_USERS, key); // to show how objects can be saved rt_hashOps.put(KEY_HASH_ALL_USERS, key, user); } @Override public Map<Object, Object> findAllUsers() { return rt_hashOps.entries(KEY_HASH_ALL_USERS); } @Override public User findUser(String username) { User user = new User(); String key = "user:" + username; if (setOps.isMember(KEY_FOR_ALL_USERS, key)) { user.setId(srt_hashOps.get(key, "id")); user.setFirstname(srt_hashOps.get(key, "firstName")); user.setLastname(srt_hashOps.get(key, "lastName")); user.setUsername(srt_hashOps.get(key, "username")); user.setPassword(srt_hashOps.get(key, "password")); } else user = null; return user; } }
apache-2.0
schorndorfer/uima-components
annotator-parent/type-system/src/main/java/org/apache/ctakes/typesystem/type/refsem/LabDeltaFlag_Type.java
2928
/* First created by JCasGen Fri Jan 03 13:40:15 CST 2014 */ package org.apache.ctakes.typesystem.type.refsem; import org.apache.uima.jcas.JCas; import org.apache.uima.jcas.JCasRegistry; import org.apache.uima.cas.impl.CASImpl; import org.apache.uima.cas.impl.FSGenerator; import org.apache.uima.cas.FeatureStructure; import org.apache.uima.cas.impl.TypeImpl; import org.apache.uima.cas.Type; import org.apache.uima.cas.impl.FeatureImpl; import org.apache.uima.cas.Feature; /** An indicator to warn that the laboratory test result has changed significantly from the previous identical laboratory test result. * Updated by JCasGen Fri Jan 03 13:40:15 CST 2014 * @generated */ public class LabDeltaFlag_Type extends Attribute_Type { /** @generated */ @Override protected FSGenerator getFSGenerator() {return fsGenerator;} /** @generated */ private final FSGenerator fsGenerator = new FSGenerator() { public FeatureStructure createFS(int addr, CASImpl cas) { if (LabDeltaFlag_Type.this.useExistingInstance) { // Return eq fs instance if already created FeatureStructure fs = LabDeltaFlag_Type.this.jcas.getJfsFromCaddr(addr); if (null == fs) { fs = new LabDeltaFlag(addr, LabDeltaFlag_Type.this); LabDeltaFlag_Type.this.jcas.putJfsFromCaddr(addr, fs); return fs; } return fs; } else return new LabDeltaFlag(addr, LabDeltaFlag_Type.this); } }; /** @generated */ @SuppressWarnings ("hiding") public final static int typeIndexID = LabDeltaFlag.typeIndexID; /** @generated @modifiable */ @SuppressWarnings ("hiding") public final static boolean featOkTst = JCasRegistry.getFeatOkTst("org.apache.ctakes.typesystem.type.refsem.LabDeltaFlag"); /** @generated */ final Feature casFeat_value; /** @generated */ final int casFeatCode_value; /** @generated */ public String getValue(int addr) { if (featOkTst && casFeat_value == null) jcas.throwFeatMissing("value", "org.apache.ctakes.typesystem.type.refsem.LabDeltaFlag"); return ll_cas.ll_getStringValue(addr, casFeatCode_value); } /** @generated */ public void setValue(int addr, String v) { if (featOkTst && casFeat_value == null) jcas.throwFeatMissing("value", "org.apache.ctakes.typesystem.type.refsem.LabDeltaFlag"); ll_cas.ll_setStringValue(addr, casFeatCode_value, v);} /** initialize variables to correspond with Cas Type and Features * @generated */ public LabDeltaFlag_Type(JCas jcas, Type casType) { super(jcas, casType); casImpl.getFSClassRegistry().addGeneratorForType((TypeImpl)this.casType, getFSGenerator()); casFeat_value = jcas.getRequiredFeatureDE(casType, "value", "uima.cas.String", featOkTst); casFeatCode_value = (null == casFeat_value) ? JCas.INVALID_FEATURE_CODE : ((FeatureImpl)casFeat_value).getCode(); } }
apache-2.0
daybaryour/dividedChatInterface
e2e/app.e2e-spec.ts
294
import { AppPage } from './app.po'; describe('node-angular App', () => { let page: AppPage; beforeEach(() => { page = new AppPage(); }); it('should display welcome message', () => { page.navigateTo(); expect(page.getParagraphText()).toEqual('Welcome to app!'); }); });
apache-2.0
consulo/consulo-android
tools-base/rpclib/src/test/java/com/android/tools/rpclib/binary/HandleTest.java
3613
/* * Copyright (C) 2015 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.android.tools.rpclib.binary; import junit.framework.TestCase; import java.io.ByteArrayInputStream; import java.io.IOException; import java.util.HashSet; import java.util.Set; public class HandleTest extends TestCase { static final String handleString = "000102030405060708090a0b0c0d0e0f10111213"; static final byte[] handleBytes = { 0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08, 0x09, 0x0a, 0x0b, 0x0c, 0x0d, 0x0e, 0x0f, 0x10, 0x11, 0x12, 0x13 }; public void testHandleEquality() { // Check handle identity. Handle handle1 = new Handle(handleBytes); assertEquals(handle1, handle1); // Check equality of two handles created with the same bytes. Handle handle2 = new Handle(handleBytes); assertEquals(handle1, handle2); } public void testHandleNonEquality() { Handle handle = new Handle(handleBytes); assertFalse(handle.equals(null)); // Check that we're getting a different handle than the zero-bytes handle. Handle zeroHandle = new Handle(new byte[handleBytes.length]); assertNotSame(zeroHandle, handle); // Check that we're getting a different handle if only the last byte differs. byte[] handleLastDiffBytes = new byte[handleBytes.length]; System.arraycopy(handleBytes, 0, handleLastDiffBytes, 0, handleBytes.length); handleLastDiffBytes[handleLastDiffBytes.length-1]++; Handle handleLastDiff = new Handle(handleLastDiffBytes); assertNotSame(handleLastDiff, handle); // Check that we're getting a different handle if only the first byte differs. byte[] handleFirstDiffBytes = new byte[handleBytes.length]; System.arraycopy(handleBytes, 0, handleFirstDiffBytes, 0, handleBytes.length); handleLastDiffBytes[0]++; Handle handleFirstDiff = new Handle(handleFirstDiffBytes); assertNotSame(handleFirstDiff, handle); } public void testHandleToString() { Handle handle = new Handle(handleBytes); assertEquals(handleString, handle.toString()); } public void testHandleAsKey() { Set<Handle> set = new HashSet<Handle>(); Handle handle1 = new Handle(handleBytes); set.add(handle1); assertTrue(set.contains(handle1)); assertEquals(1, set.size()); // Two handles with the same bytes should be seen as the same set elements. Handle sameHandle = new Handle(handleBytes); set.add(sameHandle); assertTrue(set.contains(sameHandle)); assertEquals(1, set.size()); // Two handles with different bytes should be seen as separate elements in a set. Handle zeroHandle = new Handle(new byte[20]); set.add(zeroHandle); assertTrue(set.contains(zeroHandle)); assertEquals(2, set.size()); } public void testDecodeHandle() throws IOException { ByteArrayInputStream input = new ByteArrayInputStream(handleBytes); Decoder d = new Decoder(input); Handle handle = new Handle(handleBytes); Handle handleFromDecoder = new Handle(d); assertEquals(handle, handleFromDecoder); } }
apache-2.0
jsnoble/teraslice
packages/ts-transforms/test/operations/validations/phonenumber-spec.ts
3538
import { PhoneNumber } from '../../../src/operations'; import { DataEntity } from '@terascope/job-components'; describe('phone number validation', () => { it('can instantiate', () => { const opConfig = { refs: 'someId', source_field: 'someField' }; expect(() => new PhoneNumber(opConfig)).not.toThrow(); }); it('can properly throw with bad config values', () => { const badConfig1 = { source_field: 1324 }; const badConfig2 = { source_field: '' }; const badConfig3 = { source_field: {} }; const badConfig4 = {}; // @ts-ignore expect(() => new PhoneNumber(badConfig1)).toThrow(); // @ts-ignore expect(() => new PhoneNumber(badConfig2)).toThrow(); // @ts-ignore expect(() => new PhoneNumber(badConfig3)).toThrow(); // @ts-ignore expect(() => new PhoneNumber(badConfig4)).toThrow(); }); it('can validate phone number fields', () => { const opConfig = { refs: 'someId', source_field: 'field' }; const test = new PhoneNumber(opConfig); const metaData = { selectors: { 'some:query' : true } }; const validPhone1 = '14803847362'; const validPhone2 = '1(480)384-7362'; const notValidPhone = '148038473623'; const notValidPhone2 = '3847362'; const notValidPhone3 = '4803847362'; const data1 = new DataEntity({ field: '56.234,95.234' }, metaData); const data2 = new DataEntity({ field: 123423 }, metaData); const data3 = new DataEntity({ field: 'some data here' }); const data4 = new DataEntity({ field: [1324] }); const data5 = new DataEntity({ field: { some: 'data' } }); const data6 = new DataEntity({ field: true }, metaData); const data7 = new DataEntity({}); const data8 = new DataEntity({ field: validPhone1 }); const data9 = new DataEntity({ field: validPhone2 }); const data10 = new DataEntity({ field: notValidPhone }); const data11 = new DataEntity({ field: notValidPhone2 }); const data12 = new DataEntity({ field: notValidPhone3 }); const results1 = test.run(data1); const results2 = test.run(data2); const results3 = test.run(data3); const results4 = test.run(data4); const results5 = test.run(data5); const results6 = test.run(data6); const results7 = test.run(data7); const results8 = test.run(data8); const results9 = test.run(data9); const results10 = test.run(data10); const results11 = test.run(data11); const results12 = test.run(data12); expect(DataEntity.isDataEntity(results1)).toEqual(true); expect(DataEntity.getMetadata(results1 as DataEntity, 'selectors')).toEqual(metaData.selectors); expect(results1).toEqual({}); expect(DataEntity.getMetadata(results2 as DataEntity, 'selectors')).toEqual(metaData.selectors); expect(results2).toEqual({}); expect(results3).toEqual({}); expect(results4).toEqual({}); expect(results5).toEqual({}); expect(results6).toEqual({}); expect(DataEntity.getMetadata(results6 as DataEntity, 'selectors')).toEqual(metaData.selectors); expect(results7).toEqual({}); expect(results8).toEqual({ field: validPhone1 }); expect(results9).toEqual({ field: validPhone2 }); expect(results10).toEqual({}); expect(results11).toEqual({}); expect(results12).toEqual({}); }); });
apache-2.0
plus3it/watchmaker
src/watchmaker/managers/platform.py
10887
# -*- coding: utf-8 -*- """Watchmaker base manager.""" from __future__ import (absolute_import, division, print_function, unicode_literals, with_statement) import concurrent.futures import logging import os import shutil import subprocess import tarfile import tempfile import zipfile import watchmaker.utils from watchmaker.exceptions import WatchmakerError from watchmaker.utils import urllib class PlatformManagerBase(object): """ Base class for operating system managers. All child classes will have access to methods unless overridden by an identically-named method in the child class. Args: system_params: (:obj:`dict`) Attributes, mostly file-paths, specific to the system-type (Linux or Windows). The dict keys are as follows: prepdir: Directory where Watchmaker will keep files on the system. readyfile: Path to a file that will be created upon successful completion. logdir: Directory to store log files. workingdir: Directory to store temporary files. Deleted upon successful completion. restart: Command to use to restart the system upon successful completion. shutdown_path: (Windows-only) Path to the Windows ``shutdown.exe`` command. """ boto3 = None boto_client = None def __init__(self, system_params, *args, **kwargs): self.log = logging.getLogger( '{0}.{1}'.format(__name__, self.__class__.__name__) ) self.system_params = system_params self.working_dir = None PlatformManagerBase.args = args PlatformManagerBase.kwargs = kwargs def retrieve_file(self, url, filename): """ Retrieve a file from a provided URL. Supports all :obj:`urllib.request` handlers, as well as S3 buckets. Args: url: (:obj:`str`) URL to a file. filename: (:obj:`str`) Path where the file will be saved. """ # Convert a local path to a URI url = watchmaker.utils.uri_from_filepath(url) self.log.debug('Downloading: %s', url) self.log.debug('Destination: %s', filename) try: self.log.debug('Establishing connection to the host, %s', url) response = watchmaker.utils.urlopen_retry(url) self.log.debug('Opening the file handle, %s', filename) with open(filename, 'wb') as outfile: self.log.debug('Saving file to local filesystem...') shutil.copyfileobj(response, outfile) except (ValueError, urllib.error.URLError): self.log.critical( 'Failed to retrieve the file. url = %s. filename = %s', url, filename ) raise self.log.info( 'Retrieved the file successfully. url=%s. filename=%s', url, filename ) def create_working_dir(self, basedir, prefix): """ Create a directory in ``basedir`` with a prefix of ``prefix``. Args: prefix: (:obj:`str`) Prefix to prepend to the working directory. basedir: (:obj:`str`) The directory in which to create the working directory. Returns: :obj:`str`: Path to the working directory. """ self.log.info('Creating a working directory.') original_umask = os.umask(0) try: working_dir = tempfile.mkdtemp(prefix=prefix, dir=basedir) except Exception: msg = 'Could not create a working dir in {0}'.format(basedir) self.log.critical(msg) raise self.log.debug('Created working directory: %s', working_dir) os.umask(original_umask) return working_dir @staticmethod def _pipe_handler(pipe, logger=None, prefix_msg=''): ret = b'' try: for line in iter(pipe.readline, b''): if logger: logger('%s%s', prefix_msg, line.rstrip()) ret += line finally: pipe.close() return ret def call_process(self, cmd, log_pipe='all', raise_error=True): """ Execute a shell command. Args: cmd: (:obj:`list`) Command to execute. log_pipe: (:obj:`str`) Controls what to log from the command output. Supports three values: ``stdout``, ``stderr``, ``all``. (*Default*: ``all``) raise_error: (:obj:`bool`) Switch to control whether to raise if the command return code is non-zero. (*Default*: ``True``) Returns: :obj:`dict`: Dictionary containing three keys: ``retcode`` (:obj:`int`), ``stdout`` (:obj:`bytes`), and ``stderr`` (:obj:`bytes`). """ ret = { 'retcode': 0, 'stdout': b'', 'stderr': b'' } if not isinstance(cmd, list): msg = 'Command is not a list: {0}'.format(cmd) self.log.critical(msg) raise WatchmakerError(msg) self.log.debug('Command: %s', ' '.join(cmd)) # If running as a standalone, PyInstaller will have modified the # LD_LIBRARY_PATH to point to standalone libraries. If there were a # value at runtime, PyInstaller will create LD_LIBRARY_PATH_ORIG. In # order for salt to run correctly, LD_LIBRARY_PATH has to be fixed. kwargs = {} env = dict(os.environ) lib_path_key = 'LD_LIBRARY_PATH' if env.get(lib_path_key) is not None: lib_path_orig_value = env.get(lib_path_key + '_ORIG') if lib_path_orig_value is None: # you can have lib_path and no orig, if: # 1. none was set and pyinstaller set one, or # 2. one was set and we're not in standalone package env.pop(lib_path_key, None) else: # put original lib_path back env[lib_path_key] = lib_path_orig_value kwargs['env'] = env with subprocess.Popen( cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, **kwargs ) as process, concurrent.futures.ThreadPoolExecutor( max_workers=2 ) as executor: stdout_future = executor.submit( self._pipe_handler, process.stdout, self.log.debug if log_pipe in ['stdout', 'all'] else None, 'Command stdout: ' ) stderr_future = executor.submit( self._pipe_handler, process.stderr, self.log.error if log_pipe in ['stderr', 'all'] else None, 'Command stderr: ') ret['stdout'] = stdout_future.result() ret['stderr'] = stderr_future.result() ret['retcode'] = process.wait() self.log.debug('Command retcode: %s', ret['retcode']) if raise_error and ret['retcode'] != 0: msg = 'Command failed! Exit code={0}, cmd={1}'.format( ret['retcode'], ' '.join(cmd)) self.log.critical(msg) raise WatchmakerError(msg) return ret def cleanup(self): """Delete working directory.""" self.log.info('Cleanup Time...') try: self.log.debug('working_dir=%s', self.working_dir) shutil.rmtree(self.working_dir) self.log.info('Deleted working directory...') except Exception: msg = 'Cleanup Failed!' self.log.critical(msg) raise self.log.info('Exiting cleanup routine...') def extract_contents(self, filepath, to_directory, create_dir=False): """ Extract a compressed archive to the specified directory. Args: filepath: (:obj:`str`) Path to the compressed file. Supported file extensions: - `.zip` - `.tar.gz` - `.tgz` - `.tar.bz2` - `.tbz` to_directory: (:obj:`str`) Path to the target directory create_dir: (:obj:`bool`) Switch to control the creation of a subdirectory within ``to_directory`` named for the filename of the compressed file. (*Default*: ``False``) """ if filepath.endswith('.zip'): self.log.debug('File Type: zip') opener, mode = zipfile.ZipFile, 'r' elif filepath.endswith('.tar.gz') or filepath.endswith('.tgz'): self.log.debug('File Type: GZip Tar') opener, mode = tarfile.open, 'r:gz' elif filepath.endswith('.tar.bz2') or filepath.endswith('.tbz'): self.log.debug('File Type: Bzip Tar') opener, mode = tarfile.open, 'r:bz2' else: msg = ( 'Could not extract "{0}" as no appropriate extractor is found.' .format(filepath) ) self.log.critical(msg) raise WatchmakerError(msg) if create_dir: to_directory = os.sep.join(( to_directory, '.'.join(filepath.split(os.sep)[-1].split('.')[:-1]) )) try: os.makedirs(to_directory) except OSError: if not os.path.isdir(to_directory): msg = 'Unable create directory - {0}'.format(to_directory) self.log.critical(msg) raise cwd = os.getcwd() os.chdir(to_directory) try: openfile = opener(filepath, mode) try: openfile.extractall() finally: openfile.close() finally: os.chdir(cwd) self.log.info( 'Extracted file. source=%s, dest=%s', filepath, to_directory ) class LinuxPlatformManager(PlatformManagerBase): """ Base class for Linux Platforms. Serves as a foundational class to keep OS consitency. """ def _install_from_yum(self, packages): yum_cmd = ['sudo', 'yum', '-y', 'install'] if isinstance(packages, list): yum_cmd.extend(packages) else: yum_cmd.append(packages) self.call_process(yum_cmd) self.log.debug(packages) class WindowsPlatformManager(PlatformManagerBase): """ Base class for Windows Platform. Serves as a foundational class to keep OS consitency. """
apache-2.0
tisnats/tisnats.com
sections/reportsv2/ajax_report.php
2778
<? /* * The backend to changing the report type when making a report. * It prints out the relevant report_messages from the array, then * prints the relevant report_fields and whether they're required. */ authorize(); ?> <ul> <? $CategoryID = $_POST['categoryid']; if (array_key_exists($_POST['type'], $Types[$CategoryID])) { $ReportType = $Types[$CategoryID][$_POST['type']]; } elseif (array_key_exists($_POST['type'],$Types['master'])) { $ReportType = $Types['master'][$_POST['type']]; } else { echo 'HAX IN REPORT TYPE'; die(); } foreach ($ReportType['report_messages'] as $Message) { ?> <li><?=$Message?></li> <? } ?> </ul> <br /> <table class="layout border" cellpadding="3" cellspacing="1" border="0" width="100%"> <? if (array_key_exists('image', $ReportType['report_fields'])) { ?> <tr> <td class="label"> Image(s)<?=($ReportType['report_fields']['image'] == '1' ? ' <strong class="important_text">(Required)</strong>:' : '')?> </td> <td> <input id="image" type="text" name="image" size="50" value="<?=(!empty($_POST['image']) ? display_str($_POST['image']) : '')?>" /> </td> </tr> <? } if (array_key_exists('track', $ReportType['report_fields'])) { ?> <tr> <td class="label"> Track Number(s)<?=($ReportType['report_fields']['track'] == '1' || $ReportType['report_fields']['track'] == '2' ? ' <strong class="important_text">(Required)</strong>:' : '')?> </td> <td> <input id="track" type="text" name="track" size="8" value="<?=(!empty($_POST['track']) ? display_str($_POST['track']) : '')?>" /><?=($ReportType['report_fields']['track'] == '1' ? '<input id="all_tracks" type="checkbox" onclick="AllTracks()" /> All' : '')?> </td> </tr> <? } if (array_key_exists('link', $ReportType['report_fields'])) { ?> <tr> <td class="label"> Link(s) to external source<?=($ReportType['report_fields']['link'] == '1' ? ' <strong class="important_text">(Required)</strong>:' : '')?> </td> <td> <input id="link" type="text" name="link" size="50" value="<?=(!empty($_POST['link']) ? display_str($_POST['link']) : '')?>" /> </td> </tr> <? } if (array_key_exists('sitelink', $ReportType['report_fields'])) { ?> <tr> <td class="label"> Permalink to <strong>other relevant</strong> torrent(s)<?=($ReportType['report_fields']['sitelink'] == '1' ? ' <strong class="important_text">(Required)</strong>:' : '')?> </td> <td> <input id="sitelink" type="text" name="sitelink" size="50" value="<?=(!empty($_POST['sitelink']) ? display_str($_POST['sitelink']) : '')?>" /> </td> </tr> <? } ?> <tr> <td class="label"> Comments <strong class="important_text">(Required)</strong>: </td> <td> <textarea id="extra" rows="5" cols="60" name="extra"><?=display_str($_POST['extra'])?></textarea> </td> </tr> </table>
apache-2.0
strapdata/elassandra5-rc
core/src/main/java/org/elasticsearch/common/lucene/uid/Versions.java
1582
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.common.lucene.uid; /** Utility class to resolve the Lucene doc ID and version for a given uid. */ public final class Versions { /** used to indicate the write operation should succeed regardless of current version **/ public static final long MATCH_ANY = -3L; /** indicates that the current document was not found in lucene and in the version map */ public static final long NOT_FOUND = -1L; // -2 was used for docs that can be found in the index but do not have a version /** * used to indicate that the write operation should be executed if the document is currently deleted * i.e., not found in the index and/or found as deleted (with version) in the version map */ public static final long MATCH_DELETED = -4L; }
apache-2.0
hamzakilic/wjimg
src/app/lib/imageprocess/imageProcessSimilarColors.ts
5772
import { ColorConversion } from './../imagealgorithm/colorConversion'; import { Point } from './../draw/point'; import { Polygon } from './../draw/polygon'; import { Color } from './../draw/color'; import { HImage } from './../image'; import { Layer } from '../../models/photoedit/layer'; export class ImageProcessSimilarColors { ///layer parametresini debug yapmak için kullanıyorum public static process(layer:Layer,img: HImage, color: Color, point: Point,threshold:number,continous:number=8): Array<Polygon> { let xyz = ColorConversion.rgbToXYZ(color.r, color.g, color.b); let lab = ColorConversion.XYZToLab(xyz[0], xyz[1], xyz[2]); let lch = ColorConversion.LabToLCH(lab[0], lab[1], lab[2]); let points: Array<Point> = []; ImageProcessSimilarColors.check(layer,img, points, lch, threshold, point); return [new Polygon(points)]; } private static check(layer:Layer,img: HImage, points: Array<Point>, reflch: number[], threshold: number, point: Point) { let visited = new Array<boolean>(img.width * img.height); visited.forEach((item, index, arr) => visited[index] = false); let visitPoints = []; visitPoints.push(point); while (visitPoints.length > 0) { let currentPoint = visitPoints.pop(); let x = currentPoint.x; let y = currentPoint.y; if (x < 0 || y < 0) continue; if (x > img.width || y > img.height) continue; if (visited[y * img.width + x]) continue; visited[y * img.width + x] = true; let position = y * img.width * img.bytePerPixel + x * img.bytePerPixel; let r = img.Pixels[position]; let g = img.Pixels[position + 1]; let b = img.Pixels[position + 2]; let xyz = ColorConversion.rgbToXYZ(r, g, b); let lab = ColorConversion.XYZToLab(xyz[0], xyz[1], xyz[2]); let lch = ColorConversion.LabToLCH(lab[0], lab[1], lab[2]); let dif = ImageProcessSimilarColors.delta2000({ L: lch[0], a: lch[1], b: lch[2] }, { L: reflch[0], a: reflch[1], b: reflch[2] }); if (dif < threshold) { points.push(new Point(x, y)); // layer.graphics.setPixel(x,y,new Color(255,255,255,255)); //visitPoints.push(new Point(x - 1, y - 1)); visitPoints.push(new Point(x - 1, y)); //visitPoints.push(new Point(x - 1, y + 1)); visitPoints.push(new Point(x, y - 1)); visitPoints.push(new Point(x, y + 1)); //visitPoints.push(new Point(x + 1, y - 1)); visitPoints.push(new Point(x + 1, y)); //visitPoints.push(new Point(x + 1, y + 1)); }else{ // layer.graphics.setPixel(x,y,new Color(255,0,0,255)); // debugger; } } } private static delta2000(Lab1: any, Lab2: any): number { let kL = 1.0; let kC = 1.0; let kH = 1.0; let lBarPrime = 0.5 * (Lab1.L + Lab2.L); let c1 = Math.sqrt(Lab1.a * Lab1.a + Lab1.b * Lab1.b); let c2 = Math.sqrt(Lab2.a * Lab2.a + Lab2.b * Lab2.b); let cBar = 0.5 * (c1 + c2); let cBar7 = cBar * cBar * cBar * cBar * cBar * cBar * cBar; let g = 0.5 * (1.0 - Math.sqrt(cBar7 / (cBar7 + 6103515625.0))); /* 6103515625 = 25^7 */ let a1Prime = Lab1.a * (1.0 + g); let a2Prime = Lab2.a * (1.0 + g); let c1Prime = Math.sqrt(a1Prime * a1Prime + Lab1.b * Lab1.b); let c2Prime = Math.sqrt(a2Prime * a2Prime + Lab2.b * Lab2.b); let cBarPrime = 0.5 * (c1Prime + c2Prime); let h1Prime = (Math.atan2(Lab1.b, a1Prime) * 180.0) / Math.PI; if (h1Prime < 0.0) h1Prime += 360.0; var h2Prime = (Math.atan2(Lab2.b, a2Prime) * 180.0) / Math.PI; if (h2Prime < 0.0) h2Prime += 360.0; var hBarPrime = (Math.abs(h1Prime - h2Prime) > 180.0) ? (0.5 * (h1Prime + h2Prime + 360.0)) : (0.5 * (h1Prime + h2Prime)); var t = 1.0 - 0.17 * Math.cos(Math.PI * (hBarPrime - 30.0) / 180.0) + 0.24 * Math.cos(Math.PI * (2.0 * hBarPrime) / 180.0) + 0.32 * Math.cos(Math.PI * (3.0 * hBarPrime + 6.0) / 180.0) - 0.20 * Math.cos(Math.PI * (4.0 * hBarPrime - 63.0) / 180.0); let dhPrime = 0; if (Math.abs(h2Prime - h1Prime) <= 180.0) dhPrime = h2Prime - h1Prime; else dhPrime = (h2Prime <= h1Prime) ? (h2Prime - h1Prime + 360.0) : (h2Prime - h1Prime - 360.0); let dLPrime = Lab2.L - Lab1.L; let dCPrime = c2Prime - c1Prime; let dHPrime = 2.0 * Math.sqrt(c1Prime * c2Prime) * Math.sin(Math.PI * (0.5 * dhPrime) / 180.0); let sL = 1.0 + ((0.015 * (lBarPrime - 50.0) * (lBarPrime - 50.0)) / Math.sqrt(20.0 + (lBarPrime - 50.0) * (lBarPrime - 50.0))); let sC = 1.0 + 0.045 * cBarPrime; let sH = 1.0 + 0.015 * cBarPrime * t; let dTheta = 30.0 * Math.exp(-((hBarPrime - 275.0) / 25.0) * ((hBarPrime - 275.0) / 25.0)); let cBarPrime7 = cBarPrime * cBarPrime * cBarPrime * cBarPrime * cBarPrime * cBarPrime * cBarPrime; let rC = Math.sqrt(cBarPrime7 / (cBarPrime7 + 6103515625.0)); let rT = -2.0 * rC * Math.sin(Math.PI * (2.0 * dTheta) / 180.0); let DE2000 = Math.sqrt( (dLPrime / (kL * sL)) * (dLPrime / (kL * sL)) + (dCPrime / (kC * sC)) * (dCPrime / (kC * sC)) + (dHPrime / (kH * sH)) * (dHPrime / (kH * sH)) + (dCPrime / (kC * sC)) * (dHPrime / (kH * sH)) * rT); return DE2000; } }
apache-2.0
smart-education-iot/smart-education-stack
education-manage/src/main/java/com/education/manage/Application.java
709
package com.education.manage; import org.springframework.boot.SpringApplication; import org.springframework.boot.autoconfigure.EnableAutoConfiguration; import org.springframework.boot.web.servlet.ServletComponentScan; import org.springframework.context.annotation.ComponentScan; import org.springframework.context.annotation.Configuration; /** * 管理入口 * @author jamesli * @version 1.0 * @date 2017-07-04 */ @ServletComponentScan @Configuration @EnableAutoConfiguration @ComponentScan(basePackages={"com.education"}) public class Application{ /** * 应用入口 * @param args */ public static void main( String[] args ){ SpringApplication.run(Application.class, args); } }
apache-2.0
JRosenfeldIntern/data-assistant
Shared/GPTools/pyt/scripts/dla.py
53937
""" ------------------------------------------------------------------------------- | Copyright 2016 Esri | | Licensed under the Apache License, Version 2.0 (the "License"); | you may not use this file except in compliance with the License. | You may obtain a copy of the License at | | http://www.apache.org/licenses/LICENSE-2.0 | | Unless required by applicable law or agreed to in writing, software | distributed under the License is distributed on an "AS IS" BASIS, | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | See the License for the specific language governing permissions and | limitations under the License. ------------------------------------------------------------------------------ """ # dla - Data Loading Assistant common functions # Dec 2015 # --------------------------------------------------------------------------- ''' Contains a collection of support functions used by dla tools to provide error handling and other supporting functions, typically an expansion of underlying arcpy functions with a bit more logic and testing. Generally functions return data or a True/False result depending on the situation and the arcpy functions. ''' import datetime import gc import os import pathlib import re import sys import time import traceback import urllib import xml.dom.minidom import xml.etree.cElementTree as ET; import arcpy debug = False # print field calculator messages. startTime = time.localtime() # start time for a script workspace = "dla.gdb" # default, override in script successParameterNumber = 3 # parameter number to set at end of script to indicate success of the program maxErrorCount = 20000 # max errors before a script will stop _errCount = 0 # count the errors and only report things > maxRowCount errors... _proxyhttp = None # "127.0.0.1:80" # ip address and port for proxy, you can also add user/pswd like: username:password@proxy_url:port _proxyhttps = None # same as above for any https sites - not needed for these tools but your proxy setup may require it. _project = None _xmlFolder = None _noneFieldName = '(None)' _dirName = os.path.dirname(os.path.realpath(__file__)) maxrows = 10000000 _ignoreFields = ['FID', 'OBJECTID', 'SHAPE', 'SHAPE_AREA', 'SHAPE_LENGTH', 'SHAPE_LEN', 'SHAPELENGTH', 'SHAPEAREA', 'STLENGTH()', 'STAREA()', 'RASTER', 'GLOBALID'] _ignoreFieldNames = ['OIDFieldName', 'ShapeFieldName', 'LengthFieldName', 'AreaFieldName', 'RasterFieldName', 'GlobalIDFieldName'] _CIMWKSP = 'CIMWKSP' _lyrx = '.lyrx' _http = 'http://' _https = 'https://' _sde = '.sde\\' _gdb = '.gdb\\' import string import random def id_generator(size=6, chars=string.ascii_uppercase + string.digits): return ''.join(random.choice(chars) for _ in range(size)) # helper functions def timer(input): # time difference return time.time() - input def getDBTime(): # format time val for db insert return getStrTime(time.localtime()) def getStrTime(timeVal): # get string time for a time value return time.strftime("%Y-%m-%d %H:%M:%S", timeVal) def getTimeFromStr(timeStr): # get timeVal from a string return time.strptime(timeStr, "%d/%m/%Y %I:%M:%S %p") def addMessage(val): # write a message to the screen try: if sys.stdin.isatty(): # arcpy.AddMessage(str(val)) print(str(val)) else: arcpy.AddMessage(str(val)) print(str(val)) except: arcpy.AddMessage(str(val)) def addMessageLocal(val): # write a message to the screen try: if sys.stdin.isatty(): print(str(val)) else: arcpy.AddMessage(str(val)) except: arcpy.AddMessage(str(val)) def addError(val): # add an error to the screen output # arcpy.AddMessage("Error: " + str(val)) global _errCount _errCount += 1 arcpy.AddError(str(val)) def writeFinalMessage(msg): global _errCount if msg != None: addMessage(str(msg)) addMessage("Process completed with " + str(_errCount) + " errors") if _errCount > 0: addMessage( "When any errors are encountered tools will report a general failure - even though the results may be still be satisfactory.") addMessage("Check the Geoprocessing log and errors reported along with the output data to confirm.") def strToBool(s): # return a boolean for values like 'true' return s.lower() in ("yes", "true", "t", "1") def showTraceback(): # get the traceback object and print it out tBack = sys.exc_info()[2] # tbinfo contains the line number that the code failed on and the code from that line tbinfo = traceback.format_tb(tBack) tbStr = "" for i in range(0, len(tbinfo)): tbStr = tbStr + str(tbinfo[i]) # concatenate information together concerning the error into a message string pymsg = "Python Error messages:\nTraceback Info:\n" + tbStr # + "Error Info: " + str(sys.exc_type)+ ": " + str(sys.exc_value) + "\n" # print messages for use in Python/PythonWin addError(pymsg) def getFields(xmlFile): # get the list of datasets from XML doc dsTypes = ["Fields"] for atype in dsTypes: fields = getXmlElements(xmlFile, atype) if fields != []: return fields def getIgnoreFieldNames(desc, include_globalID): ignore = _ignoreFields field_check = _ignoreFields if include_globalID: if 'GLOBALID' in field_check: field_check.pop(field_check.index('GLOBALID')) for name in field_check: val = getFieldByName(desc, name) if val != None: val = val[val.rfind('.') + 1:] ignore.append(val) return ignore def getTargetFieldsNode(xmlLocation): tree = ET.parse(xmlLocation) root = tree.getroot() tfields = root.find('TargetFields') fields = tfields.getchildren() retfields = dict() for field in fields: attributes = field.attrib retfields[attributes['Name']] = dict() retfields[attributes['Name']] = attributes return retfields def getFieldByName(desc, name): val = None try: val = eval('desc.' + name) except: val = None if val == '': val = None return val def collect_text(node): # "A function that collects text inside 'node', returning that text." s = "" for child_node in node.childNodes: if child_node.nodeType == child_node.TEXT_NODE: s += child_node.nodeValue else: s += collect_text(child_node) return s def getNodeValue(xmlDoc, nodeName): # get an xml node value node = xmlDoc.getElementsByTagName(nodeName) try: str = collect_text(node.item(0)) except: str = "" return str def getTextValue(node): try: str = collect_text(node) except: str = "" return str def getArcpyErrorMessage(): # parse out python exception content into the part after the "." - the message parts = str(sys.exc_value).split(".") if len(parts) == 1: retVal = parts[0] else: retVal = parts[1][1:] # first char after dot always appears to be newline char return retVal def testSchemaLock(dataset): # test if a schema lock is possible res = arcpy.TestSchemaLock(dataset) return res def cleanupGarbage(): # cleanup python garbage for obj in gc.garbage: del obj # remove local reference so the node can be deleted del gc.garbage[:] for i in range(2): if debug == True: addMessageLocal('cleanup pass: ' + str(i)) n = gc.collect() if debug == True: print('Unreachable objects:' + str(n)) print('Remaining Garbage:' + str(gc.garbage)) def cleanup(inWorkspace): # general cleanup function cleanupGarbage() arcpy.ClearWorkspaceCache_management(inWorkspace) def getCleanName(nameVal): # strip leading database prefix values cleanName = nameVal dotCount = nameVal.count(".") if dotCount > 0: cleanName = nameVal.split(".")[dotCount] return cleanName def makeFeatureView(workspace, sourceFC, viewName, whereClause, xmlFields): # make a feature view using the where clause if arcpy.Exists(sourceFC): if arcpy.Exists(viewName): arcpy.Delete_management(viewName) # delete view if it exists desc = arcpy.Describe(sourceFC) fields = arcpy.ListFields(sourceFC) fStr = getViewString(fields, xmlFields) try: if str(whereClause).strip() == '': whereClause = None return arcpy.MakeFeatureLayer_management(sourceFC, viewName, whereClause, None, fStr)[0] except: showTraceback() if whereClause is None: whereClause = "(None)" addMessage("Error occured, where clause: " + whereClause) # addMessage("Feature Layer " + viewName + " created for " + str(whereClause)) else: addError(sourceFC + " does not exist, exiting") exit(-1) def makeTableView(workspace, sourceTable, viewName, whereClause, xmlField): # make a table view using the where clause if arcpy.Exists(sourceTable): if arcpy.Exists(workspace + os.sep + viewName): arcpy.Delete_management(workspace + os.sep + viewName) # delete view if it exists desc = arcpy.Describe(sourceTable) fields = arcpy.ListFields(sourceTable) fStr = getViewString(fields, xmlField) arcpy.MakeTableView_management(sourceTable, viewName, whereClause, workspace, fStr) else: addError(sourceTable + " does not exist, exiting") if not arcpy.Exists(viewName): exit(-1) return (viewName) def getViewString(fields, xmlFields): # get the string for creating a view viewStr = "" for field in fields: # drop any field prefix from the source layer (happens with map joins) thisFieldName = field.name[field.name.rfind(".") + 1:] for xmlField in xmlFields: sourcename = getNodeValue(xmlField, "SourceName") if sourcename == thisFieldName: targetname = getNodeValue(xmlField, "TargetName") if sourcename != targetname and sourcename.upper() == targetname.upper(): # this is a special case where the source name is different case but the same string as the target # need to create table so that the name matches the target name so there is no conflict later thisFieldName = targetname thisFieldStr = field.name + " " + thisFieldName + " VISIBLE NONE;" viewStr += thisFieldStr if viewStr.endswith(';'): viewStr = viewStr[:-1] return viewStr def getWhereClause(dataset): whereClause = '' try: whereClause = getNodeValue(dataset, "WhereClause") except: whereClause = '' if whereClause != '' and whereClause != ' ' and whereClause != None: addMessageLocal("Where '" + whereClause + "'") else: addMessageLocal("No Where Clause") return whereClause def deleteRows(table, expr): # delete rows in dataset with an expression if debug: addMessageLocal(table) retcode = False targTable = getDatasetName(table) vname = targTable + "_ViewDelete" if arcpy.Exists(vname): arcpy.Delete_management(vname) # delete view if it exists arcpy.MakeTableView_management(table, vname, expr) arcpy.DeleteRows_management(vname) addMessageLocal("Existing " + targTable + " rows deleted ") try: arcpy.Delete_management(vname) # delete view if it exists retcode = True except: addMessageLocal("Could not delete view, continuing...") return retcode def appendRows(sourceTable, targetTable, expr, continue_on_error=True): # append rows in dataset with a where clause retcode = False sTable = getDatasetName(sourceTable) view = sTable + "_ViewAppend" + id_generator(size=3) if isTable(targetTable): deType = 'Table' else: deType = 'FeatureClass' view = makeView(deType, workspace, sourceTable, view, expr, []) numSourceFeat = arcpy.GetCount_management(view).getOutput(0) numOriginalTarFeat = arcpy.GetCount_management(targetTable).getOutput(0) addMessage("Appending " + sTable + " TO " + getDatasetName(targetTable)) if targetTable.lower().endswith(_lyrx): targetTable = getLayerFromString(targetTable) try: result = arcpy.Append_management(inputs=view, target=targetTable, schema_type='NO_TEST') except: msgs = arcpy.GetMessages() addError(msgs) return False addMessageLocal('Rows appended') numTargetFeat = arcpy.GetCount_management(targetTable).getOutput(0) addMessage(numSourceFeat + " features in source dataset") addMessage(numTargetFeat + " features in target dataset") msgs = arcpy.GetMessages() arcpy.AddMessage(msgs) retcode = True if int(numTargetFeat) != int(numSourceFeat) + int(numOriginalTarFeat): arcpy.addWarningMessage("WARNING: Different number of rows in target dataset, " + numTargetFeat) if not continue_on_error: sys.exit(-1) # Option to stop all xml scripts if if int(numTargetFeat) == 0: addError("ERROR: 0 Features in target dataset") retcode = False return retcode def listDatasets(gdb): # list all of the datasets and tables dsNames = [] dsFullNames = [] arcpy.env.workspace = gdb addMessageLocal("Getting list of Datasets from " + gdb) wsDatasets = arcpy.ListDatasets() wsTables = arcpy.ListTables() if wsDatasets: for fds in wsDatasets: desc = arcpy.Describe(fds) if desc.DatasetType == "FeatureDataset": arcpy.env.workspace = desc.CatalogPath fcs = arcpy.ListFeatureClasses() for fc in fcs: descfc = arcpy.Describe(fc) if descfc.DatasetType == "FeatureClass": dsNames.append(baseName(fc)) dsFullNames.append(desc.CatalogPath + os.sep + fc) if debug: arcpy.AddMessage(desc.CatalogPath + os.sep + fc) arcpy.env.workspace = gdb arcpy.env.workspace = gdb fcs = arcpy.ListFeatureClasses() for fClass in fcs: descfc = arcpy.Describe(fClass) if descfc.DatasetType == "FeatureClass": dsNames.append(baseName(fClass)) dsFullNames.append(gdb + os.sep + fClass) if debug: arcpy.AddMessage(gdb + os.sep + fClass) arcpy.env.workspace = gdb for table in wsTables: descfc = arcpy.Describe(table) if descfc.DatasetType == "Table": dsNames.append(baseName(table)) dsFullNames.append(gdb + os.sep + table) if debug: arcpy.AddMessage(gdb + os.sep + table) return ([dsNames, dsFullNames]) def getFullName(searchName, names, fullNames): # find full name for searchName string try: # look for the matching name in target names t = names.index(searchName.upper()) fullName = fullNames[t] return fullName except: # will get here if no match t = -1 return "" def baseName(name): # trim any database prefixes from table names if name.lower().endswith(_lyrx): name = name[:len(name) - len(_lyrx)] if name.count(".") > 0: return name.split(".")[name.count(".")].upper() else: return name.upper() # Below is a better immplementation of this function, but it is untested. # def baseName(name): # """ trim any database prefixes from table names """ # Check that it's a file on disk (rare chance there is a feature class in EGDB called "lyrx") # if os.path.exists(name): # if name.lower().endswith(_lyrx) or name.lower().endswith('.shp'): # return os.path.splitext(os.path.basename(name))[0] # # return os.path.basename(name).split('.')[-1] def getFieldValues(mode, fields, datasets): # get a list of field values, returns all values and the unique values. theValues = [] # unique list of values theDiff = [] # all values for dataset in datasets: name = dataset.getAttributeNode("name").nodeValue table = os.path.join(workspace, name) desc = arcpy.Describe(table) try: cursor = arcpy.SearchCursor(table) row = cursor.next() except (Exception, ErrorDesc): printMsg("Unable to read the Dataset, Python error is: ") msg = str(getTraceback(Exception, ErrorDesc)) printMsg(msg[msg.find("Error Info:"):]) row = None numFeat = int(arcpy.GetCount_management(table).getOutput(0)) addMessageLocal(table + ", " + str(numFeat) + " (get " + mode + ") features") progressUpdate = 1 i = 0 if numFeat > 100: progressUpdate = numFeat / 100 arcpy.SetProgressor("Step", "Getting " + mode + " values...", 0, numFeat, progressUpdate) attrs = [f.name for f in arcpy.ListFields(table)] if row is not None: while row: i += 1 if i % progressUpdate == 0: arcpy.SetProgressorPosition(i) try: for field in fields: if field in attrs: currentValue = row.getValue(field) if mode.upper() == "UNIQUE": if currentValue != None: try: theValues.index(currentValue) # if the current value is present theDiff.append(currentValue) # add to the list of differences if it is found except: theValues.append(currentValue) # else add the value if the first check fails. elif mode.upper() == "ALL": theValues.append(currentValue) except: err = "Exception caught: unable to get field values" addError(err) logProcessError(row.getValue(field), sourceIDField, row.getValue(sourceIDField), "Cannot read", err) theValues = [] row = cursor.next() del cursor # arcpy.RefreshCatalog(table) return [theValues, theDiff] def addDlaField(table, targetName, field, attrs, ftype, flength): # add a field to a dla Geodatabase if targetName == _noneFieldName: return True retcode = False try: attrs.index(targetName) # check if field exists, compare uppercase retcode = True except: try: upfield = False tupper = targetName.upper() for nm in attrs: nupper = nm.upper() if tupper == nupper and nupper not in _ignoreFields and nm != _noneFieldName and ftype.upper() != 'GLOBALID': # if case insensitive match, note GlobalID and others cannot be renamed nm2 = nm + "_1" retcode = arcpy.AlterField_management(table, nm, nm2) retcode = arcpy.AlterField_management(table, nm2, targetName) addMessage("Field altered: " + nm + " to " + targetName) upfield = True if upfield == False and targetName != _noneFieldName: retcode = addField(table, targetName, ftype, flength) addMessage("Field added: " + targetName) except: showTraceback() for attr in attrs: # drop any field prefix from the source layer (happens with map joins) thisFieldName = attr[attr.rfind(".") + 1:] if thisFieldName.upper() == targetName.upper(): addMessageLocal( "WARNING: Existing field name '" + thisFieldName + "' conflicts with new field name '" + targetName + "'. Identical names with different case are not supported by databases!\n") return retcode def addField(table, fieldName, fieldType, fieldLength): # add a field to a Geodatabase retcode = False if fieldLength == None: fieldLength = "" if fieldType.lower() == 'globalid': fieldType = 'GUID' arcpy.AddField_management(table, fieldName, fieldType, fieldLength) retcode = True return retcode def createGeodatabase(): # Create a workspace - file GDB folder = workspace[:workspace.rfind(os.sep)] fgdb = workspace[workspace.rfind(os.sep) + 1:] retcode = False try: arcpy.CreateFileGDB_management(folder, fgdb) retcode = True addMessageLocal("New Geodatabase created: " + workspace) except: showTraceback() addMessageLocal("Unable to create Geodatabase: " + folder + "\\" + fgdb) return retcode def isDlaDocument(xmlDoc): # Is the first node SourceTargetMatrix in the XML document? node = None try: node = xmlDoc.getElementsByTagName("SourceTargetMatrix") except: pass if node: retVal = True else: retVal = False return retVal def isPlaylistDocument(xmlDoc): # Is the first node a SourceTargetPlaylist? PlaylistNode = None try: PlaylistNode = xmlDoc.getElementsByTagName("SourceTargetPlaylist") except: pass if PlaylistNode: retVal = True else: retVal = False return retVal def getRootElement(xmlDoc): # get the root element retDoc = None if isDlaDocument(xmlDoc): retDoc = xmlDoc.getElementsByTagName("SourceTarget")[0] elif isPlaylistDocument(xmlDoc): retDoc = xmlDoc.getElementsByTagName("SourceTargetPlaylist")[0] return retDoc def getXmlElements(xmlFile, elemName): # get Xml elements from a file or files in a playlist retDoc = None xmlDoc = getXmlDoc(xmlFile) if isDlaDocument(xmlDoc): retDoc = xmlDoc.getElementsByTagName(elemName) elif isPlaylistDocument(xmlDoc): docs = xmlDoc.getElementsByTagName("File") for doc in docs: fileName = collect_text(doc) folder = xmlFile[:xmlFile.rfind(os.sep)] theFile = os.path.join(folder, fileName) if os.path.exists(theFile): xmlDoc2 = getXmlDoc(theFile) xmlNodes = xmlDoc2.getElementsByTagName(elemName) if retDoc == None: retDoc = xmlNodes else: for node in xmlNodes: retDoc.append(node) else: addMessageLocal(theFile + " does not exist, continuing...") else: retDoc = None return retDoc def convertDataset(dataElementType, sourceTable, workspace, targetName, whereClause): # convert a dataset if dataElementType == "DEFeatureClass": arcpy.FeatureClassToFeatureClass_conversion(sourceTable, workspace, targetName, whereClause) elif dataElementType == "DETable": arcpy.TableToTable_conversion(sourceTable, workspace, targetName, whereClause) def makeView(deType, workspace, sourceTable, viewName, whereClause, xmlFields): # make a view view = None if deType.lower().endswith('table'): view = makeTableView(workspace, sourceTable, viewName, whereClause, xmlFields) if deType.lower().endswith('featureclass'): view = makeFeatureView(workspace, sourceTable, viewName, whereClause, xmlFields) return view def exportDataset(sourceWorkspace, sourceName, targetName, dataset, xmlFields): # export a dataset result = True sourceTable = os.path.join(sourceWorkspace, sourceName) targetTable = os.path.join(workspace, targetName) addMessageLocal("Exporting dataset " + sourceTable) try: desc = arcpy.Describe(sourceTable) deType = desc.dataElementType whereClause = getWhereClause(dataset) viewName = sourceName + "_View" + id_generator(size=3) view = makeView(deType, workspace, sourceTable, viewName, whereClause, xmlFields) count = arcpy.GetCount_management(view).getOutput(0) addMessageLocal(str(count) + " source rows") convertDataset(deType, view, workspace, targetName, whereClause) except: err = "Failed to create new dataset " + targetName addError(err) logProcessError(sourceTable, sourceIDField, sourceName, targetName, err) result = False return result def importDataset(sourceWorkspace, sourceName, targetName, dataset, xmlFields): # import a dataset result = True sourceTable = os.path.join(sourceWorkspace, sourceName) targetTable = os.path.join(workspace, targetName) addMessageLocal("Importing dataset " + sourceTable) try: whereClause = getWhereClause(dataset) if not arcpy.Exists(sourceTable): err = sourceTable + " does not exist" addError(err) logProcessError(sourceTable, sourceIDField, sourceName, targetName, err) return False if not arcpy.Exists(targetTable): err = targetTable + " does not exist" addError(err) logProcessError(targetTable, sourceIDField, sourceName, targetName, err) return False desc = arcpy.Describe(sourceTable) deType = desc.dataElementType viewName = sourceName + "_View" + id_generator(size=3) view = makeView(deType, workspace, sourceTable, viewName, whereClause, xmlFields) count = arcpy.GetCount_management(view).getOutput(0) addMessageLocal(str(count) + " source rows") arcpy.Append_management([view], targetTable, "NO_TEST", "", "") except: err = "Failed to import layer " + targetName addError(err) logProcessError(sourceTable, sourceIDField, sourceName, targetName, err) result = False return result def deleteExistingRows(datasets): # delete existing rows in a dataset for dataset in datasets: name = dataset.getAttributeNode("targetName").nodeValue table = os.path.join(workspace, name) if arcpy.Exists(table): arcpy.DeleteRows_management(table) addMessageLocal("Rows deleted from: " + name) else: addMessageLocal(table + " does not exist") def getFileList(inputFolder, fileExt, minTime): # get a list of files - recursively inputFiles = [] if inputFolder.lower().endswith(".dwg") == True: # if the arg is a file instead of a folder just get that as a list inputFiles.append([os.path.dirname(inputFolder), os.path.basename(inputFolder)]) addMessageLocal(os.path.dirname(inputFolder)) addMessageLocal(os.path.basename(inputFolder)) return inputFiles docList = os.listdir(inputFolder) # Get directory list for inputDirectory for doc in docList: docLow = doc.lower() ffile = os.path.join(inputFolder, doc) if docLow.endswith(fileExt.lower()): t = os.path.getmtime(ffile) modTime = datetime.datetime.fromtimestamp(t) if modTime > minTime: inputFiles.append([inputFolder, doc]) elif os.path.isdir(ffile): newFiles = getFileList(ffile, fileExt, minTime) inputFiles = newFiles + inputFiles inputFiles = sorted(inputFiles) return (inputFiles) def repairName(name): # layer names can have spaces and other chars that can't be used in table names nname = name.replace(" ", "_") nname = nname.replace("-", "_") return nname def getSourceName(xmlDoc): path = getNodeValue(xmlDoc, "Source") name = getDatasetName(path) return name def getTargetName(xmlDoc): path = getNodeValue(xmlDoc, "Target") name = getDatasetName(path) return name def getDatasetName(path): fullname = '' if path.find("/") > -1: parts = path.split("/") fullname = parts[len(parts) - 3] elif path.lower().endswith(_lyrx): fullname = getLayerSourceString(path) else: fullname = path[path.rfind(os.sep) + 1:] trimname = baseName(fullname) name = repairName(trimname) name = arcpy.ValidateTableName(name) return name def setProject(xmlfile, projectFilePath): # set the current project to enable relative file paths for processing global _project global _xmlFolder try: if _xmlFolder == None: prj = getProject() _xmlFolder = os.path.dirname(xmlfile) if projectFilePath != None: if not os.path.exists(projectFilePath): projectFilePath = os.path.join(_xmlFolder, projectFilePath) if os.path.exists(projectFilePath): _project = arcpy.mp.ArcGISProject(projectFilePath) else: pass # Removed by Mike Miller 6/20/17, this message as relative path is by xml file and not project # addMessage(str(projectFilePath) + ' does not exist, continuing') except: # addError("Unable to set the current project, continuing") _project = None # _xmlFolder = None return _project def getProject(): global _project, _xmlFolder if _project == None: try: _project = arcpy.mp.ArcGISProject("CURRENT") except: # addMessage("Unable to obtain a reference to the current project, continuing") _project = None return _project def getDatasetPath(xmlDoc, name): # check if file exists, then try to add project folder if missing pth = getNodeValue(xmlDoc, name) if pth.lower().startswith(_http) == True or pth.lower().startswith(_https) == True: return pth elif pth.endswith(_lyrx): # need to check os.path if not os.path.exists(pth): pth = os.path.join(_xmlFolder, pth) if not os.path.exists(pth): addError("Unable to locate layer path: " + pth) else: # need to check arcpy if not arcpy.Exists(pth): pth = os.path.join(_xmlFolder, pth) if not arcpy.Exists(pth): addError("Unable to locate dataset path: " + pth) return pth def dropProjectPath(pth): # drop the project path from datasets to support relative paths and moving files between machines. if _xmlFolder != None: pth = pth.replace(_xmlFolder, '') if pth.startswith('\\'): pth = pth[1:] return pth def dropXmlFolder(xmlfile, pth): # drop the xml file path from datasets to support relative paths and moving files between machines. dir = os.path.dirname(xmlfile) pth = pth.replace(dir, '') if pth.startswith('\\'): pth = pth[1:] return pth def getMapLayer(layerName): name = layerName[layerName.rfind('\\') + 1:] layer = None try: prj = getProject() maps = prj.listMaps("*") found = False for map in maps: if not found: lyrs = map.listLayers(name) for lyr in lyrs: if lyr.name == name and not found: if lyr.supports("DataSource"): layer = lyr found = True # take the first layer with matching name except: addMessage("Warning: Unable to get layer from maps") return None return layer def makeprjFile(xmlFileName): path = pathlib.Path(xmlFileName) dir_path = str(path.parent / path.stem) if not os.path.exists(dir_path): os.mkdir(dir_path) return os.path.join(dir_path, os.path.basename(xmlFileName)) def getLayerPath(in_lyr, xmlFileName, input_type): # get the source data path for a layer pth = '' if isinstance(in_lyr, arcpy._mp.LayerFile): # map layerFile as parameter xmlpath = pathlib.Path(xmlFileName).parent.absolute() / os.path.basename(in_lyr.filePath) if os.path.exists(str(xmlpath)): os.remove(str(xmlpath)) layer_path = arcpy.SaveToLayerFile_management(in_lyr.filePath, str(xmlpath))[0] new_name = pathlib.Path(layer_path).parent.absolute() / (input_type + ".lyrx") if os.path.exists(str(new_name)): os.remove(str(new_name)) arcpy.Rename_management(layer_path, input_type + ".lyrx") pth = new_name addMessage("Used .lyrx filePath as source") elif isinstance(in_lyr, arcpy._mp.Layer): # map layer as parameter try: xmlpath = pathlib.Path(xmlFileName).parent.absolute() / (in_lyr.name + ".lyrx") if os.path.exists(str(xmlpath)): os.remove(str(xmlpath)) layer_path = arcpy.SaveToLayerFile_management(in_lyr.name, str(xmlpath))[0] new_name = pathlib.Path(layer_path).parent.absolute() / (input_type + ".lyrx") if os.path.exists(str(new_name)): os.remove(str(new_name)) arcpy.Rename_management(layer_path, input_type + ".lyrx") pth = new_name except: addError(traceback.format_exc) addError("Unable to create layer file for " + in_lyr.name) # else: # addError("Layer does not support the datasource property. Please ensure you selected a layer and not a group layer") elif isinstance(in_lyr, str) and in_lyr.lower().endswith(_lyrx): in_lyr = arcpy.mp.LayerFile(in_lyr) xmlpath = pathlib.Path(xmlFileName).parent.absolute() / os.path.basename(in_lyr.filePath) if os.path.exists(str(xmlpath)): os.remove(str(xmlpath)) layer_path = arcpy.SaveToLayerFile_management(in_lyr.filePath, str(xmlpath))[0] new_name = pathlib.Path(layer_path).parent.absolute() / (input_type + ".lyrx") if os.path.exists(str(new_name)): os.remove(str(new_name)) arcpy.Rename_management(layer_path, input_type + ".lyrx") pth = new_name else: # should be a string, check if feature layer string, then try to describe pth = repairLayerSourceUrl(in_lyr, in_lyr) if isFeatureLayerUrl(pth): return pth # else - not needed but else logic below try: desc = arcpy.Describe(in_lyr) # dataset path/source as parameter if not desc.dataType == 'Table': fl = arcpy.MakeFeatureLayer_management(in_lyr)[0] xmlpath = pathlib.Path(xmlFileName).parent.absolute() / (fl.name + ".lyrx") if os.path.exists(str(xmlpath)): os.remove(str(xmlpath)) layer_path = arcpy.SaveToLayerFile_management(fl, str(xmlpath))[0] new_name = pathlib.Path(layer_path).parent.absolute() / (input_type + ".lyrx") if os.path.exists(str(new_name)): os.remove(str(new_name)) arcpy.Rename_management(layer_path, input_type + ".lyrx") pth = str(new_name) else: pth = desc.catalogPath except: lyr = getMapLayer(in_lyr) # layer name in the project/map - if not described then could be layer name if lyr != None and lyr.supports("DataSource"): pth = lyr.dataSource in_lyr = lyr else: addError("Unable to get the DataSource for the layer: " + str(in_lyr)) return '' # handle special cases for layer paths (urls, CIMWKSP, layer ids with characters) pth = repairLayerSourceUrl(pth, in_lyr) # handle special case for joined layers pth = getJoinedLayer(in_lyr, pth) addMessage(pth) return pth def getJoinedLayer(layer, pth): # if there is a join then save a layer file and return that path path = pth if isinstance(layer, str): # map layer as parameter layer = getMapLayer(layer) try: conn = layer.connectionProperties.get("source", None) # check for a joined layer except: conn = None if conn != None and not arcpy.Exists(path): lname = layer.name + _lyrx result = arcpy.MakeFeatureLayer_management(layer, lname) layer = result.getOutput(0) arcpy.env.overwriteOutput = True projFolder = os.path.dirname(getProject().filePath) lyrFile = os.path.join(projFolder, lname) arcpy.SaveToLayerFile_management(layer, lyrFile) desc = arcpy.Describe(lyrFile) path = desc.catalogPath return path def getSDELayer(layer, pth): # if there is an SDE layer with CIMWORKSPACE save a layer and return the path path = pth if isinstance(layer, str): # map layer as parameter layer = getMapLayer(layer) if pth.startswith(_CIMWKSP): lname = layer.name + _lyrx result = arcpy.MakeFeatureLayer_management(layer, lname) layer = result.getOutput(0) arcpy.env.overwriteOutput = True projFolder = os.path.dirname(getProject().filePath) lyrFile = os.path.join(projFolder, lname) arcpy.SaveToLayerFile_management(layer, lyrFile) desc = arcpy.Describe(lyrFile) path = desc.catalogPath return path def repairLayerSourceUrl(layerPath, lyr): # take a layer path or layer name and return the data source or repaired source # lyr parameter is here but only used in CIMWKSP case. # note that multiple if statements are used - and there can be a progression of path changes - not elif statements. if layerPath == "" or layerPath == None: return layerPath path = None layerPath = str(layerPath) # seems to be url object in some cases if layerPath.startswith('GIS Servers\\'): # turn into url layerPath = layerPath.replace("GIS Servers\\", '') if layerPath.startswith(_http) == True or layerPath.startswith(_https) == True: layerPath = layerPath # do nothing else: layerPath = _http + layerPath if layerPath.find('\\') > -1: path = layerPath.replace("\\", '/') layerPath = path if layerPath.startswith(_http) == True or layerPath.startswith( _https) == True: # sometimes get http/https path to start with, need to handle non-integer layerid in both cases # fix for non-integer layer ids parts = layerPath.split("/") lastPart = parts[len(parts) - 1] ints = [int(s) for s in re.findall(r'\d+', lastPart)] # scan for the integer value in the string if ints != []: lastPart = str(ints[0]) parts[len(parts) - 1] = lastPart path = "/".join(parts) if layerPath.startswith(_CIMWKSP): # create database connection and use that path connfile = getConnectionFile(lyr.connectionProperties) path = os.path.join(connfile + os.sep + layerPath[layerPath.rfind(">") + 1:]) # </CIMWorkspaceConnection>fcname path = path.replace("\\\\", "\\") # path = getSDELayer(lyr,layerPath) if path == None: # nothing done here path = layerPath return path def getTempTable(name): return os.path.join(workspace, name) def setWorkspace(): global workspace wsName = 'dla.gdb' ws = os.path.join(_dirName, wsName) if not arcpy.Exists(ws): arcpy.CreateFileGDB_management(_dirName, wsName) workspace = ws arcpy.env.workspace = workspace def deleteWorkspace(): global workspace if workspace != None and arcpy.Exists(workspace): arcpy.Delete_management(workspace) def getLayerVisibility(layer, xmlFileName): fieldInfo = None xmlDoc = getXmlDoc(xmlFileName) targets = xmlDoc.getElementsByTagName("TargetName") names = [collect_text(node).upper() for node in targets] esrinames = _ignoreFields # ['SHAPE','OBJECTID','SHAPE_AREA','SHAPE_LENGTH','GlobalID','GLOBALID'] desc = arcpy.Describe(layer) if desc.dataType == "FeatureLayer": fieldInfo = desc.fieldInfo for index in range(0, fieldInfo.count): name = fieldInfo.getFieldName(index) if name.upper() not in names and name.upper() not in esrinames: addMessage("Hiding Field: " + name) fieldInfo.setVisible(index, "HIDDEN") return fieldInfo def refreshLayerVisibility(): prj = getProject() maps = prj.listMaps("*") for map in maps: lyrs = map.listLayers("*") for lyr in lyrs: try: isviz = lyr.visible # flip viz to redraw layer. lyr.visible = True if isviz == False else False lyr.visible = isviz except: addMessage("Could not set layer visibility") def getXmlDoc(xmlFile): # open the xmldoc and return contents xmlDoc = None try: xmlFile = xmlFile.strip("'") xmlFile = xmlFile.replace("\\", "/") xmlDoc = xml.dom.minidom.parse(xmlFile) # parse from string # xmlFile = os.path.normpath(xmlFile) except: addError("Unable to open the xmlFile: " + xmlFile) return xmlDoc def getSpatialReference(desc): # needs gp Describe object try: spref = str(desc.spatialReference.factoryCode) except: try: spref = desc.spatialReference.exportToString() except: arcpy.AddError("Could not get Spatial Reference") return spref def setupProxy(): proxies = {} if _proxyhttp != None: proxies['http'] = _http + _proxyhttp os.environ['http'] = _proxyhttp if _proxyhttps != None: proxies['https'] = _proxyhttps os.environ['https'] = _http + _proxyhttps if proxies != {}: proxy = urllib.ProxyHandler(proxies) opener = urllib.build_opener(proxy) urllib.install_opener(opener) def getConnectionFile(connectionProperties): global _xmlFolder if _xmlFolder == None: addError("_xmlFolder has not been set in code, exiting") cp = connectionProperties['connection_info'] srvr = getcp(cp, 'server') inst = getcp(cp, 'db_connection_properties') db = getcp(cp, 'database') fname = (srvr + inst + db + ".sde").replace(":", "").replace("\\", "") connfile = os.path.join(_xmlFolder, fname) if os.path.exists(connfile): os.remove(connfile) args = [] args.append(out_folder_path=_xmlFolder) args.append(out_name=fname) if getcp(cp, 'dbclient') != None: args.append(database_platform=getcp(cp, 'dbclient')) args.append(instance=inst) if getcp(cp, 'authentication_mode') != None: args.append(account_authentication=getcp(cp, 'authentication_mode')) if getcp(cp, 'username') != None: args.append(username=getcp(cp, 'username')) if getcp(cp, 'password') != None: args.append(username=getcp(cp, 'password')) args.append(database=db) if getcp(cp, 'schema') != None: args.append(username=getcp(cp, 'schema')) if getcp(cp, 'version') != None: args.append(username=getcp(cp, 'version')) if getcp(cp, 'date') != None: args.append(username=getcp(cp, 'date')) arcpy.CreateDatabaseConnection_management(','.join(args)) return connfile def getcp(cp, name): retval = None try: retval = cp[name] if name.lower() == 'authentication_mode': if retval == 'OSA': retval = 'OPERATING_SYSTEM_AUTH' else: retval = 'DATABASE_AUTH' elif name.lower() == 'dbclient': srcs = ['', 'altibase', 'db2 for z/os', 'informix', 'netezza', 'oracle', 'postgresql', 'sap hana', 'sqlserver', 'teradata'] targs = ['', 'ALTIBASE', 'DB2 for z/OS', 'Informix', 'Netezza', 'Oracle', 'PostgreSQL', 'SAP HANA', 'Sql Server', 'Teradata'] try: retval = targs[srcs.index(retval.lower())] except: retval = retval except: pass return retval def isTable(ds): desc = arcpy.Describe(ds) if desc.datasetType.lower() == 'table': return True else: return False def getSpatialReferenceString(xmlDoc, lyrtype): sprefstr = '' # try factoryCode first try: sprefstr = getNodeValue(xmlDoc, lyrtype + "FactoryCode") if sprefstr == '': sprefstr = getNodeValue(xmlDoc, lyrtype + "SpatialReference") except: try: sprefstr = getNodeValue(xmlDoc, lyrtype + "SpatialReference") except: pass return sprefstr def checkGlobalIdIndex(desc, gidName): valid = False for index in desc.indexes: try: if index.isUnique and index.fields[0].name == gidName: # index must be on correct field and unique valid = True except: pass return valid def checkDatabaseTypes(source, target): # check database types - SQL source db and SQL gdb as target supported = False try: wsType = "None" smsg = 'Workspace type does not support preserving GlobalIDs' try: wsType = arcpy.Describe(source).dataSource except: wsType = arcpy.Describe(source[:source.rfind(os.sep)]).workspaceType # might be in a feature dataset if wsType != 'RemoteDatabase': addMessage(wsType + ' Source ' + smsg) supported = False else: supported = True wsType = "None" try: wsType = arcpy.Describe(target).workspaceType except: wsType = arcpy.Describe(target[:target.rfind(os.sep)]).workspaceType # might be in a feature dataset if wsType != 'RemoteDatabase': addMessage(wsType + ' Target ' + smsg) supported = False elif supported == True: supported = True except: supported = False return supported def get_geodatabase_path(input_table): '''Return the Geodatabase path from the input table or feature class. :param input_table: path to the input table or feature class ''' workspace = os.path.dirname(input_table) if [any(ext) for ext in ('.gdb', '.mdb', '.sde') if ext in os.path.splitext(workspace)]: return workspace else: return os.path.dirname(workspace) def checkDatabaseType(path): # check database types - SQL source db and SQL gdb as target supported = False try: if path.lower().startswith(_http): supported = True elif path.lower().startswith(_https): supported = True elif path.lower().count(_sde) == 1: supported = True elif path.lower().endswith(_lyrx): source = getLayerFromString(path) if source.dataSource.startswith(_CIMWKSP): supported = True elif arcpy.Exists(source.dataSource): path = get_geodatabase_path(input_table=source.dataSource) d = arcpy.Describe(path) if d.workspaceType == "RemoteDatabase": supported = True elif path.lower().count(_gdb) == 1: supported = False except: supported = False return supported def compareSpatialRef(xmlDoc): # compare source and target spatial references spatRefMatch = False sref = getSpatialReferenceString(xmlDoc, 'Source') tref = getSpatialReferenceString(xmlDoc, 'Target') if tref is None or sref is None: return spatRefMatch if tref == sref: spatRefMatch = True else: sref_obj = arcpy.SpatialReference() tref_obj = arcpy.SpatialReference() sref_obj.loadFromString(sref) tref_obj.loadFromString(tref) if sref_obj.factoryCode == tref_obj.factoryCode: spatRefMatch = True elif ';' in sref and ';' in tref and tref.split(';')[0] == sref.split(';')[0]: spatRefMatch = True return spatRefMatch def processGlobalIds(xmlDoc): # logic check to determine if preserving globalids is possible process = False source = getDatasetPath(xmlDoc, 'Source') descs = arcpy.Describe(source) target = getDatasetPath(xmlDoc, 'Target') desct = arcpy.Describe(target) sGlobalId = getFieldByName(descs, 'globalIDFieldName') tGlobalId = getFieldByName(desct, 'globalIDFieldName') if sGlobalId != None and tGlobalId != None: addMessage('Source and Target datasets both have GlobalID fields') supportedWSs = checkDatabaseType(source) if not supportedWSs: addMessage("Source Workspace type prevents preserving GlobalIDs") supportedWSt = checkDatabaseType(target) if not supportedWSt: addMessage("Target Workspace type prevents preserving GlobalIDs") ids = checkGlobalIdIndex(descs, sGlobalId) idt = checkGlobalIdIndex(desct, tGlobalId) errmsg = 'Dataset does not have a unique index on GlobalID field, unable to preserve GlobalIDs' if not ids: addMessage('Source ' + errmsg) if not idt: addMessage('Target ' + errmsg) if ids and idt and supportedWSs and supportedWSt: process = True return process def getStagingName(source, target): stgName = getDatasetName(source) + "_" + getDatasetName(target) return os.path.basename(arcpy.CreateUniqueName(stgName, workspace)) def removeStagingElement(xmlDoc): # remove staging element from xmlDoc if len(xmlDoc.getElementsByTagName('Staged')) > 0: root = xmlDoc.getElementsByTagName('Datasets')[0] nodes = root.getElementsByTagName('Staged') for node in nodes: root.removeChild(node) return xmlDoc def insertStagingElement(xmlDoc): # insert an element to indicate that the data has been staged if len(xmlDoc.getElementsByTagName('Staged')) == 0: root = xmlDoc.getElementsByTagName('Datasets')[0] staged = xmlDoc.createElement("Staged") # set source and target elements nodeText = xmlDoc.createTextNode('true') staged.appendChild(nodeText) root.appendChild(staged) return xmlDoc def isStaged(xmlDoc): # insert an element to indicate that the data has been staged if len(xmlDoc.getElementsByTagName('Staged')) > 0: staged = True else: staged = False return staged def hasJoin(source): # counts table names in fields to determine if joined desc = arcpy.Describe(source) fc = desc.featureClass hasJoin = False tables = [] for field in fc.fields: if field.name.find('.') > 0: val = field.name.split('.')[0] if val not in tables: tables.append(val) if len(tables) > 1: hasJoin = True return hasJoin def checkIsLayerFile(val, valStr): # for layer file parameters the value passed in is a layer but the string version of the layer is a path to the lyrx file... if valStr.lower().endswith(_lyrx): return valStr else: return val def getFieldIndexList(values, value): # get the index number of a field in a list - case insensitive for idx, val in enumerate(values): if val.upper() == value.upper(): return idx def getLayerSourceString(lyrPath): if isinstance(lyrPath, str) and lyrPath.lower().endswith(_lyrx): if not os.path.exists(lyrPath): addMessage(str(_xmlFolder)) lyrPath = os.path.join(_xmlFolder, lyrPath) layer = arcpy.mp.LayerFile(lyrPath) fc = layer.listLayers()[0] return fc.dataSource def getLayerFromString(lyrPath): if isinstance(lyrPath, str) and lyrPath.lower().endswith(_lyrx): layer = arcpy.mp.LayerFile(lyrPath) fc = layer.listLayers()[0] return fc else: return lyrPath def getXmlDocName(xmlFile): # normalize and fix paths try: xmlFile = xmlFile.strip("'") xmlFile = os.path.normpath(xmlFile) except: addError("Unable to process file name: " + xmlFile) return xmlFile def getReplaceBy(xmlDoc): # get the where clause using the xml document or return '' repl = xmlDoc.getElementsByTagName("ReplaceBy")[0] fieldName = getNodeValue(repl, "FieldName") operator = getNodeValue(repl, "Operator") value = getNodeValue(repl, "Value") expr = '' type = getTargetType(xmlDoc, fieldName) if fieldName != '' and fieldName != '(None)' and operator != "Where": if type == 'String': value = "'" + value + "'" expr = fieldName + " " + operator + " " + value elif operator == 'Where': expr = value else: expr = '' # empty string by default return expr def getTargetType(xmlDoc, fname): # get the target field type for tfield in xmlDoc.getElementsByTagName('TargetField'): nm = tfield.getAttribute("Name") if nm == fname: return tfield.getAttribute("Type") def isFeatureLayerUrl(url): # assume layer string has already had \ and GIS Servers or other characters switched to be a url parts = url.split('/') lngth = len(parts) if lngth > 2: try: # check for feature server text if parts[lngth - 2] == 'FeatureServer': return True except: return False return False
apache-2.0
stackroute/LogAggregatorRT
tattvaclient/home/service/sideNavItemsFactory.js
321
angular.module('tattva') .factory('sideNavItemsFactory', ['$http', function($http){ var factory = { getSideNavItems: function() { return $http.get('/sideNav').then(function(res) { data = res.data; return data; }); } }//end of factory definition return factory; }]);
apache-2.0
PrismTech/vortex-rxjava
src/main/java/com/prismtech/vortex/rx/RxVortex.java
4986
/** * PrismTech licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with the * License and with the PrismTech Vortex product. You may obtain a copy of the * License at * <p/> * http://www.apache.org/licenses/LICENSE-2.0 * <p/> * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License and README for the specific language governing permissions and * limitations under the License. */ package com.prismtech.vortex.rx; import com.prismtech.vortex.rx.operators.OnSubscribeFromParticipant; import com.prismtech.vortex.rx.operators.OnSubscribeFromReader; import com.prismtech.vortex.rx.operators.OnSubscribeFromReaderWaitset; import com.prismtech.vortex.rx.operators.OnSubscribeToSamplesFromReader; import org.omg.dds.core.Duration; import org.omg.dds.core.ServiceEnvironment; import org.omg.dds.core.policy.QosPolicy; import org.omg.dds.domain.DomainParticipant; import org.omg.dds.sub.DataReader; import org.omg.dds.sub.Sample; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import rx.Observable; import rx.functions.Action0; import java.util.List; import java.util.concurrent.TimeUnit; public class RxVortex { private static final Logger LOGGER = LoggerFactory.getLogger(Config.RX_VORTEX_LOGGER); /** * Create a new observable for the provided {@link DataReader} <code>dr</code>. * * The observable will be a stream of the data contained in each sample. * * It is backed by a wait set rather than a Listener. * * @param dr * the data reader being observed * @param <TOPIC> * the type of dat * @return */ public static <TOPIC> Observable<TOPIC> fromDataReaderWS(final DataReader<TOPIC> dr) { ServiceEnvironment env = ServiceEnvironment.createInstance( RxVortex.class.getClassLoader() ); Observable<TOPIC> subject = Observable.create(new OnSubscribeFromReaderWaitset<TOPIC>(dr, Duration.newDuration(60, TimeUnit.SECONDS, env))); return subject; } /** * Create a new observable for the provided {@link DataReader} <code>dr</code>. * * The observable will be a stream of the data contained in each sample. * * @param dr * the data reader being observed * @param <TOPIC> * the type of data * @return */ public static <TOPIC> Observable<TOPIC> fromDataReader(final DataReader<TOPIC> dr) { Observable<TOPIC> subject = Observable.create(OnSubscribeFromReader.create(dr)); return subject; } /** * Create a new observable for the provided {@link DataReader} <code>dr</code>. * * The observable will be a stream of the samples. * * @param dr * the data reader being observed * @param <TOPIC> * the type of data * @return */ public static <TOPIC> Observable<Sample<TOPIC>> samplesFromDataReader(final DataReader<TOPIC> dr) { Observable<Sample<TOPIC>> subject = Observable.create(OnSubscribeToSamplesFromReader.create(dr)); return subject; } /** * Create a new observable for the <code>topicName</code> using the provided {@link DomainParticipant}. * * The observable will be a stream of the data contained in each sample. * * @param participant * the participant to create the observable from * @param topicName * the name of the topic that is being observed * @param topicType * the type of the topic that is being observed * @param partitions * a possibly empty list of the partitions in which the topic is being observed * @param qos * a possibly empty of {@link QosPolicy.ForDataReader} * @param <TOPIC> * the type of data * @return * the observable */ public static <TOPIC> Observable<TOPIC> fromParticipant(final DomainParticipant participant, final String topicName, final Class<TOPIC> topicType, final List<String> partitions, final QosPolicy.ForDataReader... qos) { final OnSubscribeFromParticipant<TOPIC> onSubscribe = OnSubscribeFromParticipant.create(participant, topicName, topicType, partitions); final Observable<TOPIC> observable = Observable.create(onSubscribe); observable.doOnTerminate(new Action0() { @Override public void call() { onSubscribe.cleanUp(); } }); return observable; } }
apache-2.0
zhangkun83/grpc-java
alts/src/generated/main/java/io/grpc/alts/internal/StartClientHandshakeReqOrBuilder.java
8495
// Generated by the protocol buffer compiler. DO NOT EDIT! // source: grpc/gcp/handshaker.proto package io.grpc.alts.internal; public interface StartClientHandshakeReqOrBuilder extends // @@protoc_insertion_point(interface_extends:grpc.gcp.StartClientHandshakeReq) com.google.protobuf.MessageOrBuilder { /** * <pre> * Handshake security protocol requested by the client. * </pre> * * <code>.grpc.gcp.HandshakeProtocol handshake_security_protocol = 1;</code> */ int getHandshakeSecurityProtocolValue(); /** * <pre> * Handshake security protocol requested by the client. * </pre> * * <code>.grpc.gcp.HandshakeProtocol handshake_security_protocol = 1;</code> */ io.grpc.alts.internal.HandshakeProtocol getHandshakeSecurityProtocol(); /** * <pre> * The application protocols supported by the client, e.g., "h2" (for http2), * "grpc". * </pre> * * <code>repeated string application_protocols = 2;</code> */ java.util.List<java.lang.String> getApplicationProtocolsList(); /** * <pre> * The application protocols supported by the client, e.g., "h2" (for http2), * "grpc". * </pre> * * <code>repeated string application_protocols = 2;</code> */ int getApplicationProtocolsCount(); /** * <pre> * The application protocols supported by the client, e.g., "h2" (for http2), * "grpc". * </pre> * * <code>repeated string application_protocols = 2;</code> */ java.lang.String getApplicationProtocols(int index); /** * <pre> * The application protocols supported by the client, e.g., "h2" (for http2), * "grpc". * </pre> * * <code>repeated string application_protocols = 2;</code> */ com.google.protobuf.ByteString getApplicationProtocolsBytes(int index); /** * <pre> * The record protocols supported by the client, e.g., * "ALTSRP_GCM_AES128". * </pre> * * <code>repeated string record_protocols = 3;</code> */ java.util.List<java.lang.String> getRecordProtocolsList(); /** * <pre> * The record protocols supported by the client, e.g., * "ALTSRP_GCM_AES128". * </pre> * * <code>repeated string record_protocols = 3;</code> */ int getRecordProtocolsCount(); /** * <pre> * The record protocols supported by the client, e.g., * "ALTSRP_GCM_AES128". * </pre> * * <code>repeated string record_protocols = 3;</code> */ java.lang.String getRecordProtocols(int index); /** * <pre> * The record protocols supported by the client, e.g., * "ALTSRP_GCM_AES128". * </pre> * * <code>repeated string record_protocols = 3;</code> */ com.google.protobuf.ByteString getRecordProtocolsBytes(int index); /** * <pre> * (Optional) Describes which server identities are acceptable by the client. * If target identities are provided and none of them matches the peer * identity of the server, handshake will fail. * </pre> * * <code>repeated .grpc.gcp.Identity target_identities = 4;</code> */ java.util.List<io.grpc.alts.internal.Identity> getTargetIdentitiesList(); /** * <pre> * (Optional) Describes which server identities are acceptable by the client. * If target identities are provided and none of them matches the peer * identity of the server, handshake will fail. * </pre> * * <code>repeated .grpc.gcp.Identity target_identities = 4;</code> */ io.grpc.alts.internal.Identity getTargetIdentities(int index); /** * <pre> * (Optional) Describes which server identities are acceptable by the client. * If target identities are provided and none of them matches the peer * identity of the server, handshake will fail. * </pre> * * <code>repeated .grpc.gcp.Identity target_identities = 4;</code> */ int getTargetIdentitiesCount(); /** * <pre> * (Optional) Describes which server identities are acceptable by the client. * If target identities are provided and none of them matches the peer * identity of the server, handshake will fail. * </pre> * * <code>repeated .grpc.gcp.Identity target_identities = 4;</code> */ java.util.List<? extends io.grpc.alts.internal.IdentityOrBuilder> getTargetIdentitiesOrBuilderList(); /** * <pre> * (Optional) Describes which server identities are acceptable by the client. * If target identities are provided and none of them matches the peer * identity of the server, handshake will fail. * </pre> * * <code>repeated .grpc.gcp.Identity target_identities = 4;</code> */ io.grpc.alts.internal.IdentityOrBuilder getTargetIdentitiesOrBuilder( int index); /** * <pre> * (Optional) Application may specify a local identity. Otherwise, the * handshaker chooses a default local identity. * </pre> * * <code>.grpc.gcp.Identity local_identity = 5;</code> */ boolean hasLocalIdentity(); /** * <pre> * (Optional) Application may specify a local identity. Otherwise, the * handshaker chooses a default local identity. * </pre> * * <code>.grpc.gcp.Identity local_identity = 5;</code> */ io.grpc.alts.internal.Identity getLocalIdentity(); /** * <pre> * (Optional) Application may specify a local identity. Otherwise, the * handshaker chooses a default local identity. * </pre> * * <code>.grpc.gcp.Identity local_identity = 5;</code> */ io.grpc.alts.internal.IdentityOrBuilder getLocalIdentityOrBuilder(); /** * <pre> * (Optional) Local endpoint information of the connection to the server, * such as local IP address, port number, and network protocol. * </pre> * * <code>.grpc.gcp.Endpoint local_endpoint = 6;</code> */ boolean hasLocalEndpoint(); /** * <pre> * (Optional) Local endpoint information of the connection to the server, * such as local IP address, port number, and network protocol. * </pre> * * <code>.grpc.gcp.Endpoint local_endpoint = 6;</code> */ io.grpc.alts.internal.Endpoint getLocalEndpoint(); /** * <pre> * (Optional) Local endpoint information of the connection to the server, * such as local IP address, port number, and network protocol. * </pre> * * <code>.grpc.gcp.Endpoint local_endpoint = 6;</code> */ io.grpc.alts.internal.EndpointOrBuilder getLocalEndpointOrBuilder(); /** * <pre> * (Optional) Endpoint information of the remote server, such as IP address, * port number, and network protocol. * </pre> * * <code>.grpc.gcp.Endpoint remote_endpoint = 7;</code> */ boolean hasRemoteEndpoint(); /** * <pre> * (Optional) Endpoint information of the remote server, such as IP address, * port number, and network protocol. * </pre> * * <code>.grpc.gcp.Endpoint remote_endpoint = 7;</code> */ io.grpc.alts.internal.Endpoint getRemoteEndpoint(); /** * <pre> * (Optional) Endpoint information of the remote server, such as IP address, * port number, and network protocol. * </pre> * * <code>.grpc.gcp.Endpoint remote_endpoint = 7;</code> */ io.grpc.alts.internal.EndpointOrBuilder getRemoteEndpointOrBuilder(); /** * <pre> * (Optional) If target name is provided, a secure naming check is performed * to verify that the peer authenticated identity is indeed authorized to run * the target name. * </pre> * * <code>string target_name = 8;</code> */ java.lang.String getTargetName(); /** * <pre> * (Optional) If target name is provided, a secure naming check is performed * to verify that the peer authenticated identity is indeed authorized to run * the target name. * </pre> * * <code>string target_name = 8;</code> */ com.google.protobuf.ByteString getTargetNameBytes(); /** * <pre> * (Optional) RPC protocol versions supported by the client. * </pre> * * <code>.grpc.gcp.RpcProtocolVersions rpc_versions = 9;</code> */ boolean hasRpcVersions(); /** * <pre> * (Optional) RPC protocol versions supported by the client. * </pre> * * <code>.grpc.gcp.RpcProtocolVersions rpc_versions = 9;</code> */ io.grpc.alts.internal.RpcProtocolVersions getRpcVersions(); /** * <pre> * (Optional) RPC protocol versions supported by the client. * </pre> * * <code>.grpc.gcp.RpcProtocolVersions rpc_versions = 9;</code> */ io.grpc.alts.internal.RpcProtocolVersionsOrBuilder getRpcVersionsOrBuilder(); }
apache-2.0
tensorflow/probability
spinoffs/oryx/oryx/experimental/nn/pooling.py
4391
# Copyright 2020 The TensorFlow Probability Authors. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================ """Contains building blocks for pooling layers used for neural networks.""" import abc import collections from typing import Any from jax import lax from jax import numpy as np from oryx.core import state from oryx.experimental.nn import base __all__ = [ 'Pooling', 'MaxPooling', 'SumPooling', 'AvgPooling' ] PoolingInfo = collections.namedtuple( 'PoolingInfo', ['window_shape', 'dims', 'strides', 'padding']) class Pooling(base.Layer, metaclass=abc.ABCMeta): """Base class for Pooling layers.""" @classmethod def initialize(cls, rng, in_spec, window_shape, strides=None, padding='VALID'): """Initializes Pooling layers. Args: rng: Random key. in_spec: Spec, specifying the input shape and dtype. window_shape: Int Tuple, specifying the Pooling window shape. strides: Optional tuple with pooling strides. If None, it will use stride 1 for each dimension in window_shape. padding: Either the string "SAME" or "VALID" indicating the type of padding algorithm to use. "SAME" would preserve the same input size, while "VALID" would reduce the input size. Returns: Tuple with the output shape and the LayerParams. """ del in_spec strides = strides or (1,) * len(window_shape) dims = (1,) + window_shape + (1,) # NHWC or NHC strides = (1,) + strides + (1,) info = PoolingInfo(window_shape, dims, strides, padding) return base.LayerParams(info=info) @classmethod def spec(cls, in_spec, window_shape, strides=None, padding='VALID'): in_shape = in_spec.shape if len(in_shape) > 3: raise ValueError('Need to `jax.vmap` in order to batch') in_shape = (1,) + in_shape dims = (1,) + window_shape + (1,) # NHWC or NHC non_spatial_axes = 0, len(window_shape) + 1 strides = strides or (1,) * len(window_shape) for i in sorted(non_spatial_axes): window_shape = window_shape[:i] + (1,) + window_shape[i:] strides = strides[:i] + (1,) + strides[i:] padding = lax.padtype_to_pads(in_shape, window_shape, strides, padding) out_shape = lax.reduce_window_shape_tuple(in_shape, dims, strides, padding) out_shape = out_shape[1:] return state.Shape(out_shape, dtype=in_spec.dtype) def _call(self, x): if len(x.shape) > 3: raise ValueError('Need to `jax.vmap` in order to batch') result = self._call_batched(x[np.newaxis]) return result[0] def _call_and_update_batched(self, x): return self._call_batched(x), self @abc.abstractmethod def _call_batched(self, x) -> Any: raise NotImplementedError class MaxPooling(Pooling): """Max pooling layer, computes the maximum within the window.""" def _call_batched(self, x): info = self.info return lax.reduce_window(x, -np.inf, lax.max, info.dims, info.strides, info.padding) class SumPooling(Pooling): """Sum pooling layer, computes the sum within the window.""" def _call_batched(self, x): info = self.info return lax.reduce_window(x, 0., lax.add, info.dims, info.strides, info.padding) class AvgPooling(Pooling): """Average pooling layer, computes the average within the window.""" def _call_batched(self, x): info = self.info one = np.ones(x.shape[1:-1], dtype=x.dtype) window_strides = info.strides[1:-1] window_sizes = lax.reduce_window(one, 0., lax.add, info.window_shape, window_strides, info.padding) outputs = lax.reduce_window(x, 0., lax.add, info.dims, info.strides, info.padding) return outputs / window_sizes[..., np.newaxis]
apache-2.0
pjain1/druid
indexing-hadoop/src/test/java/org/apache/druid/indexer/BatchDeltaIngestionTest.java
17931
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.druid.indexer; import com.fasterxml.jackson.databind.InjectableValues; import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.jsontype.NamedType; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Iterables; import org.apache.commons.io.FileUtils; import org.apache.druid.data.input.Firehose; import org.apache.druid.data.input.InputRow; import org.apache.druid.data.input.impl.CSVParseSpec; import org.apache.druid.data.input.impl.DimensionsSpec; import org.apache.druid.data.input.impl.StringInputRowParser; import org.apache.druid.data.input.impl.TimestampSpec; import org.apache.druid.hll.HyperLogLogCollector; import org.apache.druid.indexer.hadoop.WindowedDataSegment; import org.apache.druid.jackson.DefaultObjectMapper; import org.apache.druid.java.util.common.DateTimes; import org.apache.druid.java.util.common.Intervals; import org.apache.druid.java.util.common.StringUtils; import org.apache.druid.java.util.common.granularity.Granularities; import org.apache.druid.query.aggregation.AggregatorFactory; import org.apache.druid.query.aggregation.LongSumAggregatorFactory; import org.apache.druid.query.aggregation.hyperloglog.HyperUniquesAggregatorFactory; import org.apache.druid.segment.IndexIO; import org.apache.druid.segment.QueryableIndex; import org.apache.druid.segment.QueryableIndexStorageAdapter; import org.apache.druid.segment.StorageAdapter; import org.apache.druid.segment.indexing.DataSchema; import org.apache.druid.segment.indexing.granularity.UniformGranularitySpec; import org.apache.druid.segment.loading.LocalDataSegmentPuller; import org.apache.druid.segment.realtime.firehose.IngestSegmentFirehose; import org.apache.druid.segment.realtime.firehose.WindowedStorageAdapter; import org.apache.druid.segment.transform.TransformSpec; import org.apache.druid.timeline.DataSegment; import org.apache.druid.timeline.DataSegment.PruneSpecsHolder; import org.apache.druid.timeline.partition.HashBasedNumberedShardSpec; import org.joda.time.Interval; import org.junit.Assert; import org.junit.Rule; import org.junit.Test; import org.junit.rules.TemporaryFolder; import java.io.File; import java.io.IOException; import java.util.ArrayList; import java.util.List; import java.util.Map; public class BatchDeltaIngestionTest { @Rule public final TemporaryFolder temporaryFolder = new TemporaryFolder(); private static final ObjectMapper MAPPER; private static final IndexIO INDEX_IO; private static final Interval INTERVAL_FULL = Intervals.of("2014-10-22T00:00:00Z/P1D"); private static final Interval INTERVAL_PARTIAL = Intervals.of("2014-10-22T00:00:00Z/PT2H"); private static final DataSegment SEGMENT; static { MAPPER = new DefaultObjectMapper(); MAPPER.registerSubtypes(new NamedType(HashBasedNumberedShardSpec.class, "hashed")); InjectableValues inject = new InjectableValues.Std() .addValue(ObjectMapper.class, MAPPER) .addValue(PruneSpecsHolder.class, PruneSpecsHolder.DEFAULT); MAPPER.setInjectableValues(inject); INDEX_IO = HadoopDruidIndexerConfig.INDEX_IO; try { SEGMENT = MAPPER .readValue( BatchDeltaIngestionTest.class.getClassLoader().getResource("test-segment/descriptor.json"), DataSegment.class ) .withLoadSpec( ImmutableMap.of( "type", "local", "path", BatchDeltaIngestionTest.class.getClassLoader().getResource("test-segment/index.zip").getPath() ) ); } catch (IOException e) { throw new RuntimeException(e); } } @Test public void testReindexing() throws Exception { List<WindowedDataSegment> segments = ImmutableList.of(new WindowedDataSegment(SEGMENT, INTERVAL_FULL)); HadoopDruidIndexerConfig config = makeHadoopDruidIndexerConfig( ImmutableMap.of( "type", "dataSource", "ingestionSpec", ImmutableMap.of( "dataSource", "testds", "interval", INTERVAL_FULL ), "segments", segments ), temporaryFolder.newFolder() ); List<ImmutableMap<String, Object>> expectedRows = ImmutableList.of( ImmutableMap.of( "time", DateTimes.of("2014-10-22T00:00:00.000Z"), "host", ImmutableList.of("a.example.com"), "visited_sum", 100L, "unique_hosts", 1.0d ), ImmutableMap.of( "time", DateTimes.of("2014-10-22T01:00:00.000Z"), "host", ImmutableList.of("b.example.com"), "visited_sum", 150L, "unique_hosts", 1.0d ), ImmutableMap.of( "time", DateTimes.of("2014-10-22T02:00:00.000Z"), "host", ImmutableList.of("c.example.com"), "visited_sum", 200L, "unique_hosts", 1.0d ) ); testIngestion( config, expectedRows, Iterables.getOnlyElement(segments), ImmutableList.of("host"), ImmutableList.of("visited_sum", "unique_hosts") ); } /** * By default re-indexing expects same aggregators as used by original indexing job. But, with additional flag * "useNewAggs" in DatasourcePathSpec, user can optionally have any set of aggregators. * See https://github.com/apache/druid/issues/5277 . */ @Test public void testReindexingWithNewAggregators() throws Exception { List<WindowedDataSegment> segments = ImmutableList.of(new WindowedDataSegment(SEGMENT, INTERVAL_FULL)); AggregatorFactory[] aggregators = new AggregatorFactory[]{ new LongSumAggregatorFactory("visited_sum2", "visited_sum"), new HyperUniquesAggregatorFactory("unique_hosts2", "unique_hosts") }; Map<String, Object> inputSpec = ImmutableMap.of( "type", "dataSource", "ingestionSpec", ImmutableMap.of( "dataSource", "testds", "interval", INTERVAL_FULL ), "segments", segments, "useNewAggs", true ); File tmpDir = temporaryFolder.newFolder(); HadoopDruidIndexerConfig config = makeHadoopDruidIndexerConfig( inputSpec, tmpDir, aggregators ); List<ImmutableMap<String, Object>> expectedRows = ImmutableList.of( ImmutableMap.of( "time", DateTimes.of("2014-10-22T00:00:00.000Z"), "host", ImmutableList.of("a.example.com"), "visited_sum2", 100L, "unique_hosts2", 1.0d ), ImmutableMap.of( "time", DateTimes.of("2014-10-22T01:00:00.000Z"), "host", ImmutableList.of("b.example.com"), "visited_sum2", 150L, "unique_hosts2", 1.0d ), ImmutableMap.of( "time", DateTimes.of("2014-10-22T02:00:00.000Z"), "host", ImmutableList.of("c.example.com"), "visited_sum2", 200L, "unique_hosts2", 1.0d ) ); testIngestion( config, expectedRows, Iterables.getOnlyElement(segments), ImmutableList.of("host"), ImmutableList.of("visited_sum2", "unique_hosts2") ); } @Test public void testReindexingWithPartialWindow() throws Exception { List<WindowedDataSegment> segments = ImmutableList.of(new WindowedDataSegment(SEGMENT, INTERVAL_PARTIAL)); HadoopDruidIndexerConfig config = makeHadoopDruidIndexerConfig( ImmutableMap.of( "type", "dataSource", "ingestionSpec", ImmutableMap.of( "dataSource", "testds", "interval", INTERVAL_FULL ), "segments", segments ), temporaryFolder.newFolder() ); List<ImmutableMap<String, Object>> expectedRows = ImmutableList.of( ImmutableMap.of( "time", DateTimes.of("2014-10-22T00:00:00.000Z"), "host", ImmutableList.of("a.example.com"), "visited_sum", 100L, "unique_hosts", 1.0d ), ImmutableMap.of( "time", DateTimes.of("2014-10-22T01:00:00.000Z"), "host", ImmutableList.of("b.example.com"), "visited_sum", 150L, "unique_hosts", 1.0d ) ); testIngestion( config, expectedRows, Iterables.getOnlyElement(segments), ImmutableList.of("host"), ImmutableList.of("visited_sum", "unique_hosts") ); } @Test public void testDeltaIngestion() throws Exception { File tmpDir = temporaryFolder.newFolder(); File dataFile1 = new File(tmpDir, "data1"); FileUtils.writeLines( dataFile1, ImmutableList.of( "2014102200,a.example.com,a.example.com,90", "2014102201,b.example.com,b.example.com,25" ) ); File dataFile2 = new File(tmpDir, "data2"); FileUtils.writeLines( dataFile2, ImmutableList.of( "2014102202,c.example.com,c.example.com,70" ) ); //using a hadoop glob path to test that it continues to work with hadoop MultipleInputs usage and not //affected by //https://issues.apache.org/jira/browse/MAPREDUCE-5061 String inputPath = tmpDir.getPath() + "/{data1,data2}"; List<WindowedDataSegment> segments = ImmutableList.of(new WindowedDataSegment(SEGMENT, INTERVAL_FULL)); HadoopDruidIndexerConfig config = makeHadoopDruidIndexerConfig( ImmutableMap.of( "type", "multi", "children", ImmutableList.of( ImmutableMap.of( "type", "dataSource", "ingestionSpec", ImmutableMap.of( "dataSource", "testds", "interval", INTERVAL_FULL ), "segments", segments ), ImmutableMap.<String, Object>of( "type", "static", "paths", inputPath ) ) ), temporaryFolder.newFolder() ); List<ImmutableMap<String, Object>> expectedRows = ImmutableList.of( ImmutableMap.of( "time", DateTimes.of("2014-10-22T00:00:00.000Z"), "host", ImmutableList.of("a.example.com"), "visited_sum", 190L, "unique_hosts", 1.0d ), ImmutableMap.of( "time", DateTimes.of("2014-10-22T01:00:00.000Z"), "host", ImmutableList.of("b.example.com"), "visited_sum", 175L, "unique_hosts", 1.0d ), ImmutableMap.of( "time", DateTimes.of("2014-10-22T02:00:00.000Z"), "host", ImmutableList.of("c.example.com"), "visited_sum", 270L, "unique_hosts", 1.0d ) ); testIngestion( config, expectedRows, Iterables.getOnlyElement(segments), ImmutableList.of("host"), ImmutableList.of("visited_sum", "unique_hosts") ); } private void testIngestion( HadoopDruidIndexerConfig config, List<ImmutableMap<String, Object>> expectedRowsGenerated, WindowedDataSegment windowedDataSegment, List<String> expectedDimensions, List<String> expectedMetrics ) throws Exception { IndexGeneratorJob job = new IndexGeneratorJob(config); Assert.assertTrue(JobHelper.runJobs(ImmutableList.of(job), config)); File segmentFolder = new File( StringUtils.format( "%s/%s/%s_%s/%s/0", config.getSchema().getIOConfig().getSegmentOutputPath(), config.getSchema().getDataSchema().getDataSource(), INTERVAL_FULL.getStart().toString(), INTERVAL_FULL.getEnd().toString(), config.getSchema().getTuningConfig().getVersion() ) ); Assert.assertTrue(segmentFolder.exists()); File indexZip = new File(segmentFolder, "index.zip"); Assert.assertTrue(indexZip.exists()); File tmpUnzippedSegmentDir = temporaryFolder.newFolder(); new LocalDataSegmentPuller().getSegmentFiles(indexZip, tmpUnzippedSegmentDir); QueryableIndex index = INDEX_IO.loadIndex(tmpUnzippedSegmentDir); StorageAdapter adapter = new QueryableIndexStorageAdapter(index); Firehose firehose = new IngestSegmentFirehose( ImmutableList.of(new WindowedStorageAdapter(adapter, windowedDataSegment.getInterval())), TransformSpec.NONE, expectedDimensions, expectedMetrics, null ); List<InputRow> rows = new ArrayList<>(); while (firehose.hasMore()) { rows.add(firehose.nextRow()); } verifyRows(expectedRowsGenerated, rows, expectedDimensions, expectedMetrics); } private HadoopDruidIndexerConfig makeHadoopDruidIndexerConfig(Map<String, Object> inputSpec, File tmpDir) throws Exception { return makeHadoopDruidIndexerConfig(inputSpec, tmpDir, null); } private HadoopDruidIndexerConfig makeHadoopDruidIndexerConfig( Map<String, Object> inputSpec, File tmpDir, AggregatorFactory[] aggregators ) throws Exception { HadoopDruidIndexerConfig config = new HadoopDruidIndexerConfig( new HadoopIngestionSpec( new DataSchema( "website", MAPPER.convertValue( new StringInputRowParser( new CSVParseSpec( new TimestampSpec("timestamp", "yyyyMMddHH", null), new DimensionsSpec(DimensionsSpec.getDefaultSchemas(ImmutableList.of("host")), null, null), null, ImmutableList.of("timestamp", "host", "host2", "visited_num"), false, 0 ), null ), Map.class ), aggregators != null ? aggregators : new AggregatorFactory[]{ new LongSumAggregatorFactory("visited_sum", "visited_num"), new HyperUniquesAggregatorFactory("unique_hosts", "host2") }, new UniformGranularitySpec(Granularities.DAY, Granularities.NONE, ImmutableList.of(INTERVAL_FULL)), null, MAPPER ), new HadoopIOConfig( inputSpec, null, tmpDir.getCanonicalPath() ), new HadoopTuningConfig( tmpDir.getCanonicalPath(), null, null, null, null, null, null, null, false, false, false, false, null, false, false, null, null, null, false, false, null, null, null, null ) ) ); config.setShardSpecs( ImmutableMap.of( INTERVAL_FULL.getStartMillis(), ImmutableList.of( new HadoopyShardSpec( new HashBasedNumberedShardSpec(0, 1, 0, 1, null, HadoopDruidIndexerConfig.JSON_MAPPER), 0 ) ) ) ); config = HadoopDruidIndexerConfig.fromSpec(config.getSchema()); return config; } private void verifyRows( List<ImmutableMap<String, Object>> expectedRows, List<InputRow> actualRows, List<String> expectedDimensions, List<String> expectedMetrics ) { Assert.assertEquals(expectedRows.size(), actualRows.size()); for (int i = 0; i < expectedRows.size(); i++) { Map<String, Object> expected = expectedRows.get(i); InputRow actual = actualRows.get(i); Assert.assertEquals(expected.get("time"), actual.getTimestamp()); Assert.assertEquals(expectedDimensions, actual.getDimensions()); expectedDimensions.forEach(s -> Assert.assertEquals(expected.get(s), actual.getDimension(s))); for (String metric : expectedMetrics) { Object actualValue = actual.getRaw(metric); if (actualValue instanceof HyperLogLogCollector) { Assert.assertEquals( (Double) expected.get(metric), (Double) HyperUniquesAggregatorFactory.estimateCardinality(actualValue, false), 0.001 ); } else { Assert.assertEquals(expected.get(metric), actual.getMetric(metric)); } } } } }
apache-2.0
nickman/HeliosStreams
metric-hub/src/main/java/com/heliosapm/metrichub/speedment/tsdb/public_/tsd_tagk/TsdTagkImpl.java
498
package com.heliosapm.metrichub.speedment.tsdb.public_.tsd_tagk; import com.heliosapm.metrichub.speedment.tsdb.public_.tsd_tagk.generated.GeneratedTsdTagkImpl; /** * The default implementation of the {@link * com.heliosapm.metrichub.speedment.tsdb.public_.tsd_tagk.TsdTagk}-interface. * <p> * This file is safe to edit. It will not be overwritten by the code generator. * * @author Helios APM */ public final class TsdTagkImpl extends GeneratedTsdTagkImpl implements TsdTagk { }
apache-2.0
rastislavszabo/vpp
vendor/github.com/ligato/vpp-agent/plugins/vpp/srplugin/vppcalls/vpp2001/srv6.go
26405
// Copyright (c) 2019 Bell Canada, Pantheon Technologies and/or its affiliates. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at: // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // Package vpp2001 contains wrappers over VPP (version 19.01) binary APIs to simplify their usage package vpp2001 import ( "fmt" "net" "regexp" "strconv" "strings" "github.com/ligato/cn-infra/logging" ifs "github.com/ligato/vpp-agent/api/models/vpp/interfaces" srv6 "github.com/ligato/vpp-agent/api/models/vpp/srv6" vpp_ifs "github.com/ligato/vpp-agent/plugins/vpp/binapi/vpp2001/interfaces" vpp_sr "github.com/ligato/vpp-agent/plugins/vpp/binapi/vpp2001/sr" ) // Constants for behavior function hardcoded into VPP (there can be also custom behavior functions implemented as VPP plugins) // Constants are taken from VPP's vnet/srv6/sr.h (names are modified to Golang from original C form in VPP code) const ( BehaviorEnd uint8 = iota + 1 // Behavior of simple endpoint BehaviorX // Behavior of endpoint with Layer-3 cross-connect BehaviorT // Behavior of endpoint with specific IPv6 table lookup BehaviorDfirst // Unused. Separator in between regular and D BehaviorDX2 // Behavior of endpoint with decapulation and Layer-2 cross-connect (or DX2 with egress VLAN rewrite when VLAN notzero - not supported this variant yet) BehaviorDX6 // Behavior of endpoint with decapsulation and IPv6 cross-connect BehaviorDX4 // Behavior of endpoint with decapsulation and IPv4 cross-connect BehaviorDT6 // Behavior of endpoint with decapsulation and specific IPv6 table lookup BehaviorDT4 // Behavior of endpoint with decapsulation and specific IPv4 table lookup BehaviorLast // seems unused, note in VPP: "Must always be the last one" ) // Constants for steering type // Constants are taken from VPP's vnet/srv6/sr.h (names are modified to Golang from original C form in VPP code) const ( SteerTypeL2 uint8 = 2 SteerTypeIPv4 uint8 = 4 SteerTypeIPv6 uint8 = 6 ) // Constants for operation of SR policy modify binary API method const ( AddSRList uint8 = iota + 1 // Add SR List to an existing SR policy DeleteSRList // Delete SR List from an existing SR policy ModifyWeightOfSRList // Modify the weight of an existing SR List ) // AddLocalSid adds local sid <localSID> into VPP func (h *SRv6VppHandler) AddLocalSid(localSID *srv6.LocalSID) error { return h.addDelLocalSid(false, localSID) } // DeleteLocalSid deletes local sid <localSID> in VPP func (h *SRv6VppHandler) DeleteLocalSid(localSID *srv6.LocalSID) error { return h.addDelLocalSid(true, localSID) } func (h *SRv6VppHandler) addDelLocalSid(deletion bool, localSID *srv6.LocalSID) error { h.log.WithFields(logging.Fields{"localSID": localSID.GetSid(), "delete": deletion, "installationVrfID": h.installationVrfID(localSID), "end function": h.endFunction(localSID)}). Debug("Adding/deleting Local SID", localSID.GetSid()) sidAddr, err := parseIPv6(localSID.GetSid()) // parsing to get some standard sid form if err != nil { return fmt.Errorf("sid address %s is not IPv6 address: %v", localSID.GetSid(), err) // calls from descriptor are already validated } if !deletion && localSID.GetEndFunction_AD() != nil { return h.addSRProxy(sidAddr, localSID) } req := &vpp_sr.SrLocalsidAddDel{ IsDel: boolToUint(deletion), Localsid: vpp_sr.Srv6Sid{Addr: []byte(sidAddr)}, } req.FibTable = localSID.InstallationVrfId // where to install localsid entry/from where to remove installed localsid entry if !deletion { if err := h.writeEndFunction(req, localSID); err != nil { return err } } reply := &vpp_sr.SrLocalsidAddDelReply{} if err := h.callsChannel.SendRequest(req).ReceiveReply(reply); err != nil { return err } if reply.Retval != 0 { return fmt.Errorf("vpp call %q returned: %d", reply.GetMessageName(), reply.Retval) } h.log.WithFields(logging.Fields{"localSID": sidAddr, "delete": deletion, "installationVrfID": h.installationVrfID(localSID), "end function": h.endFunction(localSID)}). Debug("Added/deleted Local SID ", sidAddr) return nil } // addSRProxy adds local sid with SR-proxy end function (End.AD). This functionality has no binary API in VPP, therefore // CLI commands are used (VPE binary API that calls VPP's CLI). func (h *SRv6VppHandler) addSRProxy(sidAddr net.IP, localSID *srv6.LocalSID) error { // get VPP-internal names of IN and OUT interfaces names, err := h.interfaceNameMapping() if err != nil { return fmt.Errorf("can't convert interface names from etcd to VPP-internal interface names:%v", err) } outInterface, found := names[localSID.GetEndFunction_AD().OutgoingInterface] if !found { return fmt.Errorf("can't find VPP-internal name for interface %v (name in etcd)", localSID.GetEndFunction_AD().OutgoingInterface) } inInterface, found := names[localSID.GetEndFunction_AD().IncomingInterface] if !found { return fmt.Errorf("can't find VPP-internal name for interface %v (name in etcd)", localSID.GetEndFunction_AD().IncomingInterface) } // add SR-proxy using VPP CLI var cmd string if strings.TrimSpace(localSID.GetEndFunction_AD().L3ServiceAddress) == "" { // L2 service cmd = fmt.Sprintf("sr localsid address %v fib-table %v behavior end.ad oif %v iif %v", sidAddr, localSID.InstallationVrfId, outInterface, inInterface) } else { // L3 service cmd = fmt.Sprintf("sr localsid address %v fib-table %v behavior end.ad nh %v oif %v iif %v", sidAddr, localSID.InstallationVrfId, localSID.GetEndFunction_AD().L3ServiceAddress, outInterface, inInterface) } data, err := h.RunCli(cmd) if err != nil { return err } if len(strings.TrimSpace(string(data))) > 0 { return fmt.Errorf("addition of dynamic segment routing proxy failed by returning nonblank space text in CLI: %v", string(data)) } return nil } // interfaceNameMapping dumps from VPP internal names of interfaces and uses them to produce mapping from ligato interface names to vpp internal names. func (h *SRv6VppHandler) interfaceNameMapping() (map[string]string, error) { mapping := make(map[string]string) reqCtx := h.callsChannel.SendMultiRequest(&vpp_ifs.SwInterfaceDump{}) for { // get next interface info ifDetails := &vpp_ifs.SwInterfaceDetails{} stop, err := reqCtx.ReceiveReply(ifDetails) if stop { break // Break from the loop. } if err != nil { return nil, fmt.Errorf("failed to dump interface: %v", err) } // extract and compute names ligatoName := strings.TrimRight(ifDetails.Tag, "\x00") vppInternalName := strings.TrimRight(ifDetails.InterfaceName, "\x00") if ifDetails.SupSwIfIndex == uint32(ifDetails.SwIfIndex) && // no subinterface (subinterface are not DPDK) guessInterfaceType(strings.TrimRight(ifDetails.InterfaceName, "\x00")) == ifs.Interface_DPDK { // fill name for physical interfaces (they are mostly without tag) ligatoName = vppInternalName } mapping[ligatoName] = vppInternalName } return mapping, nil } func (h *SRv6VppHandler) installationVrfID(localSID *srv6.LocalSID) string { if localSID != nil { return string(localSID.InstallationVrfId) } return "<nil>" } func (h *SRv6VppHandler) endFunction(localSID *srv6.LocalSID) string { switch ef := localSID.GetEndFunction().(type) { case *srv6.LocalSID_BaseEndFunction: return fmt.Sprintf("End{psp: %v}", ef.BaseEndFunction.Psp) case *srv6.LocalSID_EndFunction_X: return fmt.Sprintf("X{psp: %v, OutgoingInterface: %v, NextHop: %v}", ef.EndFunction_X.Psp, ef.EndFunction_X.OutgoingInterface, ef.EndFunction_X.NextHop) case *srv6.LocalSID_EndFunction_T: return fmt.Sprintf("T{psp: %v, vrf: %v}", ef.EndFunction_T.Psp, ef.EndFunction_T.VrfId) case *srv6.LocalSID_EndFunction_DX2: return fmt.Sprintf("DX2{VlanTag: %v, OutgoingInterface: %v}", ef.EndFunction_DX2.VlanTag, ef.EndFunction_DX2.OutgoingInterface) case *srv6.LocalSID_EndFunction_DX4: return fmt.Sprintf("DX4{OutgoingInterface: %v, NextHop: %v}", ef.EndFunction_DX4.OutgoingInterface, ef.EndFunction_DX4.NextHop) case *srv6.LocalSID_EndFunction_DX6: return fmt.Sprintf("DX6{OutgoingInterface: %v, NextHop: %v}", ef.EndFunction_DX6.OutgoingInterface, ef.EndFunction_DX6.NextHop) case *srv6.LocalSID_EndFunction_DT4: return fmt.Sprintf("DT4{vrf: %v}", ef.EndFunction_DT4.VrfId) case *srv6.LocalSID_EndFunction_DT6: return fmt.Sprintf("DT6{vrf: %v}", ef.EndFunction_DT6.VrfId) case *srv6.LocalSID_EndFunction_AD: return fmt.Sprintf("AD{L3ServiceAddress: %v, OutgoingInterface: %v, IncomingInterface: %v}", ef.EndFunction_AD.L3ServiceAddress, ef.EndFunction_AD.OutgoingInterface, ef.EndFunction_AD.IncomingInterface) case nil: return "<nil>" default: return "unknown end function" } } func (h *SRv6VppHandler) writeEndFunction(req *vpp_sr.SrLocalsidAddDel, localSID *srv6.LocalSID) error { switch ef := localSID.EndFunction.(type) { case *srv6.LocalSID_BaseEndFunction: req.Behavior = BehaviorEnd req.EndPsp = boolToUint(ef.BaseEndFunction.Psp) case *srv6.LocalSID_EndFunction_X: req.Behavior = BehaviorX req.EndPsp = boolToUint(ef.EndFunction_X.Psp) ifMeta, exists := h.ifIndexes.LookupByName(ef.EndFunction_X.OutgoingInterface) if !exists { return fmt.Errorf("for interface %v doesn't exist sw index", ef.EndFunction_X.OutgoingInterface) } req.SwIfIndex = ifMeta.SwIfIndex nhAddr, err := parseIPv6(ef.EndFunction_X.NextHop) // parses also ipv4 addresses but into ipv6 address form if err != nil { return err } if nhAddr4 := nhAddr.To4(); nhAddr4 != nil { // ipv4 address in ipv6 address form? req.NhAddr4 = nhAddr4 } else { req.NhAddr6 = []byte(nhAddr) } case *srv6.LocalSID_EndFunction_T: req.Behavior = BehaviorT req.EndPsp = boolToUint(ef.EndFunction_T.Psp) req.SwIfIndex = ef.EndFunction_T.VrfId case *srv6.LocalSID_EndFunction_DX2: req.Behavior = BehaviorDX2 req.VlanIndex = ef.EndFunction_DX2.VlanTag ifMeta, exists := h.ifIndexes.LookupByName(ef.EndFunction_DX2.OutgoingInterface) if !exists { return fmt.Errorf("for interface %v doesn't exist sw index", ef.EndFunction_DX2.OutgoingInterface) } req.SwIfIndex = ifMeta.SwIfIndex case *srv6.LocalSID_EndFunction_DX4: req.Behavior = BehaviorDX4 ifMeta, exists := h.ifIndexes.LookupByName(ef.EndFunction_DX4.OutgoingInterface) if !exists { return fmt.Errorf("for interface %v doesn't exist sw index", ef.EndFunction_DX4.OutgoingInterface) } req.SwIfIndex = ifMeta.SwIfIndex nhAddr, err := parseIPv6(ef.EndFunction_DX4.NextHop) // parses also IPv4 if err != nil { return err } nhAddr4 := nhAddr.To4() if nhAddr4 == nil { return fmt.Errorf("next hop of DX4 end function (%v) is not valid IPv4 address", ef.EndFunction_DX4.NextHop) } req.NhAddr4 = []byte(nhAddr4) case *srv6.LocalSID_EndFunction_DX6: req.Behavior = BehaviorDX6 ifMeta, exists := h.ifIndexes.LookupByName(ef.EndFunction_DX6.OutgoingInterface) if !exists { return fmt.Errorf("for interface %v doesn't exist sw index", ef.EndFunction_DX6.OutgoingInterface) } req.SwIfIndex = ifMeta.SwIfIndex nhAddr6, err := parseIPv6(ef.EndFunction_DX6.NextHop) if err != nil { return err } req.NhAddr6 = []byte(nhAddr6) case *srv6.LocalSID_EndFunction_DT4: req.Behavior = BehaviorDT4 req.SwIfIndex = ef.EndFunction_DT4.VrfId case *srv6.LocalSID_EndFunction_DT6: req.Behavior = BehaviorDT6 req.SwIfIndex = ef.EndFunction_DT6.VrfId case nil: return fmt.Errorf("End function not set. Please configure end function for local SID %v ", localSID.GetSid()) default: return fmt.Errorf("unknown end function (model link type %T)", ef) // EndFunction_AD is handled elsewhere } return nil } // SetEncapsSourceAddress sets for SRv6 in VPP the source address used for encapsulated packet func (h *SRv6VppHandler) SetEncapsSourceAddress(address string) error { h.log.Debugf("Configuring encapsulation source address to address %v", address) ipAddress, err := parseIPv6(address) if err != nil { return err } req := &vpp_sr.SrSetEncapSource{ EncapsSource: []byte(ipAddress), } reply := &vpp_sr.SrSetEncapSourceReply{} if err := h.callsChannel.SendRequest(req).ReceiveReply(reply); err != nil { return err } if reply.Retval != 0 { return fmt.Errorf("vpp call %q returned: %d", reply.GetMessageName(), reply.Retval) } h.log.WithFields(logging.Fields{"Encapsulation source address": address}). Debug("Encapsulation source address configured.") return nil } // AddPolicy adds SRv6 policy <policy> into VPP (including all policy's segment lists). func (h *SRv6VppHandler) AddPolicy(policy *srv6.Policy) error { if err := h.addBasePolicyWithFirstSegmentList(policy); err != nil { return fmt.Errorf("can't create Policy with first segment list (Policy: %+v): %v", policy, err) } if err := h.addOtherSegmentLists(policy); err != nil { return fmt.Errorf("can't add all segment lists to created policy %+v: %v", policy, err) } return nil } func (h *SRv6VppHandler) addBasePolicyWithFirstSegmentList(policy *srv6.Policy) error { h.log.Debugf("Adding SR policy %+v", policy) bindingSid, err := parseIPv6(policy.GetBsid()) // already validated if err != nil { return fmt.Errorf("binding sid address %s is not IPv6 address: %v", policy.GetBsid(), err) // calls from descriptor are already validated } if len(policy.SegmentLists) == 0 { return fmt.Errorf("policy must have defined at least one segment list (Policy: %+v)", policy) // calls from descriptor are already validated } sids, err := h.convertPolicySegment(policy.SegmentLists[0]) if err != nil { return err } // Note: Weight in sr.SrPolicyAdd is leftover from API changes that moved weight into sr.Srv6SidList (it is weight of sid list not of the whole policy) req := &vpp_sr.SrPolicyAdd{ BsidAddr: []byte(bindingSid), Sids: *sids, IsEncap: boolToUint(policy.SrhEncapsulation), Type: boolToUint(policy.SprayBehaviour), FibTable: policy.InstallationVrfId, } reply := &vpp_sr.SrPolicyAddReply{} if err := h.callsChannel.SendRequest(req).ReceiveReply(reply); err != nil { return err } if reply.Retval != 0 { return fmt.Errorf("vpp call %q returned: %d", reply.GetMessageName(), reply.Retval) } h.log.WithFields(logging.Fields{"binding SID": bindingSid, "list of next SIDs": policy.SegmentLists[0].Segments}). Debug("base SR policy (policy with just one segment list) added") return nil } func (h *SRv6VppHandler) addOtherSegmentLists(policy *srv6.Policy) error { for _, sl := range policy.SegmentLists[1:] { if err := h.AddPolicySegmentList(sl, policy); err != nil { return fmt.Errorf("failed to add policy segment %+v: %v", sl, err) } } return nil } // DeletePolicy deletes SRv6 policy given by binding SID <bindingSid> func (h *SRv6VppHandler) DeletePolicy(bindingSid net.IP) error { h.log.Debugf("Deleting SR policy with binding SID %v ", bindingSid) req := &vpp_sr.SrPolicyDel{ BsidAddr: vpp_sr.Srv6Sid{Addr: []byte(bindingSid)}, // TODO add ability to define policy also by index (SrPolicyIndex) } reply := &vpp_sr.SrPolicyDelReply{} if err := h.callsChannel.SendRequest(req).ReceiveReply(reply); err != nil { return err } if reply.Retval != 0 { return fmt.Errorf("vpp call %q returned: %d", reply.GetMessageName(), reply.Retval) } h.log.WithFields(logging.Fields{"binding SID": bindingSid}).Debug("SR policy deleted") return nil } // AddPolicySegmentList adds segment list <segmentList> to SRv6 policy <policy> in VPP func (h *SRv6VppHandler) AddPolicySegmentList(segmentList *srv6.Policy_SegmentList, policy *srv6.Policy) error { h.log.Debugf("Adding segment %+v to SR policy %+v", segmentList, policy) err := h.modPolicy(AddSRList, policy, segmentList, 0) if err == nil { h.log.WithFields(logging.Fields{"binding SID": policy.Bsid, "list of next SIDs": segmentList.Segments}). Debug("SR policy modified(added another segment list)") } return err } // DeletePolicySegmentList removes segment list <segmentList> (with VPP-internal index <segmentVPPIndex>) from SRv6 policy <policy> in VPP func (h *SRv6VppHandler) DeletePolicySegmentList(segmentList *srv6.Policy_SegmentList, segmentVPPIndex uint32, policy *srv6.Policy) error { h.log.Debugf("Removing segment %+v (vpp-internal index %v) from SR policy %+v", segmentList, segmentVPPIndex, policy) err := h.modPolicy(DeleteSRList, policy, segmentList, segmentVPPIndex) if err == nil { h.log.WithFields(logging.Fields{"binding SID": policy.Bsid, "list of next SIDs": segmentList.Segments, "segmentListIndex": segmentVPPIndex}). Debug("SR policy modified(removed segment list)") } return err } func (h *SRv6VppHandler) modPolicy(operation uint8, policy *srv6.Policy, segmentList *srv6.Policy_SegmentList, segmentListIndex uint32) error { bindingSid, err := parseIPv6(policy.GetBsid()) if err != nil { return fmt.Errorf("binding sid address %s is not IPv6 address: %v", policy.GetBsid(), err) // calls from descriptor are already validated } sids, err := h.convertPolicySegment(segmentList) if err != nil { return err } // Note: Weight in sr.SrPolicyMod is leftover from API changes that moved weight into sr.Srv6SidList (it is weight of sid list not of the whole policy) req := &vpp_sr.SrPolicyMod{ BsidAddr: []byte(bindingSid), // TODO add ability to define policy also by index (SrPolicyIndex) Operation: operation, Sids: *sids, FibTable: policy.InstallationVrfId, } if operation == DeleteSRList || operation == ModifyWeightOfSRList { req.SlIndex = segmentListIndex } reply := &vpp_sr.SrPolicyModReply{} if err := h.callsChannel.SendRequest(req).ReceiveReply(reply); err != nil { return err } if reply.Retval != 0 { return fmt.Errorf("vpp call %q returned: %d", reply.GetMessageName(), reply.Retval) } return nil } func (h *SRv6VppHandler) convertPolicySegment(segmentList *srv6.Policy_SegmentList) (*vpp_sr.Srv6SidList, error) { var segments []vpp_sr.Srv6Sid for _, sid := range segmentList.Segments { // parse to IPv6 address parserSid, err := parseIPv6(sid) if err != nil { return nil, err } // add sid to segment list ipv6Segment := vpp_sr.Srv6Sid{ Addr: make([]byte, 16), // sr.Srv6Sid.Addr = [16]byte } copy(ipv6Segment.Addr, parserSid) segments = append(segments, ipv6Segment) } return &vpp_sr.Srv6SidList{ NumSids: uint8(len(segments)), Sids: segments, Weight: segmentList.Weight, }, nil } // RetrievePolicyIndexInfo retrieves index of policy <policy> and its segment lists func (h *SRv6VppHandler) RetrievePolicyIndexInfo(policy *srv6.Policy) (policyIndex uint32, segmentListIndexes map[*srv6.Policy_SegmentList]uint32, err error) { // dump sr policies using VPP CLI data, err := h.RunCli("sh sr policies") if err != nil { return ^uint32(0), nil, fmt.Errorf("can't dump index data from VPP: %v", err) } // do necessary parsing to extract index of segment list dumpStr := strings.ToLower(string(data)) segmentListIndexes = make(map[*srv6.Policy_SegmentList]uint32) for _, policyStr := range strings.Split(dumpStr, "-----------") { policyHeader := regexp.MustCompile(fmt.Sprintf("\\[(\\d+)\\]\\.-\\s+bsid:\\s*%s", strings.ToLower(strings.TrimSpace(policy.GetBsid())))) if policyMatch := policyHeader.FindStringSubmatch(policyStr); policyMatch != nil { parsed, err := strconv.ParseUint(policyMatch[1], 10, 32) if err != nil { return ^uint32(0), nil, fmt.Errorf("can't parse policy index %q (dump: %s)", policyMatch[1], dumpStr) } policyIndex = uint32(parsed) for _, sl := range policy.SegmentLists { slRE := regexp.MustCompile(fmt.Sprintf("\\[(\\d+)\\].- < %s,[^:>]*> weight: %d", strings.ToLower(strings.Join(sl.Segments, ", ")), sl.Weight)) if slMatch := slRE.FindStringSubmatch(policyStr); slMatch != nil { parsed, err := strconv.ParseUint(slMatch[1], 10, 32) if err != nil { return ^uint32(0), nil, fmt.Errorf("can't parse segment policy index %q (dump: %s)", slMatch[1], dumpStr) } segmentListIndexes[sl] = uint32(parsed) continue } return ^uint32(0), nil, fmt.Errorf("can't find index for segment list %+v (policy bsid %v) in dump %q", sl, policy.GetBsid(), dumpStr) } return policyIndex, segmentListIndexes, nil } } return ^uint32(0), nil, fmt.Errorf("can't find index for policy with bsid %v in dump %q", policy.GetBsid(), dumpStr) } // AddSteering sets in VPP steering into SRv6 policy. func (h *SRv6VppHandler) AddSteering(steering *srv6.Steering) error { return h.addDelSteering(false, steering) } // RemoveSteering removes in VPP steering into SRv6 policy. func (h *SRv6VppHandler) RemoveSteering(steering *srv6.Steering) error { return h.addDelSteering(true, steering) } func (h *SRv6VppHandler) addDelSteering(delete bool, steering *srv6.Steering) error { // defining operation strings for logging operationProgressing, operationFinished := "Adding", "Added" if delete { operationProgressing, operationFinished = "Removing", "Removed" } // logging info about operation with steering switch t := steering.Traffic.(type) { case *srv6.Steering_L3Traffic_: h.log.Debugf("%v steering for l3 traffic with destination %v to SR policy (binding SID %v, policy index %v)", operationProgressing, t.L3Traffic.PrefixAddress, steering.GetPolicyBsid(), steering.GetPolicyIndex()) case *srv6.Steering_L2Traffic_: h.log.Debugf("%v steering for l2 traffic from interface %v to SR policy (binding SID %v, policy index %v)", operationProgressing, t.L2Traffic.InterfaceName, steering.GetPolicyBsid(), steering.GetPolicyIndex()) } // converting policy reference var policyBSIDAddr []byte // undefined reference var policyIndex = uint32(0) // undefined reference switch ref := steering.PolicyRef.(type) { case *srv6.Steering_PolicyBsid: bsid, err := parseIPv6(ref.PolicyBsid) if err != nil { return fmt.Errorf("can't parse binding SID %q to IP address: %v ", ref.PolicyBsid, err) } policyBSIDAddr = []byte(bsid) case *srv6.Steering_PolicyIndex: policyIndex = ref.PolicyIndex case nil: return fmt.Errorf("policy reference must be provided") default: return fmt.Errorf("unknown policy reference type (link type %+v)", ref) } // converting target traffic info var prefixAddr []byte steerType := SteerTypeIPv6 tableID := uint32(0) maskWidth := uint32(0) intIndex := uint32(0) switch t := steering.Traffic.(type) { case *srv6.Steering_L3Traffic_: ip, ipnet, err := net.ParseCIDR(t.L3Traffic.PrefixAddress) if err != nil { return fmt.Errorf("can't parse ip prefix %q: %v", t.L3Traffic.PrefixAddress, err) } if ip.To4() != nil { // IPv4 address steerType = SteerTypeIPv4 } tableID = t.L3Traffic.InstallationVrfId prefixAddr = []byte(ip.To16()) ms, _ := ipnet.Mask.Size() maskWidth = uint32(ms) case *srv6.Steering_L2Traffic_: steerType = SteerTypeL2 ifMeta, exists := h.ifIndexes.LookupByName(t.L2Traffic.InterfaceName) if !exists { return fmt.Errorf("for interface %v doesn't exist sw index", t.L2Traffic.InterfaceName) } intIndex = ifMeta.SwIfIndex case nil: return fmt.Errorf("traffic type must be provided") default: return fmt.Errorf("unknown traffic type (link type %+v)", t) } req := &vpp_sr.SrSteeringAddDel{ IsDel: boolToUint(delete), TableID: tableID, BsidAddr: policyBSIDAddr, // policy (to which we want to steer routing into) identified by policy binding sid (alternativelly it can be used policy index) SrPolicyIndex: policyIndex, // policy (to which we want to steer routing into) identified by policy index (alternativelly it can be used policy binding sid) TrafficType: steerType, // type of traffic to steer PrefixAddr: prefixAddr, // destination prefix address (L3 traffic type only) MaskWidth: maskWidth, // destination ip prefix mask (L3 traffic type only) SwIfIndex: intIndex, // incoming interface (L2 traffic type only) } reply := &vpp_sr.SrSteeringAddDelReply{} if err := h.callsChannel.SendRequest(req).ReceiveReply(reply); err != nil { return err } if reply.Retval != 0 { return fmt.Errorf("vpp call %q returned: %d", reply.GetMessageName(), reply.Retval) } h.log.WithFields(logging.Fields{"steer type": steerType, "L3 prefix address bytes": prefixAddr, "L2 interface index": intIndex, "policy binding SID": policyBSIDAddr, "policy index": policyIndex}). Debugf("%v steering to SR policy ", operationFinished) return nil } func boolToUint(input bool) uint8 { if input { return uint8(1) } return uint8(0) } // parseIPv6 parses string <str> to IPv6 address (including IPv4 address converted to IPv6 address) func parseIPv6(str string) (net.IP, error) { ip := net.ParseIP(str) if ip == nil { return nil, fmt.Errorf(" %q is not ip address", str) } ipv6 := ip.To16() if ipv6 == nil { return nil, fmt.Errorf(" %q is not ipv6 address", str) } return ipv6, nil } // guessInterfaceType attempts to guess the correct interface type from its internal name (as given by VPP). // This is required mainly for those interface types, that do not provide dump binary API, // such as loopback of af_packet. func guessInterfaceType(ifName string) ifs.Interface_Type { switch { case strings.HasPrefix(ifName, "loop"), strings.HasPrefix(ifName, "local"): return ifs.Interface_SOFTWARE_LOOPBACK case strings.HasPrefix(ifName, "memif"): return ifs.Interface_MEMIF case strings.HasPrefix(ifName, "tap"): return ifs.Interface_TAP case strings.HasPrefix(ifName, "host"): return ifs.Interface_AF_PACKET case strings.HasPrefix(ifName, "vxlan"): return ifs.Interface_VXLAN_TUNNEL case strings.HasPrefix(ifName, "ipsec"): return ifs.Interface_IPSEC_TUNNEL case strings.HasPrefix(ifName, "vmxnet3"): return ifs.Interface_VMXNET3_INTERFACE default: return ifs.Interface_DPDK } }
apache-2.0
lympid/lympid-core
src/main/java/com/lympid/core/behaviorstatemachines/listener/AllStateListener.java
1136
/* * Copyright 2015 Fabien Renaud. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.lympid.core.behaviorstatemachines.listener; /** * * @author Fabien Renaud */ public interface AllStateListener<C> extends StateActivityBeforeExecutionListener<C>, StateActivityAfterExecutionListener<C>, StateActivityExceptionListener<C>, StateEnterListener<C>, StateEnterBeforeExecutionListener<C>, StateEnterAfterExecutionListener<C>, StateEnterExceptionListener<C>, StateExitListener<C>, StateExitBeforeExecutionListener<C>, StateExitAfterExecutionListener<C>, StateExitExceptionListener<C> { }
apache-2.0
frankbu/istio
vendor/github.com/envoyproxy/go-control-plane/envoy/config/filter/accesslog/v2/accesslog.pb.go
69191
// Code generated by protoc-gen-gogo. DO NOT EDIT. // source: envoy/config/filter/accesslog/v2/accesslog.proto /* Package v2 is a generated protocol buffer package. It is generated from these files: envoy/config/filter/accesslog/v2/accesslog.proto It has these top-level messages: AccessLog AccessLogFilter ComparisonFilter StatusCodeFilter DurationFilter NotHealthCheckFilter TraceableFilter RuntimeFilter AndFilter OrFilter HeaderFilter */ package v2 import proto "github.com/gogo/protobuf/proto" import fmt "fmt" import math "math" import envoy_api_v2_core "github.com/envoyproxy/go-control-plane/envoy/api/v2/core" import envoy_api_v2_route "github.com/envoyproxy/go-control-plane/envoy/api/v2/route" import envoy_type1 "github.com/envoyproxy/go-control-plane/envoy/type" import google_protobuf "github.com/gogo/protobuf/types" import _ "github.com/lyft/protoc-gen-validate/validate" import io "io" // Reference imports to suppress errors if they are not otherwise used. var _ = proto.Marshal var _ = fmt.Errorf var _ = math.Inf // This is a compile-time assertion to ensure that this generated file // is compatible with the proto package it is being compiled against. // A compilation error at this line likely means your copy of the // proto package needs to be updated. const _ = proto.GoGoProtoPackageIsVersion2 // please upgrade the proto package type ComparisonFilter_Op int32 const ( // = ComparisonFilter_EQ ComparisonFilter_Op = 0 // >= ComparisonFilter_GE ComparisonFilter_Op = 1 // <= ComparisonFilter_LE ComparisonFilter_Op = 2 ) var ComparisonFilter_Op_name = map[int32]string{ 0: "EQ", 1: "GE", 2: "LE", } var ComparisonFilter_Op_value = map[string]int32{ "EQ": 0, "GE": 1, "LE": 2, } func (x ComparisonFilter_Op) String() string { return proto.EnumName(ComparisonFilter_Op_name, int32(x)) } func (ComparisonFilter_Op) EnumDescriptor() ([]byte, []int) { return fileDescriptorAccesslog, []int{2, 0} } type AccessLog struct { // The name of the access log implementation to instantiate. The name must // match a statically registered access log. Current built-in loggers include: // // #. "envoy.file_access_log" // #. "envoy.http_grpc_access_log" Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` // Filter which is used to determine if the access log needs to be written. Filter *AccessLogFilter `protobuf:"bytes,2,opt,name=filter" json:"filter,omitempty"` // Custom configuration that depends on the access log being instantiated. Built-in configurations // include: // // #. "envoy.file_access_log": :ref:`FileAccessLog // <envoy_api_msg_config.accesslog.v2.FileAccessLog>` // #. "envoy.http_grpc_access_log": :ref:`HttpGrpcAccessLogConfig // <envoy_api_msg_config.accesslog.v2.HttpGrpcAccessLogConfig>` Config *google_protobuf.Struct `protobuf:"bytes,3,opt,name=config" json:"config,omitempty"` } func (m *AccessLog) Reset() { *m = AccessLog{} } func (m *AccessLog) String() string { return proto.CompactTextString(m) } func (*AccessLog) ProtoMessage() {} func (*AccessLog) Descriptor() ([]byte, []int) { return fileDescriptorAccesslog, []int{0} } func (m *AccessLog) GetName() string { if m != nil { return m.Name } return "" } func (m *AccessLog) GetFilter() *AccessLogFilter { if m != nil { return m.Filter } return nil } func (m *AccessLog) GetConfig() *google_protobuf.Struct { if m != nil { return m.Config } return nil } type AccessLogFilter struct { // Types that are valid to be assigned to FilterSpecifier: // *AccessLogFilter_StatusCodeFilter // *AccessLogFilter_DurationFilter // *AccessLogFilter_NotHealthCheckFilter // *AccessLogFilter_TraceableFilter // *AccessLogFilter_RuntimeFilter // *AccessLogFilter_AndFilter // *AccessLogFilter_OrFilter // *AccessLogFilter_HeaderFilter FilterSpecifier isAccessLogFilter_FilterSpecifier `protobuf_oneof:"filter_specifier"` } func (m *AccessLogFilter) Reset() { *m = AccessLogFilter{} } func (m *AccessLogFilter) String() string { return proto.CompactTextString(m) } func (*AccessLogFilter) ProtoMessage() {} func (*AccessLogFilter) Descriptor() ([]byte, []int) { return fileDescriptorAccesslog, []int{1} } type isAccessLogFilter_FilterSpecifier interface { isAccessLogFilter_FilterSpecifier() MarshalTo([]byte) (int, error) Size() int } type AccessLogFilter_StatusCodeFilter struct { StatusCodeFilter *StatusCodeFilter `protobuf:"bytes,1,opt,name=status_code_filter,json=statusCodeFilter,oneof"` } type AccessLogFilter_DurationFilter struct { DurationFilter *DurationFilter `protobuf:"bytes,2,opt,name=duration_filter,json=durationFilter,oneof"` } type AccessLogFilter_NotHealthCheckFilter struct { NotHealthCheckFilter *NotHealthCheckFilter `protobuf:"bytes,3,opt,name=not_health_check_filter,json=notHealthCheckFilter,oneof"` } type AccessLogFilter_TraceableFilter struct { TraceableFilter *TraceableFilter `protobuf:"bytes,4,opt,name=traceable_filter,json=traceableFilter,oneof"` } type AccessLogFilter_RuntimeFilter struct { RuntimeFilter *RuntimeFilter `protobuf:"bytes,5,opt,name=runtime_filter,json=runtimeFilter,oneof"` } type AccessLogFilter_AndFilter struct { AndFilter *AndFilter `protobuf:"bytes,6,opt,name=and_filter,json=andFilter,oneof"` } type AccessLogFilter_OrFilter struct { OrFilter *OrFilter `protobuf:"bytes,7,opt,name=or_filter,json=orFilter,oneof"` } type AccessLogFilter_HeaderFilter struct { HeaderFilter *HeaderFilter `protobuf:"bytes,8,opt,name=header_filter,json=headerFilter,oneof"` } func (*AccessLogFilter_StatusCodeFilter) isAccessLogFilter_FilterSpecifier() {} func (*AccessLogFilter_DurationFilter) isAccessLogFilter_FilterSpecifier() {} func (*AccessLogFilter_NotHealthCheckFilter) isAccessLogFilter_FilterSpecifier() {} func (*AccessLogFilter_TraceableFilter) isAccessLogFilter_FilterSpecifier() {} func (*AccessLogFilter_RuntimeFilter) isAccessLogFilter_FilterSpecifier() {} func (*AccessLogFilter_AndFilter) isAccessLogFilter_FilterSpecifier() {} func (*AccessLogFilter_OrFilter) isAccessLogFilter_FilterSpecifier() {} func (*AccessLogFilter_HeaderFilter) isAccessLogFilter_FilterSpecifier() {} func (m *AccessLogFilter) GetFilterSpecifier() isAccessLogFilter_FilterSpecifier { if m != nil { return m.FilterSpecifier } return nil } func (m *AccessLogFilter) GetStatusCodeFilter() *StatusCodeFilter { if x, ok := m.GetFilterSpecifier().(*AccessLogFilter_StatusCodeFilter); ok { return x.StatusCodeFilter } return nil } func (m *AccessLogFilter) GetDurationFilter() *DurationFilter { if x, ok := m.GetFilterSpecifier().(*AccessLogFilter_DurationFilter); ok { return x.DurationFilter } return nil } func (m *AccessLogFilter) GetNotHealthCheckFilter() *NotHealthCheckFilter { if x, ok := m.GetFilterSpecifier().(*AccessLogFilter_NotHealthCheckFilter); ok { return x.NotHealthCheckFilter } return nil } func (m *AccessLogFilter) GetTraceableFilter() *TraceableFilter { if x, ok := m.GetFilterSpecifier().(*AccessLogFilter_TraceableFilter); ok { return x.TraceableFilter } return nil } func (m *AccessLogFilter) GetRuntimeFilter() *RuntimeFilter { if x, ok := m.GetFilterSpecifier().(*AccessLogFilter_RuntimeFilter); ok { return x.RuntimeFilter } return nil } func (m *AccessLogFilter) GetAndFilter() *AndFilter { if x, ok := m.GetFilterSpecifier().(*AccessLogFilter_AndFilter); ok { return x.AndFilter } return nil } func (m *AccessLogFilter) GetOrFilter() *OrFilter { if x, ok := m.GetFilterSpecifier().(*AccessLogFilter_OrFilter); ok { return x.OrFilter } return nil } func (m *AccessLogFilter) GetHeaderFilter() *HeaderFilter { if x, ok := m.GetFilterSpecifier().(*AccessLogFilter_HeaderFilter); ok { return x.HeaderFilter } return nil } // XXX_OneofFuncs is for the internal use of the proto package. func (*AccessLogFilter) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { return _AccessLogFilter_OneofMarshaler, _AccessLogFilter_OneofUnmarshaler, _AccessLogFilter_OneofSizer, []interface{}{ (*AccessLogFilter_StatusCodeFilter)(nil), (*AccessLogFilter_DurationFilter)(nil), (*AccessLogFilter_NotHealthCheckFilter)(nil), (*AccessLogFilter_TraceableFilter)(nil), (*AccessLogFilter_RuntimeFilter)(nil), (*AccessLogFilter_AndFilter)(nil), (*AccessLogFilter_OrFilter)(nil), (*AccessLogFilter_HeaderFilter)(nil), } } func _AccessLogFilter_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { m := msg.(*AccessLogFilter) // filter_specifier switch x := m.FilterSpecifier.(type) { case *AccessLogFilter_StatusCodeFilter: _ = b.EncodeVarint(1<<3 | proto.WireBytes) if err := b.EncodeMessage(x.StatusCodeFilter); err != nil { return err } case *AccessLogFilter_DurationFilter: _ = b.EncodeVarint(2<<3 | proto.WireBytes) if err := b.EncodeMessage(x.DurationFilter); err != nil { return err } case *AccessLogFilter_NotHealthCheckFilter: _ = b.EncodeVarint(3<<3 | proto.WireBytes) if err := b.EncodeMessage(x.NotHealthCheckFilter); err != nil { return err } case *AccessLogFilter_TraceableFilter: _ = b.EncodeVarint(4<<3 | proto.WireBytes) if err := b.EncodeMessage(x.TraceableFilter); err != nil { return err } case *AccessLogFilter_RuntimeFilter: _ = b.EncodeVarint(5<<3 | proto.WireBytes) if err := b.EncodeMessage(x.RuntimeFilter); err != nil { return err } case *AccessLogFilter_AndFilter: _ = b.EncodeVarint(6<<3 | proto.WireBytes) if err := b.EncodeMessage(x.AndFilter); err != nil { return err } case *AccessLogFilter_OrFilter: _ = b.EncodeVarint(7<<3 | proto.WireBytes) if err := b.EncodeMessage(x.OrFilter); err != nil { return err } case *AccessLogFilter_HeaderFilter: _ = b.EncodeVarint(8<<3 | proto.WireBytes) if err := b.EncodeMessage(x.HeaderFilter); err != nil { return err } case nil: default: return fmt.Errorf("AccessLogFilter.FilterSpecifier has unexpected type %T", x) } return nil } func _AccessLogFilter_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { m := msg.(*AccessLogFilter) switch tag { case 1: // filter_specifier.status_code_filter if wire != proto.WireBytes { return true, proto.ErrInternalBadWireType } msg := new(StatusCodeFilter) err := b.DecodeMessage(msg) m.FilterSpecifier = &AccessLogFilter_StatusCodeFilter{msg} return true, err case 2: // filter_specifier.duration_filter if wire != proto.WireBytes { return true, proto.ErrInternalBadWireType } msg := new(DurationFilter) err := b.DecodeMessage(msg) m.FilterSpecifier = &AccessLogFilter_DurationFilter{msg} return true, err case 3: // filter_specifier.not_health_check_filter if wire != proto.WireBytes { return true, proto.ErrInternalBadWireType } msg := new(NotHealthCheckFilter) err := b.DecodeMessage(msg) m.FilterSpecifier = &AccessLogFilter_NotHealthCheckFilter{msg} return true, err case 4: // filter_specifier.traceable_filter if wire != proto.WireBytes { return true, proto.ErrInternalBadWireType } msg := new(TraceableFilter) err := b.DecodeMessage(msg) m.FilterSpecifier = &AccessLogFilter_TraceableFilter{msg} return true, err case 5: // filter_specifier.runtime_filter if wire != proto.WireBytes { return true, proto.ErrInternalBadWireType } msg := new(RuntimeFilter) err := b.DecodeMessage(msg) m.FilterSpecifier = &AccessLogFilter_RuntimeFilter{msg} return true, err case 6: // filter_specifier.and_filter if wire != proto.WireBytes { return true, proto.ErrInternalBadWireType } msg := new(AndFilter) err := b.DecodeMessage(msg) m.FilterSpecifier = &AccessLogFilter_AndFilter{msg} return true, err case 7: // filter_specifier.or_filter if wire != proto.WireBytes { return true, proto.ErrInternalBadWireType } msg := new(OrFilter) err := b.DecodeMessage(msg) m.FilterSpecifier = &AccessLogFilter_OrFilter{msg} return true, err case 8: // filter_specifier.header_filter if wire != proto.WireBytes { return true, proto.ErrInternalBadWireType } msg := new(HeaderFilter) err := b.DecodeMessage(msg) m.FilterSpecifier = &AccessLogFilter_HeaderFilter{msg} return true, err default: return false, nil } } func _AccessLogFilter_OneofSizer(msg proto.Message) (n int) { m := msg.(*AccessLogFilter) // filter_specifier switch x := m.FilterSpecifier.(type) { case *AccessLogFilter_StatusCodeFilter: s := proto.Size(x.StatusCodeFilter) n += proto.SizeVarint(1<<3 | proto.WireBytes) n += proto.SizeVarint(uint64(s)) n += s case *AccessLogFilter_DurationFilter: s := proto.Size(x.DurationFilter) n += proto.SizeVarint(2<<3 | proto.WireBytes) n += proto.SizeVarint(uint64(s)) n += s case *AccessLogFilter_NotHealthCheckFilter: s := proto.Size(x.NotHealthCheckFilter) n += proto.SizeVarint(3<<3 | proto.WireBytes) n += proto.SizeVarint(uint64(s)) n += s case *AccessLogFilter_TraceableFilter: s := proto.Size(x.TraceableFilter) n += proto.SizeVarint(4<<3 | proto.WireBytes) n += proto.SizeVarint(uint64(s)) n += s case *AccessLogFilter_RuntimeFilter: s := proto.Size(x.RuntimeFilter) n += proto.SizeVarint(5<<3 | proto.WireBytes) n += proto.SizeVarint(uint64(s)) n += s case *AccessLogFilter_AndFilter: s := proto.Size(x.AndFilter) n += proto.SizeVarint(6<<3 | proto.WireBytes) n += proto.SizeVarint(uint64(s)) n += s case *AccessLogFilter_OrFilter: s := proto.Size(x.OrFilter) n += proto.SizeVarint(7<<3 | proto.WireBytes) n += proto.SizeVarint(uint64(s)) n += s case *AccessLogFilter_HeaderFilter: s := proto.Size(x.HeaderFilter) n += proto.SizeVarint(8<<3 | proto.WireBytes) n += proto.SizeVarint(uint64(s)) n += s case nil: default: panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) } return n } // Filter on an integer comparison. type ComparisonFilter struct { // Comparison operator. Op ComparisonFilter_Op `protobuf:"varint,1,opt,name=op,proto3,enum=envoy.config.filter.accesslog.v2.ComparisonFilter_Op" json:"op,omitempty"` // Value to compare against. Value *envoy_api_v2_core.RuntimeUInt32 `protobuf:"bytes,2,opt,name=value" json:"value,omitempty"` } func (m *ComparisonFilter) Reset() { *m = ComparisonFilter{} } func (m *ComparisonFilter) String() string { return proto.CompactTextString(m) } func (*ComparisonFilter) ProtoMessage() {} func (*ComparisonFilter) Descriptor() ([]byte, []int) { return fileDescriptorAccesslog, []int{2} } func (m *ComparisonFilter) GetOp() ComparisonFilter_Op { if m != nil { return m.Op } return ComparisonFilter_EQ } func (m *ComparisonFilter) GetValue() *envoy_api_v2_core.RuntimeUInt32 { if m != nil { return m.Value } return nil } // Filters on HTTP response/status code. type StatusCodeFilter struct { // Comparison. Comparison *ComparisonFilter `protobuf:"bytes,1,opt,name=comparison" json:"comparison,omitempty"` } func (m *StatusCodeFilter) Reset() { *m = StatusCodeFilter{} } func (m *StatusCodeFilter) String() string { return proto.CompactTextString(m) } func (*StatusCodeFilter) ProtoMessage() {} func (*StatusCodeFilter) Descriptor() ([]byte, []int) { return fileDescriptorAccesslog, []int{3} } func (m *StatusCodeFilter) GetComparison() *ComparisonFilter { if m != nil { return m.Comparison } return nil } // Filters on total request duration in milliseconds. type DurationFilter struct { // Comparison. Comparison *ComparisonFilter `protobuf:"bytes,1,opt,name=comparison" json:"comparison,omitempty"` } func (m *DurationFilter) Reset() { *m = DurationFilter{} } func (m *DurationFilter) String() string { return proto.CompactTextString(m) } func (*DurationFilter) ProtoMessage() {} func (*DurationFilter) Descriptor() ([]byte, []int) { return fileDescriptorAccesslog, []int{4} } func (m *DurationFilter) GetComparison() *ComparisonFilter { if m != nil { return m.Comparison } return nil } // Filters for requests that are not health check requests. A health check // request is marked by the health check filter. type NotHealthCheckFilter struct { } func (m *NotHealthCheckFilter) Reset() { *m = NotHealthCheckFilter{} } func (m *NotHealthCheckFilter) String() string { return proto.CompactTextString(m) } func (*NotHealthCheckFilter) ProtoMessage() {} func (*NotHealthCheckFilter) Descriptor() ([]byte, []int) { return fileDescriptorAccesslog, []int{5} } // Filters for requests that are traceable. See the tracing overview for more // information on how a request becomes traceable. type TraceableFilter struct { } func (m *TraceableFilter) Reset() { *m = TraceableFilter{} } func (m *TraceableFilter) String() string { return proto.CompactTextString(m) } func (*TraceableFilter) ProtoMessage() {} func (*TraceableFilter) Descriptor() ([]byte, []int) { return fileDescriptorAccesslog, []int{6} } // Filters for random sampling of requests. type RuntimeFilter struct { // Runtime key to get an optional overridden numerator for use in the *percent_sampled* field. // If found in runtime, this value will replace the default numerator. RuntimeKey string `protobuf:"bytes,1,opt,name=runtime_key,json=runtimeKey,proto3" json:"runtime_key,omitempty"` // The default sampling percentage. If not specified, defaults to 0% with denominator of 100. PercentSampled *envoy_type1.FractionalPercent `protobuf:"bytes,2,opt,name=percent_sampled,json=percentSampled" json:"percent_sampled,omitempty"` // By default, sampling pivots on the header // :ref:`x-request-id<config_http_conn_man_headers_x-request-id>` being present. If // :ref:`x-request-id<config_http_conn_man_headers_x-request-id>` is present, the filter will // consistently sample across multiple hosts based on the runtime key value and the value // extracted from :ref:`x-request-id<config_http_conn_man_headers_x-request-id>`. If it is // missing, or *use_independent_randomness* is set to true, the filter will randomly sample based // on the runtime key value alone. *use_independent_randomness* can be used for logging kill // switches within complex nested :ref:`AndFilter // <envoy_api_msg_config.filter.accesslog.v2.AndFilter>` and :ref:`OrFilter // <envoy_api_msg_config.filter.accesslog.v2.OrFilter>` blocks that are easier to reason about // from a probability perspective (i.e., setting to true will cause the filter to behave like // an independent random variable when composed within logical operator filters). UseIndependentRandomness bool `protobuf:"varint,3,opt,name=use_independent_randomness,json=useIndependentRandomness,proto3" json:"use_independent_randomness,omitempty"` } func (m *RuntimeFilter) Reset() { *m = RuntimeFilter{} } func (m *RuntimeFilter) String() string { return proto.CompactTextString(m) } func (*RuntimeFilter) ProtoMessage() {} func (*RuntimeFilter) Descriptor() ([]byte, []int) { return fileDescriptorAccesslog, []int{7} } func (m *RuntimeFilter) GetRuntimeKey() string { if m != nil { return m.RuntimeKey } return "" } func (m *RuntimeFilter) GetPercentSampled() *envoy_type1.FractionalPercent { if m != nil { return m.PercentSampled } return nil } func (m *RuntimeFilter) GetUseIndependentRandomness() bool { if m != nil { return m.UseIndependentRandomness } return false } // Performs a logical “and” operation on the result of each filter in filters. // Filters are evaluated sequentially and if one of them returns false, the // filter returns false immediately. type AndFilter struct { Filters []*AccessLogFilter `protobuf:"bytes,1,rep,name=filters" json:"filters,omitempty"` } func (m *AndFilter) Reset() { *m = AndFilter{} } func (m *AndFilter) String() string { return proto.CompactTextString(m) } func (*AndFilter) ProtoMessage() {} func (*AndFilter) Descriptor() ([]byte, []int) { return fileDescriptorAccesslog, []int{8} } func (m *AndFilter) GetFilters() []*AccessLogFilter { if m != nil { return m.Filters } return nil } // Performs a logical “or” operation on the result of each individual filter. // Filters are evaluated sequentially and if one of them returns true, the // filter returns true immediately. type OrFilter struct { Filters []*AccessLogFilter `protobuf:"bytes,2,rep,name=filters" json:"filters,omitempty"` } func (m *OrFilter) Reset() { *m = OrFilter{} } func (m *OrFilter) String() string { return proto.CompactTextString(m) } func (*OrFilter) ProtoMessage() {} func (*OrFilter) Descriptor() ([]byte, []int) { return fileDescriptorAccesslog, []int{9} } func (m *OrFilter) GetFilters() []*AccessLogFilter { if m != nil { return m.Filters } return nil } // Filters requests based on the presence or value of a request header. type HeaderFilter struct { // Only requests with a header which matches the specified HeaderMatcher will pass the filter // check. Header *envoy_api_v2_route.HeaderMatcher `protobuf:"bytes,1,opt,name=header" json:"header,omitempty"` } func (m *HeaderFilter) Reset() { *m = HeaderFilter{} } func (m *HeaderFilter) String() string { return proto.CompactTextString(m) } func (*HeaderFilter) ProtoMessage() {} func (*HeaderFilter) Descriptor() ([]byte, []int) { return fileDescriptorAccesslog, []int{10} } func (m *HeaderFilter) GetHeader() *envoy_api_v2_route.HeaderMatcher { if m != nil { return m.Header } return nil } func init() { proto.RegisterType((*AccessLog)(nil), "envoy.config.filter.accesslog.v2.AccessLog") proto.RegisterType((*AccessLogFilter)(nil), "envoy.config.filter.accesslog.v2.AccessLogFilter") proto.RegisterType((*ComparisonFilter)(nil), "envoy.config.filter.accesslog.v2.ComparisonFilter") proto.RegisterType((*StatusCodeFilter)(nil), "envoy.config.filter.accesslog.v2.StatusCodeFilter") proto.RegisterType((*DurationFilter)(nil), "envoy.config.filter.accesslog.v2.DurationFilter") proto.RegisterType((*NotHealthCheckFilter)(nil), "envoy.config.filter.accesslog.v2.NotHealthCheckFilter") proto.RegisterType((*TraceableFilter)(nil), "envoy.config.filter.accesslog.v2.TraceableFilter") proto.RegisterType((*RuntimeFilter)(nil), "envoy.config.filter.accesslog.v2.RuntimeFilter") proto.RegisterType((*AndFilter)(nil), "envoy.config.filter.accesslog.v2.AndFilter") proto.RegisterType((*OrFilter)(nil), "envoy.config.filter.accesslog.v2.OrFilter") proto.RegisterType((*HeaderFilter)(nil), "envoy.config.filter.accesslog.v2.HeaderFilter") proto.RegisterEnum("envoy.config.filter.accesslog.v2.ComparisonFilter_Op", ComparisonFilter_Op_name, ComparisonFilter_Op_value) } func (m *AccessLog) Marshal() (dAtA []byte, err error) { size := m.Size() dAtA = make([]byte, size) n, err := m.MarshalTo(dAtA) if err != nil { return nil, err } return dAtA[:n], nil } func (m *AccessLog) MarshalTo(dAtA []byte) (int, error) { var i int _ = i var l int _ = l if len(m.Name) > 0 { dAtA[i] = 0xa i++ i = encodeVarintAccesslog(dAtA, i, uint64(len(m.Name))) i += copy(dAtA[i:], m.Name) } if m.Filter != nil { dAtA[i] = 0x12 i++ i = encodeVarintAccesslog(dAtA, i, uint64(m.Filter.Size())) n1, err := m.Filter.MarshalTo(dAtA[i:]) if err != nil { return 0, err } i += n1 } if m.Config != nil { dAtA[i] = 0x1a i++ i = encodeVarintAccesslog(dAtA, i, uint64(m.Config.Size())) n2, err := m.Config.MarshalTo(dAtA[i:]) if err != nil { return 0, err } i += n2 } return i, nil } func (m *AccessLogFilter) Marshal() (dAtA []byte, err error) { size := m.Size() dAtA = make([]byte, size) n, err := m.MarshalTo(dAtA) if err != nil { return nil, err } return dAtA[:n], nil } func (m *AccessLogFilter) MarshalTo(dAtA []byte) (int, error) { var i int _ = i var l int _ = l if m.FilterSpecifier != nil { nn3, err := m.FilterSpecifier.MarshalTo(dAtA[i:]) if err != nil { return 0, err } i += nn3 } return i, nil } func (m *AccessLogFilter_StatusCodeFilter) MarshalTo(dAtA []byte) (int, error) { i := 0 if m.StatusCodeFilter != nil { dAtA[i] = 0xa i++ i = encodeVarintAccesslog(dAtA, i, uint64(m.StatusCodeFilter.Size())) n4, err := m.StatusCodeFilter.MarshalTo(dAtA[i:]) if err != nil { return 0, err } i += n4 } return i, nil } func (m *AccessLogFilter_DurationFilter) MarshalTo(dAtA []byte) (int, error) { i := 0 if m.DurationFilter != nil { dAtA[i] = 0x12 i++ i = encodeVarintAccesslog(dAtA, i, uint64(m.DurationFilter.Size())) n5, err := m.DurationFilter.MarshalTo(dAtA[i:]) if err != nil { return 0, err } i += n5 } return i, nil } func (m *AccessLogFilter_NotHealthCheckFilter) MarshalTo(dAtA []byte) (int, error) { i := 0 if m.NotHealthCheckFilter != nil { dAtA[i] = 0x1a i++ i = encodeVarintAccesslog(dAtA, i, uint64(m.NotHealthCheckFilter.Size())) n6, err := m.NotHealthCheckFilter.MarshalTo(dAtA[i:]) if err != nil { return 0, err } i += n6 } return i, nil } func (m *AccessLogFilter_TraceableFilter) MarshalTo(dAtA []byte) (int, error) { i := 0 if m.TraceableFilter != nil { dAtA[i] = 0x22 i++ i = encodeVarintAccesslog(dAtA, i, uint64(m.TraceableFilter.Size())) n7, err := m.TraceableFilter.MarshalTo(dAtA[i:]) if err != nil { return 0, err } i += n7 } return i, nil } func (m *AccessLogFilter_RuntimeFilter) MarshalTo(dAtA []byte) (int, error) { i := 0 if m.RuntimeFilter != nil { dAtA[i] = 0x2a i++ i = encodeVarintAccesslog(dAtA, i, uint64(m.RuntimeFilter.Size())) n8, err := m.RuntimeFilter.MarshalTo(dAtA[i:]) if err != nil { return 0, err } i += n8 } return i, nil } func (m *AccessLogFilter_AndFilter) MarshalTo(dAtA []byte) (int, error) { i := 0 if m.AndFilter != nil { dAtA[i] = 0x32 i++ i = encodeVarintAccesslog(dAtA, i, uint64(m.AndFilter.Size())) n9, err := m.AndFilter.MarshalTo(dAtA[i:]) if err != nil { return 0, err } i += n9 } return i, nil } func (m *AccessLogFilter_OrFilter) MarshalTo(dAtA []byte) (int, error) { i := 0 if m.OrFilter != nil { dAtA[i] = 0x3a i++ i = encodeVarintAccesslog(dAtA, i, uint64(m.OrFilter.Size())) n10, err := m.OrFilter.MarshalTo(dAtA[i:]) if err != nil { return 0, err } i += n10 } return i, nil } func (m *AccessLogFilter_HeaderFilter) MarshalTo(dAtA []byte) (int, error) { i := 0 if m.HeaderFilter != nil { dAtA[i] = 0x42 i++ i = encodeVarintAccesslog(dAtA, i, uint64(m.HeaderFilter.Size())) n11, err := m.HeaderFilter.MarshalTo(dAtA[i:]) if err != nil { return 0, err } i += n11 } return i, nil } func (m *ComparisonFilter) Marshal() (dAtA []byte, err error) { size := m.Size() dAtA = make([]byte, size) n, err := m.MarshalTo(dAtA) if err != nil { return nil, err } return dAtA[:n], nil } func (m *ComparisonFilter) MarshalTo(dAtA []byte) (int, error) { var i int _ = i var l int _ = l if m.Op != 0 { dAtA[i] = 0x8 i++ i = encodeVarintAccesslog(dAtA, i, uint64(m.Op)) } if m.Value != nil { dAtA[i] = 0x12 i++ i = encodeVarintAccesslog(dAtA, i, uint64(m.Value.Size())) n12, err := m.Value.MarshalTo(dAtA[i:]) if err != nil { return 0, err } i += n12 } return i, nil } func (m *StatusCodeFilter) Marshal() (dAtA []byte, err error) { size := m.Size() dAtA = make([]byte, size) n, err := m.MarshalTo(dAtA) if err != nil { return nil, err } return dAtA[:n], nil } func (m *StatusCodeFilter) MarshalTo(dAtA []byte) (int, error) { var i int _ = i var l int _ = l if m.Comparison != nil { dAtA[i] = 0xa i++ i = encodeVarintAccesslog(dAtA, i, uint64(m.Comparison.Size())) n13, err := m.Comparison.MarshalTo(dAtA[i:]) if err != nil { return 0, err } i += n13 } return i, nil } func (m *DurationFilter) Marshal() (dAtA []byte, err error) { size := m.Size() dAtA = make([]byte, size) n, err := m.MarshalTo(dAtA) if err != nil { return nil, err } return dAtA[:n], nil } func (m *DurationFilter) MarshalTo(dAtA []byte) (int, error) { var i int _ = i var l int _ = l if m.Comparison != nil { dAtA[i] = 0xa i++ i = encodeVarintAccesslog(dAtA, i, uint64(m.Comparison.Size())) n14, err := m.Comparison.MarshalTo(dAtA[i:]) if err != nil { return 0, err } i += n14 } return i, nil } func (m *NotHealthCheckFilter) Marshal() (dAtA []byte, err error) { size := m.Size() dAtA = make([]byte, size) n, err := m.MarshalTo(dAtA) if err != nil { return nil, err } return dAtA[:n], nil } func (m *NotHealthCheckFilter) MarshalTo(dAtA []byte) (int, error) { var i int _ = i var l int _ = l return i, nil } func (m *TraceableFilter) Marshal() (dAtA []byte, err error) { size := m.Size() dAtA = make([]byte, size) n, err := m.MarshalTo(dAtA) if err != nil { return nil, err } return dAtA[:n], nil } func (m *TraceableFilter) MarshalTo(dAtA []byte) (int, error) { var i int _ = i var l int _ = l return i, nil } func (m *RuntimeFilter) Marshal() (dAtA []byte, err error) { size := m.Size() dAtA = make([]byte, size) n, err := m.MarshalTo(dAtA) if err != nil { return nil, err } return dAtA[:n], nil } func (m *RuntimeFilter) MarshalTo(dAtA []byte) (int, error) { var i int _ = i var l int _ = l if len(m.RuntimeKey) > 0 { dAtA[i] = 0xa i++ i = encodeVarintAccesslog(dAtA, i, uint64(len(m.RuntimeKey))) i += copy(dAtA[i:], m.RuntimeKey) } if m.PercentSampled != nil { dAtA[i] = 0x12 i++ i = encodeVarintAccesslog(dAtA, i, uint64(m.PercentSampled.Size())) n15, err := m.PercentSampled.MarshalTo(dAtA[i:]) if err != nil { return 0, err } i += n15 } if m.UseIndependentRandomness { dAtA[i] = 0x18 i++ if m.UseIndependentRandomness { dAtA[i] = 1 } else { dAtA[i] = 0 } i++ } return i, nil } func (m *AndFilter) Marshal() (dAtA []byte, err error) { size := m.Size() dAtA = make([]byte, size) n, err := m.MarshalTo(dAtA) if err != nil { return nil, err } return dAtA[:n], nil } func (m *AndFilter) MarshalTo(dAtA []byte) (int, error) { var i int _ = i var l int _ = l if len(m.Filters) > 0 { for _, msg := range m.Filters { dAtA[i] = 0xa i++ i = encodeVarintAccesslog(dAtA, i, uint64(msg.Size())) n, err := msg.MarshalTo(dAtA[i:]) if err != nil { return 0, err } i += n } } return i, nil } func (m *OrFilter) Marshal() (dAtA []byte, err error) { size := m.Size() dAtA = make([]byte, size) n, err := m.MarshalTo(dAtA) if err != nil { return nil, err } return dAtA[:n], nil } func (m *OrFilter) MarshalTo(dAtA []byte) (int, error) { var i int _ = i var l int _ = l if len(m.Filters) > 0 { for _, msg := range m.Filters { dAtA[i] = 0x12 i++ i = encodeVarintAccesslog(dAtA, i, uint64(msg.Size())) n, err := msg.MarshalTo(dAtA[i:]) if err != nil { return 0, err } i += n } } return i, nil } func (m *HeaderFilter) Marshal() (dAtA []byte, err error) { size := m.Size() dAtA = make([]byte, size) n, err := m.MarshalTo(dAtA) if err != nil { return nil, err } return dAtA[:n], nil } func (m *HeaderFilter) MarshalTo(dAtA []byte) (int, error) { var i int _ = i var l int _ = l if m.Header != nil { dAtA[i] = 0xa i++ i = encodeVarintAccesslog(dAtA, i, uint64(m.Header.Size())) n16, err := m.Header.MarshalTo(dAtA[i:]) if err != nil { return 0, err } i += n16 } return i, nil } func encodeVarintAccesslog(dAtA []byte, offset int, v uint64) int { for v >= 1<<7 { dAtA[offset] = uint8(v&0x7f | 0x80) v >>= 7 offset++ } dAtA[offset] = uint8(v) return offset + 1 } func (m *AccessLog) Size() (n int) { var l int _ = l l = len(m.Name) if l > 0 { n += 1 + l + sovAccesslog(uint64(l)) } if m.Filter != nil { l = m.Filter.Size() n += 1 + l + sovAccesslog(uint64(l)) } if m.Config != nil { l = m.Config.Size() n += 1 + l + sovAccesslog(uint64(l)) } return n } func (m *AccessLogFilter) Size() (n int) { var l int _ = l if m.FilterSpecifier != nil { n += m.FilterSpecifier.Size() } return n } func (m *AccessLogFilter_StatusCodeFilter) Size() (n int) { var l int _ = l if m.StatusCodeFilter != nil { l = m.StatusCodeFilter.Size() n += 1 + l + sovAccesslog(uint64(l)) } return n } func (m *AccessLogFilter_DurationFilter) Size() (n int) { var l int _ = l if m.DurationFilter != nil { l = m.DurationFilter.Size() n += 1 + l + sovAccesslog(uint64(l)) } return n } func (m *AccessLogFilter_NotHealthCheckFilter) Size() (n int) { var l int _ = l if m.NotHealthCheckFilter != nil { l = m.NotHealthCheckFilter.Size() n += 1 + l + sovAccesslog(uint64(l)) } return n } func (m *AccessLogFilter_TraceableFilter) Size() (n int) { var l int _ = l if m.TraceableFilter != nil { l = m.TraceableFilter.Size() n += 1 + l + sovAccesslog(uint64(l)) } return n } func (m *AccessLogFilter_RuntimeFilter) Size() (n int) { var l int _ = l if m.RuntimeFilter != nil { l = m.RuntimeFilter.Size() n += 1 + l + sovAccesslog(uint64(l)) } return n } func (m *AccessLogFilter_AndFilter) Size() (n int) { var l int _ = l if m.AndFilter != nil { l = m.AndFilter.Size() n += 1 + l + sovAccesslog(uint64(l)) } return n } func (m *AccessLogFilter_OrFilter) Size() (n int) { var l int _ = l if m.OrFilter != nil { l = m.OrFilter.Size() n += 1 + l + sovAccesslog(uint64(l)) } return n } func (m *AccessLogFilter_HeaderFilter) Size() (n int) { var l int _ = l if m.HeaderFilter != nil { l = m.HeaderFilter.Size() n += 1 + l + sovAccesslog(uint64(l)) } return n } func (m *ComparisonFilter) Size() (n int) { var l int _ = l if m.Op != 0 { n += 1 + sovAccesslog(uint64(m.Op)) } if m.Value != nil { l = m.Value.Size() n += 1 + l + sovAccesslog(uint64(l)) } return n } func (m *StatusCodeFilter) Size() (n int) { var l int _ = l if m.Comparison != nil { l = m.Comparison.Size() n += 1 + l + sovAccesslog(uint64(l)) } return n } func (m *DurationFilter) Size() (n int) { var l int _ = l if m.Comparison != nil { l = m.Comparison.Size() n += 1 + l + sovAccesslog(uint64(l)) } return n } func (m *NotHealthCheckFilter) Size() (n int) { var l int _ = l return n } func (m *TraceableFilter) Size() (n int) { var l int _ = l return n } func (m *RuntimeFilter) Size() (n int) { var l int _ = l l = len(m.RuntimeKey) if l > 0 { n += 1 + l + sovAccesslog(uint64(l)) } if m.PercentSampled != nil { l = m.PercentSampled.Size() n += 1 + l + sovAccesslog(uint64(l)) } if m.UseIndependentRandomness { n += 2 } return n } func (m *AndFilter) Size() (n int) { var l int _ = l if len(m.Filters) > 0 { for _, e := range m.Filters { l = e.Size() n += 1 + l + sovAccesslog(uint64(l)) } } return n } func (m *OrFilter) Size() (n int) { var l int _ = l if len(m.Filters) > 0 { for _, e := range m.Filters { l = e.Size() n += 1 + l + sovAccesslog(uint64(l)) } } return n } func (m *HeaderFilter) Size() (n int) { var l int _ = l if m.Header != nil { l = m.Header.Size() n += 1 + l + sovAccesslog(uint64(l)) } return n } func sovAccesslog(x uint64) (n int) { for { n++ x >>= 7 if x == 0 { break } } return n } func sozAccesslog(x uint64) (n int) { return sovAccesslog(uint64((x << 1) ^ uint64((int64(x) >> 63)))) } func (m *AccessLog) Unmarshal(dAtA []byte) error { l := len(dAtA) iNdEx := 0 for iNdEx < l { preIndex := iNdEx var wire uint64 for shift := uint(0); ; shift += 7 { if shift >= 64 { return ErrIntOverflowAccesslog } if iNdEx >= l { return io.ErrUnexpectedEOF } b := dAtA[iNdEx] iNdEx++ wire |= (uint64(b) & 0x7F) << shift if b < 0x80 { break } } fieldNum := int32(wire >> 3) wireType := int(wire & 0x7) if wireType == 4 { return fmt.Errorf("proto: AccessLog: wiretype end group for non-group") } if fieldNum <= 0 { return fmt.Errorf("proto: AccessLog: illegal tag %d (wire type %d)", fieldNum, wire) } switch fieldNum { case 1: if wireType != 2 { return fmt.Errorf("proto: wrong wireType = %d for field Name", wireType) } var stringLen uint64 for shift := uint(0); ; shift += 7 { if shift >= 64 { return ErrIntOverflowAccesslog } if iNdEx >= l { return io.ErrUnexpectedEOF } b := dAtA[iNdEx] iNdEx++ stringLen |= (uint64(b) & 0x7F) << shift if b < 0x80 { break } } intStringLen := int(stringLen) if intStringLen < 0 { return ErrInvalidLengthAccesslog } postIndex := iNdEx + intStringLen if postIndex > l { return io.ErrUnexpectedEOF } m.Name = string(dAtA[iNdEx:postIndex]) iNdEx = postIndex case 2: if wireType != 2 { return fmt.Errorf("proto: wrong wireType = %d for field Filter", wireType) } var msglen int for shift := uint(0); ; shift += 7 { if shift >= 64 { return ErrIntOverflowAccesslog } if iNdEx >= l { return io.ErrUnexpectedEOF } b := dAtA[iNdEx] iNdEx++ msglen |= (int(b) & 0x7F) << shift if b < 0x80 { break } } if msglen < 0 { return ErrInvalidLengthAccesslog } postIndex := iNdEx + msglen if postIndex > l { return io.ErrUnexpectedEOF } if m.Filter == nil { m.Filter = &AccessLogFilter{} } if err := m.Filter.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { return err } iNdEx = postIndex case 3: if wireType != 2 { return fmt.Errorf("proto: wrong wireType = %d for field Config", wireType) } var msglen int for shift := uint(0); ; shift += 7 { if shift >= 64 { return ErrIntOverflowAccesslog } if iNdEx >= l { return io.ErrUnexpectedEOF } b := dAtA[iNdEx] iNdEx++ msglen |= (int(b) & 0x7F) << shift if b < 0x80 { break } } if msglen < 0 { return ErrInvalidLengthAccesslog } postIndex := iNdEx + msglen if postIndex > l { return io.ErrUnexpectedEOF } if m.Config == nil { m.Config = &google_protobuf.Struct{} } if err := m.Config.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { return err } iNdEx = postIndex default: iNdEx = preIndex skippy, err := skipAccesslog(dAtA[iNdEx:]) if err != nil { return err } if skippy < 0 { return ErrInvalidLengthAccesslog } if (iNdEx + skippy) > l { return io.ErrUnexpectedEOF } iNdEx += skippy } } if iNdEx > l { return io.ErrUnexpectedEOF } return nil } func (m *AccessLogFilter) Unmarshal(dAtA []byte) error { l := len(dAtA) iNdEx := 0 for iNdEx < l { preIndex := iNdEx var wire uint64 for shift := uint(0); ; shift += 7 { if shift >= 64 { return ErrIntOverflowAccesslog } if iNdEx >= l { return io.ErrUnexpectedEOF } b := dAtA[iNdEx] iNdEx++ wire |= (uint64(b) & 0x7F) << shift if b < 0x80 { break } } fieldNum := int32(wire >> 3) wireType := int(wire & 0x7) if wireType == 4 { return fmt.Errorf("proto: AccessLogFilter: wiretype end group for non-group") } if fieldNum <= 0 { return fmt.Errorf("proto: AccessLogFilter: illegal tag %d (wire type %d)", fieldNum, wire) } switch fieldNum { case 1: if wireType != 2 { return fmt.Errorf("proto: wrong wireType = %d for field StatusCodeFilter", wireType) } var msglen int for shift := uint(0); ; shift += 7 { if shift >= 64 { return ErrIntOverflowAccesslog } if iNdEx >= l { return io.ErrUnexpectedEOF } b := dAtA[iNdEx] iNdEx++ msglen |= (int(b) & 0x7F) << shift if b < 0x80 { break } } if msglen < 0 { return ErrInvalidLengthAccesslog } postIndex := iNdEx + msglen if postIndex > l { return io.ErrUnexpectedEOF } v := &StatusCodeFilter{} if err := v.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { return err } m.FilterSpecifier = &AccessLogFilter_StatusCodeFilter{v} iNdEx = postIndex case 2: if wireType != 2 { return fmt.Errorf("proto: wrong wireType = %d for field DurationFilter", wireType) } var msglen int for shift := uint(0); ; shift += 7 { if shift >= 64 { return ErrIntOverflowAccesslog } if iNdEx >= l { return io.ErrUnexpectedEOF } b := dAtA[iNdEx] iNdEx++ msglen |= (int(b) & 0x7F) << shift if b < 0x80 { break } } if msglen < 0 { return ErrInvalidLengthAccesslog } postIndex := iNdEx + msglen if postIndex > l { return io.ErrUnexpectedEOF } v := &DurationFilter{} if err := v.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { return err } m.FilterSpecifier = &AccessLogFilter_DurationFilter{v} iNdEx = postIndex case 3: if wireType != 2 { return fmt.Errorf("proto: wrong wireType = %d for field NotHealthCheckFilter", wireType) } var msglen int for shift := uint(0); ; shift += 7 { if shift >= 64 { return ErrIntOverflowAccesslog } if iNdEx >= l { return io.ErrUnexpectedEOF } b := dAtA[iNdEx] iNdEx++ msglen |= (int(b) & 0x7F) << shift if b < 0x80 { break } } if msglen < 0 { return ErrInvalidLengthAccesslog } postIndex := iNdEx + msglen if postIndex > l { return io.ErrUnexpectedEOF } v := &NotHealthCheckFilter{} if err := v.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { return err } m.FilterSpecifier = &AccessLogFilter_NotHealthCheckFilter{v} iNdEx = postIndex case 4: if wireType != 2 { return fmt.Errorf("proto: wrong wireType = %d for field TraceableFilter", wireType) } var msglen int for shift := uint(0); ; shift += 7 { if shift >= 64 { return ErrIntOverflowAccesslog } if iNdEx >= l { return io.ErrUnexpectedEOF } b := dAtA[iNdEx] iNdEx++ msglen |= (int(b) & 0x7F) << shift if b < 0x80 { break } } if msglen < 0 { return ErrInvalidLengthAccesslog } postIndex := iNdEx + msglen if postIndex > l { return io.ErrUnexpectedEOF } v := &TraceableFilter{} if err := v.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { return err } m.FilterSpecifier = &AccessLogFilter_TraceableFilter{v} iNdEx = postIndex case 5: if wireType != 2 { return fmt.Errorf("proto: wrong wireType = %d for field RuntimeFilter", wireType) } var msglen int for shift := uint(0); ; shift += 7 { if shift >= 64 { return ErrIntOverflowAccesslog } if iNdEx >= l { return io.ErrUnexpectedEOF } b := dAtA[iNdEx] iNdEx++ msglen |= (int(b) & 0x7F) << shift if b < 0x80 { break } } if msglen < 0 { return ErrInvalidLengthAccesslog } postIndex := iNdEx + msglen if postIndex > l { return io.ErrUnexpectedEOF } v := &RuntimeFilter{} if err := v.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { return err } m.FilterSpecifier = &AccessLogFilter_RuntimeFilter{v} iNdEx = postIndex case 6: if wireType != 2 { return fmt.Errorf("proto: wrong wireType = %d for field AndFilter", wireType) } var msglen int for shift := uint(0); ; shift += 7 { if shift >= 64 { return ErrIntOverflowAccesslog } if iNdEx >= l { return io.ErrUnexpectedEOF } b := dAtA[iNdEx] iNdEx++ msglen |= (int(b) & 0x7F) << shift if b < 0x80 { break } } if msglen < 0 { return ErrInvalidLengthAccesslog } postIndex := iNdEx + msglen if postIndex > l { return io.ErrUnexpectedEOF } v := &AndFilter{} if err := v.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { return err } m.FilterSpecifier = &AccessLogFilter_AndFilter{v} iNdEx = postIndex case 7: if wireType != 2 { return fmt.Errorf("proto: wrong wireType = %d for field OrFilter", wireType) } var msglen int for shift := uint(0); ; shift += 7 { if shift >= 64 { return ErrIntOverflowAccesslog } if iNdEx >= l { return io.ErrUnexpectedEOF } b := dAtA[iNdEx] iNdEx++ msglen |= (int(b) & 0x7F) << shift if b < 0x80 { break } } if msglen < 0 { return ErrInvalidLengthAccesslog } postIndex := iNdEx + msglen if postIndex > l { return io.ErrUnexpectedEOF } v := &OrFilter{} if err := v.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { return err } m.FilterSpecifier = &AccessLogFilter_OrFilter{v} iNdEx = postIndex case 8: if wireType != 2 { return fmt.Errorf("proto: wrong wireType = %d for field HeaderFilter", wireType) } var msglen int for shift := uint(0); ; shift += 7 { if shift >= 64 { return ErrIntOverflowAccesslog } if iNdEx >= l { return io.ErrUnexpectedEOF } b := dAtA[iNdEx] iNdEx++ msglen |= (int(b) & 0x7F) << shift if b < 0x80 { break } } if msglen < 0 { return ErrInvalidLengthAccesslog } postIndex := iNdEx + msglen if postIndex > l { return io.ErrUnexpectedEOF } v := &HeaderFilter{} if err := v.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { return err } m.FilterSpecifier = &AccessLogFilter_HeaderFilter{v} iNdEx = postIndex default: iNdEx = preIndex skippy, err := skipAccesslog(dAtA[iNdEx:]) if err != nil { return err } if skippy < 0 { return ErrInvalidLengthAccesslog } if (iNdEx + skippy) > l { return io.ErrUnexpectedEOF } iNdEx += skippy } } if iNdEx > l { return io.ErrUnexpectedEOF } return nil } func (m *ComparisonFilter) Unmarshal(dAtA []byte) error { l := len(dAtA) iNdEx := 0 for iNdEx < l { preIndex := iNdEx var wire uint64 for shift := uint(0); ; shift += 7 { if shift >= 64 { return ErrIntOverflowAccesslog } if iNdEx >= l { return io.ErrUnexpectedEOF } b := dAtA[iNdEx] iNdEx++ wire |= (uint64(b) & 0x7F) << shift if b < 0x80 { break } } fieldNum := int32(wire >> 3) wireType := int(wire & 0x7) if wireType == 4 { return fmt.Errorf("proto: ComparisonFilter: wiretype end group for non-group") } if fieldNum <= 0 { return fmt.Errorf("proto: ComparisonFilter: illegal tag %d (wire type %d)", fieldNum, wire) } switch fieldNum { case 1: if wireType != 0 { return fmt.Errorf("proto: wrong wireType = %d for field Op", wireType) } m.Op = 0 for shift := uint(0); ; shift += 7 { if shift >= 64 { return ErrIntOverflowAccesslog } if iNdEx >= l { return io.ErrUnexpectedEOF } b := dAtA[iNdEx] iNdEx++ m.Op |= (ComparisonFilter_Op(b) & 0x7F) << shift if b < 0x80 { break } } case 2: if wireType != 2 { return fmt.Errorf("proto: wrong wireType = %d for field Value", wireType) } var msglen int for shift := uint(0); ; shift += 7 { if shift >= 64 { return ErrIntOverflowAccesslog } if iNdEx >= l { return io.ErrUnexpectedEOF } b := dAtA[iNdEx] iNdEx++ msglen |= (int(b) & 0x7F) << shift if b < 0x80 { break } } if msglen < 0 { return ErrInvalidLengthAccesslog } postIndex := iNdEx + msglen if postIndex > l { return io.ErrUnexpectedEOF } if m.Value == nil { m.Value = &envoy_api_v2_core.RuntimeUInt32{} } if err := m.Value.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { return err } iNdEx = postIndex default: iNdEx = preIndex skippy, err := skipAccesslog(dAtA[iNdEx:]) if err != nil { return err } if skippy < 0 { return ErrInvalidLengthAccesslog } if (iNdEx + skippy) > l { return io.ErrUnexpectedEOF } iNdEx += skippy } } if iNdEx > l { return io.ErrUnexpectedEOF } return nil } func (m *StatusCodeFilter) Unmarshal(dAtA []byte) error { l := len(dAtA) iNdEx := 0 for iNdEx < l { preIndex := iNdEx var wire uint64 for shift := uint(0); ; shift += 7 { if shift >= 64 { return ErrIntOverflowAccesslog } if iNdEx >= l { return io.ErrUnexpectedEOF } b := dAtA[iNdEx] iNdEx++ wire |= (uint64(b) & 0x7F) << shift if b < 0x80 { break } } fieldNum := int32(wire >> 3) wireType := int(wire & 0x7) if wireType == 4 { return fmt.Errorf("proto: StatusCodeFilter: wiretype end group for non-group") } if fieldNum <= 0 { return fmt.Errorf("proto: StatusCodeFilter: illegal tag %d (wire type %d)", fieldNum, wire) } switch fieldNum { case 1: if wireType != 2 { return fmt.Errorf("proto: wrong wireType = %d for field Comparison", wireType) } var msglen int for shift := uint(0); ; shift += 7 { if shift >= 64 { return ErrIntOverflowAccesslog } if iNdEx >= l { return io.ErrUnexpectedEOF } b := dAtA[iNdEx] iNdEx++ msglen |= (int(b) & 0x7F) << shift if b < 0x80 { break } } if msglen < 0 { return ErrInvalidLengthAccesslog } postIndex := iNdEx + msglen if postIndex > l { return io.ErrUnexpectedEOF } if m.Comparison == nil { m.Comparison = &ComparisonFilter{} } if err := m.Comparison.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { return err } iNdEx = postIndex default: iNdEx = preIndex skippy, err := skipAccesslog(dAtA[iNdEx:]) if err != nil { return err } if skippy < 0 { return ErrInvalidLengthAccesslog } if (iNdEx + skippy) > l { return io.ErrUnexpectedEOF } iNdEx += skippy } } if iNdEx > l { return io.ErrUnexpectedEOF } return nil } func (m *DurationFilter) Unmarshal(dAtA []byte) error { l := len(dAtA) iNdEx := 0 for iNdEx < l { preIndex := iNdEx var wire uint64 for shift := uint(0); ; shift += 7 { if shift >= 64 { return ErrIntOverflowAccesslog } if iNdEx >= l { return io.ErrUnexpectedEOF } b := dAtA[iNdEx] iNdEx++ wire |= (uint64(b) & 0x7F) << shift if b < 0x80 { break } } fieldNum := int32(wire >> 3) wireType := int(wire & 0x7) if wireType == 4 { return fmt.Errorf("proto: DurationFilter: wiretype end group for non-group") } if fieldNum <= 0 { return fmt.Errorf("proto: DurationFilter: illegal tag %d (wire type %d)", fieldNum, wire) } switch fieldNum { case 1: if wireType != 2 { return fmt.Errorf("proto: wrong wireType = %d for field Comparison", wireType) } var msglen int for shift := uint(0); ; shift += 7 { if shift >= 64 { return ErrIntOverflowAccesslog } if iNdEx >= l { return io.ErrUnexpectedEOF } b := dAtA[iNdEx] iNdEx++ msglen |= (int(b) & 0x7F) << shift if b < 0x80 { break } } if msglen < 0 { return ErrInvalidLengthAccesslog } postIndex := iNdEx + msglen if postIndex > l { return io.ErrUnexpectedEOF } if m.Comparison == nil { m.Comparison = &ComparisonFilter{} } if err := m.Comparison.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { return err } iNdEx = postIndex default: iNdEx = preIndex skippy, err := skipAccesslog(dAtA[iNdEx:]) if err != nil { return err } if skippy < 0 { return ErrInvalidLengthAccesslog } if (iNdEx + skippy) > l { return io.ErrUnexpectedEOF } iNdEx += skippy } } if iNdEx > l { return io.ErrUnexpectedEOF } return nil } func (m *NotHealthCheckFilter) Unmarshal(dAtA []byte) error { l := len(dAtA) iNdEx := 0 for iNdEx < l { preIndex := iNdEx var wire uint64 for shift := uint(0); ; shift += 7 { if shift >= 64 { return ErrIntOverflowAccesslog } if iNdEx >= l { return io.ErrUnexpectedEOF } b := dAtA[iNdEx] iNdEx++ wire |= (uint64(b) & 0x7F) << shift if b < 0x80 { break } } fieldNum := int32(wire >> 3) wireType := int(wire & 0x7) if wireType == 4 { return fmt.Errorf("proto: NotHealthCheckFilter: wiretype end group for non-group") } if fieldNum <= 0 { return fmt.Errorf("proto: NotHealthCheckFilter: illegal tag %d (wire type %d)", fieldNum, wire) } switch fieldNum { default: iNdEx = preIndex skippy, err := skipAccesslog(dAtA[iNdEx:]) if err != nil { return err } if skippy < 0 { return ErrInvalidLengthAccesslog } if (iNdEx + skippy) > l { return io.ErrUnexpectedEOF } iNdEx += skippy } } if iNdEx > l { return io.ErrUnexpectedEOF } return nil } func (m *TraceableFilter) Unmarshal(dAtA []byte) error { l := len(dAtA) iNdEx := 0 for iNdEx < l { preIndex := iNdEx var wire uint64 for shift := uint(0); ; shift += 7 { if shift >= 64 { return ErrIntOverflowAccesslog } if iNdEx >= l { return io.ErrUnexpectedEOF } b := dAtA[iNdEx] iNdEx++ wire |= (uint64(b) & 0x7F) << shift if b < 0x80 { break } } fieldNum := int32(wire >> 3) wireType := int(wire & 0x7) if wireType == 4 { return fmt.Errorf("proto: TraceableFilter: wiretype end group for non-group") } if fieldNum <= 0 { return fmt.Errorf("proto: TraceableFilter: illegal tag %d (wire type %d)", fieldNum, wire) } switch fieldNum { default: iNdEx = preIndex skippy, err := skipAccesslog(dAtA[iNdEx:]) if err != nil { return err } if skippy < 0 { return ErrInvalidLengthAccesslog } if (iNdEx + skippy) > l { return io.ErrUnexpectedEOF } iNdEx += skippy } } if iNdEx > l { return io.ErrUnexpectedEOF } return nil } func (m *RuntimeFilter) Unmarshal(dAtA []byte) error { l := len(dAtA) iNdEx := 0 for iNdEx < l { preIndex := iNdEx var wire uint64 for shift := uint(0); ; shift += 7 { if shift >= 64 { return ErrIntOverflowAccesslog } if iNdEx >= l { return io.ErrUnexpectedEOF } b := dAtA[iNdEx] iNdEx++ wire |= (uint64(b) & 0x7F) << shift if b < 0x80 { break } } fieldNum := int32(wire >> 3) wireType := int(wire & 0x7) if wireType == 4 { return fmt.Errorf("proto: RuntimeFilter: wiretype end group for non-group") } if fieldNum <= 0 { return fmt.Errorf("proto: RuntimeFilter: illegal tag %d (wire type %d)", fieldNum, wire) } switch fieldNum { case 1: if wireType != 2 { return fmt.Errorf("proto: wrong wireType = %d for field RuntimeKey", wireType) } var stringLen uint64 for shift := uint(0); ; shift += 7 { if shift >= 64 { return ErrIntOverflowAccesslog } if iNdEx >= l { return io.ErrUnexpectedEOF } b := dAtA[iNdEx] iNdEx++ stringLen |= (uint64(b) & 0x7F) << shift if b < 0x80 { break } } intStringLen := int(stringLen) if intStringLen < 0 { return ErrInvalidLengthAccesslog } postIndex := iNdEx + intStringLen if postIndex > l { return io.ErrUnexpectedEOF } m.RuntimeKey = string(dAtA[iNdEx:postIndex]) iNdEx = postIndex case 2: if wireType != 2 { return fmt.Errorf("proto: wrong wireType = %d for field PercentSampled", wireType) } var msglen int for shift := uint(0); ; shift += 7 { if shift >= 64 { return ErrIntOverflowAccesslog } if iNdEx >= l { return io.ErrUnexpectedEOF } b := dAtA[iNdEx] iNdEx++ msglen |= (int(b) & 0x7F) << shift if b < 0x80 { break } } if msglen < 0 { return ErrInvalidLengthAccesslog } postIndex := iNdEx + msglen if postIndex > l { return io.ErrUnexpectedEOF } if m.PercentSampled == nil { m.PercentSampled = &envoy_type1.FractionalPercent{} } if err := m.PercentSampled.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { return err } iNdEx = postIndex case 3: if wireType != 0 { return fmt.Errorf("proto: wrong wireType = %d for field UseIndependentRandomness", wireType) } var v int for shift := uint(0); ; shift += 7 { if shift >= 64 { return ErrIntOverflowAccesslog } if iNdEx >= l { return io.ErrUnexpectedEOF } b := dAtA[iNdEx] iNdEx++ v |= (int(b) & 0x7F) << shift if b < 0x80 { break } } m.UseIndependentRandomness = bool(v != 0) default: iNdEx = preIndex skippy, err := skipAccesslog(dAtA[iNdEx:]) if err != nil { return err } if skippy < 0 { return ErrInvalidLengthAccesslog } if (iNdEx + skippy) > l { return io.ErrUnexpectedEOF } iNdEx += skippy } } if iNdEx > l { return io.ErrUnexpectedEOF } return nil } func (m *AndFilter) Unmarshal(dAtA []byte) error { l := len(dAtA) iNdEx := 0 for iNdEx < l { preIndex := iNdEx var wire uint64 for shift := uint(0); ; shift += 7 { if shift >= 64 { return ErrIntOverflowAccesslog } if iNdEx >= l { return io.ErrUnexpectedEOF } b := dAtA[iNdEx] iNdEx++ wire |= (uint64(b) & 0x7F) << shift if b < 0x80 { break } } fieldNum := int32(wire >> 3) wireType := int(wire & 0x7) if wireType == 4 { return fmt.Errorf("proto: AndFilter: wiretype end group for non-group") } if fieldNum <= 0 { return fmt.Errorf("proto: AndFilter: illegal tag %d (wire type %d)", fieldNum, wire) } switch fieldNum { case 1: if wireType != 2 { return fmt.Errorf("proto: wrong wireType = %d for field Filters", wireType) } var msglen int for shift := uint(0); ; shift += 7 { if shift >= 64 { return ErrIntOverflowAccesslog } if iNdEx >= l { return io.ErrUnexpectedEOF } b := dAtA[iNdEx] iNdEx++ msglen |= (int(b) & 0x7F) << shift if b < 0x80 { break } } if msglen < 0 { return ErrInvalidLengthAccesslog } postIndex := iNdEx + msglen if postIndex > l { return io.ErrUnexpectedEOF } m.Filters = append(m.Filters, &AccessLogFilter{}) if err := m.Filters[len(m.Filters)-1].Unmarshal(dAtA[iNdEx:postIndex]); err != nil { return err } iNdEx = postIndex default: iNdEx = preIndex skippy, err := skipAccesslog(dAtA[iNdEx:]) if err != nil { return err } if skippy < 0 { return ErrInvalidLengthAccesslog } if (iNdEx + skippy) > l { return io.ErrUnexpectedEOF } iNdEx += skippy } } if iNdEx > l { return io.ErrUnexpectedEOF } return nil } func (m *OrFilter) Unmarshal(dAtA []byte) error { l := len(dAtA) iNdEx := 0 for iNdEx < l { preIndex := iNdEx var wire uint64 for shift := uint(0); ; shift += 7 { if shift >= 64 { return ErrIntOverflowAccesslog } if iNdEx >= l { return io.ErrUnexpectedEOF } b := dAtA[iNdEx] iNdEx++ wire |= (uint64(b) & 0x7F) << shift if b < 0x80 { break } } fieldNum := int32(wire >> 3) wireType := int(wire & 0x7) if wireType == 4 { return fmt.Errorf("proto: OrFilter: wiretype end group for non-group") } if fieldNum <= 0 { return fmt.Errorf("proto: OrFilter: illegal tag %d (wire type %d)", fieldNum, wire) } switch fieldNum { case 2: if wireType != 2 { return fmt.Errorf("proto: wrong wireType = %d for field Filters", wireType) } var msglen int for shift := uint(0); ; shift += 7 { if shift >= 64 { return ErrIntOverflowAccesslog } if iNdEx >= l { return io.ErrUnexpectedEOF } b := dAtA[iNdEx] iNdEx++ msglen |= (int(b) & 0x7F) << shift if b < 0x80 { break } } if msglen < 0 { return ErrInvalidLengthAccesslog } postIndex := iNdEx + msglen if postIndex > l { return io.ErrUnexpectedEOF } m.Filters = append(m.Filters, &AccessLogFilter{}) if err := m.Filters[len(m.Filters)-1].Unmarshal(dAtA[iNdEx:postIndex]); err != nil { return err } iNdEx = postIndex default: iNdEx = preIndex skippy, err := skipAccesslog(dAtA[iNdEx:]) if err != nil { return err } if skippy < 0 { return ErrInvalidLengthAccesslog } if (iNdEx + skippy) > l { return io.ErrUnexpectedEOF } iNdEx += skippy } } if iNdEx > l { return io.ErrUnexpectedEOF } return nil } func (m *HeaderFilter) Unmarshal(dAtA []byte) error { l := len(dAtA) iNdEx := 0 for iNdEx < l { preIndex := iNdEx var wire uint64 for shift := uint(0); ; shift += 7 { if shift >= 64 { return ErrIntOverflowAccesslog } if iNdEx >= l { return io.ErrUnexpectedEOF } b := dAtA[iNdEx] iNdEx++ wire |= (uint64(b) & 0x7F) << shift if b < 0x80 { break } } fieldNum := int32(wire >> 3) wireType := int(wire & 0x7) if wireType == 4 { return fmt.Errorf("proto: HeaderFilter: wiretype end group for non-group") } if fieldNum <= 0 { return fmt.Errorf("proto: HeaderFilter: illegal tag %d (wire type %d)", fieldNum, wire) } switch fieldNum { case 1: if wireType != 2 { return fmt.Errorf("proto: wrong wireType = %d for field Header", wireType) } var msglen int for shift := uint(0); ; shift += 7 { if shift >= 64 { return ErrIntOverflowAccesslog } if iNdEx >= l { return io.ErrUnexpectedEOF } b := dAtA[iNdEx] iNdEx++ msglen |= (int(b) & 0x7F) << shift if b < 0x80 { break } } if msglen < 0 { return ErrInvalidLengthAccesslog } postIndex := iNdEx + msglen if postIndex > l { return io.ErrUnexpectedEOF } if m.Header == nil { m.Header = &envoy_api_v2_route.HeaderMatcher{} } if err := m.Header.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { return err } iNdEx = postIndex default: iNdEx = preIndex skippy, err := skipAccesslog(dAtA[iNdEx:]) if err != nil { return err } if skippy < 0 { return ErrInvalidLengthAccesslog } if (iNdEx + skippy) > l { return io.ErrUnexpectedEOF } iNdEx += skippy } } if iNdEx > l { return io.ErrUnexpectedEOF } return nil } func skipAccesslog(dAtA []byte) (n int, err error) { l := len(dAtA) iNdEx := 0 for iNdEx < l { var wire uint64 for shift := uint(0); ; shift += 7 { if shift >= 64 { return 0, ErrIntOverflowAccesslog } if iNdEx >= l { return 0, io.ErrUnexpectedEOF } b := dAtA[iNdEx] iNdEx++ wire |= (uint64(b) & 0x7F) << shift if b < 0x80 { break } } wireType := int(wire & 0x7) switch wireType { case 0: for shift := uint(0); ; shift += 7 { if shift >= 64 { return 0, ErrIntOverflowAccesslog } if iNdEx >= l { return 0, io.ErrUnexpectedEOF } iNdEx++ if dAtA[iNdEx-1] < 0x80 { break } } return iNdEx, nil case 1: iNdEx += 8 return iNdEx, nil case 2: var length int for shift := uint(0); ; shift += 7 { if shift >= 64 { return 0, ErrIntOverflowAccesslog } if iNdEx >= l { return 0, io.ErrUnexpectedEOF } b := dAtA[iNdEx] iNdEx++ length |= (int(b) & 0x7F) << shift if b < 0x80 { break } } iNdEx += length if length < 0 { return 0, ErrInvalidLengthAccesslog } return iNdEx, nil case 3: for { var innerWire uint64 var start int = iNdEx for shift := uint(0); ; shift += 7 { if shift >= 64 { return 0, ErrIntOverflowAccesslog } if iNdEx >= l { return 0, io.ErrUnexpectedEOF } b := dAtA[iNdEx] iNdEx++ innerWire |= (uint64(b) & 0x7F) << shift if b < 0x80 { break } } innerWireType := int(innerWire & 0x7) if innerWireType == 4 { break } next, err := skipAccesslog(dAtA[start:]) if err != nil { return 0, err } iNdEx = start + next } return iNdEx, nil case 4: return iNdEx, nil case 5: iNdEx += 4 return iNdEx, nil default: return 0, fmt.Errorf("proto: illegal wireType %d", wireType) } } panic("unreachable") } var ( ErrInvalidLengthAccesslog = fmt.Errorf("proto: negative length found during unmarshaling") ErrIntOverflowAccesslog = fmt.Errorf("proto: integer overflow") ) func init() { proto.RegisterFile("envoy/config/filter/accesslog/v2/accesslog.proto", fileDescriptorAccesslog) } var fileDescriptorAccesslog = []byte{ // 819 bytes of a gzipped FileDescriptorProto 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xb4, 0x95, 0x41, 0x6f, 0xe3, 0x44, 0x14, 0xc7, 0x77, 0x9c, 0x26, 0x9b, 0xbc, 0xdd, 0x26, 0x66, 0xb4, 0xa2, 0xa1, 0x2a, 0x51, 0xf0, 0x69, 0xb5, 0x48, 0xf6, 0x92, 0x15, 0x3d, 0x71, 0x21, 0x25, 0xa5, 0x11, 0x85, 0x52, 0x97, 0x0a, 0x04, 0x52, 0xad, 0x89, 0x3d, 0x49, 0xac, 0x3a, 0x1e, 0x6b, 0x3c, 0x8e, 0x94, 0x2b, 0x27, 0xc4, 0x91, 0x2b, 0x5f, 0x04, 0xc1, 0xa5, 0xdc, 0x38, 0xf2, 0x11, 0x50, 0x6f, 0xfd, 0x16, 0xc8, 0x33, 0x63, 0x37, 0x89, 0x2a, 0xb9, 0x42, 0xec, 0x25, 0xf1, 0xcc, 0x9b, 0xff, 0xef, 0xbd, 0x37, 0xf3, 0xde, 0x0c, 0xbc, 0xa6, 0xf1, 0x92, 0xad, 0x1c, 0x9f, 0xc5, 0xd3, 0x70, 0xe6, 0x4c, 0xc3, 0x48, 0x50, 0xee, 0x10, 0xdf, 0xa7, 0x69, 0x1a, 0xb1, 0x99, 0xb3, 0x1c, 0xdc, 0x0f, 0xec, 0x84, 0x33, 0xc1, 0x70, 0x5f, 0x2a, 0x6c, 0xa5, 0xb0, 0x95, 0xc2, 0xbe, 0x5f, 0xb4, 0x1c, 0xec, 0x1f, 0x28, 0x26, 0x49, 0xc2, 0x5c, 0xef, 0x33, 0x4e, 0x9d, 0x09, 0x49, 0xa9, 0xd2, 0xef, 0xf7, 0x36, 0xac, 0x9c, 0x65, 0x82, 0xaa, 0x5f, 0x6d, 0xef, 0x2a, 0xbb, 0x58, 0x25, 0xd4, 0x49, 0x28, 0xf7, 0x69, 0x2c, 0xb4, 0xe5, 0x60, 0xc6, 0xd8, 0x2c, 0xa2, 0x8e, 0x1c, 0x4d, 0xb2, 0xa9, 0x93, 0x0a, 0x9e, 0xf9, 0x85, 0x75, 0x6f, 0x49, 0xa2, 0x30, 0x20, 0x82, 0x3a, 0xc5, 0x87, 0x32, 0x58, 0xbf, 0x22, 0x68, 0x7d, 0x2a, 0xe3, 0x3b, 0x65, 0x33, 0x8c, 0x61, 0x27, 0x26, 0x0b, 0xda, 0x45, 0x7d, 0xf4, 0xb2, 0xe5, 0xca, 0x6f, 0x3c, 0x86, 0x86, 0xca, 0xa3, 0x6b, 0xf4, 0xd1, 0xcb, 0x67, 0x83, 0x8f, 0xec, 0xaa, 0x1c, 0xed, 0x12, 0x78, 0x2c, 0x8d, 0xae, 0x06, 0x60, 0x07, 0x1a, 0x4a, 0xd5, 0xad, 0x49, 0xd4, 0x9e, 0xad, 0x82, 0xb6, 0x8b, 0xa0, 0xed, 0x0b, 0x19, 0xb4, 0xab, 0x97, 0x59, 0x3f, 0x35, 0xa0, 0xb3, 0x05, 0xc3, 0x13, 0xc0, 0xa9, 0x20, 0x22, 0x4b, 0x3d, 0x9f, 0x05, 0xd4, 0xd3, 0xb1, 0x21, 0x09, 0x1c, 0x54, 0xc7, 0x76, 0x21, 0xb5, 0x47, 0x2c, 0xa0, 0x8a, 0x77, 0xf2, 0xc4, 0x35, 0xd3, 0xad, 0x39, 0xfc, 0x03, 0x74, 0x82, 0x8c, 0x13, 0x11, 0xb2, 0xd8, 0xdb, 0x48, 0xfe, 0x75, 0xb5, 0x83, 0xcf, 0xb4, 0xb0, 0xc4, 0xb7, 0x83, 0x8d, 0x19, 0xcc, 0x60, 0x2f, 0x66, 0xc2, 0x9b, 0x53, 0x12, 0x89, 0xb9, 0xe7, 0xcf, 0xa9, 0x7f, 0x5d, 0x38, 0x51, 0xdb, 0x72, 0x58, 0xed, 0xe4, 0x2b, 0x26, 0x4e, 0xa4, 0xfe, 0x28, 0x97, 0x97, 0xae, 0x5e, 0xc4, 0x0f, 0xcc, 0xe3, 0x2b, 0x30, 0x05, 0x27, 0x3e, 0x25, 0x93, 0xa8, 0xdc, 0xaf, 0x9d, 0xc7, 0x9e, 0xe5, 0x37, 0x85, 0xb2, 0x74, 0xd2, 0x11, 0x9b, 0x53, 0xf8, 0x3b, 0x68, 0xf3, 0x2c, 0x16, 0xe1, 0xa2, 0xa4, 0xd7, 0x25, 0xdd, 0xa9, 0xa6, 0xbb, 0x4a, 0x57, 0xb2, 0x77, 0xf9, 0xfa, 0x04, 0x3e, 0x05, 0x20, 0x71, 0x50, 0x50, 0x1b, 0x92, 0xfa, 0xe1, 0x23, 0xea, 0x2f, 0x0e, 0x4a, 0x62, 0x8b, 0x14, 0x03, 0x3c, 0x86, 0x16, 0xe3, 0x05, 0xec, 0xa9, 0x84, 0xbd, 0xaa, 0x86, 0x9d, 0xf1, 0x92, 0xd5, 0x64, 0xfa, 0x1b, 0x5f, 0xc2, 0xee, 0x9c, 0x92, 0x80, 0x96, 0xb8, 0xa6, 0xc4, 0xd9, 0xd5, 0xb8, 0x13, 0x29, 0x2b, 0x91, 0xcf, 0xe7, 0x6b, 0xe3, 0xe1, 0x7b, 0x60, 0x2a, 0x8d, 0x97, 0x26, 0xd4, 0x0f, 0xa7, 0x21, 0xe5, 0xb8, 0xfe, 0xdb, 0xdd, 0x4d, 0x0d, 0x59, 0x7f, 0x20, 0x30, 0x8f, 0xd8, 0x22, 0x21, 0x3c, 0x4c, 0xcb, 0x52, 0x3a, 0x07, 0x83, 0x25, 0xb2, 0xf6, 0xdb, 0x83, 0x8f, 0xab, 0x7d, 0x6f, 0xeb, 0xed, 0xb3, 0x64, 0x08, 0xbf, 0xdf, 0xdd, 0xd4, 0xea, 0x3f, 0x22, 0xc3, 0x44, 0xae, 0xc1, 0x12, 0x7c, 0x08, 0xf5, 0x25, 0x89, 0x32, 0xaa, 0x0b, 0xbe, 0xaf, 0xa9, 0x24, 0x09, 0x73, 0x42, 0x7e, 0x5f, 0x15, 0x87, 0x76, 0x39, 0x8e, 0xc5, 0x9b, 0x81, 0xab, 0x96, 0x5b, 0x07, 0x60, 0x9c, 0x25, 0xb8, 0x01, 0xc6, 0xe8, 0xdc, 0x7c, 0x92, 0xff, 0x7f, 0x3e, 0x32, 0x51, 0xfe, 0x7f, 0x3a, 0x32, 0x0d, 0x8b, 0x83, 0xb9, 0xdd, 0x78, 0xf8, 0x0a, 0xc0, 0x2f, 0x03, 0x7a, 0x7c, 0x03, 0x6f, 0x27, 0xa1, 0x33, 0xf8, 0x59, 0x66, 0xb0, 0x46, 0xb4, 0x12, 0x68, 0x6f, 0xf6, 0xe2, 0x5b, 0xf7, 0xf8, 0x2e, 0xbc, 0x78, 0xa8, 0x31, 0xad, 0x77, 0xa0, 0xb3, 0xd5, 0x46, 0xd6, 0x9f, 0x08, 0x76, 0x37, 0x8a, 0x1f, 0xbf, 0x82, 0x67, 0x45, 0x17, 0x5d, 0xd3, 0x95, 0xba, 0x82, 0x87, 0xad, 0xdc, 0xd3, 0x0e, 0x37, 0xfa, 0xc8, 0x05, 0x6d, 0xfd, 0x82, 0xae, 0xf0, 0x31, 0x74, 0xf4, 0xed, 0xef, 0xa5, 0x64, 0x91, 0x44, 0x34, 0xd0, 0xc7, 0xf5, 0xbe, 0xce, 0x26, 0x7f, 0x20, 0xec, 0x63, 0x4e, 0xfc, 0x3c, 0x7b, 0x12, 0x7d, 0xad, 0x16, 0xbb, 0x6d, 0xad, 0xba, 0x50, 0x22, 0xfc, 0x09, 0xec, 0x67, 0x29, 0xf5, 0xc2, 0x38, 0xa0, 0x09, 0x8d, 0x83, 0x9c, 0xc7, 0x49, 0x1c, 0xb0, 0x45, 0x4c, 0xd3, 0x54, 0xde, 0x46, 0x4d, 0xb7, 0x9b, 0xa5, 0x74, 0x7c, 0xbf, 0xc0, 0x2d, 0xed, 0x56, 0x00, 0xad, 0xb2, 0xd3, 0xf0, 0xb7, 0xf0, 0x54, 0xed, 0x5d, 0xda, 0x45, 0xfd, 0xda, 0x7f, 0x7a, 0x27, 0xf4, 0xbe, 0xfe, 0x82, 0x8c, 0xa6, 0xe1, 0x16, 0x34, 0xcb, 0x87, 0x66, 0xd1, 0x82, 0xeb, 0x4e, 0x8c, 0xff, 0xd5, 0xc9, 0x25, 0x3c, 0x5f, 0x6f, 0x4c, 0x3c, 0x82, 0x86, 0x6a, 0x4c, 0x5d, 0x25, 0x1f, 0x6c, 0xb6, 0x81, 0x7a, 0x92, 0x95, 0xe2, 0x4b, 0x22, 0xfc, 0xf9, 0x56, 0x51, 0x68, 0xf1, 0xd0, 0xfc, 0xeb, 0xb6, 0x87, 0xfe, 0xbe, 0xed, 0xa1, 0x7f, 0x6e, 0x7b, 0xe8, 0x7b, 0x63, 0x39, 0x98, 0x34, 0xe4, 0x53, 0xf7, 0xe6, 0xdf, 0x00, 0x00, 0x00, 0xff, 0xff, 0x4c, 0xcf, 0xaa, 0x0d, 0x5b, 0x08, 0x00, 0x00, }
apache-2.0
gelldur/EventBus
lib/src/dexode/eventbus/Bus.hpp
3468
// // Created by gelldur on 26.11.2019. // #pragma once #include <any> #include <atomic> #include <memory> #include "dexode/eventbus/Listener.hpp" #include "dexode/eventbus/internal/ListenerAttorney.hpp" #include "dexode/eventbus/internal/event_id.hpp" #include "dexode/eventbus/stream/ProtectedEventStream.hpp" namespace dexode::eventbus { class Bus; template <typename Event> using DefaultEventStream = eventbus::stream::ProtectedEventStream<Event>; using CreateStreamCallback = std::unique_ptr<eventbus::stream::EventStream> (*const)(); using PostponeCallback = bool (*const)(Bus& bus, std::any event); template <typename Event> bool postpone(Bus& bus, std::any event); template <typename Event> std::unique_ptr<eventbus::stream::EventStream> createDefaultEventStream() { return std::make_unique<DefaultEventStream<Event>>(); } class PostponeHelper { public: internal::event_id_t eventID = nullptr; std::any event; PostponeCallback postponeCallback = nullptr; // function pointer CreateStreamCallback createStreamCallback = nullptr; // function pointer PostponeHelper(const internal::event_id_t eventId, std::any&& event, PostponeCallback postponeCallback, CreateStreamCallback createStreamCallback) : eventID(eventId) , event(std::forward<std::any>(event)) , postponeCallback(postponeCallback) , createStreamCallback(createStreamCallback) {} template <typename Event> static PostponeHelper create(std::any&& event) { return PostponeHelper{internal::event_id<Event>(), std::forward<std::any>(event), postpone<Event>, createDefaultEventStream<Event>}; } ~PostponeHelper() = default; }; class Bus { template <typename> friend class dexode::eventbus::internal::ListenerAttorney; public: using Listener = eventbus::Listener<dexode::eventbus::Bus>; Bus() = default; virtual ~Bus() = default; virtual std::size_t process() = 0; template <typename Event> bool postpone(Event event) { static_assert(internal::validateEvent<Event>(), "Invalid event"); auto postponeCall = PostponeHelper::create<Event>(std::move(event)); return postponeEvent(postponeCall); } protected: virtual bool postponeEvent(PostponeHelper& postponeCall) = 0; virtual eventbus::stream::EventStream* listen(std::uint32_t listenerID, internal::event_id_t eventID, CreateStreamCallback createStreamCallback) = 0; virtual void unlistenAll(std::uint32_t listenerID) = 0; virtual void unlisten(std::uint32_t listenerID, internal::event_id_t eventID) = 0; private: std::atomic<std::uint32_t> _lastID{0}; std::uint32_t newListenerID() { return ++_lastID; // used for generate unique listeners ID's } template <class Event> void listen(const std::uint32_t listenerID, std::function<void(const Event&)>&& callback) { static_assert(internal::validateEvent<Event>(), "Invalid event"); assert(callback && "callback should be valid"); // Check for valid object constexpr auto eventID = internal::event_id<Event>(); auto* eventStream = listen(listenerID, eventID, createDefaultEventStream<Event>); if(eventStream != nullptr) // maybe someone don't want add listener { eventStream->addListener(listenerID, std::forward<std::function<void(const Event&)>>(callback)); } } }; template <typename Event> bool postpone(Bus& bus, std::any event) { return bus.postpone(std::move(std::any_cast<Event>(event))); } } // namespace dexode::eventbus
apache-2.0
PureSolTechnologies/streaming
iterators/src/test/java/com/puresoltechnologies/streaming/CountingStreamIteratorTest.java
1459
package com.puresoltechnologies.streaming; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertFalse; import static org.junit.jupiter.api.Assertions.assertTrue; import java.util.Arrays; import java.util.List; import org.junit.jupiter.api.Test; import com.puresoltechnologies.streaming.iterators.CountingStreamIterator; public class CountingStreamIteratorTest { @Test public void test() { List<Integer> numbers = Arrays.asList(1, 2, 3, 4, 5); CountingStreamIterator<Integer> countingIterator = new CountingStreamIterator<Integer>(numbers.iterator()); assertEquals(0, countingIterator.getCount()); assertTrue(countingIterator.hasNext()); assertEquals(1, (int) countingIterator.next()); assertEquals(1, countingIterator.getCount()); assertTrue(countingIterator.hasNext()); assertEquals(2, (int) countingIterator.next()); assertEquals(2, countingIterator.getCount()); assertTrue(countingIterator.hasNext()); assertEquals(3, (int) countingIterator.next()); assertEquals(3, countingIterator.getCount()); assertTrue(countingIterator.hasNext()); assertEquals(4, (int) countingIterator.next()); assertEquals(4, countingIterator.getCount()); assertTrue(countingIterator.hasNext()); assertEquals(5, (int) countingIterator.next()); assertEquals(5, countingIterator.getCount()); assertFalse(countingIterator.hasNext()); assertEquals(5, countingIterator.getCount()); } }
apache-2.0
tbpmig/mig-website
bookswap/migrations/0005_auto_20151226_1536.py
474
# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import models, migrations import bookswap.models class Migration(migrations.Migration): dependencies = [ ('bookswap', '0004_auto_20151226_1535'), ] operations = [ migrations.AlterField( model_name='book', name='term', field=models.ForeignKey(default=bookswap.models.default_term, to='mig_main.AcademicTerm'), ), ]
apache-2.0
maxyo/Flappy-Bird
Assets/Scripts/WallScript.cs
513
using UnityEngine; using System.Collections; public class WallScript : MonoBehaviour { private bool scored = false; // Use this for initialization void Start () { } void OnTriggerEnter2D(Collider2D col) { if (scored) return; GameControllerScript.Controller.UpScore (); Animation anim = GetComponent<Animation> (); scored = true; anim.Play (); } public void ResetWall() { GameControllerScript gcs = GameControllerScript.Controller; gcs.MoveWall (gameObject); scored = false; } }
apache-2.0
guogaowei/lanyuan-notebook
src/main/java/com/lanyuan/util/PasswordHelper.java
1177
package com.lanyuan.util; import org.apache.shiro.crypto.RandomNumberGenerator; import org.apache.shiro.crypto.SecureRandomNumberGenerator; import org.apache.shiro.crypto.hash.SimpleHash; import org.apache.shiro.util.ByteSource; import com.lanyuan.entity.UserFormMap; public class PasswordHelper { private RandomNumberGenerator randomNumberGenerator = new SecureRandomNumberGenerator(); private String algorithmName = "md5"; private int hashIterations = 2; public void encryptPassword(UserFormMap userFormMap) { String salt=randomNumberGenerator.nextBytes().toHex(); userFormMap.put("credentialsSalt", salt); String newPassword = new SimpleHash(algorithmName, userFormMap.get("password"), ByteSource.Util.bytes(userFormMap.get("accountName")+salt), hashIterations).toHex(); userFormMap.put("password", newPassword); } public static void main(String[] args) { PasswordHelper passwordHelper = new PasswordHelper(); UserFormMap userFormMap = new UserFormMap(); userFormMap.put("password","123456"); userFormMap.put("accountName","admin"); passwordHelper.encryptPassword(userFormMap); System.out.println(userFormMap); } }
apache-2.0
java110/MicroCommunity
service-user/src/main/java/com/java110/user/bmo/userLogin/ISaveUserLoginBMO.java
337
package com.java110.user.bmo.userLogin; import com.java110.po.userLogin.UserLoginPo; import org.springframework.http.ResponseEntity; public interface ISaveUserLoginBMO { /** * 添加用户登录 * add by wuxw * @param userLoginPo * @return */ ResponseEntity<String> save(UserLoginPo userLoginPo); }
apache-2.0
greenpeppersoftware/greenpepper3-java
core/src/main/java/com/greenpepper/interpreter/flow/dowith/InterpretRow.java
2828
/* * Copyright (c) 2006 Pyxis Technologies inc. * * This is free software; you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation; either version 2 of the License, or * (at your option) any later version. * * This software is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with this program; if not, write to the Free Software * Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA, * or see the FSF site: http://www.fsf.org. */ package com.greenpepper.interpreter.flow.dowith; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import com.greenpepper.Call; import com.greenpepper.Example; import com.greenpepper.Specification; import com.greenpepper.document.Document; import com.greenpepper.document.GreenPepperInterpreterSelector; import com.greenpepper.interpreter.flow.AbstractRow; import com.greenpepper.reflect.Fixture; import com.greenpepper.systemunderdevelopment.DefaultSystemUnderDevelopment; import com.greenpepper.systemunderdevelopment.SystemUnderDevelopment; import com.greenpepper.util.ExampleUtil; import com.greenpepper.util.ExampleWrapper; public class InterpretRow extends AbstractRow { public InterpretRow( Fixture fixture ) { super( fixture ); } public List<Example> actionCells(Example row) { return ExampleUtil.asList(row.at(0,1)); } public void interpret( Specification table ) { final Example row = table.nextExample(); Document document = Document.text( ExampleWrapper.sandbox( row ) ); document.execute(new GreenPepperInterpreterSelector(systemUnderDevelopment())); table.exampleDone( document.getStatistics() ); while (table.hasMoreExamples()) { table.nextExample(); } } private SystemUnderDevelopment systemUnderDevelopment() { return new DefaultSystemUnderDevelopment() { public Fixture getFixture(String name, String... params) throws Throwable { List<String> cells = new ArrayList<String>(); cells.add(name); cells.addAll(Arrays.asList(params)); Action action = new Action( cells ); Call call = action.checkAgainst( fixture ); Object target = call.execute(); return fixture.fixtureFor( target ); } public void addImport(String packageName) { } }; } }
apache-2.0
KiminRyu/ExoPlayer
testutils/src/main/java/com/google/android/exoplayer2/testutil/Action.java
21970
/* * Copyright (C) 2016 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.android.exoplayer2.testutil; import android.os.Handler; import android.util.Log; import android.view.Surface; import com.google.android.exoplayer2.C; import com.google.android.exoplayer2.ExoPlaybackException; import com.google.android.exoplayer2.ExoPlayer; import com.google.android.exoplayer2.PlaybackParameters; import com.google.android.exoplayer2.Player; import com.google.android.exoplayer2.PlayerMessage; import com.google.android.exoplayer2.PlayerMessage.Target; import com.google.android.exoplayer2.SimpleExoPlayer; import com.google.android.exoplayer2.Timeline; import com.google.android.exoplayer2.source.MediaSource; import com.google.android.exoplayer2.testutil.ActionSchedule.ActionNode; import com.google.android.exoplayer2.testutil.ActionSchedule.PlayerRunnable; import com.google.android.exoplayer2.testutil.ActionSchedule.PlayerTarget; import com.google.android.exoplayer2.trackselection.MappingTrackSelector; import com.google.android.exoplayer2.util.HandlerWrapper; /** * Base class for actions to perform during playback tests. */ public abstract class Action { private final String tag; private final String description; /** * @param tag A tag to use for logging. * @param description A description to be logged when the action is executed. */ public Action(String tag, String description) { this.tag = tag; this.description = description; } /** * Executes the action and schedules the next. * * @param player The player to which the action should be applied. * @param trackSelector The track selector to which the action should be applied. * @param surface The surface to use when applying actions. * @param handler The handler to use to pass to the next action. * @param nextAction The next action to schedule immediately after this action finished. */ public final void doActionAndScheduleNext( SimpleExoPlayer player, MappingTrackSelector trackSelector, Surface surface, HandlerWrapper handler, ActionNode nextAction) { Log.i(tag, description); doActionAndScheduleNextImpl(player, trackSelector, surface, handler, nextAction); } /** * Called by {@link #doActionAndScheduleNext(SimpleExoPlayer, MappingTrackSelector, Surface, * HandlerWrapper, ActionNode)} to perform the action and to schedule the next action node. * * @param player The player to which the action should be applied. * @param trackSelector The track selector to which the action should be applied. * @param surface The surface to use when applying actions. * @param handler The handler to use to pass to the next action. * @param nextAction The next action to schedule immediately after this action finished. */ protected void doActionAndScheduleNextImpl( SimpleExoPlayer player, MappingTrackSelector trackSelector, Surface surface, HandlerWrapper handler, ActionNode nextAction) { doActionImpl(player, trackSelector, surface); if (nextAction != null) { nextAction.schedule(player, trackSelector, surface, handler); } } /** * Called by {@link #doActionAndScheduleNextImpl(SimpleExoPlayer, MappingTrackSelector, Surface, * HandlerWrapper, ActionNode)} to perform the action. * * @param player The player to which the action should be applied. * @param trackSelector The track selector to which the action should be applied. * @param surface The surface to use when applying actions. */ protected abstract void doActionImpl( SimpleExoPlayer player, MappingTrackSelector trackSelector, Surface surface); /** * Calls {@link Player#seekTo(long)} or {@link Player#seekTo(int, long)}. */ public static final class Seek extends Action { private final Integer windowIndex; private final long positionMs; /** * Action calls {@link Player#seekTo(long)}. * * @param tag A tag to use for logging. * @param positionMs The seek position. */ public Seek(String tag, long positionMs) { super(tag, "Seek:" + positionMs); this.windowIndex = null; this.positionMs = positionMs; } /** * Action calls {@link Player#seekTo(int, long)}. * * @param tag A tag to use for logging. * @param windowIndex The window to seek to. * @param positionMs The seek position. */ public Seek(String tag, int windowIndex, long positionMs) { super(tag, "Seek:" + positionMs); this.windowIndex = windowIndex; this.positionMs = positionMs; } @Override protected void doActionImpl(SimpleExoPlayer player, MappingTrackSelector trackSelector, Surface surface) { if (windowIndex == null) { player.seekTo(positionMs); } else { player.seekTo(windowIndex, positionMs); } } } /** * Calls {@link Player#stop()} or {@link Player#stop(boolean)}. */ public static final class Stop extends Action { private static final String STOP_ACTION_TAG = "Stop"; private final Boolean reset; /** * Action will call {@link Player#stop()}. * * @param tag A tag to use for logging. */ public Stop(String tag) { super(tag, STOP_ACTION_TAG); this.reset = null; } /** * Action will call {@link Player#stop(boolean)}. * * @param tag A tag to use for logging. * @param reset The value to pass to {@link Player#stop(boolean)}. */ public Stop(String tag, boolean reset) { super(tag, STOP_ACTION_TAG); this.reset = reset; } @Override protected void doActionImpl(SimpleExoPlayer player, MappingTrackSelector trackSelector, Surface surface) { if (reset == null) { player.stop(); } else { player.stop(reset); } } } /** * Calls {@link Player#setPlayWhenReady(boolean)}. */ public static final class SetPlayWhenReady extends Action { private final boolean playWhenReady; /** * @param tag A tag to use for logging. * @param playWhenReady The value to pass. */ public SetPlayWhenReady(String tag, boolean playWhenReady) { super(tag, playWhenReady ? "Play" : "Pause"); this.playWhenReady = playWhenReady; } @Override protected void doActionImpl(SimpleExoPlayer player, MappingTrackSelector trackSelector, Surface surface) { player.setPlayWhenReady(playWhenReady); } } /** * Calls {@link MappingTrackSelector#setRendererDisabled(int, boolean)}. */ public static final class SetRendererDisabled extends Action { private final int rendererIndex; private final boolean disabled; /** * @param tag A tag to use for logging. * @param rendererIndex The index of the renderer. * @param disabled Whether the renderer should be disabled. */ public SetRendererDisabled(String tag, int rendererIndex, boolean disabled) { super(tag, "SetRendererDisabled:" + rendererIndex + ":" + disabled); this.rendererIndex = rendererIndex; this.disabled = disabled; } @Override protected void doActionImpl(SimpleExoPlayer player, MappingTrackSelector trackSelector, Surface surface) { trackSelector.setRendererDisabled(rendererIndex, disabled); } } /** * Calls {@link SimpleExoPlayer#clearVideoSurface()}. */ public static final class ClearVideoSurface extends Action { /** * @param tag A tag to use for logging. */ public ClearVideoSurface(String tag) { super(tag, "ClearVideoSurface"); } @Override protected void doActionImpl(SimpleExoPlayer player, MappingTrackSelector trackSelector, Surface surface) { player.clearVideoSurface(); } } /** * Calls {@link SimpleExoPlayer#setVideoSurface(Surface)}. */ public static final class SetVideoSurface extends Action { /** * @param tag A tag to use for logging. */ public SetVideoSurface(String tag) { super(tag, "SetVideoSurface"); } @Override protected void doActionImpl(SimpleExoPlayer player, MappingTrackSelector trackSelector, Surface surface) { player.setVideoSurface(surface); } } /** * Calls {@link ExoPlayer#prepare(MediaSource)}. */ public static final class PrepareSource extends Action { private final MediaSource mediaSource; private final boolean resetPosition; private final boolean resetState; /** * @param tag A tag to use for logging. */ public PrepareSource(String tag, MediaSource mediaSource) { this(tag, mediaSource, true, true); } /** * @param tag A tag to use for logging. */ public PrepareSource(String tag, MediaSource mediaSource, boolean resetPosition, boolean resetState) { super(tag, "PrepareSource"); this.mediaSource = mediaSource; this.resetPosition = resetPosition; this.resetState = resetState; } @Override protected void doActionImpl(SimpleExoPlayer player, MappingTrackSelector trackSelector, Surface surface) { player.prepare(mediaSource, resetPosition, resetState); } } /** * Calls {@link Player#setRepeatMode(int)}. */ public static final class SetRepeatMode extends Action { private final @Player.RepeatMode int repeatMode; /** * @param tag A tag to use for logging. */ public SetRepeatMode(String tag, @Player.RepeatMode int repeatMode) { super(tag, "SetRepeatMode:" + repeatMode); this.repeatMode = repeatMode; } @Override protected void doActionImpl(SimpleExoPlayer player, MappingTrackSelector trackSelector, Surface surface) { player.setRepeatMode(repeatMode); } } /** * Calls {@link Player#setShuffleModeEnabled(boolean)}. */ public static final class SetShuffleModeEnabled extends Action { private final boolean shuffleModeEnabled; /** * @param tag A tag to use for logging. */ public SetShuffleModeEnabled(String tag, boolean shuffleModeEnabled) { super(tag, "SetShuffleModeEnabled:" + shuffleModeEnabled); this.shuffleModeEnabled = shuffleModeEnabled; } @Override protected void doActionImpl(SimpleExoPlayer player, MappingTrackSelector trackSelector, Surface surface) { player.setShuffleModeEnabled(shuffleModeEnabled); } } /** Calls {@link ExoPlayer#createMessage(Target)} and {@link PlayerMessage#send()}. */ public static final class SendMessages extends Action { private final Target target; private final int windowIndex; private final long positionMs; private final boolean deleteAfterDelivery; /** * @param tag A tag to use for logging. * @param target A message target. * @param positionMs The position at which the message should be sent, in milliseconds. */ public SendMessages(String tag, Target target, long positionMs) { this( tag, target, /* windowIndex= */ C.INDEX_UNSET, positionMs, /* deleteAfterDelivery= */ true); } /** * @param tag A tag to use for logging. * @param target A message target. * @param windowIndex The window index at which the message should be sent, or {@link * C#INDEX_UNSET} for the current window. * @param positionMs The position at which the message should be sent, in milliseconds. * @param deleteAfterDelivery Whether the message will be deleted after delivery. */ public SendMessages( String tag, Target target, int windowIndex, long positionMs, boolean deleteAfterDelivery) { super(tag, "SendMessages"); this.target = target; this.windowIndex = windowIndex; this.positionMs = positionMs; this.deleteAfterDelivery = deleteAfterDelivery; } @Override protected void doActionImpl( final SimpleExoPlayer player, MappingTrackSelector trackSelector, Surface surface) { if (target instanceof PlayerTarget) { ((PlayerTarget) target).setPlayer(player); } PlayerMessage message = player.createMessage(target); if (windowIndex != C.INDEX_UNSET) { message.setPosition(windowIndex, positionMs); } else { message.setPosition(positionMs); } message.setHandler(new Handler()); message.setDeleteAfterDelivery(deleteAfterDelivery); message.send(); } } /** * Calls {@link Player#setPlaybackParameters(PlaybackParameters)}. */ public static final class SetPlaybackParameters extends Action { private final PlaybackParameters playbackParameters; /** * @param tag A tag to use for logging. * @param playbackParameters The playback parameters. */ public SetPlaybackParameters(String tag, PlaybackParameters playbackParameters) { super(tag, "SetPlaybackParameters:" + playbackParameters); this.playbackParameters = playbackParameters; } @Override protected void doActionImpl(SimpleExoPlayer player, MappingTrackSelector trackSelector, Surface surface) { player.setPlaybackParameters(playbackParameters); } } /** * Schedules a play action to be executed, waits until the player reaches the specified position, * and pauses the player again. */ public static final class PlayUntilPosition extends Action { private final int windowIndex; private final long positionMs; /** * @param tag A tag to use for logging. * @param windowIndex The window index at which the player should be paused again. * @param positionMs The position in that window at which the player should be paused again. */ public PlayUntilPosition(String tag, int windowIndex, long positionMs) { super(tag, "PlayUntilPosition:" + windowIndex + "," + positionMs); this.windowIndex = windowIndex; this.positionMs = positionMs; } @Override protected void doActionAndScheduleNextImpl( final SimpleExoPlayer player, final MappingTrackSelector trackSelector, final Surface surface, final HandlerWrapper handler, final ActionNode nextAction) { // Schedule one message on the playback thread to pause the player immediately. player .createMessage( new Target() { @Override public void handleMessage(int messageType, Object payload) throws ExoPlaybackException { player.setPlayWhenReady(/* playWhenReady= */ false); } }) .setPosition(windowIndex, positionMs) .send(); // Schedule another message on this test thread to continue action schedule. player .createMessage( new Target() { @Override public void handleMessage(int messageType, Object payload) throws ExoPlaybackException { nextAction.schedule(player, trackSelector, surface, handler); } }) .setPosition(windowIndex, positionMs) .setHandler(new Handler()) .send(); player.setPlayWhenReady(true); } @Override protected void doActionImpl( SimpleExoPlayer player, MappingTrackSelector trackSelector, Surface surface) { // Not triggered. } } /** * Waits for {@link Player.EventListener#onTimelineChanged(Timeline, Object, int)}. */ public static final class WaitForTimelineChanged extends Action { private final Timeline expectedTimeline; /** * @param tag A tag to use for logging. */ public WaitForTimelineChanged(String tag, Timeline expectedTimeline) { super(tag, "WaitForTimelineChanged"); this.expectedTimeline = expectedTimeline; } @Override protected void doActionAndScheduleNextImpl( final SimpleExoPlayer player, final MappingTrackSelector trackSelector, final Surface surface, final HandlerWrapper handler, final ActionNode nextAction) { if (nextAction == null) { return; } Player.EventListener listener = new Player.DefaultEventListener() { @Override public void onTimelineChanged(Timeline timeline, Object manifest, @Player.TimelineChangeReason int reason) { if (timeline.equals(expectedTimeline)) { player.removeListener(this); nextAction.schedule(player, trackSelector, surface, handler); } } }; player.addListener(listener); if (player.getCurrentTimeline().equals(expectedTimeline)) { player.removeListener(listener); nextAction.schedule(player, trackSelector, surface, handler); } } @Override protected void doActionImpl(SimpleExoPlayer player, MappingTrackSelector trackSelector, Surface surface) { // Not triggered. } } /** * Waits for {@link Player.EventListener#onPositionDiscontinuity(int)}. */ public static final class WaitForPositionDiscontinuity extends Action { /** * @param tag A tag to use for logging. */ public WaitForPositionDiscontinuity(String tag) { super(tag, "WaitForPositionDiscontinuity"); } @Override protected void doActionAndScheduleNextImpl( final SimpleExoPlayer player, final MappingTrackSelector trackSelector, final Surface surface, final HandlerWrapper handler, final ActionNode nextAction) { if (nextAction == null) { return; } player.addListener(new Player.DefaultEventListener() { @Override public void onPositionDiscontinuity(@Player.DiscontinuityReason int reason) { player.removeListener(this); nextAction.schedule(player, trackSelector, surface, handler); } }); } @Override protected void doActionImpl(SimpleExoPlayer player, MappingTrackSelector trackSelector, Surface surface) { // Not triggered. } } /** * Waits for a specified playback state, returning either immediately or after a call to * {@link Player.EventListener#onPlayerStateChanged(boolean, int)}. */ public static final class WaitForPlaybackState extends Action { private final int targetPlaybackState; /** * @param tag A tag to use for logging. */ public WaitForPlaybackState(String tag, int targetPlaybackState) { super(tag, "WaitForPlaybackState"); this.targetPlaybackState = targetPlaybackState; } @Override protected void doActionAndScheduleNextImpl( final SimpleExoPlayer player, final MappingTrackSelector trackSelector, final Surface surface, final HandlerWrapper handler, final ActionNode nextAction) { if (nextAction == null) { return; } if (targetPlaybackState == player.getPlaybackState()) { nextAction.schedule(player, trackSelector, surface, handler); } else { player.addListener(new Player.DefaultEventListener() { @Override public void onPlayerStateChanged(boolean playWhenReady, int playbackState) { if (targetPlaybackState == playbackState) { player.removeListener(this); nextAction.schedule(player, trackSelector, surface, handler); } } }); } } @Override protected void doActionImpl(SimpleExoPlayer player, MappingTrackSelector trackSelector, Surface surface) { // Not triggered. } } /** * Waits for {@link Player.EventListener#onSeekProcessed()}. */ public static final class WaitForSeekProcessed extends Action { /** * @param tag A tag to use for logging. */ public WaitForSeekProcessed(String tag) { super(tag, "WaitForSeekProcessed"); } @Override protected void doActionAndScheduleNextImpl( final SimpleExoPlayer player, final MappingTrackSelector trackSelector, final Surface surface, final HandlerWrapper handler, final ActionNode nextAction) { if (nextAction == null) { return; } player.addListener(new Player.DefaultEventListener() { @Override public void onSeekProcessed() { player.removeListener(this); nextAction.schedule(player, trackSelector, surface, handler); } }); } @Override protected void doActionImpl(SimpleExoPlayer player, MappingTrackSelector trackSelector, Surface surface) { // Not triggered. } } /** * Calls {@link Runnable#run()}. */ public static final class ExecuteRunnable extends Action { private final Runnable runnable; /** * @param tag A tag to use for logging. */ public ExecuteRunnable(String tag, Runnable runnable) { super(tag, "ExecuteRunnable"); this.runnable = runnable; } @Override protected void doActionImpl(SimpleExoPlayer player, MappingTrackSelector trackSelector, Surface surface) { if (runnable instanceof PlayerRunnable) { ((PlayerRunnable) runnable).setPlayer(player); } runnable.run(); } } }
apache-2.0
sintefneodroid/droid
Runtime/Structs/Space/Sample/SampleSpace1.cs
1596
using System; using droid.Runtime.Enums; using droid.Runtime.Interfaces; using droid.Runtime.Sampling; using UnityEngine; namespace droid.Runtime.Structs.Space.Sample { /// <inheritdoc cref="ISpace" /> /// <summary> /// </summary> [Serializable] public struct SampleSpace1 : ISamplable { #region Fields [Header("Sampling", order = 103)] [SerializeField] internal Space1 _space; /// <summary> /// /// </summary> [SerializeField] internal DistributionSampler _distribution_sampler; #endregion /// <summary> /// /// </summary> public DistributionSampler DistributionSampler { get { return this._distribution_sampler; } set { this._distribution_sampler = value; } } public SampleSpace1(string unused = null) { this._space = Space1.ZeroOne; this._distribution_sampler = new DistributionSampler(); } /// <summary> /// /// </summary> /// <returns></returns> public dynamic Sample() { switch (this._space.Normalised) { case Normalisation.None_: return this._space.Round(this.DistributionSampler.Range(this._space.Min, this._space.Max)); case Normalisation.Zero_one_: return this.DistributionSampler.Range(0, 1); case Normalisation.Minus_one_one_: return this.DistributionSampler.Range(-1, 1); default: throw new ArgumentOutOfRangeException(); } } /// <summary> /// /// </summary> public ISpace Space { get { return this._space; } set { this._space = (Space1)value; } } } }
apache-2.0
bartvde/sdk
__tests__/components/layer-list.test.js
12535
/* global it, describe, expect, beforeEach, spyOn */ import React from 'react'; import {mount, configure} from 'enzyme'; import Adapter from 'enzyme-adapter-react-16'; import {createStore, combineReducers} from 'redux'; import {Provider} from 'react-redux'; import MapReducer from '../../src/reducers/map'; import * as MapActions from '../../src/actions/map'; import {isLayerVisible} from '../../src/util'; import SdkLayerList from '../../src/components/layer-list'; import SdkLayerListItem from '../../src/components/layer-list-item'; import {layerListItemTarget} from '../../src/components/layer-list-item'; configure({adapter: new Adapter()}); class TestLayerListItem extends SdkLayerListItem { render() { return ( <div> <button className="btn-up" onClick={() => { this.moveLayerUp(); }}></button> <button className="btn-down" onClick={() => { this.moveLayerDown(); }}></button> <button className="btn-remove" onClick={() => { this.removeLayer(); }}></button> </div> ); } } // uses ol instead of ul class TestList extends React.Component { render() { return ( <ol> {this.props.children} </ol> ); } } class TestListGroup extends React.Component { render() { const children = []; let text; if (this.props.group.collapsed) { text = this.props.group.name; } else { text = (<b>{this.props.group.name}</b>); } for (let i = 0, ii = this.props.childLayers.length; i < ii; i++) { children.push( <this.props.layerClass exclusive={this.props.group.exclusive} key={i} groupLayers={this.props.childLayers} layers={this.props.layers} layer={this.props.childLayers[i]} groupId={this.props.groupId} /> ); } return (<li>{text}<ol>{children}</ol></li>); } } describe('test the LayerList component', () => { let store = null; // this is the same setup as used in legends but instead // of listing legends this lists the layers in a list. beforeEach(() => { store = createStore(combineReducers({ map: MapReducer, }), { map: { version: 8, sources: { osm: { type: 'raster', tileSize: 256, tiles: [ 'https://a.tile.openstreetmap.org/{z}/{x}/{y}.png', 'https://b.tile.openstreetmap.org/{z}/{x}/{y}.png', 'https://c.tile.openstreetmap.org/{z}/{x}/{y}.png', ], }, wms: { type: 'raster', tiles: [ '/wms?SERVICE=WMS&LAYERS=x,y,z&FORMAT=image/png&EXCEPTIONS=image/png&BBOX={bbox-epsg-3857}', ], }, other: { type: 'geojson', data: { }, }, }, layers: [ { id: 'osm', source: 'osm', }, { id: 'wms-test', source: 'wms', }, { id: 'html-test', source: 'other', }, { id: 'href-test', source: 'other', }, { id: 'image-test', ref: 'href-test', }, { id: 'null-test', source: 'other', }, { id: 'bad-type-test', source: 'other', metadata: { 'bnd:title': 'custom-layer-title', }, }, ], }, }); }); it('should render the layer list without error', () => { mount(<Provider store={store}><SdkLayerList /></Provider>); }); it('should allow for custom className', () => { const wrapper = mount(<Provider store={store}><SdkLayerList enableDD={false} className='foo' /></Provider>); expect(wrapper.html()).toMatchSnapshot(); }); function getCustomLayerList() { return mount(<Provider store={store}><SdkLayerList enableDD={false} layerClass={TestLayerListItem} /></Provider>); } it('should render with a custom layer list class', () => { getCustomLayerList(); }); it('should check that the custom title was rendered', () => { const wrapper = mount(<Provider store={store}><SdkLayerList /></Provider>); expect(wrapper.html().indexOf('custom-layer-title')).toBeGreaterThan(-1); }); it('should remove a layer', () => { const n_layers = store.getState().map.layers.length; const wrapper = getCustomLayerList(); wrapper.find('.btn-remove').first().simulate('click'); expect(store.getState().map.layers.length).toBe(n_layers - 1); }); it('should move a layer up', () => { const layers = store.getState().map.layers; const wrapper = getCustomLayerList(); wrapper.find('.btn-up').last().simulate('click'); let new_layers = store.getState().map.layers; expect(new_layers[0].id).toBe(layers[1].id); // try to move a layer up that's already at the top wrapper.find('.btn-up').first().simulate('click'); const last_layer = layers.length - 1; new_layers = store.getState().map.layers; expect(new_layers[last_layer].id).toBe(layers[last_layer].id); }); it('should move a layer down', () => { const layers = store.getState().map.layers; const n_layers = layers.length; const wrapper = getCustomLayerList(); wrapper.find('.btn-down').first().simulate('click'); let new_layers = store.getState().map.layers; expect(new_layers[n_layers - 1].id).toBe(layers[n_layers - 2].id); // try to move a layer down that's already at the bottom wrapper.find('.btn-down').last().simulate('click'); new_layers = store.getState().map.layers; expect(new_layers[0].id).toBe(layers[0].id); }); it('should toggle layer visibility', () => { const wrapper = mount(<Provider store={store}><SdkLayerList /></Provider>); expect(isLayerVisible(store.getState().map.layers[0])).toBe(true); const checkbox = wrapper.find('input').last(); checkbox.simulate('change', {target: {checked: false}}); expect(isLayerVisible(store.getState().map.layers[0])).toBe(false); checkbox.simulate('change', {target: {checked: true}}); expect(isLayerVisible(store.getState().map.layers[0])).toBe(true); }); it('should handle basic grouping', () => { store.dispatch(MapActions.updateMetadata({ 'mapbox:groups': { 'background': { name: 'Base Maps', }, 'overlays': { name: 'Overlays', }, } })); store.dispatch(MapActions.updateLayer('osm', { metadata: { 'mapbox:group': 'background' } })); store.dispatch(MapActions.updateLayer('wms-test', { metadata: { 'mapbox:group': 'overlays' } })); store.dispatch(MapActions.updateLayer('html-test', { metadata: { 'mapbox:group': 'overlays' } })); const wrapper = mount(<Provider store={store}><SdkLayerList enableDD={false} /></Provider>); expect(wrapper.html()).toMatchSnapshot(); }); it('should handle hiding layers', () => { store.dispatch(MapActions.updateLayer('osm', { metadata: { 'bnd:hide-layerlist': true } })); const wrapper = mount(<Provider store={store}><SdkLayerList enableDD={false} /></Provider>); expect(wrapper.html()).toMatchSnapshot(); }); it('should handle a custom list class', () => { const wrapper = mount(<Provider store={store}><SdkLayerList enableDD={false} listClass={TestList} /></Provider>); expect(wrapper.html()).toMatchSnapshot(); }); it('should handle a custom list and listgroup class', () => { store.dispatch(MapActions.updateMetadata({ 'mapbox:groups': { 'background': { name: 'Base Maps', collapsed: true }, 'overlays': { name: 'Overlays', collapsed: false }, } })); store.dispatch(MapActions.updateLayer('osm', { metadata: { 'mapbox:group': 'background' } })); store.dispatch(MapActions.updateLayer('wms-test', { metadata: { 'mapbox:group': 'overlays' } })); store.dispatch(MapActions.updateLayer('html-test', { metadata: { 'mapbox:group': 'overlays' } })); const wrapper = mount(<Provider store={store}><SdkLayerList enableDD={false} groupClass={TestListGroup} listClass={TestList} /></Provider>); expect(wrapper.html()).toMatchSnapshot(); }); it('should handle hiding layers in a group', () => { store.dispatch(MapActions.updateMetadata({ 'mapbox:groups': { 'background': { name: 'Base Maps', }, 'overlays': { name: 'Overlays', }, } })); store.dispatch(MapActions.updateLayer('osm', { metadata: { 'mapbox:group': 'background' } })); store.dispatch(MapActions.updateLayer('wms-test', { metadata: { 'mapbox:group': 'overlays', 'bnd:hide-layerlist': true } })); store.dispatch(MapActions.updateLayer('html-test', { metadata: { 'mapbox:group': 'overlays' } })); const wrapper = mount(<Provider store={store}><SdkLayerList enableDD={false} /></Provider>); expect(wrapper.html()).toMatchSnapshot(); }); }); describe('test drag and drop', () => { let drop, layers, props, monitor, item; beforeEach(() => { drop = layerListItemTarget.drop; layers = [{ id: 'foo', index: 0, }, { id: 'bar', index: 1, }, { id: 'baz', index: 2, }]; const dispatch = function() {}; props = {index: 2, layers, dispatch}; item = { index: 1, layer: layers[1], }; monitor = { getItem() { return item; } }; }); it('should dispatch on drop', () => { spyOn(props, 'dispatch'); drop(props, monitor); expect(props.dispatch).toHaveBeenCalledWith({'layerId': 'bar', 'targetId': 'baz', 'type': 'MAP_ORDER_LAYER'}); }); it('should not dispatch if source and target are the same', () => { props.index = 1; spyOn(props, 'dispatch'); drop(props, monitor); expect(props.dispatch).not.toHaveBeenCalled(); }); it('should not dispatch if no source item layer', () => { delete item.layer; spyOn(props, 'dispatch'); drop(props, monitor); expect(props.dispatch).not.toHaveBeenCalled(); }); it('should not dispatch if hover index out of bounds', () => { props.index = 10; spyOn(props, 'dispatch'); drop(props, monitor); expect(props.dispatch).not.toHaveBeenCalled(); }); }); describe('test the exclusive grouping of the LayerList component', () => { let store = null; beforeEach(() => { store = createStore(combineReducers({ map: MapReducer, }), { map: { version: 8, sources: {}, metadata: { 'mapbox:groups': { 'baselayers': { name: 'Base Layers', exclusive: true, }, } }, layers: [ { id: 'osm', source: 'osm', metadata: { 'mapbox:group': 'baselayers', }, }, { id: 'carto', source: 'carto', metadata: { 'mapbox:group': 'baselayers', }, layout: { visibility: 'none', }, }, { id: 'esri', source: 'esri', metadata: { 'mapbox:group': 'baselayers', }, layout: { visibility: 'none', }, }, ], }, }); }); it('should handle exclusive groups', () => { const wrapper = mount(<Provider store={store}><SdkLayerList enableDD={false} /></Provider>); expect(wrapper.html()).toMatchSnapshot(); }); it('should toggle layer visibility', () => { const wrapper = mount(<Provider store={store}><SdkLayerList /></Provider>); expect(isLayerVisible(store.getState().map.layers[0])).toBe(true); const checkbox = wrapper.find('input').first(); checkbox.simulate('change', {target: {checked: true}}); expect(isLayerVisible(store.getState().map.layers[0])).toBe(false); expect(isLayerVisible(store.getState().map.layers[1])).toBe(false); expect(isLayerVisible(store.getState().map.layers[2])).toBe(true); }); });
apache-2.0
numansiddique/contrail-controller
src/vnsw/agent/oper/agent_path.cc
39333
/* * Copyright (c) 2013 Juniper Networks, Inc. All rights reserved. */ #include <boost/uuid/uuid_io.hpp> #include <boost/lexical_cast.hpp> #include <boost/foreach.hpp> #include <cmn/agent_cmn.h> #include <route/route.h> #include <vnc_cfg_types.h> #include <agent_types.h> #include <filter/acl.h> #include <oper/peer.h> #include <oper/vrf.h> #include <oper/interface_common.h> #include <oper/nexthop.h> #include <oper/tunnel_nh.h> #include <oper/vn.h> #include <oper/mirror_table.h> #include <oper/vxlan.h> #include <oper/mpls.h> #include <oper/route_common.h> #include <oper/agent_sandesh.h> using namespace std; using namespace boost::asio; AgentPath::AgentPath(const Peer *peer, AgentRoute *rt): Path(), peer_(peer), nh_(NULL), label_(MplsTable::kInvalidLabel), vxlan_id_(VxLanTable::kInvalidvxlan_id), dest_vn_name_(""), sync_(false), force_policy_(false), sg_list_(), tunnel_dest_(0), tunnel_bmap_(TunnelType::AllType()), tunnel_type_(TunnelType::ComputeType(TunnelType::AllType())), vrf_name_(""), gw_ip_(0), unresolved_(true), is_stale_(false), is_subnet_discard_(false), dependant_rt_(rt), path_preference_(), local_ecmp_mpls_label_(rt), composite_nh_key_(NULL), subnet_gw_ip_(), arp_mac_(), arp_interface_(NULL), arp_valid_(false), ecmp_suppressed_(false) { } AgentPath::~AgentPath() { clear_sg_list(); } uint32_t AgentPath::GetTunnelBmap() const { TunnelType::Type type = TunnelType::ComputeType(tunnel_bmap_); if ((type == (1 << TunnelType::VXLAN)) && (vxlan_id_ != 0)) { return (1 << TunnelType::VXLAN); } else { return tunnel_bmap_; } } uint32_t AgentPath::GetActiveLabel() const { if (tunnel_type_ == TunnelType::VXLAN) { return vxlan_id_; } else { return label_; } } NextHop* AgentPath::nexthop() const { return nh_.get(); } const NextHop* AgentPath::ComputeNextHop(Agent *agent) const { if (nh_) { return nh_.get(); } if (unresolved_ == true) { DiscardNH key; return static_cast<NextHop *> (agent->nexthop_table()->FindActiveEntry(&key)); } //Indirect route's path, get direct route's NH const NextHop *nh = dependant_rt_.get()->GetActiveNextHop(); if (nh == NULL) { assert(0); } return nh; } bool AgentPath::ChangeNH(Agent *agent, NextHop *nh) { // If NH is not found, point route to discard NH bool ret = false; if (nh == NULL) { nh = agent->nexthop_table()->discard_nh(); } if (nh_ != nh) { nh_ = nh; if (nh && nh->GetType() == NextHop::TUNNEL) { TunnelNH *tunnel_nh = static_cast<TunnelNH *>(nh); tunnel_dest_ = *tunnel_nh->GetDip(); } ret = true; } if (peer_ && (peer_->GetType() == Peer::MULTICAST_PEER) && (label_ != MplsTable::kInvalidLabel)) { MplsLabelKey key(MplsLabel::MCAST_NH, label_); MplsLabel *mpls = static_cast<MplsLabel *>(agent->mpls_table()-> FindActiveEntry(&key)); if (agent->mpls_table()->ChangeNH(mpls, nh)) ret = true; if (mpls) { //Send notify of change mpls->get_table_partition()->Notify(mpls); } } return ret; } bool AgentPath::RebakeAllTunnelNHinCompositeNH(const AgentRoute *sync_route) { if (nh_->GetType() != NextHop::COMPOSITE){ return false; } Agent *agent = static_cast<AgentRouteTable *>(sync_route->get_table())->agent(); CompositeNH *cnh = static_cast<CompositeNH *>(nh_.get()); //Compute new tunnel type TunnelType::Type new_tunnel_type; //Only MPLS types are supported for multicast if ((sync_route->is_multicast()) && (peer_->GetType() == Peer::MULTICAST_FABRIC_TREE_BUILDER)) { new_tunnel_type = TunnelType::ComputeType(TunnelType::MplsType()); if (new_tunnel_type == TunnelType::VXLAN) { new_tunnel_type = TunnelType::MPLS_GRE; } } else { new_tunnel_type = TunnelType::ComputeType(tunnel_bmap_); } CompositeNH *new_composite_nh = NULL; new_composite_nh = cnh->ChangeTunnelType(agent, new_tunnel_type); if (ChangeNH(agent, new_composite_nh)) { //Update composite NH key list to reflect new type if (composite_nh_key_) composite_nh_key_->ChangeTunnelType(new_tunnel_type); return true; } return false; } bool AgentPath::UpdateNHPolicy(Agent *agent) { bool ret = false; if (nh_.get() == NULL || nh_->GetType() != NextHop::INTERFACE) { return ret; } const InterfaceNH *intf_nh = static_cast<const InterfaceNH *>(nh_.get()); if (intf_nh->GetInterface()->type() != Interface::VM_INTERFACE) { return ret; } const VmInterface *vm_port = static_cast<const VmInterface *>(intf_nh->GetInterface()); bool policy = vm_port->policy_enabled(); if (force_policy_) { policy = true; } NextHop *nh = NULL; if (intf_nh->PolicyEnabled() != policy) { //Make path point to policy enabled interface InterfaceNHKey key(new VmInterfaceKey(AgentKey::ADD_DEL_CHANGE, vm_port->GetUuid(), ""), policy, intf_nh->GetFlags()); nh = static_cast<NextHop *> (agent->nexthop_table()->FindActiveEntry(&key)); // If NH is not found, point route to discard NH if (nh == NULL) { LOG(DEBUG, "Interface NH for <" << boost::lexical_cast<std::string>(vm_port->GetUuid()) << " : policy = " << policy); nh = agent->nexthop_table()->discard_nh(); } if (ChangeNH(agent, nh) == true) { ret = true; } } return ret; } bool AgentPath::UpdateTunnelType(Agent *agent, const AgentRoute *sync_route) { //Return if there is no change in tunnel type for non Composite NH. //For composite NH component needs to be traversed. if ((tunnel_type_ == TunnelType::ComputeType(tunnel_bmap_)) && (nh_.get() && nh_.get()->GetType() != NextHop::COMPOSITE)) { return false; } tunnel_type_ = TunnelType::ComputeType(tunnel_bmap_); if (tunnel_type_ == TunnelType::VXLAN && vxlan_id_ == VxLanTable::kInvalidvxlan_id) { tunnel_type_ = TunnelType::ComputeType(TunnelType::MplsType()); } if (nh_.get() && nh_->GetType() == NextHop::TUNNEL) { DBRequest nh_req(DBRequest::DB_ENTRY_ADD_CHANGE); const TunnelNH *tunnel_nh = static_cast<const TunnelNH*>(nh_.get()); TunnelNHKey *tnh_key = new TunnelNHKey(agent->fabric_vrf_name(), *(tunnel_nh->GetSip()), tunnel_dest_, false, tunnel_type_); nh_req.key.reset(tnh_key); nh_req.data.reset(new TunnelNHData()); agent->nexthop_table()->Process(nh_req); TunnelNHKey nh_key(agent->fabric_vrf_name(), *(tunnel_nh->GetSip()), tunnel_dest_, false, tunnel_type_); NextHop *nh = static_cast<NextHop *> (agent->nexthop_table()->FindActiveEntry(&nh_key)); ChangeNH(agent, nh); } if (nh_.get() && nh_->GetType() == NextHop::COMPOSITE) { RebakeAllTunnelNHinCompositeNH(sync_route); } return true; } bool AgentPath::Sync(AgentRoute *sync_route) { bool ret = false; bool unresolved = false; Agent *agent = static_cast<AgentRouteTable *> (sync_route->get_table())->agent(); // Check if there is change in policy on the interface // If yes update the path to point to policy enabled NH if (UpdateNHPolicy(agent)) { ret = true; } //Handle tunnel type change if (UpdateTunnelType(agent, sync_route)) { ret = true; } //Check if there was a change in local ecmp composite nexthop if (nh_ && nh_->GetType() == NextHop::COMPOSITE && composite_nh_key_.get() != NULL && local_ecmp_mpls_label_.get() != NULL) { boost::scoped_ptr<CompositeNHKey> composite_nh_key(composite_nh_key_->Clone()); if (ReorderCompositeNH(agent, composite_nh_key.get())) { if (ChangeCompositeNH(agent, composite_nh_key.get())) { ret = true; } } } if (nh_ && nh_->GetType() == NextHop::ARP) { if (CopyArpData()) { ret = true; } } if (vrf_name_ == Agent::NullString()) { return ret; } InetUnicastAgentRouteTable *table = NULL; InetUnicastRouteEntry *rt = NULL; table = agent->vrf_table()->GetInet4UnicastRouteTable(vrf_name_); if (table) rt = table->FindRoute(gw_ip_); if (rt == sync_route) { rt = NULL; } if (rt == NULL || rt->plen() == 0) { unresolved = true; } else if (rt->GetActiveNextHop()->GetType() == NextHop::RESOLVE) { const ResolveNH *nh = static_cast<const ResolveNH *>(rt->GetActiveNextHop()); table->AddArpReq(vrf_name_, gw_ip_, nh->interface()->vrf()->GetName(), nh->interface(), nh->PolicyEnabled(), dest_vn_name_, sg_list_); unresolved = true; } else { unresolved = false; } if (unresolved_ != unresolved) { unresolved_ = unresolved; ret = true; } // Reset to new gateway route, no nexthop for indirect route if (dependant_rt_.get() != rt) { dependant_rt_.reset(rt); ret = true; } return ret; } bool AgentPath::IsLess(const AgentPath &r_path) const { if (peer()->GetType() == r_path.peer()->GetType()) { if (path_preference() != r_path.path_preference()) { //If right path has lesser preference, then //it should be after the current entry //Hence the reverse check return (r_path.path_preference() < path_preference()); } } return peer()->IsLess(r_path.peer()); } void AgentPath::set_nexthop(NextHop *nh) { nh_ = nh; } bool AgentPath::CopyArpData() { bool ret = false; if (nh_ && nh_->GetType() == NextHop::ARP) { const ArpNH *arp_nh = static_cast<const ArpNH *>(nh_.get()); if (arp_mac() != arp_nh->GetMac()) { set_arp_mac(arp_nh->GetMac()); ret = true; } if (arp_interface() != arp_nh->GetInterface()) { set_arp_interface(arp_nh->GetInterface()); ret = true; } if (arp_valid() != arp_nh->IsValid()) { set_arp_valid(arp_nh->IsValid()); ret = true; } } return ret; } EvpnDerivedPath::EvpnDerivedPath(const EvpnPeer *evpn_peer, const IpAddress &ip_addr, uint32_t ethernet_tag, const std::string &parent) : AgentPath(evpn_peer, NULL), ip_addr_(ip_addr), ethernet_tag_(ethernet_tag), parent_(parent){ } bool EvpnDerivedPath::IsLess(const AgentPath &r_path) const { const EvpnDerivedPath *r_evpn_path = dynamic_cast<const EvpnDerivedPath *>(&r_path); if (r_evpn_path != NULL) { if (r_evpn_path->ip_addr() != ip_addr_) { return (ip_addr_ < r_evpn_path->ip_addr()); } } return peer()->IsLess(r_path.peer()); } const NextHop *EvpnDerivedPath::ComputeNextHop(Agent *agent) const { return nexthop(); } EvpnDerivedPathData::EvpnDerivedPathData(const EvpnRouteEntry *evpn_rt) : AgentRouteData(false), ethernet_tag_(evpn_rt->ethernet_tag()), ip_addr_(evpn_rt->ip_addr()), reference_path_(evpn_rt->GetActivePath()), ecmp_suppressed_(false) { // For debuging add peer of active path in parent as well std::stringstream s; s << evpn_rt->ToString(); s << " "; if (reference_path_ && reference_path_->peer()) s << reference_path_->peer()->GetName(); parent_ = s.str(); } AgentPath *EvpnDerivedPathData::CreateAgentPath(const Peer *peer, AgentRoute *rt) const { const EvpnPeer *evpn_peer = dynamic_cast<const EvpnPeer *>(peer); assert(evpn_peer != NULL); return (new EvpnDerivedPath(evpn_peer, ip_addr_, ethernet_tag_, parent_)); } bool EvpnDerivedPathData::AddChangePath(Agent *agent, AgentPath *path, const AgentRoute *rt) { bool ret = false; EvpnDerivedPath *evpn_path = dynamic_cast<EvpnDerivedPath *>(path); assert(evpn_path != NULL); evpn_path->set_tunnel_dest(reference_path_->tunnel_dest()); uint32_t label = reference_path_->label(); if (evpn_path->label() != label) { evpn_path->set_label(label); ret = true; } uint32_t vxlan_id = reference_path_->vxlan_id(); if (evpn_path->vxlan_id() != vxlan_id) { evpn_path->set_vxlan_id(vxlan_id); ret = true; } uint32_t tunnel_bmap = reference_path_->tunnel_bmap(); if (evpn_path->tunnel_bmap() != tunnel_bmap) { evpn_path->set_tunnel_bmap(tunnel_bmap); ret = true; } TunnelType::Type tunnel_type = reference_path_->tunnel_type(); if (evpn_path->tunnel_type() != tunnel_type) { evpn_path->set_tunnel_type(tunnel_type); ret = true; } PathPreference pref = reference_path_->path_preference(); if (evpn_path->path_preference() != pref) { // Take path preference from parent path evpn_path->set_path_preference(pref); ret = true; } if (evpn_path->nexthop() != reference_path_->nexthop()) { evpn_path->set_nexthop(reference_path_->nexthop()); ret = true; } const SecurityGroupList &sg_list = reference_path_->sg_list(); if (evpn_path->sg_list() != sg_list) { evpn_path->set_sg_list(sg_list); ret = true; } const std::string &dest_vn = reference_path_->dest_vn_name(); if (evpn_path->dest_vn_name() != dest_vn) { evpn_path->set_dest_vn_name(dest_vn); ret = true; } if (evpn_path->ecmp_suppressed() != ecmp_suppressed_) { evpn_path->set_ecmp_suppressed(ecmp_suppressed_); ret = true; } return ret; } bool HostRoute::AddChangePath(Agent *agent, AgentPath *path, const AgentRoute *rt) { bool ret = false; NextHop *nh = NULL; InterfaceNHKey key(intf_.Clone(), false, InterfaceNHFlags::INET4); nh = static_cast<NextHop *>(agent->nexthop_table()->FindActiveEntry(&key)); if (path->dest_vn_name() != dest_vn_name_) { path->set_dest_vn_name(dest_vn_name_); ret = true; } path->set_unresolved(false); if (path->ChangeNH(agent, nh) == true) ret = true; return ret; } bool HostRoute::UpdateRoute(AgentRoute *rt) { bool ret = false; InetUnicastRouteEntry *uc_rt = static_cast<InetUnicastRouteEntry *>(rt); AgentRouteTable *table = static_cast<AgentRouteTable *>(rt->get_table()); if ((table->GetTableType() != Agent::INET4_UNICAST) && (table->GetTableType() != Agent::INET6_UNICAST)) return ret; if (uc_rt->proxy_arp() != true) { uc_rt->set_proxy_arp(true); ret = true; } return ret; } bool L2ReceiveRoute::AddChangePath(Agent *agent, AgentPath *path, const AgentRoute *rt) { bool ret = false; path->set_unresolved(false); if (path->dest_vn_name() != dest_vn_name_) { path->set_dest_vn_name(dest_vn_name_); ret = true; } if (path->label() != mpls_label_) { path->set_label(mpls_label_); ret = true; } if (path->vxlan_id() != vxlan_id_) { path->set_vxlan_id(vxlan_id_); ret = true; } if (path->path_preference().ConfigChanged(path_preference_)) { path->set_path_preference(path_preference_); ret = true; } if (path->ChangeNH(agent, agent->nexthop_table()->l2_receive_nh()) == true) ret = true; return ret; } bool InetInterfaceRoute::UpdateRoute(AgentRoute *rt) { bool ret = false; AgentRouteTable *table = static_cast<AgentRouteTable *>(rt->get_table()); if ((table->GetTableType() != Agent::INET4_UNICAST) && (table->GetTableType() != Agent::INET6_UNICAST)) return ret; InetUnicastRouteEntry *uc_rt = static_cast<InetUnicastRouteEntry *>(rt); if (uc_rt->proxy_arp() != true) { uc_rt->set_proxy_arp(true); ret = true; } if (uc_rt->ipam_subnet_route() == true) { uc_rt->set_ipam_subnet_route(false); ret = true; } return ret; } bool InetInterfaceRoute::AddChangePath(Agent *agent, AgentPath *path, const AgentRoute *rt) { bool ret = false; NextHop *nh = NULL; InterfaceNHKey key(intf_.Clone(), false, InterfaceNHFlags::INET4); nh = static_cast<NextHop *>(agent->nexthop_table()->FindActiveEntry(&key)); if (path->dest_vn_name() != dest_vn_name_) { path->set_dest_vn_name(dest_vn_name_); ret = true; } if (path->label() != label_) { path->set_label(label_); ret = true; } path->set_tunnel_bmap(tunnel_bmap_); TunnelType::Type tunnel_type = TunnelType::ComputeType(tunnel_bmap_); if (tunnel_type != path->tunnel_type()) { path->set_tunnel_type(tunnel_type); ret = true; } path->set_unresolved(false); if (path->ChangeNH(agent, nh) == true) ret = true; return ret; } bool DropRoute::AddChangePath(Agent *agent, AgentPath *path, const AgentRoute *rt) { bool ret = false; if (path->dest_vn_name() != vn_) { path->set_dest_vn_name(vn_); ret = true; } NextHop *nh = agent->nexthop_table()->discard_nh(); path->set_unresolved(false); if (path->ChangeNH(agent, nh) == true) { ret = true; } return ret; } bool LocalVmRoute::AddChangePath(Agent *agent, AgentPath *path, const AgentRoute *rt) { bool ret = false; NextHop *nh = NULL; SecurityGroupList path_sg_list; //TODO Based on key table type pick up interface VmInterfaceKey intf_key(AgentKey::ADD_DEL_CHANGE, intf_.uuid_, ""); VmInterface *vm_port = static_cast<VmInterface *> (agent->interface_table()->FindActiveEntry(&intf_key)); bool policy = false; if (vm_port) { // Use policy based NH if policy enabled on interface if (vm_port->policy_enabled()) { policy = true; ret = true; } } path->set_tunnel_bmap(tunnel_bmap_); TunnelType::Type new_tunnel_type = TunnelType::ComputeType(tunnel_bmap_); if (new_tunnel_type == TunnelType::VXLAN && vxlan_id_ == VxLanTable::kInvalidvxlan_id) { new_tunnel_type = TunnelType::ComputeType(TunnelType::MplsType()); } if (path->tunnel_type() != new_tunnel_type) { path->set_tunnel_type(new_tunnel_type); ret = true; } // If policy force-enabled in request, enable policy path->set_force_policy(force_policy_); if (force_policy_) { policy = true; } InterfaceNHKey key(intf_.Clone(), policy, flags_); nh = static_cast<NextHop *>(agent->nexthop_table()->FindActiveEntry(&key)); if (path->label() != mpls_label_) { path->set_label(mpls_label_); ret = true; } if (path->vxlan_id() != vxlan_id_) { path->set_vxlan_id(vxlan_id_); ret = true; } if (path->dest_vn_name() != dest_vn_name_) { path->set_dest_vn_name(dest_vn_name_); ret = true; } path_sg_list = path->sg_list(); if (path_sg_list != sg_list_) { path->set_sg_list(sg_list_); ret = true; } //Priority and sequence no of path are updated from path //preference state machine //Path preference value enqueued here would be copied //only if //1> ecmp field is set to true, meaning path would be // active-active //2> static preference is set, meaning external entity // would specify the preference of this path(ex LBaaS) //3> Change in priority when static preference is set if (path->path_preference().ConfigChanged(path_preference_)) { path->set_path_preference(path_preference_); ret = true; } if (path->peer() && path->peer()->GetType() == Peer::BGP_PEER) { //Copy entire path preference for BGP peer path, //since allowed-address pair config doesn't modify //preference on BGP path if (path->path_preference() != path_preference_) { path->set_path_preference(path_preference_); ret = true; } } // When BGP path was added, the policy flag in BGP path was based on // interface config at that instance. If the policy flag changes in // path for "Local Peer", we should change policy flag on BGP peer // also. Check if policy has changed and enable SYNC of all path in // this case // Ideally his is needed only for LocalPath. But, having code for all // paths does not have any problem bool old_policy = false; bool new_policy = false; if (path->ComputeNextHop(agent) && path->ComputeNextHop(agent)->PolicyEnabled()) old_policy = true; if (nh && nh->PolicyEnabled()) new_policy = true; if (old_policy != new_policy) { sync_route_ = true; } if (path->subnet_gw_ip() != subnet_gw_ip_) { path->set_subnet_gw_ip(subnet_gw_ip_); ret = true; } path->set_unresolved(false); path->SyncRoute(sync_route_); if (path->ChangeNH(agent, nh) == true) ret = true; return ret; } bool VlanNhRoute::AddChangePath(Agent *agent, AgentPath *path, const AgentRoute *rt) { bool ret = false; NextHop *nh = NULL; SecurityGroupList path_sg_list; assert(intf_.type_ == Interface::VM_INTERFACE); VlanNHKey key(intf_.uuid_, tag_); nh = static_cast<NextHop *>(agent->nexthop_table()->FindActiveEntry(&key)); if (nh) { assert(nh->GetType() == NextHop::VLAN); } if (path->label() != label_) { path->set_label(label_); ret = true; } if (path->dest_vn_name() != dest_vn_name_) { path->set_dest_vn_name(dest_vn_name_); ret = true; } path_sg_list = path->sg_list(); if (path_sg_list != sg_list_) { path->set_sg_list(sg_list_); ret = true; } //Copy over entire path preference structure, whenever there is a //transition from active-active to active-backup struture if (path->path_preference().ConfigChanged(path_preference_)) { path->set_path_preference(path_preference_); ret = true; } path->set_tunnel_bmap(tunnel_bmap_); TunnelType::Type tunnel_type = TunnelType::ComputeType(tunnel_bmap_); if (tunnel_type != path->tunnel_type()) { path->set_tunnel_type(tunnel_type); ret = true; } path->set_unresolved(false); if (path->ChangeNH(agent, nh) == true) { ret = true; } return ret; } bool ResolveRoute::AddChangePath(Agent *agent, AgentPath *path, const AgentRoute *rt) { bool ret = false; NextHop *nh = NULL; ResolveNHKey key(intf_key_.get(), policy_); nh = static_cast<NextHop *>(agent->nexthop_table()->FindActiveEntry(&key)); path->set_unresolved(false); if (path->dest_vn_name() != dest_vn_name_) { path->set_dest_vn_name(dest_vn_name_); ret = true; } if (path->label() != label_) { path->set_label(label_); ret = true; } if (path->sg_list() != path_sg_list_) { path->set_sg_list(path_sg_list_); ret = true; } //By default resolve route on gateway interface //is supported with MPLSoGRE or MplsoUdp port path->set_tunnel_bmap(TunnelType::MplsType()); TunnelType::Type new_tunnel_type = TunnelType::ComputeType(TunnelType::MplsType()); if (path->tunnel_type() != new_tunnel_type) { path->set_tunnel_type(new_tunnel_type); } if (path->ChangeNH(agent, nh) == true) ret = true; return ret; } bool ReceiveRoute::AddChangePath(Agent *agent, AgentPath *path, const AgentRoute *rt) { bool ret = false; NextHop *nh = NULL; //TODO check if it needs to know table type ReceiveNHKey key(intf_.Clone(), policy_); nh = static_cast<NextHop *>(agent->nexthop_table()->FindActiveEntry(&key)); path->set_unresolved(false); if (path->dest_vn_name() != vn_) { path->set_dest_vn_name(vn_); ret = true; } if (path->label() != label_) { path->set_label(label_); ret = true; } if (path->ChangeNH(agent, nh) == true) ret = true; return ret; } bool ReceiveRoute::UpdateRoute(AgentRoute *rt) { bool ret = false; AgentRouteTable *table = static_cast<AgentRouteTable *>(rt->get_table()); if ((table->GetTableType() != Agent::INET4_UNICAST) && (table->GetTableType() != Agent::INET6_UNICAST)) return ret; InetUnicastRouteEntry *uc_rt = static_cast<InetUnicastRouteEntry *>(rt); if (uc_rt->proxy_arp() != proxy_arp_) { uc_rt->set_proxy_arp(proxy_arp_); ret = true; } return ret; } bool MulticastRoute::AddChangePath(Agent *agent, AgentPath *path, const AgentRoute *rt) { bool ret = false; NextHop *nh = NULL; agent->nexthop_table()->Process(composite_nh_req_); nh = static_cast<NextHop *>(agent->nexthop_table()-> FindActiveEntry(composite_nh_req_.key.get())); assert(nh); ret = MulticastRoute::CopyPathParameters(agent, path, vn_name_, false, vxlan_id_, label_, tunnel_type_, nh); return ret; } bool MulticastRoute::CopyPathParameters(Agent *agent, AgentPath *path, const std::string &vn_name, bool unresolved, uint32_t vxlan_id, uint32_t label, uint32_t tunnel_type, NextHop *nh) { path->set_dest_vn_name(vn_name); path->set_unresolved(unresolved); path->set_vxlan_id(vxlan_id); path->set_label(label); //Setting of tunnel is only for simulated TOR. path->set_tunnel_bmap(tunnel_type); TunnelType::Type new_tunnel_type = TunnelType::ComputeType(tunnel_type); if (new_tunnel_type == TunnelType::VXLAN && vxlan_id == VxLanTable::kInvalidvxlan_id) { new_tunnel_type = TunnelType::ComputeType(TunnelType::MplsType()); } if (path->tunnel_type() != new_tunnel_type) { path->set_tunnel_type(new_tunnel_type); } path->ChangeNH(agent, nh); return true; } bool PathPreferenceData::AddChangePath(Agent *agent, AgentPath *path, const AgentRoute *rt) { bool ret = false; //ECMP flag will not be changed by path preference module, //hence retain value in path if (!path) { return ret; } path_preference_.set_ecmp(path->path_preference().ecmp()); if (path && path->path_preference() != path_preference_) { path->set_path_preference(path_preference_); ret = true; } return ret; } // Subnet Route route data IpamSubnetRoute::IpamSubnetRoute(DBRequest &nh_req, const std::string &dest_vn_name) : AgentRouteData(false), dest_vn_name_(dest_vn_name) { nh_req_.Swap(&nh_req); } bool IpamSubnetRoute::AddChangePath(Agent *agent, AgentPath *path, const AgentRoute *rt) { agent->nexthop_table()->Process(nh_req_); NextHop *nh = static_cast<NextHop *>(agent->nexthop_table()-> FindActiveEntry(nh_req_.key.get())); assert(nh); bool ret = false; if (path->ChangeNH(agent, nh) == true) { ret = true; } path->set_is_subnet_discard(true); if (path->dest_vn_name() != dest_vn_name_) { path->set_dest_vn_name(dest_vn_name_); ret = true; } //Resync of subnet route is needed for identifying if arp flood flag //needs to be enabled for all the smaller subnets present w.r.t. this subnet //route. AgentRouteTable *table = static_cast<AgentRouteTable *>(rt->get_table()); assert((table->GetTableType() == Agent::INET4_UNICAST) || (table->GetTableType() == Agent::INET6_UNICAST)); InetUnicastAgentRouteTable *uc_rt_table = static_cast<InetUnicastAgentRouteTable *>(table); const InetUnicastRouteEntry *uc_rt = static_cast<const InetUnicastRouteEntry *>(rt); uc_rt_table->ResyncSubnetRoutes(uc_rt, true); return ret; } bool IpamSubnetRoute::UpdateRoute(AgentRoute *rt) { bool ret = false; InetUnicastRouteEntry *uc_rt = static_cast<InetUnicastRouteEntry *>(rt); if (uc_rt->ipam_subnet_route() != true) { uc_rt->set_ipam_subnet_route(true); ret = true; } if (uc_rt->proxy_arp() == true) { uc_rt->set_proxy_arp(false); ret =true; } return ret; } /////////////////////////////////////////////// // Sandesh routines below (route_sandesh.cc) ////////////////////////////////////////////// //TODO make it generic void UnresolvedNH::HandleRequest() const { VrfEntry *vrf = Agent::GetInstance()->vrf_table()->FindVrfFromId(0); if (!vrf) { ErrorResp *resp = new ErrorResp(); resp->set_context(context()); resp->Response(); return; } int count = 0; std::string empty(""); AgentRouteTable *rt_table = static_cast<AgentRouteTable *> (vrf->GetInet4UnicastRouteTable()); NhListResp *resp = new NhListResp(); //TODO - Convert inet4ucroutetable to agentroutetable AgentRouteTable::UnresolvedNHTree::const_iterator it; it = rt_table->unresolved_nh_begin(); for (;it != rt_table->unresolved_nh_end(); it++) { count++; const NextHop *nh = *it; nh->DBEntrySandesh(resp, empty); if (count == 1) { resp->set_context(context()+"$"); resp->Response(); count = 0; resp = new NhListResp(); } } resp->set_context(context()); resp->Response(); return; } //TODO IMplement filltrace in path class void AgentRoute::FillTrace(RouteInfo &rt_info, Trace event, const AgentPath *path) { Agent *agent = static_cast<AgentRouteTable *>(get_table())->agent(); rt_info.set_ip(ToString()); rt_info.set_vrf(vrf()->GetName()); switch(event) { case ADD:{ rt_info.set_op("ADD"); break; } case DELETE: { rt_info.set_op("DELETE"); break; } case ADD_PATH: case DELETE_PATH: case STALE_PATH: case CHANGE_PATH: { if (event == ADD_PATH) { rt_info.set_op("PATH ADD"); } else if (event == CHANGE_PATH) { rt_info.set_op("PATH CHANGE"); } else if (event == DELETE_PATH) { rt_info.set_op("PATH DELETE"); } else if (event == STALE_PATH) { rt_info.set_op("PATH STALE"); } if (path == NULL) { rt_info.set_nh_type("<NULL>"); break; } if (path->peer()) { rt_info.set_peer(path->peer()->GetName()); } rt_info.set_ecmp(path->path_preference().ecmp()); const NextHop *nh = path->ComputeNextHop(agent); if (nh == NULL) { rt_info.set_nh_type("<NULL>"); break; } switch (nh->GetType()) { case NextHop::TUNNEL: { const TunnelNH *tun = static_cast<const TunnelNH *>(nh); rt_info.set_nh_type("TUNNEL"); rt_info.set_dest_server(tun->GetDip()->to_string()); rt_info.set_dest_server_vrf(tun->GetVrf()->GetName()); break; } case NextHop::ARP:{ rt_info.set_nh_type("DIRECT"); break; } case NextHop::INTERFACE: { const InterfaceNH *intf_nh = static_cast<const InterfaceNH *>(nh); rt_info.set_nh_type("INTERFACE"); rt_info.set_intf(intf_nh->GetInterface()->name()); break; } case NextHop::RECEIVE: { const ReceiveNH *rcv_nh = static_cast<const ReceiveNH *>(nh); rt_info.set_nh_type("RECEIVE"); rt_info.set_intf(rcv_nh->GetInterface()->name()); break; } case NextHop::DISCARD: { rt_info.set_nh_type("DISCARD"); break; } case NextHop::VLAN: { rt_info.set_nh_type("VLAN"); break; } case NextHop::RESOLVE: { rt_info.set_nh_type("RESOLVE"); break; } case NextHop::COMPOSITE: { rt_info.set_nh_type("COMPOSITE"); break; } case NextHop::L2_RECEIVE: { rt_info.set_nh_type("L2_RECEIVE"); break; } default: assert(0); break; } break; } } } void AgentPath::SetSandeshData(PathSandeshData &pdata) const { const NextHop *nh = nexthop(); if (nh != NULL) { nh->SetNHSandeshData(pdata.nh); } pdata.set_peer(const_cast<Peer *>(peer())->GetName()); pdata.set_dest_vn(dest_vn_name()); pdata.set_unresolved(unresolved() ? "true" : "false"); if (!gw_ip().is_unspecified()) { pdata.set_gw_ip(gw_ip().to_string()); pdata.set_vrf(vrf_name()); } if (ecmp_suppressed()) { pdata.set_ecmp_suppressed(true); } pdata.set_sg_list(sg_list()); pdata.set_vxlan_id(vxlan_id()); pdata.set_label(label()); pdata.set_active_tunnel_type( TunnelType(tunnel_type()).ToString()); pdata.set_supported_tunnel_type( TunnelType::GetString(tunnel_bmap())); pdata.set_stale(is_stale()); PathPreferenceSandeshData path_preference_data; path_preference_data.set_sequence(path_preference_.sequence()); path_preference_data.set_preference(path_preference_.preference()); path_preference_data.set_ecmp(path_preference_.ecmp()); path_preference_data.set_wait_for_traffic( path_preference_.wait_for_traffic()); pdata.set_path_preference_data(path_preference_data); pdata.set_active_label(GetActiveLabel()); if (peer()->GetType() == Peer::MAC_VM_BINDING_PEER) { const MacVmBindingPath *dhcp_path = static_cast<const MacVmBindingPath *>(this); pdata.set_flood_dhcp(dhcp_path->flood_dhcp() ? "true" : "false"); pdata.set_vm_name(dhcp_path->vm_interface()->ToString()); } } void AgentPath::set_local_ecmp_mpls_label(MplsLabel *mpls) { local_ecmp_mpls_label_.reset(mpls); } const MplsLabel* AgentPath::local_ecmp_mpls_label() const { return local_ecmp_mpls_label_.get(); } bool AgentPath::ReorderCompositeNH(Agent *agent, CompositeNHKey *composite_nh_key) { //Find local composite mpls label, if present //This has to be done, before expanding component NH BOOST_FOREACH(ComponentNHKeyPtr component_nh_key, composite_nh_key->component_nh_key_list()) { if (component_nh_key.get() == NULL || component_nh_key->nh_key()->GetType() != NextHop::COMPOSITE) { continue; } //Get mpls label allocated for this composite NH MplsLabel *mpls = agent->mpls_table()-> FindMplsLabel(component_nh_key->label()); if (!mpls) { //If a mpls label is deleted, //wait for bgp to update latest list local_ecmp_mpls_label_.reset(mpls); return false; } local_ecmp_mpls_label_.reset(mpls); break; } //Make a copy of composite NH, so that aggregarate mpls //label allocated for local composite ecmp is maintained //as data in path CompositeNHKey *comp_key = composite_nh_key->Clone(); //Reorder the keys so that, existing component NH maintain //there previous position //For example take a composite NH with members A, B, C //in that exact order,If B gets deleted, //the new composite NH created should be A <NULL> C in that order, //irrespective of the order user passed it in composite_nh_key->Reorder(agent, label_, ComputeNextHop(agent)); //Copy the unchanged component NH list to path data set_composite_nh_key(comp_key); return true; } bool AgentPath::ChangeCompositeNH(Agent *agent, CompositeNHKey *composite_nh_key) { DBRequest nh_req(DBRequest::DB_ENTRY_ADD_CHANGE); nh_req.key.reset(composite_nh_key->Clone()); nh_req.data.reset(new CompositeNHData()); agent->nexthop_table()->Process(nh_req); NextHop *nh = static_cast<NextHop *>(agent->nexthop_table()-> FindActiveEntry(composite_nh_key)); assert(nh); if (ChangeNH(agent, nh) == true) { return true; } return false; } const Ip4Address *AgentPath::NexthopIp(Agent *agent) const { if (peer_ == NULL) { return agent->router_ip_ptr(); } return peer_->NexthopIp(agent, this); } MacVmBindingPath::MacVmBindingPath(const Peer *peer) : AgentPath(peer, NULL), vm_interface_(NULL), flood_dhcp_(false) { } bool MacVmBindingPath::IsLess(const AgentPath &r_path) const { return peer()->IsLess(r_path.peer()); } const NextHop *MacVmBindingPath::ComputeNextHop(Agent *agent) const { return nexthop(); } AgentPath *MacVmBindingPathData::CreateAgentPath(const Peer *peer, AgentRoute *rt) const { const Peer *mac_vm_binding_peer = dynamic_cast<const Peer *>(peer); assert(mac_vm_binding_peer != NULL); return (new MacVmBindingPath(mac_vm_binding_peer)); } bool MacVmBindingPathData::AddChangePath(Agent *agent, AgentPath *path, const AgentRoute *rt) { bool ret = false; MacVmBindingPath *dhcp_path = dynamic_cast<MacVmBindingPath *>(path); NextHop *nh = agent->nexthop_table()->discard_nh(); if (path->ChangeNH(agent, nh) == true) ret = true; bool flood_dhcp = !(vm_intf_->dhcp_enable_config()); if (dhcp_path->flood_dhcp() != flood_dhcp) { dhcp_path->set_flood_dhcp(flood_dhcp); ret = true; } if (dhcp_path->vm_interface() != vm_intf_) { dhcp_path->set_vm_interface(vm_intf_); ret = true; } return ret; }
apache-2.0
TinghuanWang/source
src/cn/jsprun/dao/Member_Magics_Magiclog_MemberMagicsDao.java
323
package cn.jsprun.dao; import cn.jsprun.domain.Magiclog; import cn.jsprun.domain.Magics; import cn.jsprun.domain.Membermagics; import cn.jsprun.domain.Members; public interface Member_Magics_Magiclog_MemberMagicsDao { public boolean userBuyMagic(Magics magic,Magiclog magiclog,Members member,Membermagics memberMagics); }
apache-2.0
johnbelamaric/themis
vendor/github.com/infobloxopen/go-trees/dltree/domain_label.go
4349
package dltree import ( "bytes" "math" "strconv" ) // A DomainLabel represents content of a label. type DomainLabel []byte // GetFirstLabelSize returns size in bytes needed to store first label of given domain name as a DomainLabel. Additionally the function returns position right after the label in given string (or length of the string if the first label also is the last). func GetFirstLabelSize(s string) (int, int) { size := 0 escaped := 0 var code [3]byte for i := 0; i < len(s); i++ { c := s[i] if escaped <= 0 { switch c { case '.': return size, i case '\\': escaped = 1 default: size++ } } else if escaped == 1 { if c >= '0' && c <= '9' { code[0] = c escaped++ } else { size++ escaped = 0 } } else if escaped > 1 && escaped < 4 { if c >= '0' && c <= '9' { code[escaped-1] = c escaped++ } else { size += escaped escaped = 0 switch c { case '.': return size, i case '\\': escaped = 1 default: size++ } } } else { if n, _ := strconv.Atoi(string(code[:])); n >= 0 && n <= math.MaxUint8 { size++ } else { size += escaped } escaped = 0 switch c { case '.': return size, i case '\\': escaped = 1 default: size++ } } } if escaped > 0 && escaped < 4 { size += escaped } else if escaped >= 4 { if n, _ := strconv.Atoi(string(code[:])); n >= 0 && n <= math.MaxUint8 { size++ } else { size += escaped } } return size, len(s) } // MakeDomainLabel returns first domain label found in given string as DomainLabel and position in the string right after the label. func MakeDomainLabel(s string) (DomainLabel, int) { size, end := GetFirstLabelSize(s) out := make(DomainLabel, size) escaped := 0 var code [3]byte p := 0 for i := 0; i < len(s); i++ { c := s[i] if escaped <= 0 { switch c { case '.': return out, end case '\\': escaped = 1 default: if c >= 'A' && c <= 'Z' { c += 0x20 } out[p] = c p++ } } else if escaped == 1 { if c >= '0' && c <= '9' { code[0] = c escaped++ } else { if c >= 'A' && c <= 'Z' { c += 0x20 } out[p] = c p++ escaped = 0 } } else if escaped > 1 && escaped < 4 { if c >= '0' && c <= '9' { code[escaped-1] = c escaped++ } else { out[p] = '\\' p++ for j := 0; j < escaped-1; j++ { out[p] = code[j] p++ } escaped = 0 switch c { case '.': return out, end case '\\': escaped = 1 default: if c >= 'A' && c <= 'Z' { c += 0x20 } out[p] = c p++ } } } else { if n, _ := strconv.Atoi(string(code[:])); n >= 0 && n <= math.MaxUint8 { out[p] = byte(n) if out[p] >= 'A' && out[p] <= 'Z' { out[p] += 0x20 } p++ } else { out[p] = '\\' p++ for _, b := range code { out[p] = b p++ } } escaped = 0 switch c { case '.': return out, end case '\\': escaped = 1 default: if c >= 'A' && c <= 'Z' { c += 0x20 } out[p] = c p++ } } } if escaped > 0 && escaped < 4 { out[p] = '\\' p++ for i := 0; i < escaped-1; i++ { out[p] = code[i] p++ } } else if escaped >= 4 { if n, _ := strconv.Atoi(string(code[:])); n >= 0 && n <= math.MaxUint8 { out[p] = byte(n) if out[p] >= 'A' && out[p] <= 'Z' { out[p] += 0x20 } p++ } else { out[p] = '\\' p++ for _, b := range code { out[p] = b p++ } } } return out, end } // String returns domain label in human readable format. func (l DomainLabel) String() string { size := 0 for _, c := range l { size++ if c < ' ' || c > '~' { size += 3 } else if c == '.' || c == '\\' { size++ } } out := make([]byte, size) i := 0 for _, c := range l { if c < ' ' || c > '~' { out[i] = '\\' if c < 100 { i++ out[i] = '0' if c < 10 { i++ out[i] = '0' } } for _, n := range strconv.Itoa(int(c)) { i++ out[i] = byte(n) } } else { if c == '.' || c == '\\' { out[i] = '\\' i++ } out[i] = c } i++ } return string(out) } func compare(a, b DomainLabel) int { d := len(a) - len(b) if d != 0 { return d } return bytes.Compare(a, b) }
apache-2.0
injoin/plook
index.js
189
"use strict"; // Export the server if we're the main module // Export the Plook class otherwise module.exports = require.main === module ? require( "./server" ) : require( "./lib/plook" );
apache-2.0
alexzatsepin/omim
routing/edge_estimator.cpp
9324
#include "routing/edge_estimator.hpp" #include "routing/routing_helpers.hpp" #include "traffic/traffic_info.hpp" #include "indexer/feature_altitude.hpp" #include "base/assert.hpp" #include <algorithm> #include <unordered_map> using namespace routing; using namespace std; using namespace traffic; namespace { feature::TAltitude constexpr kMountainSicknessAltitudeM = 2500; enum class Purpose { Weight, ETA }; double TimeBetweenSec(m2::PointD const & from, m2::PointD const & to, double speedMpS) { CHECK_GREATER(speedMpS, 0.0, ("from:", MercatorBounds::ToLatLon(from), "to:", MercatorBounds::ToLatLon(to))); double const distanceM = MercatorBounds::DistanceOnEarth(from, to); return distanceM / speedMpS; } double CalcTrafficFactor(SpeedGroup speedGroup) { if (speedGroup == SpeedGroup::TempBlock) { double constexpr kImpossibleDrivingFactor = 1e4; return kImpossibleDrivingFactor; } double const percentage = 0.01 * static_cast<double>(kSpeedGroupThresholdPercentage[static_cast<size_t>(speedGroup)]); CHECK_GREATER(percentage, 0.0, ("Speed group:", speedGroup)); return 1.0 / percentage; } double GetPedestrianClimbPenalty(double tangent, feature::TAltitude altitudeM) { if (tangent <= 0) // Descent return 1.0 + 2.0 * (-tangent); // Climb. // The upper the penalty is more: // |1 + 10 * tangent| for altitudes lower than |kMountainSicknessAltitudeM| // |1 + 20 * tangent| for 4000 meters // |1 + 30 * tangent| for 5500 meters // |1 + 40 * tangent| for 7000 meters return 1.0 + (10.0 + max(0, altitudeM - kMountainSicknessAltitudeM) * 10.0 / 1500) * tangent; } double GetBicycleClimbPenalty(double tangent, feature::TAltitude altitudeM) { if (tangent <= 0) // Descent return 1.0; // Climb. if (altitudeM < kMountainSicknessAltitudeM) return 1.0 + 30.0 * tangent; return 1.0 + 50.0 * tangent; } double GetCarClimbPenalty(double /* tangent */, feature::TAltitude /* altitude */) { return 1.0; } template <typename GetClimbPenalty> double CalcClimbSegment(Purpose purpose, Segment const & segment, RoadGeometry const & road, GetClimbPenalty && getClimbPenalty) { Junction const & from = road.GetJunction(segment.GetPointId(false /* front */)); Junction const & to = road.GetJunction(segment.GetPointId(true /* front */)); SpeedKMpH const & speed = road.GetSpeed(segment.IsForward()); double const distance = MercatorBounds::DistanceOnEarth(from.GetPoint(), to.GetPoint()); double const speedMpS = KMPH2MPS(purpose == Purpose::Weight ? speed.m_weight : speed.m_eta); CHECK_GREATER(speedMpS, 0.0, ()); double const timeSec = distance / speedMpS; if (base::AlmostEqualAbs(distance, 0.0, 0.1)) return timeSec; double const altitudeDiff = static_cast<double>(to.GetAltitude()) - static_cast<double>(from.GetAltitude()); return timeSec * getClimbPenalty(altitudeDiff / distance, to.GetAltitude()); } } // namespace namespace routing { // EdgeEstimator ----------------------------------------------------------------------------------- EdgeEstimator::EdgeEstimator(double maxWeightSpeedKMpH, double offroadSpeedKMpH) : m_maxWeightSpeedMpS(KMPH2MPS(maxWeightSpeedKMpH)), m_offroadSpeedMpS(KMPH2MPS(offroadSpeedKMpH)) { CHECK_GREATER(m_offroadSpeedMpS, 0.0, ()); CHECK_GREATER_OR_EQUAL(m_maxWeightSpeedMpS, m_offroadSpeedMpS, ()); } double EdgeEstimator::CalcHeuristic(m2::PointD const & from, m2::PointD const & to) const { return TimeBetweenSec(from, to, m_maxWeightSpeedMpS); } double EdgeEstimator::CalcLeapWeight(m2::PointD const & from, m2::PointD const & to) const { // Let us assume for the time being that // leap edges will be added with a half of max speed. // @TODO(bykoianko) It's necessary to gather statistics to calculate better factor(s) instead of // one below. return TimeBetweenSec(from, to, m_maxWeightSpeedMpS / 2.0); } double EdgeEstimator::CalcOffroadWeight(m2::PointD const & from, m2::PointD const & to) const { return TimeBetweenSec(from, to, m_offroadSpeedMpS); } // PedestrianEstimator ----------------------------------------------------------------------------- class PedestrianEstimator final : public EdgeEstimator { public: PedestrianEstimator(double maxWeightSpeedKMpH, double offroadSpeedKMpH) : EdgeEstimator(maxWeightSpeedKMpH, offroadSpeedKMpH) { } // EdgeEstimator overrides: double GetUTurnPenalty() const override { return 0.0 /* seconds */; } double CalcSegmentWeight(Segment const & segment, RoadGeometry const & road) const override { return CalcClimbSegment(Purpose::Weight, segment, road, GetPedestrianClimbPenalty); } double CalcSegmentETA(Segment const & segment, RoadGeometry const & road) const override { return CalcClimbSegment(Purpose::ETA, segment, road, GetPedestrianClimbPenalty); } }; // BicycleEstimator -------------------------------------------------------------------------------- class BicycleEstimator final : public EdgeEstimator { public: BicycleEstimator(double maxWeightSpeedKMpH, double offroadSpeedKMpH) : EdgeEstimator(maxWeightSpeedKMpH, offroadSpeedKMpH) { } // EdgeEstimator overrides: double GetUTurnPenalty() const override { return 20.0 /* seconds */; } double CalcSegmentWeight(Segment const & segment, RoadGeometry const & road) const override { return CalcClimbSegment(Purpose::Weight, segment, road, GetBicycleClimbPenalty); } double CalcSegmentETA(Segment const & segment, RoadGeometry const & road) const override { return CalcClimbSegment(Purpose::ETA, segment, road, GetBicycleClimbPenalty); } }; // CarEstimator ------------------------------------------------------------------------------------ class CarEstimator final : public EdgeEstimator { public: CarEstimator(shared_ptr<TrafficStash> trafficStash, double maxWeightSpeedKMpH, double offroadSpeedKMpH); // EdgeEstimator overrides: double CalcSegmentWeight(Segment const & segment, RoadGeometry const & road) const override; double CalcSegmentETA(Segment const & segment, RoadGeometry const & road) const override; double GetUTurnPenalty() const override; private: double CalcSegment(Purpose purpose, Segment const & segment, RoadGeometry const & road) const; shared_ptr<TrafficStash> m_trafficStash; }; CarEstimator::CarEstimator(shared_ptr<TrafficStash> trafficStash, double maxWeightSpeedKMpH, double offroadSpeedKMpH) : EdgeEstimator(maxWeightSpeedKMpH, offroadSpeedKMpH), m_trafficStash(move(trafficStash)) { } double CarEstimator::CalcSegmentWeight(Segment const & segment, RoadGeometry const & road) const { return CalcSegment(Purpose::Weight, segment, road); } double CarEstimator::CalcSegmentETA(Segment const & segment, RoadGeometry const & road) const { return CalcSegment(Purpose::ETA, segment, road); } double CarEstimator::GetUTurnPenalty() const { // Adds 2 minutes penalty for U-turn. The value is quite arbitrary // and needs to be properly selected after a number of real-world // experiments. return 2 * 60; // seconds } double CarEstimator::CalcSegment(Purpose purpose, Segment const & segment, RoadGeometry const & road) const { double result = CalcClimbSegment(purpose, segment, road, GetCarClimbPenalty); if (m_trafficStash) { SpeedGroup const speedGroup = m_trafficStash->GetSpeedGroup(segment); ASSERT_LESS(speedGroup, SpeedGroup::Count, ()); double const trafficFactor = CalcTrafficFactor(speedGroup); result *= trafficFactor; if (speedGroup != SpeedGroup::Unknown && speedGroup != SpeedGroup::G5) { // Current time estimation are too optimistic. // Need more accurate tuning: traffic lights, traffic jams, road models and so on. // Add some penalty to make estimation of a more realistic. // TODO: make accurate tuning, remove penalty. double constexpr kTimePenalty = 1.8; result *= kTimePenalty; } } return result; } // EdgeEstimator ----------------------------------------------------------------------------------- // static shared_ptr<EdgeEstimator> EdgeEstimator::Create(VehicleType vehicleType, double maxWeighSpeedKMpH, double offroadSpeedKMpH, shared_ptr<TrafficStash> trafficStash) { switch (vehicleType) { case VehicleType::Pedestrian: case VehicleType::Transit: return make_shared<PedestrianEstimator>(maxWeighSpeedKMpH, offroadSpeedKMpH); case VehicleType::Bicycle: return make_shared<BicycleEstimator>(maxWeighSpeedKMpH, offroadSpeedKMpH); case VehicleType::Car: return make_shared<CarEstimator>(trafficStash, maxWeighSpeedKMpH, offroadSpeedKMpH); case VehicleType::Count: CHECK(false, ("Can't create EdgeEstimator for", vehicleType)); return nullptr; } UNREACHABLE(); } // static shared_ptr<EdgeEstimator> EdgeEstimator::Create(VehicleType vehicleType, VehicleModelInterface const & vehicleModel, shared_ptr<TrafficStash> trafficStash) { return Create(vehicleType, vehicleModel.GetMaxWeightSpeed(), vehicleModel.GetOffroadSpeed(), trafficStash); } } // namespace routing
apache-2.0
liudih/rabbitmq
rabbitmq-consumer/src/main/java/com/rabbit/conf/basemapper/WebsiteMapper.java
1436
package com.rabbit.conf.basemapper; import java.util.List; import org.apache.ibatis.annotations.Param; import org.apache.ibatis.annotations.Select; import com.rabbit.dto.Website; public interface WebsiteMapper { @Select("select * from t_website where iid = #{0} limit 1") Website selectByPrimaryKey(Integer iid); @Select("select * from t_website ORDER BY iid") List<Website> getAll(); @Select("SELECT ws.iid, ws.iplatformid, ws.ccode, vh.cvhost as curl, " + "CASE WHEN vh.ilanguageid IS NOT NULL THEN vh.ilanguageid ELSE ws.ilanguageid END, " + "CASE WHEN vh.icurrencyid IS NOT NULL THEN vh.icurrencyid ELSE ws.icurrencyid END, " + "vh.ccreateuser, vh.dcreatedate," + "ws.bfallback, ws.idefaultshippingcountry, ws.idefaultshippingstorage " + "FROM t_website ws " + "INNER JOIN t_vhost vh ON ws.iid = vh.iwebsiteid " + "WHERE cvhost=#{0}") Website findByHostname(String hostname); @Select("select ws.* from t_website ws " + "inner join t_country_website cws on cws.iwebsiteid = ws.iid and cws.iplatformid = ws.iplatformid " + "inner join t_country c on cws.icountryid = c.iid " + "where c.cshortname = #{0} and cws.iplatformid = #{1}") Website findCountryDefault(String countryCode, Integer platformId); @Select("select * from t_website where bfallback = true limit 1") Website selectDefaultWebsite(); List<Website> getWebsitesByWebsiteIds( @Param("list") List<Integer> websiteIds); }
apache-2.0
zaurx/gitblit
src/com/gitblit/ServletRequestWrapper.java
6242
/* * Copyright 2011 gitblit.com. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.gitblit; import java.io.BufferedReader; import java.io.IOException; import java.io.UnsupportedEncodingException; import java.security.Principal; import java.util.Enumeration; import java.util.Locale; import java.util.Map; import javax.servlet.RequestDispatcher; import javax.servlet.ServletInputStream; import javax.servlet.http.Cookie; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpSession; public abstract class ServletRequestWrapper implements HttpServletRequest { protected final HttpServletRequest req; public ServletRequestWrapper(HttpServletRequest req) { this.req = req; } @Override public Object getAttribute(String name) { return req.getAttribute(name); } @Override public Enumeration getAttributeNames() { return req.getAttributeNames(); } @Override public String getCharacterEncoding() { return req.getCharacterEncoding(); } @Override public void setCharacterEncoding(String env) throws UnsupportedEncodingException { req.setCharacterEncoding(env); } @Override public int getContentLength() { return req.getContentLength(); } @Override public String getContentType() { return req.getContentType(); } @Override public ServletInputStream getInputStream() throws IOException { return req.getInputStream(); } @Override public String getParameter(String name) { return req.getParameter(name); } @Override public Enumeration getParameterNames() { return req.getParameterNames(); } @Override public String[] getParameterValues(String name) { return req.getParameterValues(name); } @Override public Map getParameterMap() { return req.getParameterMap(); } @Override public String getProtocol() { return req.getProtocol(); } @Override public String getScheme() { return req.getScheme(); } @Override public String getServerName() { return req.getServerName(); } @Override public int getServerPort() { return req.getServerPort(); } @Override public BufferedReader getReader() throws IOException { return req.getReader(); } @Override public String getRemoteAddr() { return req.getRemoteAddr(); } @Override public String getRemoteHost() { return req.getRemoteHost(); } @Override public void setAttribute(String name, Object o) { req.setAttribute(name, o); } @Override public void removeAttribute(String name) { req.removeAttribute(name); } @Override public Locale getLocale() { return req.getLocale(); } @Override public Enumeration getLocales() { return req.getLocales(); } @Override public boolean isSecure() { return req.isSecure(); } @Override public RequestDispatcher getRequestDispatcher(String path) { return req.getRequestDispatcher(path); } @Override @Deprecated public String getRealPath(String path) { return req.getRealPath(path); } @Override public int getRemotePort() { return req.getRemotePort(); } @Override public String getLocalName() { return req.getLocalName(); } @Override public String getLocalAddr() { return req.getLocalAddr(); } @Override public int getLocalPort() { return req.getLocalPort(); } @Override public String getAuthType() { return req.getAuthType(); } @Override public Cookie[] getCookies() { return req.getCookies(); } @Override public long getDateHeader(String name) { return req.getDateHeader(name); } @Override public String getHeader(String name) { return req.getHeader(name); } @Override public Enumeration getHeaders(String name) { return req.getHeaders(name); } @Override public Enumeration getHeaderNames() { return req.getHeaderNames(); } @Override public int getIntHeader(String name) { return req.getIntHeader(name); } @Override public String getMethod() { return req.getMethod(); } @Override public String getPathInfo() { return req.getPathInfo(); } @Override public String getPathTranslated() { return req.getPathTranslated(); } @Override public String getContextPath() { return req.getContextPath(); } @Override public String getQueryString() { return req.getQueryString(); } @Override public String getRemoteUser() { return req.getRemoteUser(); } @Override public boolean isUserInRole(String role) { return req.isUserInRole(role); } @Override public Principal getUserPrincipal() { return req.getUserPrincipal(); } @Override public String getRequestedSessionId() { return req.getRequestedSessionId(); } @Override public String getRequestURI() { return req.getRequestURI(); } @Override public StringBuffer getRequestURL() { return req.getRequestURL(); } @Override public String getServletPath() { return req.getServletPath(); } @Override public HttpSession getSession(boolean create) { return req.getSession(create); } @Override public HttpSession getSession() { return req.getSession(); } @Override public boolean isRequestedSessionIdValid() { return req.isRequestedSessionIdValid(); } @Override public boolean isRequestedSessionIdFromCookie() { return req.isRequestedSessionIdFromCookie(); } @Override public boolean isRequestedSessionIdFromURL() { return req.isRequestedSessionIdFromURL(); } @Override @Deprecated public boolean isRequestedSessionIdFromUrl() { return req.isRequestedSessionIdFromUrl(); } }
apache-2.0
droolsjbpm/kie-benchmarks
optaplanner-benchmarks/optaplanner-perf-framework/src/main/java/org/jboss/qa/brms/performance/examples/nurserostering/solver/drools/EmployeeConsecutiveWeekendAssignmentEnd.java
2937
/* * Copyright 2010 Red Hat, Inc. and/or its affiliates. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jboss.qa.brms.performance.examples.nurserostering.solver.drools; import java.io.Serializable; import org.apache.commons.lang3.builder.CompareToBuilder; import org.apache.commons.lang3.builder.EqualsBuilder; import org.apache.commons.lang3.builder.HashCodeBuilder; import org.jboss.qa.brms.performance.examples.nurserostering.domain.Employee; import org.jboss.qa.brms.performance.examples.nurserostering.domain.contract.Contract; public class EmployeeConsecutiveWeekendAssignmentEnd implements Comparable<EmployeeConsecutiveWeekendAssignmentEnd>, Serializable { private Employee employee; private int sundayIndex; public EmployeeConsecutiveWeekendAssignmentEnd(Employee employee, int sundayIndex) { this.employee = employee; this.sundayIndex = sundayIndex; } public Employee getEmployee() { return employee; } public void setEmployee(Employee employee) { this.employee = employee; } public int getSundayIndex() { return sundayIndex; } public void setSundayIndex(int sundayIndex) { this.sundayIndex = sundayIndex; } @Override public boolean equals(Object o) { if (this == o) { return true; } else if (o instanceof EmployeeConsecutiveWeekendAssignmentEnd) { EmployeeConsecutiveWeekendAssignmentEnd other = (EmployeeConsecutiveWeekendAssignmentEnd) o; return new EqualsBuilder() .append(employee, other.employee) .append(sundayIndex, other.sundayIndex) .isEquals(); } else { return false; } } @Override public int hashCode() { return new HashCodeBuilder() .append(employee) .append(sundayIndex) .toHashCode(); } @Override public int compareTo(EmployeeConsecutiveWeekendAssignmentEnd other) { return new CompareToBuilder() .append(employee, other.employee) .append(sundayIndex, other.sundayIndex) .toComparison(); } @Override public String toString() { return employee + " weekend ... - " + sundayIndex; } public Contract getContract() { return employee.getContract(); } }
apache-2.0
timothyhinrichs/opa
ast/index_test.go
7237
// Copyright 2017 The OPA Authors. All rights reserved. // Use of this source code is governed by an Apache2 // license that can be found in the LICENSE file. package ast import ( "fmt" "testing" "github.com/open-policy-agent/opa/util/test" ) type testResolver struct { input *Term failRef Ref } func (r testResolver) Resolve(ref Ref) (Value, error) { if ref.Equal(r.failRef) { return nil, fmt.Errorf("some error") } if ref.HasPrefix(InputRootRef) { v, err := r.input.Value.Find(ref[1:]) if err != nil { return nil, nil } return v, nil } panic("illegal value") } func TestBaseDocEqIndexing(t *testing.T) { module := MustParseModule(` package test exact { input.x = 1 input.y = 2 } { input.x = 3 input.y = 4 } scalars { input.x = 0 input.y = 1 } { 1 = input.y # exercise ordering input.x = 0 } { input.y = 2 input.z = 2 } { input.x = 2 } vars { input.x = 1 input.y = 2 } { input.x = x input.y = 3 } { input.x = 4 input.z = 5 } composite_arr { input.x = 1 input.y = [1,2,3] input.z = 1 } { input.x = 1 input.y = [1,2,4,x] } { input.y = [1,2,y,5] input.z = 3 } { input.y = [] } { # Must be included in all results as nested composites are not indexed. input.y = [1,[2,3],4] } composite_obj { input.y = {"foo": "bar", "bar": x} } # filtering ruleset contains rules that cannot be indexed (for different reasons). filtering { count([], x) } { not input.x = 0 } { x = [1,2,3] x[0] = 1 } { input.x[_] = 1 } { input.x[input.y] = 1 } { # include one rule that can be indexed to exercise merging of root non-indexable # rules with other rules. input.x = 1 } # exercise default keyword default allow = false allow { input.x = 1 } { input.x = 0 } `) tests := []struct { note string ruleset string input string expectedRS interface{} expectedDR *Rule }{ { note: "exact match", ruleset: "exact", input: `{"x": 3, "y": 4}`, expectedRS: []string{ `exact { input.x = 3; input.y = 4 }`, }, }, { note: "undefined match", ruleset: "scalars", input: `{"x": 2, "y": 2}`, expectedRS: []string{ `scalars { input.x = 2 }`}, }, { note: "disjoint match", ruleset: "scalars", input: `{"x": 2, "y": 2, "z": 2}`, expectedRS: []string{ `scalars { input.x = 2 }`, `scalars { input.y = 2; input.z = 2}`}, }, { note: "ordering match", ruleset: "scalars", input: `{"x": 0, "y": 1}`, expectedRS: []string{ `scalars { input.x = 0; input.y = 1 }`, `scalars { 1 = input.y; input.x = 0 }`}, }, { note: "type no match", ruleset: "vars", input: `{"y": 3, "x": {1,2,3}}`, expectedRS: []string{ `vars { input.x = x; input.y = 3 }`, }, }, { note: "var match", ruleset: "vars", input: `{"x": 1, "y": 3}`, expectedRS: []string{ `vars { input.x = x; input.y = 3 }`, }, }, { note: "var match disjoint", ruleset: "vars", input: `{"x": 4, "z": 5, "y": 3}`, expectedRS: []string{ `vars { input.x = x; input.y = 3 }`, `vars { input.x = 4; input.z = 5 }`, }, }, { note: "array match", ruleset: "composite_arr", input: `{ "x": 1, "y": [1,2,3], "z": 1, }`, expectedRS: []string{ `composite_arr { input.x = 1; input.y = [1,2,3]; input.z = 1 }`, `composite_arr { input.y = [1,[2,3],4] }`, }, }, { note: "array var match", ruleset: "composite_arr", input: `{ "x": 1, "y": [1,2,4,5], }`, expectedRS: []string{ `composite_arr { input.x = 1; input.y = [1,2,4,x] }`, `composite_arr { input.y = [1,[2,3],4] }`, }, }, { note: "array var multiple match", ruleset: "composite_arr", input: `{ "x": 1, "y": [1,2,4,5], "z": 3, }`, expectedRS: []string{ `composite_arr { input.x = 1; input.y = [1,2,4,x] }`, `composite_arr { input.y = [1,2,y,5]; input.z = 3 }`, `composite_arr { input.y = [1,[2,3],4] }`, }, }, { note: "array nested match non-indexable rules", ruleset: "composite_arr", input: `{ "x": 1, "y": [1,[2,3],4], }`, expectedRS: []string{ `composite_arr { input.y = [1,[2,3],4] }`, }, }, { note: "array empty match", ruleset: "composite_arr", input: `{"y": []}`, expectedRS: []string{ `composite_arr { input.y = [] }`, `composite_arr { input.y = [1,[2,3],4] }`, }, }, { note: "object match non-indexable rule", ruleset: "composite_obj", input: `{"y": {"foo": "bar", "bar": "baz"}}`, expectedRS: []string{ `composite_obj { input.y = {"foo": "bar", "bar": x} }`, }, }, { note: "default rule only", ruleset: "allow", input: `{"x": 2}`, expectedRS: []string{}, expectedDR: MustParseRule(`default allow = false`), }, { note: "match and default rule", ruleset: "allow", input: `{"x": 1}`, expectedRS: []string{"allow { input.x = 1 }"}, expectedDR: MustParseRule(`default allow = false`), }, { note: "match and non-indexable rules", ruleset: "filtering", input: `{"x": 1}`, expectedRS: module.RuleSet(Var("filtering")), }, { note: "non-indexable rules", ruleset: "filtering", input: `{}`, expectedRS: module.RuleSet(Var("filtering")).Diff(NewRuleSet(MustParseRule(`filtering { input.x = 1 }`))), }, } for _, tc := range tests { test.Subtest(t, tc.note, func(t *testing.T) { rules := []*Rule{} for _, rule := range module.Rules { if rule.Head.Name == Var(tc.ruleset) { rules = append(rules, rule) } } input := MustParseTerm(tc.input) var expectedRS RuleSet switch e := tc.expectedRS.(type) { case []string: for _, r := range e { expectedRS.Add(MustParseRule(r)) } case RuleSet: expectedRS = e default: panic("Unexpected test case expected value") } index := newBaseDocEqIndex(func(Ref) bool { return false }) if !index.Build(rules) { t.Fatalf("Expected index build to succeed") } rs, dr, err := index.Index(testResolver{input, nil}) if err != nil { t.Fatalf("Unexpected error during index lookup: %v", err) } result := NewRuleSet(rs...) if !result.Equal(expectedRS) { t.Fatalf("Expected ruleset %v but got: %v", expectedRS, rs) } if dr == nil && tc.expectedDR != nil { t.Fatalf("Expected default rule but got nil") } else if dr != nil && tc.expectedDR == nil { t.Fatalf("Unexpected default rule %v", dr) } else if dr != nil && tc.expectedDR != nil && !dr.Equal(tc.expectedDR) { t.Fatalf("Expected default rule %v but got: %v", tc.expectedDR, dr) } }) } } func TestBaseDocEqIndexingErrors(t *testing.T) { index := newBaseDocEqIndex(func(Ref) bool { return false }) module := MustParseModule(` package ex p { input.raise_error = 1 }`) if !index.Build(module.Rules) { t.Fatalf("Expected index to build") } _, _, err := index.Index(testResolver{MustParseTerm(`{}`), MustParseRef("input.raise_error")}) if err == nil || err.Error() != "some error" { t.Fatalf("Expected error but got: %v", err) } }
apache-2.0
zhaoxiansheng/coolweather
app/src/main/java/com/example/zy/coolweather/gson/AQI.java
218
package com.example.zy.coolweather.gson; /** * Created by ZY on 2017/6/7. */ public class AQI { public AQICity city; public class AQICity{ public String aqi; public String pm25; } }
apache-2.0
jasonwee/asus-rt-n14uhp-mrtg
src/lesson_data_structures/collections_ordereddict_equality.py
366
import collections print('dict :', end=' ') d1 = {} d1['a'] = 'A' d1['b'] = 'B' d1['c'] = 'C' d2 = {} d2['c'] = 'C' d2['b'] = 'B' d2['a'] = 'A' print(d1 == d2) print('OrderedDict:', end=' ') d1 = collections.OrderedDict() d1['a'] = 'A' d1['b'] = 'B' d1['c'] = 'C' d2 = collections.OrderedDict() d2['c'] = 'C' d2['b'] = 'B' d2['a'] = 'A' print(d1 == d2)
apache-2.0
exzogeni/droidkit
src/main/java/com/exzogeni/dk/log/formatter/SimpleLogFormatter.java
2117
/* * Copyright (c) 2012-2014 Daniel Serdyukov * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.exzogeni.dk.log.formatter; import android.support.annotation.NonNull; import org.apache.commons.io.IOUtils; import java.io.BufferedWriter; import java.io.PrintWriter; import java.io.StringWriter; import java.util.Locale; /** * @author Daniel Serdyukov */ public class SimpleLogFormatter implements LogFormatter { private static StringBuilder newMessageBuilder(Thread thread, StackTraceElement caller) { return new StringBuilder(256) .append("[").append(thread.getName()).append("]") .append(" ").append(caller).append(" >>>>>\n"); } @Override public String format(@NonNull Thread thread, @NonNull StackTraceElement caller, @NonNull String format, Object... args) { final StringBuilder message = newMessageBuilder(thread, caller); if (args.length > 0) { message.append(String.format(Locale.US, format, args)); } else { message.append(format); } return message.toString(); } @Override public String format(@NonNull Thread thread, @NonNull StackTraceElement caller, @NonNull Throwable e) { final StringBuilder message = newMessageBuilder(thread, caller); final StringWriter trace = new StringWriter(); final PrintWriter traceWriter = new PrintWriter(new BufferedWriter(trace, 512), true); try { e.printStackTrace(traceWriter); } finally { traceWriter.flush(); IOUtils.closeQuietly(traceWriter); } return message.append(trace.toString()).toString(); } }
apache-2.0
wanbangsoftware/Everdigm
Wbs.Everdigm.Web/Wbs.Everdigm.Desktop/Properties/Settings.Designer.cs
1094
//------------------------------------------------------------------------------ // <auto-generated> // 此代码由工具生成。 // 运行时版本:4.0.30319.42000 // // 对此文件的更改可能会导致不正确的行为,并且如果 // 重新生成代码,这些更改将会丢失。 // </auto-generated> //------------------------------------------------------------------------------ namespace Wbs.Everdigm.Desktop.Properties { [global::System.Runtime.CompilerServices.CompilerGeneratedAttribute()] [global::System.CodeDom.Compiler.GeneratedCodeAttribute("Microsoft.VisualStudio.Editors.SettingsDesigner.SettingsSingleFileGenerator", "14.0.0.0")] internal sealed partial class Settings : global::System.Configuration.ApplicationSettingsBase { private static Settings defaultInstance = ((Settings)(global::System.Configuration.ApplicationSettingsBase.Synchronized(new Settings()))); public static Settings Default { get { return defaultInstance; } } } }
apache-2.0
dassmeta/passport
passport-core-service/src/main/java/com/dassmeta/passport/core/service/impl/RoleServiceImpl.java
5442
package com.dassmeta.passport.core.service.impl; import java.util.List; import java.util.Map; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Service; import org.springframework.transaction.TransactionStatus; import org.springframework.transaction.support.TransactionCallbackWithoutResult; import org.springframework.transaction.support.TransactionTemplate; import com.dassmeta.passport.core.service.RoleService; import com.dassmeta.passport.dal.dataobject.UrpPermission; import com.dassmeta.passport.dal.dataobject.UrpRole; import com.dassmeta.passport.dal.dataobject.UrpRolePermission; import com.dassmeta.passport.dal.ibatis.UrpPermissionDao; import com.dassmeta.passport.dal.ibatis.UrpRoleDao; import com.dassmeta.passport.dal.ibatis.UrpRolePermissionDao; import com.dassmeta.passport.dal.ibatis.UrpUserRoleDao; import com.dassmeta.passport.util.PageList; @Service("roleService") public class RoleServiceImpl implements RoleService { @Autowired private UrpRoleDao roleDao; @Autowired private UrpRolePermissionDao rolePermissionDao; @Autowired private UrpUserRoleDao userRoleDao; @Autowired private UrpPermissionDao permissionDao; @Autowired private TransactionTemplate aclTransactionTemplate; public void delete(final UrpRole role) { aclTransactionTemplate.execute(new TransactionCallbackWithoutResult() { @Override protected void doInTransactionWithoutResult(TransactionStatus status) { roleDao.remove(role); rolePermissionDao.removeByRoleId(role.getId()); userRoleDao.removeByRoleId(role.getId()); } }); } public void deleteRolePermission(Long roleId) { this.rolePermissionDao.removeByRoleId(roleId); } public List<UrpPermission> findPermissionInfo() { return this.permissionDao.getAllPermission(); } public List<UrpPermission> getRolePermission(Long roleId) { return this.permissionDao.findByRoleId(roleId); } public List<UrpRole> getAllRole() { return roleDao.getAllRole(); } public List<UrpRole> getAllRoleByOrgCode(String orgId) { // orgId = " and orgCode = " + orgId + " "; // Query query = this.baseDao.executeHQL("from UrpRole t where t.visible='Y'" + orgId + " order by t.roleName"); // return query.list(); return null; } public PageList<UrpRole> findForPage(Map<String, Object> params, int pageSize, int pageNo) { // Criterion cri = null; // Order o = null; // o = CriterionBuilder.getOrder("createTime", false); // return this.baseDao.findForPage(UrpRole.class, cri, Integer.valueOf(page), Integer.valueOf(10), o); // return roleDao.findPageList(params, pageSize, pageNo); } public List<?> findAuUserDetail(String id) { // String s = "select c.USER_NAME,b.ORG_NAME,c.JOB_NAME from AU_ORG_INFO b,AU_USER_DETAIL c,"; // s = s + "URP_USER_ROLE d\twhere d.ROLE_ID='" + id + "' and b.ID=c.ORG_ID and d.USER_ID=c.USER_ID "; // Query query = this.baseDao.executeSQL(s); // return query.list(); return null; } public void saveOrUpdateRolePermission(UrpRolePermission urpRolePermission) { // this.baseDao.save(urpRolePermission); this.rolePermissionDao.saveOrUpdate(urpRolePermission); } public String getRolePer(String roleId) { // String perID = ""; // String perS = ""; // String hql = "from UrpRolePermission t where t.roleId='" + roleId + "'"; // List roleL = this.baseDao.executeHQL(hql).list(); // for (int i = 0; i < roleL.size(); i++) { // UrpRolePermission role = (UrpRolePermission) roleL.get(i); // perID = perID + "'" + role.getPermissionId() + "',"; // } // if (!"".equals(perID)) { // perID = perID.substring(0, perID.length() - 1); // String hql2 = "from UrpPermission t where t.id in (" + perID + ")"; // List perL = this.baseDao.executeHQL(hql2).list(); // for (int i = 0; i < perL.size(); i++) { // UrpPermission per = (UrpPermission) perL.get(i); // perS = perS + per.getName() + "���"; // } // } // return perS; return null; } public void deleteRolePermission(UrpRolePermission urpRolePermission) { // String hql = "from UrpRolePermission a where a.roleId = " + urpRolePermission.getRoleId() + // " and a.permissionId = " + urpRolePermission.getPermissionId(); // Query query = this.baseDao.executeHQL(hql); // this.baseDao.deleteAll(query.list()); } public List getAllDepart() { // String hql = "from AuOrgInfo"; // List list = this.baseDao.executeHQL(hql).list(); // return list; return null; } public void enabledRole(UrpRole role) { // UrpRole ur = (UrpRole) this.baseDao.get(UrpRole.class, role.getId()); // if (ur != null) { // this.baseDao.executeHQL("update UrpRole r set r.visible=?,r.modifyTime=? where r.id=?").setString(0, // "Y").setDate(1, new Date()).setSerializable(2, ur.getId()).executeUpdate(); // } } public void disabledRole(UrpRole role) { // UrpRole ur = (UrpRole) this.baseDao.get(UrpRole.class, role.getId()); // if (ur != null) { // this.baseDao.executeHQL("update UrpRole r set r.visible=?,r.modifyTime=? where r.id=?").setString(0, // "N").setDate(1, new Date()).setSerializable(2, ur.getId()).executeUpdate(); // } } public void setAclTransactionTemplate(TransactionTemplate aclTransactionTemplate) { this.aclTransactionTemplate = aclTransactionTemplate; } }
apache-2.0
jessyZu/jsongood
jsongood-core/src/main/java/com/github/jessyZu/jsongood/core/RpcRequestDecoder.java
137
/** * */ package com.github.jessyZu.jsongood.core; public interface RpcRequestDecoder { RpcRequest decode(String payload); }
apache-2.0
coderion/youtrack-export
src/main/java/pl/coderion/rest/DefaultYouTrackService.java
19035
package pl.coderion.rest; import com.ulisesbocchio.jasyptspringboot.annotation.EnableEncryptableProperties; import com.x5.template.Chunk; import com.x5.template.Theme; import org.joda.time.Duration; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.core.ParameterizedTypeReference; import org.springframework.http.HttpEntity; import org.springframework.http.HttpHeaders; import org.springframework.stereotype.Component; import org.springframework.util.LinkedMultiValueMap; import org.springframework.util.MultiValueMap; import org.springframework.web.client.HttpClientErrorException; import pl.coderion.config.YouTrackConfiguration; import pl.coderion.exception.AuthenticationException; import pl.coderion.model.Field; import pl.coderion.model.Issue; import pl.coderion.model.IssueCompacts; import pl.coderion.template.DateTimeFilter; import java.io.BufferedWriter; import java.io.IOException; import java.nio.file.Files; import java.nio.file.NoSuchFileException; import java.nio.file.Paths; import java.text.DateFormat; import java.text.SimpleDateFormat; import java.util.ArrayList; import java.util.Calendar; import java.util.Date; import java.util.List; /** * Copyright (C) Coderion sp. z o.o. */ @Component @EnableEncryptableProperties public class DefaultYouTrackService implements YouTrackService { @Autowired YouTrackConfiguration youTrackConfiguration; @Autowired YouTrackRestClient youTrackRestClient; @Autowired YouTrackTimeCalculator youTrackTimeCalculator; Logger logger = LoggerFactory.getLogger(getClass()); private static final String SESSION_COOKIE_NAME = "YTSESSIONID"; private static final String SECURITY_COOKIE_NAME = "jetbrains.charisma.main.security.PRINCIPAL"; private static final Integer NORMAL_PRIORITY_TIME = 2400; private static final Integer CRITICAL_PRIORITY_TIME = 180; private static final Integer MAJOR_PRIORITY_TIME = 1440; @Override public void login() throws HttpClientErrorException { YouTrackMethodEnum youTrackMethodEnum = YouTrackMethodEnum.LOGIN; MultiValueMap<String, String> map = new LinkedMultiValueMap<String, String>(); map.add("login", youTrackConfiguration.getLogin()); map.add("password", youTrackConfiguration.getPassword()); HttpEntity<MultiValueMap<String, String>> requestEntity = new HttpEntity<MultiValueMap<String, String>>(map, null); String url = youTrackConfiguration.getUrl() + youTrackMethodEnum.getUrl(); try { HttpEntity<String> response = youTrackRestClient.execute(url, youTrackMethodEnum.getHttpMethod(), requestEntity, String.class); logger.info("Logged in."); logger.info("Project: " + youTrackConfiguration.getProject()); storeCookieData(response.getHeaders().get("Set-Cookie")); } catch (HttpClientErrorException e) { throw new AuthenticationException(e.getStatusCode()); } } @Override public List<String[]> getReleasePlannedDate(Date dateFrom, Date dateTo, String listPanel) { YouTrackMethodEnum youTrackMethodEnum = YouTrackMethodEnum.GET_ISSUES; // List of arrays with the final data & list of the times (needed to sort later) List<String[]> list = new ArrayList<>(); List<Long> plannedReleaseTimes = new ArrayList<>(); String[] headers = { "Name", "Report title", "Type", "Priority", "Create date", "State", "Task creator", "Task maintainer", "Planned test release date", "Report generate date", "Over time [hours]", "Over time [days]", "Effort" }; list.add(headers); Long reportDate = new Date().getTime(); Calendar cal = Calendar.getInstance(); cal.setTimeInMillis(reportDate); DateFormat curDateFormat = new SimpleDateFormat ("yyyy-MM-dd HH:mm"); //rest HttpHeaders requestHeaders = new HttpHeaders(); requestHeaders.add("Cookie", youTrackConfiguration.getCookieSessionId()); requestHeaders.add("Cookie", youTrackConfiguration.getCookieSecurity()); HttpEntity requestEntity = new HttpEntity(null, requestHeaders); DateFormat form = new SimpleDateFormat("yyyy-MM-dd"); String url = youTrackConfiguration.getUrl() + String.format(youTrackMethodEnum.getUrl(), form.format(dateFrom), form.format(dateTo), listPanel).replace("ALL", "IMA,IDFR,IMAFO,FO"); ParameterizedTypeReference<IssueCompacts> typeRef = new ParameterizedTypeReference<IssueCompacts>() { }; HttpEntity<IssueCompacts> response = youTrackRestClient.execute(url, youTrackMethodEnum.getHttpMethod(), requestEntity, typeRef); IssueCompacts issues = response.getBody(); // Getting all data from issues by streams for (Issue issue : issues.getIssues()) { for (Field field : issue.getFields()) { if (field.getName().equals("numberInProject")) { logger.info(field.getValue().toString()); } } boolean isPlannedReleaseDatePresent = issue.getFields().stream().filter(f -> f.getName().equals("Planned test release date")).count() > 0; if (!isPlannedReleaseDatePresent) continue; Long plannedReleaseDate = Long.valueOf(removeBrackets(issue.getFields().stream().filter(f -> f.getName().equals("Planned test release date")).map(Field::getValue) .findFirst().get())); String name = issue.getFields().stream().filter(f -> f.getName().equals("projectShortName")).map(Field::getValue).findFirst().get() +"-"; name += issue.getFields().stream().filter(f -> f.getName().equals("numberInProject")).map(Field::getValue).findFirst().get(); String summary = issue.getFields().stream().filter(f -> f.getName().equals("summary")).map(Field::getValue).findFirst().get(); String type = issue.getFields().stream().filter(f -> f.getName().equals("Type")).map(Field::getValue).findFirst().get(); String priority = issue.getFields().stream().filter(f -> f.getName().equals("Priority")).map(Field::getValue).findFirst().get(); Long createDate = Long.parseLong(issue.getFields().stream().filter(f -> f.getName().equals("created")).map(Field::getValue).findFirst().get()); String state = issue.getFields().stream().filter(f -> f.getName().equals("State")).map(Field::getValue).findFirst().get(); String taskCreator = issue.getFields().stream().filter(f -> f.getName().equals("reporterFullName")).map(Field::getValue).findFirst().get(); boolean isMaintainerPresent = issue.getFields().stream().filter(f -> f.getName().equals("Maintainer")).count() > 0; String taskMaintainer = ""; if (isMaintainerPresent) taskMaintainer = issue.getFields().stream().filter(f -> f.getName().equals("Maintainer")).map(Field::getValue).findFirst().get(); boolean isEffortPresent = issue.getFields().stream().filter(f -> f.getName().equals("Effort")).count() > 0; String effort = ""; if (isEffortPresent) effort = removeBrackets(issue.getFields().stream().filter(f -> f.getName().equals("Effort")).map(Field::getValueId).findFirst().get().toString()); DateFormat format = new SimpleDateFormat("yyyy-MM-dd"); Calendar calendar = Calendar.getInstance(); Calendar calendar1 = Calendar.getInstance(); calendar.setTimeInMillis(createDate); calendar1.setTimeInMillis(plannedReleaseDate); Long releaseOverTimeHours = 0L; Long releaseOverTimeDays = 0L; if (reportDate > plannedReleaseDate) { releaseOverTimeHours = Math.abs(new Duration(reportDate, plannedReleaseDate).getStandardHours()); releaseOverTimeDays = Math.abs(new Duration(reportDate, plannedReleaseDate).getStandardDays()); } String[] values = { name, summary, removeBrackets(type), removeBrackets(priority), format.format(calendar.getTime()), removeBrackets(state), taskCreator, removeBrackets(taskMaintainer), format.format(calendar1.getTime()), format.format(cal.getTime()), releaseOverTimeHours+"h", releaseOverTimeDays+ (releaseOverTimeDays == 1 ? " day" : " days"), effort }; plannedReleaseTimes.add(plannedReleaseDate); list.add(values); } // returns sorted by planned test release date list return sort(list, plannedReleaseTimes); } @Override public List<String[]> getIssuesByDate(Date dateFrom, Date dateTo, String listPanel) { YouTrackMethodEnum youTrackMethodEnum = YouTrackMethodEnum.GET_ISSUES_BY_DATE; // List of arrays with the final data & list of the times (needed to sort later) List<String[]> listTab = new ArrayList<>(); List<Long> resolveTimes = new ArrayList<>(); String headers[] = { "Name", "Report title", "Type", "Priority", "Created date", "Resolved date", "Resolved time [mins]", "Resolved time [hours]+[mins]", "Over time [mins]", "Over time [hours]+[mins]", "State", "Task creator", "Task maintainer" }; listTab.add(headers); HttpHeaders requestHeaders = new HttpHeaders(); requestHeaders.add("Cookie", youTrackConfiguration.getCookieSessionId()); requestHeaders.add("Cookie", youTrackConfiguration.getCookieSecurity()); HttpEntity requestEntity = new HttpEntity(null, requestHeaders); DateFormat form = new SimpleDateFormat("yyyy-MM-dd"); String url = youTrackConfiguration.getUrl() + String.format(youTrackMethodEnum.getUrl(), form.format(dateFrom), form.format(dateTo), listPanel) .replace("ALL","IMA,IDFR,FO,IMAFO"); ParameterizedTypeReference<IssueCompacts> typeRef = new ParameterizedTypeReference<IssueCompacts>() { }; HttpEntity<IssueCompacts> response = youTrackRestClient.execute(url, youTrackMethodEnum.getHttpMethod(), requestEntity, typeRef); IssueCompacts issues = response.getBody(); // Getting all data from issues by streams int cnt = 0; for (Issue issue : issues.getIssues()) { String type = issue.getFields().stream().filter(f -> f.getName().equals("Type")).map(Field::getValue).findFirst().get(); String creator = issue.getFields().stream().filter(f -> f.getName().equals("reporterFullName")).map(Field::getValue).findFirst().get(); String state = issue.getFields().stream().filter(f -> f.getName().equals("State")).map(Field::getValue).findFirst().get(); boolean env = issue.getFields().stream().filter(f -> f.getName().equals("Environment")).map(Field::getValue).findFirst().isPresent(); String environment = ""; if (env) environment = issue.getFields().stream().filter(f -> f.getName().equals("Environment")).map(Field::getValue).findFirst().get(); String priority = issue.getFields().stream().filter(f -> f.getName().equals("Priority")).map(Field::getValue).findFirst().get(); String name = issue.getFields().stream().filter(f -> f.getName().equals("projectShortName")).map(Field::getValue).findFirst().get() +"-"; name += issue.getFields().stream().filter(f -> f.getName().equals("numberInProject")).map(Field::getValue).findFirst().get(); String summary = issue.getFields().stream().filter(f -> f.getName().equals("summary")).map(Field::getValue).findFirst().get(); Long createDate = Long.parseLong(issue.getFields().stream().filter(f -> f.getName().equals("created")).map(Field::getValue).findFirst().get()); boolean isResolved = issue.getFields().stream().filter(f -> f.getName().equals("resolved")).map(Field::getValue).findFirst().isPresent(); Long resolveDate = 0L; if (isResolved) resolveDate = Long.parseLong(issue.getFields().stream().filter(f -> f.getName().equals("resolved")).map(Field::getValue).findFirst().get()); boolean isMaintainerPresent = issue.getFields().stream().filter(f -> f.getName().equals("Maintainer")).count() > 0; String taskMaintainer = ""; if (isMaintainerPresent) taskMaintainer = issue.getFields().stream().filter(f -> f.getName().equals("Maintainer")).map(Field::getValue).findFirst().get(); // Takes only Bugs with environment "Production" and state fixed or virified if (type.equals("Bug") && isResolved && environment.equals("Production") && (state.equals("Fixed") || state.equals("Verified"))) { cnt++; DateFormat format = new SimpleDateFormat("dd/MM/yyyy HH:mm"); Calendar calendar = Calendar.getInstance(); Calendar calendar1 = Calendar.getInstance(); calendar.setTimeInMillis(createDate); calendar1.setTimeInMillis(resolveDate); Long minutes; Long overTime; // Counts the time spent on issues with high or normal priority // + checks whether or not maintainer have spent more time than he had if (youTrackConfiguration.getHighPriorityStates().contains(priority)) { minutes = youTrackTimeCalculator.getIssueHighPriorityResolveTime(name); if (priority.equals("Major")) { overTime = minutes > MAJOR_PRIORITY_TIME ? minutes - MAJOR_PRIORITY_TIME : 0; } else { overTime = minutes > CRITICAL_PRIORITY_TIME ? minutes - CRITICAL_PRIORITY_TIME : 0; } logger.info(name + " " + type + " " + priority+" "+ "created: " + format.format(calendar.getTime()) + " " + "resolved: " + format.format(calendar1.getTime()) + " czas: " + youTrackTimeCalculator.getIssueHighPriorityResolveTime(name)+" status: "+state); } else { minutes = youTrackTimeCalculator.getIssueNormalResolveTime(name); overTime = minutes > NORMAL_PRIORITY_TIME ? minutes - NORMAL_PRIORITY_TIME : 0; logger.info(name + " " + type + " " + priority+" "+ "created: " + format.format(calendar.getTime()) + " " + "resolved: " + format.format(calendar1.getTime()) + " czas: " + youTrackTimeCalculator.getIssueNormalResolveTime(name)+" status: "+state); } resolveTimes.add(resolveDate); String[] arr = { name, summary, removeBrackets(type), removeBrackets(priority), format.format(calendar.getTime()), format.format(calendar1.getTime()), minutes+"", minutes / 60+"h "+minutes % 60+"m", overTime+"m", overTime / 60+"h "+overTime % 60+"m", removeBrackets(state), creator, removeBrackets(taskMaintainer) }; listTab.add(arr); } } logger.info(cnt+""); // returns sorted by resolve time list of issues return sort(listTab, resolveTimes); } // Method that sorts list by list of the times public List<String[]> sort(List<String[]> list, List<Long> resolveTimes) { List<String[]> sortedList = new ArrayList<>(); sortedList.add(list.get(0)); list.remove(0); int size = list.size(); boolean visited[] = new boolean[size+1]; for (int i = 0; i < size; i++) { Long min = Long.MAX_VALUE; int index = 0; for (int j = 0; j < size; j++) { if (visited[j]) continue; if (resolveTimes.get(j) < min) { index = j; } min = Math.min(min, resolveTimes.get(j)); } sortedList.add(list.get(index)); visited[index] = true; } return sortedList; } public String removeBrackets(String s) { return s.replace("[","").replace("]","").replace("\"", ""); } public void generateHTML(String issues) { // Generate HTML document long startTime = System.currentTimeMillis(); try { if (!Files.exists(Paths.get(youTrackConfiguration.getOutput()))) { Files.createDirectory(Paths.get(youTrackConfiguration.getOutput())); } } catch (IOException e) { logger.error(e.getMessage(), e); } try (BufferedWriter writer = Files.newBufferedWriter(Paths.get(youTrackConfiguration.getOutput() + "/index.html"))) { Theme theme = new Theme(); theme.registerFilter(new DateTimeFilter()); Chunk html = theme.makeChunk("template#simple"); html.set("url", youTrackConfiguration.getUrl()); html.set("project", youTrackConfiguration.getProject()); html.set("issues", issues); String output = html.toString(); writer.write(output); long duration = System.currentTimeMillis() - startTime; logger.info(String.format("Generated HTML document [%sms]: %s", duration, youTrackConfiguration.getOutput())); } catch (NoSuchFileException e) { logger.error("There is no file: " + youTrackConfiguration.getOutput()); } catch (IOException e) { logger.error(e.getMessage(), e); } } /** * Stores authentication data from cookie after successful login * @param cookieHeaders */ public void storeCookieData(List<String> cookieHeaders) { for (String cookieHeader : cookieHeaders) { logger.debug("Cookie: " + cookieHeader); if (cookieHeader.startsWith(SESSION_COOKIE_NAME)) { youTrackConfiguration.setCookieSessionId(cookieHeader); } else if (cookieHeader.startsWith(SECURITY_COOKIE_NAME)) { youTrackConfiguration.setCookieSecurity(cookieHeader); } } } }
apache-2.0
beeldengeluid/zieook
backend/zieook-api/zieook-api-workflow/src/main/java/nl/gridline/zieook/workflow/api/CollectionImport.java
5352
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations */ package nl.gridline.zieook.workflow.api; import javax.servlet.http.HttpServletRequest; import javax.ws.rs.Consumes; import javax.ws.rs.DELETE; import javax.ws.rs.GET; import javax.ws.rs.POST; import javax.ws.rs.PUT; import javax.ws.rs.Path; import javax.ws.rs.PathParam; import javax.ws.rs.Produces; import javax.ws.rs.QueryParam; import javax.ws.rs.core.Context; import javax.ws.rs.core.MediaType; import javax.ws.rs.core.Response; import nl.gridline.zieook.workflow.model.Collection; import nl.gridline.zieook.workflow.model.Collections; import nl.gridline.zieook.workflow.model.OAIMetadata; import nl.gridline.zieook.workflow.model.OAISets; import nl.gridline.zieook.workflow.model.TaskConfigList; import nl.gridline.zieook.workflow.model.TaskConfigMap; import org.jboss.resteasy.annotations.GZIP; /** * CRUD on collections meta data: collection import, update and delete - collection content read is covered in the * {@link nl.gridline.zieook.api.CollectionData} interface of the zieook-api-data * <p /> * Project zieook-api-workflow<br /> * CollectionImport.java created 7 feb. 2011 * <p /> * Copyright, all rights reserved 2011 GridLine Amsterdam * @author <a href="mailto:job@gridline.nl">Job</a> * @version $Revision$, $Date$ */ @Path(Constants.APIPATH) public interface CollectionImport { @GET @GZIP @Produces({MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML}) @Path("collection/oai_sets") OAISets getAvailableSets(@QueryParam("url") String url); @GET @GZIP @Produces({MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML}) @Path("collection/oai_metadata") OAIMetadata getAvailableMetaData(@QueryParam("url") String url); /** * Create a collection based on the posted collection object. A collection might point to a local directory, a link * or wait for a data upload * @param cp */ @POST @GZIP @Consumes({MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML}) @Path("collection/{cp}") Response createCollectionMeta(@PathParam("cp") String cp, Collection collection); @GET @GZIP @Produces({MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML}) @Path("collection/{cp}/{collection}") Collection readCollectionMeta(@PathParam("cp") String cp, @PathParam("collection") String collection); @GET @GZIP @Produces({MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML}) @Path("collection-list/{cp}") Collections readCollectionsMeta(@PathParam("cp") String cp); @GET @GZIP @Produces({MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML}) @Path("collection-list") Collections readCollectionsMeta(); /** * Update collection meta data * @param cp - content provider * @param collectionname - collection name * @param collection - collection meta data */ @PUT @GZIP @Consumes({MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML}) @Path("collection-meta/{cp}/{collection}") Response updateCollectionMeta(@PathParam("cp") String cp, @PathParam("collection") String collectionname, Collection collection); /** * Update collection data by providing a file upload * @param cp content provider * @param collection collection name * @param type one of [user,rating,collection] */ @PUT @GZIP @Consumes({"binary/octet-stream"}) @Path("collection/{cp}/{collection}/{part}") Response updateCollectionData(@PathParam("cp") String cp, @PathParam("collection") String collection, @PathParam("part") String part, @Context HttpServletRequest request); /** * Delete a collection from the server * @param cp - content provider * @param collection - collection name */ @DELETE @GZIP @Path("collection/{cp}/{collection}") Response deleteCollection(@PathParam("cp") String cp, @PathParam("collection") String collection); @GET @GZIP @Produces({MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML}) @Path("collection-import/{cp}/{collection}") TaskConfigMap collectionImportState(@PathParam("cp") String cp, @PathParam("collection") String collection); /** * @param cp * @param collection * @return */ @GET @GZIP @Produces({MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML}) @Path("collection-implist/{cp}/{collection}") TaskConfigList collectionImportList(@PathParam("cp") String cp, @PathParam("collection") String collection, @QueryParam("start") Long start, @QueryParam("end") Long end); @PUT @Consumes({MediaType.APPLICATION_XML, MediaType.APPLICATION_JSON}) @Path("collection-schedule/{cp}/{collection}") Response scheduleCollection(@PathParam("cp") String cp, @PathParam("collection") String collection, String date); }
apache-2.0
googleapis/nodejs-dataproc-metastore
samples/generated/v1beta/dataproc_metastore.export_metadata.js
2845
// Copyright 2021 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. 'use strict'; function main(service) { // [START metastore_v1beta_generated_DataprocMetastore_ExportMetadata_async] /** * TODO(developer): Uncomment these variables before running the sample. */ /** * A Cloud Storage URI of a folder, in the format * `gs://<bucket_name>/<path_inside_bucket>`. A sub-folder * `<export_folder>` containing exported files will be created below it. */ // const destinationGcsFolder = 'abc123' /** * Required. The relative resource name of the metastore service to run * export, in the following form: * `projects/{project_id}/locations/{location_id}/services/{service_id}` */ // const service = 'abc123' /** * Optional. A request ID. Specify a unique request ID to allow the server to * ignore the request if it has completed. The server will ignore subsequent * requests that provide a duplicate request ID for at least 60 minutes after * the first request. * For example, if an initial request times out, followed by another request * with the same request ID, the server ignores the second request to prevent * the creation of duplicate commitments. * The request ID must be a valid * UUID (https://en.wikipedia.org/wiki/Universally_unique_identifier#Format). * A zero UUID (00000000-0000-0000-0000-000000000000) is not supported. */ // const requestId = 'abc123' /** * Optional. The type of the database dump. If unspecified, defaults to * `MYSQL`. */ // const databaseDumpType = {} // Imports the Metastore library const {DataprocMetastoreClient} = require('@google-cloud/dataproc-metastore').v1beta; // Instantiates a client const metastoreClient = new DataprocMetastoreClient(); async function callExportMetadata() { // Construct request const request = { service, }; // Run request const [operation] = await metastoreClient.exportMetadata(request); const [response] = await operation.promise(); console.log(response); } callExportMetadata(); // [END metastore_v1beta_generated_DataprocMetastore_ExportMetadata_async] } process.on('unhandledRejection', err => { console.error(err.message); process.exitCode = 1; }); main(...process.argv.slice(2));
apache-2.0
jkandasa/hawkular-inventory
hawkular-inventory-api/src/main/java/org/hawkular/inventory/api/paging/Page.java
3037
/* * Copyright 2015 Red Hat, Inc. and/or its affiliates * and other contributors as indicated by the @author tags. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.hawkular.inventory.api.paging; import java.io.IOException; import java.util.Iterator; import java.util.List; import java.util.Spliterator; import java.util.Spliterators; import java.util.stream.Collectors; import java.util.stream.StreamSupport; /** * A read-only list representing a single page of some results. * * <p>Contains a reference to the paging state object that describes the position of the page in some overall results * .<p/> * * <p>This implements the {@link AutoCloseable} so make sure you call the {@link #close()} method or you use the * try-with-resource statement, in order to prevent potential memory leaks. </p> * * @author Lukas Krejci * @since 0.0.1 */ public class Page<T> implements Iterator<T>, AutoCloseable, Iterable<T> { private Iterator<T> wrapped; private final PageContext pageContext; private final long totalSize; public Page(Iterator<T> wrapped, PageContext pageContext, long totalSize) { this.wrapped = wrapped; this.pageContext = pageContext; this.totalSize = totalSize; } protected Page(PageContext pageContext, long totalSize) { this(null, pageContext, totalSize); } /** * @return the information about the page of the results that this object represents */ public PageContext getPageContext() { return pageContext; } /** * @return the total number of results of which this page is a subset of */ public long getTotalSize() { return totalSize; } /** * Try to avoid calling this method in production code, because it can have bad impact on performance * * @return results in a list form */ public List<T> toList() { return StreamSupport.stream(Spliterators.spliteratorUnknownSize(this, Spliterator.ORDERED), false) .collect(Collectors.<T>toList()); } @Override public boolean hasNext() { return wrapped != null && wrapped.hasNext(); } @Override public T next() { if (wrapped == null) { throw new IllegalStateException("the iterator has been already closed"); } return wrapped.next(); } @Override public void close() throws IOException { this.wrapped = null; } @Override public Iterator<T> iterator() { return this; } }
apache-2.0
spinnaker/halyard
halyard-config/src/main/java/com/netflix/spinnaker/halyard/config/validate/v1/security/X509Validator.java
1345
/* * Copyright 2017 Target, Inc. * * Licensed under the Apache License, Version 2.0 (the "License") * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.netflix.spinnaker.halyard.config.validate.v1.security; import com.netflix.spinnaker.halyard.config.model.v1.node.Validator; import com.netflix.spinnaker.halyard.config.model.v1.security.X509; import com.netflix.spinnaker.halyard.config.problem.v1.ConfigProblemSetBuilder; import com.netflix.spinnaker.halyard.core.problem.v1.Problem; import org.springframework.stereotype.Component; @Component public class X509Validator extends Validator<X509> { @Override public void validate(ConfigProblemSetBuilder p, X509 x509) { if (!x509.isEnabled()) { return; } if (x509.getRoleOid() != null && (x509.getRoleOid() == "")) { p.addProblem(Problem.Severity.ERROR, "roleOid specified but given blank line"); } } }
apache-2.0
ebi-uniprot/QuickGOBE
ontology-rest/src/test/java/uk/ac/ebi/quickgo/ontology/traversal/read/OntologyGraphConfigIT.java
2232
package uk.ac.ebi.quickgo.ontology.traversal.read; import org.junit.Test; import org.junit.runner.RunWith; import org.springframework.batch.core.BatchStatus; import org.springframework.batch.core.JobExecution; import org.springframework.batch.test.JobLauncherTestUtils; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.boot.test.context.SpringBootTest; import org.springframework.test.context.TestPropertySource; import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; import uk.ac.ebi.quickgo.ontology.traversal.OntologyGraph; import java.util.Set; import java.util.stream.Collectors; import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.containsInAnyOrder; /** * Check that the {@link OntologyGraphConfig} correctly sets up an instance of * {@link OntologyGraph}, by reading test data resources. * * Created 18/05/16 * @author Edd */ @RunWith(SpringJUnit4ClassRunner.class) @SpringBootTest(classes = {OntologyGraphConfig.class, JobTestRunnerConfig.class}) @TestPropertySource(properties = "ontology.traversal.source=classpath:/relations/RELATIONS.dat.gz,classpath:/relations/ECO_RELATIONS.dat.gz") public class OntologyGraphConfigIT { @Autowired private JobLauncherTestUtils jobLauncherTestUtils; @Autowired private OntologyGraph ontologyGraph; @Test public void runOntologyGraphLoading() throws Exception { JobExecution jobExecution = jobLauncherTestUtils.launchJob(); BatchStatus status = jobExecution.getStatus(); assertThat(status, is(BatchStatus.COMPLETED)); assertThat(ontologyGraph.getVertices(), containsInAnyOrder( "GO:0000001", "GO:0048308", "ECO:0000205", "ECO:0000361", "ECO:0001149", "ECO:0000269" )); Set<String> edges = ontologyGraph.getEdges() .stream() .map(rel -> rel.relationship.getShortName()) .collect(Collectors.toSet()); assertThat(edges, containsInAnyOrder( "I", "P", "R", "CO", "UI" )); } }
apache-2.0
meitar/zaproxy
zap/src/main/java/org/zaproxy/zap/view/OptionsConnectionPanel.java
13029
/* * Zed Attack Proxy (ZAP) and its related class files. * * ZAP is an HTTP/HTTPS proxy for assessing web application security. * * Copyright 2010 The ZAP Development Team * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.zaproxy.zap.view; import java.awt.CardLayout; import java.awt.GridBagConstraints; import java.awt.GridBagLayout; import javax.swing.JCheckBox; import javax.swing.JLabel; import javax.swing.JPanel; import javax.swing.JPasswordField; import org.parosproxy.paros.Constant; import org.parosproxy.paros.model.OptionsParam; import org.parosproxy.paros.network.ConnectionParam; import org.parosproxy.paros.view.AbstractParamPanel; import org.zaproxy.zap.utils.FontUtils; import org.zaproxy.zap.utils.ZapTextField; public class OptionsConnectionPanel extends AbstractParamPanel { private static final long serialVersionUID = 1L; private JPanel panelProxyAuth = null; private JPanel panelProxyChain = null; private ZapTextField txtProxyChainRealm = null; private ZapTextField txtProxyChainUserName = null; private JPasswordField txtProxyChainPassword = null; private JCheckBox chkShowPassword = null; private ProxyDialog proxyDialog = null; private boolean prompting = false; public void setProxyDialog(ProxyDialog proxyDialog) { this.proxyDialog = proxyDialog; } public OptionsConnectionPanel(boolean prompting) { super(); this.prompting = prompting; initialize(); } public OptionsConnectionPanel() { super(); initialize(); } /** * This method initializes panelProxyAuth * * @return javax.swing.JPanel */ private JPanel getPanelProxyAuth() { if (panelProxyAuth == null) { java.awt.GridBagConstraints gridBagConstraints82 = new GridBagConstraints(); java.awt.GridBagConstraints gridBagConstraints72 = new GridBagConstraints(); java.awt.GridBagConstraints gridBagConstraints62 = new GridBagConstraints(); java.awt.GridBagConstraints gridBagConstraints52 = new GridBagConstraints(); java.awt.GridBagConstraints gridBagConstraints42 = new GridBagConstraints(); java.awt.GridBagConstraints gridBagConstraints31 = new GridBagConstraints(); java.awt.GridBagConstraints gridBagConstraints21 = new GridBagConstraints(); java.awt.GridBagConstraints gridBagConstraints16 = new GridBagConstraints(); javax.swing.JLabel jLabel11 = new JLabel(); javax.swing.JLabel jLabel10 = new JLabel(); javax.swing.JLabel jLabel9 = new JLabel(); panelProxyAuth = new JPanel(); panelProxyAuth.setLayout(new GridBagLayout()); jLabel9.setText(Constant.messages.getString("conn.options.proxy.auth.realm")); jLabel10.setText(Constant.messages.getString("conn.options.proxy.auth.username")); if (prompting) { jLabel11.setText(Constant.messages.getString("conn.options.proxy.auth.passprompt")); } else { jLabel11.setText(Constant.messages.getString("conn.options.proxy.auth.password")); } panelProxyAuth.setBorder( javax.swing.BorderFactory.createTitledBorder( null, Constant.messages.getString("conn.options.proxy.auth.auth"), javax.swing.border.TitledBorder.DEFAULT_JUSTIFICATION, javax.swing.border.TitledBorder.DEFAULT_POSITION, FontUtils.getFont(FontUtils.Size.standard), java.awt.Color.black)); gridBagConstraints16.gridx = 0; gridBagConstraints16.gridy = 0; gridBagConstraints16.insets = new java.awt.Insets(2, 2, 2, 2); gridBagConstraints16.anchor = java.awt.GridBagConstraints.NORTHWEST; gridBagConstraints16.fill = java.awt.GridBagConstraints.HORIZONTAL; gridBagConstraints16.gridwidth = 2; gridBagConstraints16.weightx = 1.0D; gridBagConstraints21.gridx = 0; gridBagConstraints21.gridy = 1; gridBagConstraints21.insets = new java.awt.Insets(2, 2, 2, 2); gridBagConstraints21.anchor = java.awt.GridBagConstraints.WEST; gridBagConstraints21.fill = java.awt.GridBagConstraints.HORIZONTAL; gridBagConstraints21.weightx = 0.5D; gridBagConstraints31.gridx = 1; gridBagConstraints31.gridy = 1; gridBagConstraints31.weightx = 0.5D; gridBagConstraints31.fill = java.awt.GridBagConstraints.HORIZONTAL; gridBagConstraints31.insets = new java.awt.Insets(2, 2, 2, 2); gridBagConstraints31.anchor = java.awt.GridBagConstraints.WEST; gridBagConstraints31.ipadx = 50; gridBagConstraints42.gridx = 0; gridBagConstraints42.gridy = 2; gridBagConstraints42.insets = new java.awt.Insets(2, 2, 2, 2); gridBagConstraints42.fill = java.awt.GridBagConstraints.HORIZONTAL; gridBagConstraints42.weightx = 0.5D; gridBagConstraints42.anchor = java.awt.GridBagConstraints.WEST; gridBagConstraints52.gridx = 1; gridBagConstraints52.gridy = 2; gridBagConstraints52.weightx = 0.5D; gridBagConstraints52.fill = java.awt.GridBagConstraints.HORIZONTAL; gridBagConstraints52.insets = new java.awt.Insets(2, 2, 2, 2); gridBagConstraints52.anchor = java.awt.GridBagConstraints.WEST; gridBagConstraints52.ipadx = 50; gridBagConstraints62.gridx = 0; gridBagConstraints62.gridy = 3; gridBagConstraints62.insets = new java.awt.Insets(2, 2, 2, 2); gridBagConstraints62.anchor = java.awt.GridBagConstraints.WEST; gridBagConstraints62.fill = java.awt.GridBagConstraints.HORIZONTAL; gridBagConstraints62.weightx = 0.5D; gridBagConstraints72.gridx = 1; gridBagConstraints72.gridy = 3; gridBagConstraints72.weightx = 0.5D; gridBagConstraints72.fill = java.awt.GridBagConstraints.HORIZONTAL; gridBagConstraints72.insets = new java.awt.Insets(2, 2, 2, 2); gridBagConstraints72.anchor = java.awt.GridBagConstraints.WEST; gridBagConstraints72.ipadx = 50; gridBagConstraints82.gridx = 1; gridBagConstraints82.gridy = 4; gridBagConstraints82.weightx = 0.5D; gridBagConstraints82.fill = java.awt.GridBagConstraints.HORIZONTAL; gridBagConstraints82.insets = new java.awt.Insets(2, 2, 2, 2); gridBagConstraints82.anchor = java.awt.GridBagConstraints.WEST; gridBagConstraints82.ipadx = 50; panelProxyAuth.add(jLabel9, gridBagConstraints21); panelProxyAuth.add(getTxtProxyChainRealm(), gridBagConstraints31); panelProxyAuth.add(jLabel10, gridBagConstraints42); panelProxyAuth.add(getTxtProxyChainUserName(), gridBagConstraints52); panelProxyAuth.add(jLabel11, gridBagConstraints62); panelProxyAuth.add(getTxtProxyChainPassword(), gridBagConstraints72); panelProxyAuth.add(getChkShowPassword(), gridBagConstraints82); } return panelProxyAuth; } /** * This method initializes panelProxyChain * * @return javax.swing.JPanel */ private JPanel getPanelProxyChain() { if (panelProxyChain == null) { panelProxyChain = new JPanel(); java.awt.GridBagConstraints gridBagConstraints92 = new GridBagConstraints(); javax.swing.JLabel jLabel8 = new JLabel(); java.awt.GridBagConstraints gridBagConstraints102 = new GridBagConstraints(); panelProxyChain.setLayout(new GridBagLayout()); gridBagConstraints92.gridx = 0; gridBagConstraints92.gridy = 0; gridBagConstraints92.insets = new java.awt.Insets(2, 2, 2, 2); gridBagConstraints92.anchor = java.awt.GridBagConstraints.NORTHWEST; gridBagConstraints92.fill = java.awt.GridBagConstraints.HORIZONTAL; panelProxyChain.setName("Proxy Chain"); jLabel8.setText(""); gridBagConstraints102.anchor = java.awt.GridBagConstraints.NORTHWEST; gridBagConstraints102.fill = java.awt.GridBagConstraints.BOTH; gridBagConstraints102.gridx = 0; gridBagConstraints102.gridy = 1; gridBagConstraints102.weightx = 1.0D; gridBagConstraints102.weighty = 1.0D; panelProxyChain.add(getPanelProxyAuth(), gridBagConstraints92); panelProxyChain.add(jLabel8, gridBagConstraints102); } return panelProxyChain; } /** This method initializes this */ private void initialize() { this.setLayout(new CardLayout()); this.setName(Constant.messages.getString("conn.options.title")); this.add(getPanelProxyChain(), getPanelProxyChain().getName()); } @Override public void initParam(Object obj) { OptionsParam optionsParam = (OptionsParam) obj; ConnectionParam connectionParam = optionsParam.getConnectionParam(); // set Proxy Chain parameters txtProxyChainRealm.setText(connectionParam.getProxyChainRealm()); txtProxyChainRealm.discardAllEdits(); txtProxyChainUserName.setText(connectionParam.getProxyChainUserName()); txtProxyChainUserName.discardAllEdits(); chkShowPassword.setSelected(false); // Default don't show (everytime) txtProxyChainPassword.setEchoChar('*'); // Default mask (everytime) this.proxyDialog.pack(); } @Override public void saveParam(Object obj) throws Exception { OptionsParam optionsParam = (OptionsParam) obj; ConnectionParam connectionParam = optionsParam.getConnectionParam(); connectionParam.setProxyChainRealm(txtProxyChainRealm.getText()); connectionParam.setProxyChainUserName(txtProxyChainUserName.getText()); // Make sure this isn't saved in the config file connectionParam.setProxyChainPassword( new String(txtProxyChainPassword.getPassword()), false); } private ZapTextField getTxtProxyChainRealm() { if (txtProxyChainRealm == null) { txtProxyChainRealm = new ZapTextField(); } return txtProxyChainRealm; } private ZapTextField getTxtProxyChainUserName() { if (txtProxyChainUserName == null) { txtProxyChainUserName = new ZapTextField(); } return txtProxyChainUserName; } private JPasswordField getTxtProxyChainPassword() { if (txtProxyChainPassword == null) { txtProxyChainPassword = new JPasswordField(); txtProxyChainPassword.addActionListener( new java.awt.event.ActionListener() { @Override public void actionPerformed(java.awt.event.ActionEvent e) { proxyDialog.saveAndClose(); } }); } return txtProxyChainPassword; } /** * This method initializes chkShowPassword * * @return javax.swing.JCheckBox */ private JCheckBox getChkShowPassword() { if (chkShowPassword == null) { chkShowPassword = new JCheckBox(); chkShowPassword.setText( Constant.messages.getString("conn.options.proxy.auth.showpass")); chkShowPassword.addActionListener( new java.awt.event.ActionListener() { @Override public void actionPerformed(java.awt.event.ActionEvent e) { if (chkShowPassword.isSelected()) { txtProxyChainPassword.setEchoChar((char) 0); } else { txtProxyChainPassword.setEchoChar('*'); } } }); } return chkShowPassword; } public void passwordFocus() { this.getTxtProxyChainPassword().requestFocus(); } @Override public String getHelpIndex() { return "ui.dialogs.options.connection"; } }
apache-2.0
glutwins/pholcus
vendor/golang.org/x/text/encoding/charmap/maketables.go
11954
// Copyright 2013 The Go Authors. All rights reserved. // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. // +build ignore package main import ( "bufio" "fmt" "log" "net/http" "sort" "strings" "unicode/utf8" "golang.org/x/text/encoding" "golang.org/x/text/internal/gen" ) const ascii = "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f" + "\x10\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f" + ` !"#$%&'()*+,-./0123456789:;<=>?` + `@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_` + "`abcdefghijklmnopqrstuvwxyz{|}~\u007f" var encodings = []struct { name string mib string comment string varName string replacement byte mapping string }{ { "IBM Code Page 437", "PC8CodePage437", "", "CodePage437", encoding.ASCIISub, "http://source.icu-project.org/repos/icu/data/trunk/charset/data/ucm/glibc-IBM437-2.1.2.ucm", }, { "IBM Code Page 850", "PC850Multilingual", "", "CodePage850", encoding.ASCIISub, "http://source.icu-project.org/repos/icu/data/trunk/charset/data/ucm/glibc-IBM850-2.1.2.ucm", }, { "IBM Code Page 852", "PCp852", "", "CodePage852", encoding.ASCIISub, "http://source.icu-project.org/repos/icu/data/trunk/charset/data/ucm/glibc-IBM852-2.1.2.ucm", }, { "IBM Code Page 855", "IBM855", "", "CodePage855", encoding.ASCIISub, "http://source.icu-project.org/repos/icu/data/trunk/charset/data/ucm/glibc-IBM855-2.1.2.ucm", }, { "Windows Code Page 858", // PC latin1 with Euro "IBM00858", "", "CodePage858", encoding.ASCIISub, "http://source.icu-project.org/repos/icu/data/trunk/charset/data/ucm/windows-858-2000.ucm", }, { "IBM Code Page 862", "PC862LatinHebrew", "", "CodePage862", encoding.ASCIISub, "http://source.icu-project.org/repos/icu/data/trunk/charset/data/ucm/glibc-IBM862-2.1.2.ucm", }, { "IBM Code Page 866", "IBM866", "", "CodePage866", encoding.ASCIISub, "http://encoding.spec.whatwg.org/index-ibm866.txt", }, { "ISO 8859-1", "ISOLatin1", "", "ISO8859_1", encoding.ASCIISub, "http://source.icu-project.org/repos/icu/data/trunk/charset/data/ucm/iso-8859_1-1998.ucm", }, { "ISO 8859-2", "ISOLatin2", "", "ISO8859_2", encoding.ASCIISub, "http://encoding.spec.whatwg.org/index-iso-8859-2.txt", }, { "ISO 8859-3", "ISOLatin3", "", "ISO8859_3", encoding.ASCIISub, "http://encoding.spec.whatwg.org/index-iso-8859-3.txt", }, { "ISO 8859-4", "ISOLatin4", "", "ISO8859_4", encoding.ASCIISub, "http://encoding.spec.whatwg.org/index-iso-8859-4.txt", }, { "ISO 8859-5", "ISOLatinCyrillic", "", "ISO8859_5", encoding.ASCIISub, "http://encoding.spec.whatwg.org/index-iso-8859-5.txt", }, { "ISO 8859-6", "ISOLatinArabic", "", "ISO8859_6,ISO8859_6E,ISO8859_6I", encoding.ASCIISub, "http://encoding.spec.whatwg.org/index-iso-8859-6.txt", }, { "ISO 8859-7", "ISOLatinGreek", "", "ISO8859_7", encoding.ASCIISub, "http://encoding.spec.whatwg.org/index-iso-8859-7.txt", }, { "ISO 8859-8", "ISOLatinHebrew", "", "ISO8859_8,ISO8859_8E,ISO8859_8I", encoding.ASCIISub, "http://encoding.spec.whatwg.org/index-iso-8859-8.txt", }, { "ISO 8859-10", "ISOLatin6", "", "ISO8859_10", encoding.ASCIISub, "http://encoding.spec.whatwg.org/index-iso-8859-10.txt", }, { "ISO 8859-13", "ISO885913", "", "ISO8859_13", encoding.ASCIISub, "http://encoding.spec.whatwg.org/index-iso-8859-13.txt", }, { "ISO 8859-14", "ISO885914", "", "ISO8859_14", encoding.ASCIISub, "http://encoding.spec.whatwg.org/index-iso-8859-14.txt", }, { "ISO 8859-15", "ISO885915", "", "ISO8859_15", encoding.ASCIISub, "http://encoding.spec.whatwg.org/index-iso-8859-15.txt", }, { "ISO 8859-16", "ISO885916", "", "ISO8859_16", encoding.ASCIISub, "http://encoding.spec.whatwg.org/index-iso-8859-16.txt", }, { "KOI8-R", "KOI8R", "", "KOI8R", encoding.ASCIISub, "http://encoding.spec.whatwg.org/index-koi8-r.txt", }, { "KOI8-U", "KOI8U", "", "KOI8U", encoding.ASCIISub, "http://encoding.spec.whatwg.org/index-koi8-u.txt", }, { "Macintosh", "Macintosh", "", "Macintosh", encoding.ASCIISub, "http://encoding.spec.whatwg.org/index-macintosh.txt", }, { "Macintosh Cyrillic", "MacintoshCyrillic", "", "MacintoshCyrillic", encoding.ASCIISub, "http://encoding.spec.whatwg.org/index-x-mac-cyrillic.txt", }, { "Windows 874", "Windows874", "", "Windows874", encoding.ASCIISub, "http://encoding.spec.whatwg.org/index-windows-874.txt", }, { "Windows 1250", "Windows1250", "", "Windows1250", encoding.ASCIISub, "http://encoding.spec.whatwg.org/index-windows-1250.txt", }, { "Windows 1251", "Windows1251", "", "Windows1251", encoding.ASCIISub, "http://encoding.spec.whatwg.org/index-windows-1251.txt", }, { "Windows 1252", "Windows1252", "", "Windows1252", encoding.ASCIISub, "http://encoding.spec.whatwg.org/index-windows-1252.txt", }, { "Windows 1253", "Windows1253", "", "Windows1253", encoding.ASCIISub, "http://encoding.spec.whatwg.org/index-windows-1253.txt", }, { "Windows 1254", "Windows1254", "", "Windows1254", encoding.ASCIISub, "http://encoding.spec.whatwg.org/index-windows-1254.txt", }, { "Windows 1255", "Windows1255", "", "Windows1255", encoding.ASCIISub, "http://encoding.spec.whatwg.org/index-windows-1255.txt", }, { "Windows 1256", "Windows1256", "", "Windows1256", encoding.ASCIISub, "http://encoding.spec.whatwg.org/index-windows-1256.txt", }, { "Windows 1257", "Windows1257", "", "Windows1257", encoding.ASCIISub, "http://encoding.spec.whatwg.org/index-windows-1257.txt", }, { "Windows 1258", "Windows1258", "", "Windows1258", encoding.ASCIISub, "http://encoding.spec.whatwg.org/index-windows-1258.txt", }, { "X-User-Defined", "XUserDefined", "It is defined at http://encoding.spec.whatwg.org/#x-user-defined", "XUserDefined", encoding.ASCIISub, ascii + "\uf780\uf781\uf782\uf783\uf784\uf785\uf786\uf787" + "\uf788\uf789\uf78a\uf78b\uf78c\uf78d\uf78e\uf78f" + "\uf790\uf791\uf792\uf793\uf794\uf795\uf796\uf797" + "\uf798\uf799\uf79a\uf79b\uf79c\uf79d\uf79e\uf79f" + "\uf7a0\uf7a1\uf7a2\uf7a3\uf7a4\uf7a5\uf7a6\uf7a7" + "\uf7a8\uf7a9\uf7aa\uf7ab\uf7ac\uf7ad\uf7ae\uf7af" + "\uf7b0\uf7b1\uf7b2\uf7b3\uf7b4\uf7b5\uf7b6\uf7b7" + "\uf7b8\uf7b9\uf7ba\uf7bb\uf7bc\uf7bd\uf7be\uf7bf" + "\uf7c0\uf7c1\uf7c2\uf7c3\uf7c4\uf7c5\uf7c6\uf7c7" + "\uf7c8\uf7c9\uf7ca\uf7cb\uf7cc\uf7cd\uf7ce\uf7cf" + "\uf7d0\uf7d1\uf7d2\uf7d3\uf7d4\uf7d5\uf7d6\uf7d7" + "\uf7d8\uf7d9\uf7da\uf7db\uf7dc\uf7dd\uf7de\uf7df" + "\uf7e0\uf7e1\uf7e2\uf7e3\uf7e4\uf7e5\uf7e6\uf7e7" + "\uf7e8\uf7e9\uf7ea\uf7eb\uf7ec\uf7ed\uf7ee\uf7ef" + "\uf7f0\uf7f1\uf7f2\uf7f3\uf7f4\uf7f5\uf7f6\uf7f7" + "\uf7f8\uf7f9\uf7fa\uf7fb\uf7fc\uf7fd\uf7fe\uf7ff", }, } func getWHATWG(url string) string { res, err := http.Get(url) if err != nil { log.Fatalf("%q: Get: %v", url, err) } defer res.Body.Close() mapping := make([]rune, 128) for i := range mapping { mapping[i] = '\ufffd' } scanner := bufio.NewScanner(res.Body) for scanner.Scan() { s := strings.TrimSpace(scanner.Text()) if s == "" || s[0] == '#' { continue } x, y := 0, 0 if _, err := fmt.Sscanf(s, "%d\t0x%x", &x, &y); err != nil { log.Fatalf("could not parse %q", s) } if x < 0 || 128 <= x { log.Fatalf("code %d is out of range", x) } if 0x80 <= y && y < 0xa0 { // We diverge from the WHATWG spec by mapping control characters // in the range [0x80, 0xa0) to U+FFFD. continue } mapping[x] = rune(y) } return ascii + string(mapping) } func getUCM(url string) string { res, err := http.Get(url) if err != nil { log.Fatalf("%q: Get: %v", url, err) } defer res.Body.Close() mapping := make([]rune, 256) for i := range mapping { mapping[i] = '\ufffd' } charsFound := 0 scanner := bufio.NewScanner(res.Body) for scanner.Scan() { s := strings.TrimSpace(scanner.Text()) if s == "" || s[0] == '#' { continue } var c byte var r rune if _, err := fmt.Sscanf(s, `<U%x> \x%x |0`, &r, &c); err != nil { continue } mapping[c] = r charsFound++ } if charsFound < 200 { log.Fatalf("%q: only %d characters found (wrong page format?)", url, charsFound) } return string(mapping) } func main() { mibs := map[string]bool{} all := []string{} w := gen.NewCodeWriter() defer w.WriteGoFile("tables.go", "charmap") printf := func(s string, a ...interface{}) { fmt.Fprintf(w, s, a...) } printf("import (\n") printf("\t\"golang.org/x/text/encoding\"\n") printf("\t\"golang.org/x/text/encoding/internal/identifier\"\n") printf(")\n\n") for _, e := range encodings { varNames := strings.Split(e.varName, ",") all = append(all, varNames...) varName := varNames[0] switch { case strings.HasPrefix(e.mapping, "http://encoding.spec.whatwg.org/"): e.mapping = getWHATWG(e.mapping) case strings.HasPrefix(e.mapping, "http://source.icu-project.org/repos/icu/data/trunk/charset/data/ucm/"): e.mapping = getUCM(e.mapping) } asciiSuperset, low := strings.HasPrefix(e.mapping, ascii), 0x00 if asciiSuperset { low = 0x80 } lvn := 1 if strings.HasPrefix(varName, "ISO") || strings.HasPrefix(varName, "KOI") { lvn = 3 } lowerVarName := strings.ToLower(varName[:lvn]) + varName[lvn:] printf("// %s is the %s encoding.\n", varName, e.name) if e.comment != "" { printf("//\n// %s\n", e.comment) } printf("var %s encoding.Encoding = &%s\n\nvar %s = charmap{\nname: %q,\n", varName, lowerVarName, lowerVarName, e.name) if mibs[e.mib] { log.Fatalf("MIB type %q declared multiple times.", e.mib) } printf("mib: identifier.%s,\n", e.mib) printf("asciiSuperset: %t,\n", asciiSuperset) printf("low: 0x%02x,\n", low) printf("replacement: 0x%02x,\n", e.replacement) printf("decode: [256]utf8Enc{\n") i, backMapping := 0, map[rune]byte{} for _, c := range e.mapping { if _, ok := backMapping[c]; !ok && c != utf8.RuneError { backMapping[c] = byte(i) } var buf [8]byte n := utf8.EncodeRune(buf[:], c) if n > 3 { panic(fmt.Sprintf("rune %q (%U) is too long", c, c)) } printf("{%d,[3]byte{0x%02x,0x%02x,0x%02x}},", n, buf[0], buf[1], buf[2]) if i%2 == 1 { printf("\n") } i++ } printf("},\n") printf("encode: [256]uint32{\n") encode := make([]uint32, 0, 256) for c, i := range backMapping { encode = append(encode, uint32(i)<<24|uint32(c)) } sort.Sort(byRune(encode)) for len(encode) < cap(encode) { encode = append(encode, encode[len(encode)-1]) } for i, enc := range encode { printf("0x%08x,", enc) if i%8 == 7 { printf("\n") } } printf("},\n}\n") // Add an estimate of the size of a single charmap{} struct value, which // includes two 256 elem arrays of 4 bytes and some extra fields, which // align to 3 uint64s on 64-bit architectures. w.Size += 2*4*256 + 3*8 } // TODO: add proper line breaking. printf("var listAll = []encoding.Encoding{\n%s,\n}\n\n", strings.Join(all, ",\n")) } type byRune []uint32 func (b byRune) Len() int { return len(b) } func (b byRune) Less(i, j int) bool { return b[i]&0xffffff < b[j]&0xffffff } func (b byRune) Swap(i, j int) { b[i], b[j] = b[j], b[i] }
apache-2.0
equella/Equella
Source/Plugins/Core/com.equella.core/src/com/tle/core/item/edit/attachment/PackageResourceAttachmentEditor.java
965
/* * Licensed to The Apereo Foundation under one or more contributor license * agreements. See the NOTICE file distributed with this work for additional * information regarding copyright ownership. * * The Apereo Foundation licenses this file to you under the Apache License, * Version 2.0, (the "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at: * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.tle.core.item.edit.attachment; public interface PackageResourceAttachmentEditor extends AttachmentEditor { void editFilename(String filename); }
apache-2.0
varra4u/utils4j
src/main/java/com/varra/jmx/exception/MBeanRegistrationException.java
1987
/* * utils4j - MBeanRegistrationException.java, Feb 3, 2011 4:45:08 PM * * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.varra.jmx.exception; /** * Wraps exceptions thrown by the register(), unRegister() methods of the * <CODE>MBeanRegistration</CODE> interface. * * @author Rajakrishna V. Reddy * @version 1.0 * */ public class MBeanRegistrationException extends JMXWrapperException { /** The Constant serialVersionUID. */ private static final long serialVersionUID = -803932191385515338L; /** * Instantiates a new m bean registration exception. * * @param message * the message */ public MBeanRegistrationException(String message) { super(message); } /** * Creates an <CODE>MBeanRegistrationException</CODE> that wraps the actual * <CODE>java.lang.Exception</CODE>. * * @param cause * the wrapped exception. */ public MBeanRegistrationException(Throwable cause) { super(cause); } /** * Instantiates a new m bean registration exception. * * @param message * the message * @param cause * the cause */ public MBeanRegistrationException(String message, Throwable cause) { super(message, cause); } }
apache-2.0
ahmedowian/code_katas
trees/src/test/java/ahmedowian/code_katas/trees/NodeManagerRemovingTest.java
1228
package ahmedowian.code_katas.trees; import org.junit.Before; import org.junit.Test; public class NodeManagerRemovingTest extends NodeManagerTestBase { /** * Sets up the tree to look like this: * A -> Removing A on level 0 will cause there to be no root * /\ \ * B I C2 -> Removing B on level 1 will cause a conflict of C and C2! * / \ \ \ * C D J L * //\\ \ \ * EF GH K M * * By definition, a tree has no cycles, so we cannot have two parents of any one node. * However, we can have two different nodes with the same key, making them "equal". Hence C and C2. */ @Before public void setUp() { super.setUp(); nodeA.addChildren(nodeB, nodeI, nodeC2); nodeB.addChildren(nodeC, nodeD); nodeD.addChildren(nodeE, nodeF, nodeG, nodeH); nodeI.addChildren(nodeJ); nodeJ.addChildren(nodeK); nodeC2.addChildren(nodeL); nodeL.addChildren(nodeM); } @Test public void removeA_At0() { TreeNode node = new TreeNode("A", 10); mgr.remove(node, 0); // What to expect here? } }
apache-2.0
thlcly/JavaDemo
src/main/java/com/aaront/java/proxy/static_proxy/HelloWorldImpl.java
173
package com.aaront.java.proxy.static_proxy; public class HelloWorldImpl implements HelloWorld { public void print() { System.out.println("Hello World"); } }
apache-2.0
codersimple/DesignPattern
src/strategy/King.java
58
package strategy; public class King extends Character {}
apache-2.0
spinnaker/orca
orca-api/src/main/java/com/netflix/spinnaker/orca/api/pipeline/Task.java
3476
/* * Copyright 2020 Netflix, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.netflix.spinnaker.orca.api.pipeline; import com.netflix.spinnaker.kork.annotations.Beta; import com.netflix.spinnaker.kork.plugins.api.internal.SpinnakerExtensionPoint; import com.netflix.spinnaker.orca.api.pipeline.models.ExecutionStatus; import com.netflix.spinnaker.orca.api.pipeline.models.StageExecution; import java.lang.annotation.ElementType; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; import java.lang.annotation.Target; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import javax.annotation.Nonnull; import javax.annotation.Nullable; /** A discrete unit of work in a pipeline execution that does one thing and one thing only. */ @Beta public interface Task extends SpinnakerExtensionPoint { /** * Execute the business logic of the task, using the provided stage execution state. * * @param stage The running stage execution stage * @return The result of this Task's execution */ @Nonnull TaskResult execute(@Nonnull StageExecution stage); /** * Behavior to be called on Task timeout. * * <p>This method should be used if you need to perform any cleanup operations in response to the * task being aborted after taking too long to complete. * * @param stage The running state execution state */ default @Nullable TaskResult onTimeout(@Nonnull StageExecution stage) { return null; } /** * Behavior to be called on Task cancellation. * * <p>This method should be used if you need to perform cleanup in response to the task being * cancelled before it was able to complete. * * @deprecated Use onCancelWithResult instead * @param stage The running state execution state */ @Deprecated default void onCancel(@Nonnull StageExecution stage) {} /** * Behavior to be called on Task cancellation. * * <p>This method should be used if you need to perform cleanup in response to the task being * cancelled before it was able to complete. * * <p>When returning a {@link TaskResult}, the {@link ExecutionStatus} will be ignored, as the * resulting status will always be {@link ExecutionStatus#CANCELED}. * * @param stage The running state execution state */ @Nullable default TaskResult onCancelWithResult(@Nonnull StageExecution stage) { onCancel(stage); return null; } /** A collection of known aliases. */ default Collection<String> aliases() { if (getClass().isAnnotationPresent(Aliases.class)) { return Arrays.asList(getClass().getAnnotation(Aliases.class).value()); } return Collections.emptyList(); } /** Allows backwards compatibility of a task's "type", even through class renames / refactors. */ @Retention(RetentionPolicy.RUNTIME) @Target(ElementType.TYPE) @interface Aliases { String[] value() default {}; } }
apache-2.0
ilxlf/google_search_bar
search/src/main/java/com/ilxlf/search/util/SystemUiHiderBase.java
1544
package com.ilxlf.search.util; import android.app.Activity; import android.view.View; import android.view.WindowManager; /** * A base implementation of {@link SystemUiHider}. Uses APIs available in all * API levels to show and hide the status bar. */ public class SystemUiHiderBase extends SystemUiHider { /** * Whether or not the system UI is currently visible. This is a cached value * from calls to {@link #hide()} and {@link #show()}. */ private boolean mVisible = true; /** * Constructor not intended to be called by clients. Use * {@link SystemUiHider#getInstance} to obtain an instance. */ protected SystemUiHiderBase(Activity activity, View anchorView, int flags) { super(activity, anchorView, flags); } @Override public boolean isVisible() { return mVisible; } @Override public void hide() { if ((mFlags & FLAG_FULLSCREEN) != 0) { mActivity.getWindow().setFlags( WindowManager.LayoutParams.FLAG_FULLSCREEN, WindowManager.LayoutParams.FLAG_FULLSCREEN); } mOnVisibilityChangeListener.onVisibilityChange(false); mVisible = false; } @Override public void show() { if ((mFlags & FLAG_FULLSCREEN) != 0) { mActivity.getWindow().setFlags( 0, WindowManager.LayoutParams.FLAG_FULLSCREEN); } mOnVisibilityChangeListener.onVisibilityChange(true); mVisible = true; } }
apache-2.0
svn2github/scalatest
src/main/scala/org/scalatest/concurrent/TimeLimitedTests.scala
6607
/* * Copyright 2001-2012 Artima, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.scalatest.concurrent import org.scalatest.SuiteMixin import org.scalatest.Suite import Timeouts._ import org.scalatest.exceptions.ModifiableMessage import org.scalatest.Resources import org.scalatest.time.Span import org.scalatest.exceptions.TimeoutField import org.scalatest.Outcome import org.scalatest.Exceptional /** * Trait that when mixed into a suite class establishes a time limit for its tests. * * <p> * This trait overrides <code>withFixture</code>, wrapping a <code>super.withFixture(test)</code> call * in a <code>failAfter</code> invocation, specifying a timeout obtained by invoking <code>timeLimit</code> * and an <a href="Interruptor.html"><code>Interruptor</code></a> by invoking <code>defaultTestInterruptor</code>: * </p> * * <pre class="stHighlight"> * failAfter(timeLimit) { * super.withFixture(test) * } (defaultTestInterruptor) * </pre> * * <p> * Note that the <code>failAfter</code> method executes the body of the by-name passed to it using the same * thread that invoked <code>failAfter</code>. This means that the same thread will run the <code>withFixture</code> method * as well as each test, so no extra synchronization is required. A second thread is used to run a timer, and if the timeout * expires, that second thread will attempt to interrupt the main test thread via the <code>defaultTestInterruptor</code>. * </p> * * <p> * The <code>timeLimit</code> field is abstract in this trait. Thus you must specify a time limit when you use it. * For example, the following code specifies that each test must complete within 200 milliseconds: * </p> * * <pre class="stHighlight"> * import org.scalatest.FunSpec * import org.scalatest.concurrent.TimeLimitedTests * import org.scalatest.time.SpanSugar._ * * class ExampleSpec extends FunSpec with TimeLimitedTests { * * // Note: You may need to either write 200.millis or (200 millis), or * // place a semicolon or blank line after plain old 200 millis, to * // avoid the semicolon inference problems of postfix operator notation. * val timeLimit = 200 millis * * describe("A time-limited test") { * it("should succeed if it completes within the time limit") { * Thread.sleep(100) * } * it("should fail if it is taking too darn long") { * Thread.sleep(300) * } * } * } * </pre> * * <p> * If you run the above <code>ExampleSpec</code>, the second test will fail with the error message: * </p> * * <p> * <code>The test did not complete within the specified 200 millisecond time limit.</code> * </p> * * <p> * The <code>failAfter</code> method uses an <code>Interruptor</code> to attempt to interrupt the main test thread if the timeout * expires. The default <code>Interruptor</code> returned by the <code>defaultTestInterruptor</code> method is a * <a href="ThreadInterruptor$.html"><code>ThreadInterruptor</code></a>, which calls <code>interrupt</code> on the main test thread. If you wish to change this * interruption strategy, override <code>defaultTestInterruptor</code> to return a different <code>Interruptor</code>. For example, * here's how you'd change the default to <a href="DoNotInterrupt$.html"><code>DoNotInterrupt</code></a>, a very patient interruption strategy that does nothing to * interrupt the main test thread: * </p> * * <pre class="stHighlight"> * import org.scalatest.FunSpec * import org.scalatest.concurrent.TimeLimitedTests * import org.scalatest.time.SpanSugar._ * * class ExampleSpec extends FunSpec with TimeLimitedTests { * * val timeLimit = 200 millis * * override val defaultTestInterruptor = DoNotInterrupt * * describe("A time-limited test") { * it("should succeed if it completes within the time limit") { * Thread.sleep(100) * } * it("should fail if it is taking too darn long") { * Thread.sleep(300) * } * } * } * </pre> * * <p> * Like the previous incarnation of <code>ExampleSuite</code>, the second test will fail with an error message that indicates * a timeout expired. But whereas in the previous case, the <code>Thread.sleep</code> would be interrupted after 200 milliseconds, * in this case it is never interrupted. In the previous case, the failed test requires a little over 200 milliseconds to run. * In this case, because the <code>sleep(300)</code> is never interrupted, the failed test requires a little over 300 milliseconds * to run. * </p> */ trait TimeLimitedTests extends SuiteMixin { this: Suite => /** * A stackable implementation of <code>withFixture</code> that wraps a call to <code>super.withFixture</code> in a * <code>failAfter</code> invocation. * * @param test the test on which to enforce a time limit */ abstract override def withFixture(test: NoArgTest): Outcome = { try { failAfter(timeLimit) { super.withFixture(test) } (defaultTestInterruptor) } catch { case e: org.scalatest.exceptions.ModifiableMessage[_] with TimeoutField => Exceptional(e.modifyMessage(opts => Some(Resources("testTimeLimitExceeded", e.timeout.prettyString)))) case t: Throwable => Exceptional(t) } } /** * The time limit, in milliseconds, in which each test in a <code>Suite</code> that mixes in * <code>TimeLimitedTests</code> must complete. */ def timeLimit: Span /** * The default <a href="Interruptor.html"><code>Interruptor</code></a> strategy used to interrupt tests that exceed their time limit. * * <p> * This trait's implementation of this method returns <a href="ThreadInterruptor$.html"><code>ThreadInterruptor</code></a>, which invokes <code>interrupt</code> * on the main test thread. Override this method to change the test interruption strategy. * </p> * * @return a <code>ThreadInterruptor</code> */ val defaultTestInterruptor: Interruptor = ThreadInterruptor } /* Will need to add cancelAfter to the doc comment in 2.0. */
apache-2.0
miranda-messaging/miranda
src/main/java/com/ltsllc/miranda/file/states/SingleFileWritingState.java
2592
package com.ltsllc.miranda.file.states; import com.ltsllc.miranda.Message; import com.ltsllc.miranda.Panic; import com.ltsllc.miranda.Results; import com.ltsllc.miranda.State; import com.ltsllc.miranda.clientinterface.MirandaException; import com.ltsllc.miranda.file.SingleFile; import com.ltsllc.miranda.miranda.Miranda; import com.ltsllc.miranda.shutdown.ShutdownMessage; import com.ltsllc.miranda.writer.WriteResponseMessage; import java.util.ArrayList; import java.util.List; import java.util.concurrent.BlockingQueue; public class SingleFileWritingState extends State { private List<BlockingQueue<Message>> writeListeners = new ArrayList<>(); private State readyState; public State getReadyState() { return readyState; } public void setReadyState(State readyState) { this.readyState = readyState; } public List<BlockingQueue<Message>> getWriteListeners() { return writeListeners; } public void setWriteListeners(List<BlockingQueue<Message>> writeListeners) { this.writeListeners = writeListeners; } public SingleFileWritingState(SingleFile singleFile, State readyState) { super(singleFile); setReadyState(readyState); } public void addWriteListener (BlockingQueue<Message> listener) { writeListeners.add(listener); } public void tellWriteListeners (WriteResponseMessage writeResponseMessage) { try { for (BlockingQueue<Message> listener : getWriteListeners()) { listener.put(writeResponseMessage); } } catch (InterruptedException e) { Panic panic = new Panic("Exception trying to send message.", e, Panic.Reasons.ExceptionSendingMessage); Miranda.panicMiranda(panic); } } public State processMessage(Message message) throws MirandaException { State nextState = getContainer().getCurrentState(); switch (message.getSubject()) { case WriteResponse: { WriteResponseMessage writeResponseMessage = (WriteResponseMessage) message; nextState = processWriteResonseMessage(writeResponseMessage); break; } default: { nextState = super.processMessage(message); break; } } return nextState; } public State processWriteResonseMessage (WriteResponseMessage writeResponseMessage) { return getReadyState(); } public SingleFile getFile() { return (SingleFile) getContainer(); } }
apache-2.0
Ztiany/AndroidBase
lib_network/src/main/java/com/android/sdk/net/progress/ProgressRequestBody.java
2751
package com.android.sdk.net.progress; import android.os.SystemClock; import android.support.annotation.NonNull; import java.io.IOException; import okhttp3.MediaType; import okhttp3.RequestBody; import okio.Buffer; import okio.BufferedSink; import okio.ForwardingSink; import okio.Okio; import okio.Sink; class ProgressRequestBody extends RequestBody { private final ProgressListener mProgressListener; private final RequestBody mDelegate; private final int mRefreshTime; private BufferedSink mBufferedSink; ProgressRequestBody(RequestBody delegate, int refreshTime, ProgressListener progressListener) { this.mDelegate = delegate; mProgressListener = progressListener; this.mRefreshTime = refreshTime; } @Override public MediaType contentType() { return mDelegate.contentType(); } @Override public long contentLength() { try { return mDelegate.contentLength(); } catch (IOException e) { e.printStackTrace(); } return -1; } @Override public void writeTo(@NonNull BufferedSink sink) throws IOException { if (mBufferedSink == null) { mBufferedSink = Okio.buffer(new CountingSink(sink)); } try { mDelegate.writeTo(mBufferedSink); mBufferedSink.flush(); } catch (IOException e) { e.printStackTrace(); mProgressListener.onLoadFail(e); throw e; } } private final class CountingSink extends ForwardingSink { private long totalBytesRead = 0L; private long lastRefreshTime = 0L; //最后一次刷新的时间 private long mContentLength; private boolean mIsFinish; CountingSink(Sink delegate) { super(delegate); } @Override public void write(@NonNull Buffer source, long byteCount) throws IOException { try { super.write(source, byteCount); } catch (IOException e) { e.printStackTrace(); mProgressListener.onLoadFail(e); throw e; } if (mContentLength == 0) { //避免重复调用 contentLength() mContentLength = contentLength(); } totalBytesRead += byteCount; long curTime = SystemClock.elapsedRealtime(); mIsFinish = totalBytesRead == mContentLength; if (curTime - lastRefreshTime >= mRefreshTime || mIsFinish) { mProgressListener.onProgress(totalBytesRead, mContentLength, totalBytesRead * 1.0F / mContentLength, mIsFinish); lastRefreshTime = curTime; } } } }
apache-2.0