repo_name
stringlengths
4
116
path
stringlengths
4
379
size
stringlengths
1
7
content
stringlengths
3
1.05M
license
stringclasses
15 values
OSGP/Integration-Tests
cucumber-tests-platform-smartmetering/src/test/java/org/opensmartgridplatform/cucumber/platform/smartmetering/glue/steps/ws/smartmetering/smartmeteringadhoc/AllAttributeValues.java
3355
/** * Copyright 2016 Smart Society Services B.V. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 */ package org.opensmartgridplatform.cucumber.platform.smartmetering.glue.steps.ws.smartmetering.smartmeteringadhoc; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertTrue; import java.util.Map; import org.apache.commons.lang3.StringUtils; import org.springframework.beans.factory.annotation.Autowired; import org.opensmartgridplatform.adapter.ws.schema.smartmetering.adhoc.GetAllAttributeValuesAsyncRequest; import org.opensmartgridplatform.adapter.ws.schema.smartmetering.adhoc.GetAllAttributeValuesAsyncResponse; import org.opensmartgridplatform.adapter.ws.schema.smartmetering.adhoc.GetAllAttributeValuesRequest; import org.opensmartgridplatform.adapter.ws.schema.smartmetering.adhoc.GetAllAttributeValuesResponse; import org.opensmartgridplatform.adapter.ws.schema.smartmetering.common.OsgpResultType; import org.opensmartgridplatform.cucumber.core.ScenarioContext; import org.opensmartgridplatform.cucumber.platform.PlatformKeys; import org.opensmartgridplatform.cucumber.platform.smartmetering.support.ws.smartmetering.adhoc.AllAttributeValuesRequestFactory; import org.opensmartgridplatform.cucumber.platform.smartmetering.support.ws.smartmetering.adhoc.SmartMeteringAdHocRequestClient; import org.opensmartgridplatform.cucumber.platform.smartmetering.support.ws.smartmetering.adhoc.SmartMeteringAdHocResponseClient; import cucumber.api.java.en.Then; import cucumber.api.java.en.When; public class AllAttributeValues { @Autowired private SmartMeteringAdHocRequestClient<GetAllAttributeValuesAsyncResponse, GetAllAttributeValuesRequest> requestClient; @Autowired private SmartMeteringAdHocResponseClient<GetAllAttributeValuesResponse, GetAllAttributeValuesAsyncRequest> responseClient; @When("^the get all attribute values request is received$") public void whenTheGetAllAttributeValuesRequestIsReceived(final Map<String, String> settings) throws Throwable { final GetAllAttributeValuesRequest request = AllAttributeValuesRequestFactory.fromParameterMap(settings); final GetAllAttributeValuesAsyncResponse asyncResponse = this.requestClient.doRequest(request); assertNotNull("AsyncResponse should not be null", asyncResponse); ScenarioContext.current().put(PlatformKeys.KEY_CORRELATION_UID, asyncResponse.getCorrelationUid()); } @Then("^a get all attribute values response should be returned$") public void thenAGetAllAttributeValuesResponseShouldBeReturned(final Map<String, String> settings) throws Throwable { final GetAllAttributeValuesAsyncRequest asyncRequest = AllAttributeValuesRequestFactory.fromScenarioContext(); final GetAllAttributeValuesResponse response = this.responseClient.getResponse(asyncRequest); assertEquals("Result is not as expected", OsgpResultType.fromValue(settings.get(PlatformKeys.KEY_RESULT)), response.getResult()); assertTrue("Response should contain Output", StringUtils.isNotBlank(response.getOutput())); } }
apache-2.0
lucasjellema/APM-Demo-App-WorldView
RestService/src/nl/amis/world/rest/rss/RSSFeedParser.java
8507
package nl.amis.world.rest.rss; import java.io.IOException; import java.io.InputStream; import java.net.MalformedURLException; import java.net.URL; import javax.ws.rs.GET; import javax.ws.rs.Path; import javax.ws.rs.PathParam; import javax.ws.rs.Produces; import javax.ws.rs.core.MediaType; import javax.xml.stream.XMLEventReader; import javax.xml.stream.XMLInputFactory; import javax.xml.stream.XMLStreamException; import javax.xml.stream.events.Characters; import javax.xml.stream.events.XMLEvent; import oracle.adf.share.logging.ADFLogger; @Path("world-rss") public class RSSFeedParser { private static ADFLogger _logger = ADFLogger.createADFLogger(RSSFeedParser.class); static final String TITLE = "title"; static final String DESCRIPTION = "description"; static final String CHANNEL = "channel"; static final String LANGUAGE = "language"; static final String COPYRIGHT = "copyright"; static final String LINK = "link"; static final String AUTHOR = "author"; static final String ITEM = "item"; static final String PUB_DATE = "pubDate"; static final String GUID = "guid"; URL url; String urlString; String bbcFeed = "http://feeds.bbci.co.uk/news/rss.xml?edition=uk"; String nosFeed = "http://feeds.nos.nl/nosnieuwsalgemeen"; String abcFeed = "http://feeds.abcnews.com/abcnews/internationalheadlines"; String cnnFeed = "http://rss.cnn.com/rss/cnn_topstories.rss"; String reutersFeed = "http://feeds.reuters.com/Reuters/worldNews"; public RSSFeedParser(String feedUrl) { try { this.urlString = feedUrl; this.url = new URL(feedUrl); } catch (MalformedURLException e) { throw new RuntimeException(e); } } public RSSFeedParser() { try { this.urlString = cnnFeed; this.url = new URL(urlString); } catch (MalformedURLException e) { throw new RuntimeException(e); } } @GET @Path("feed/{feedId}") @Produces({ MediaType.APPLICATION_XML, MediaType.APPLICATION_JSON }) public Feed getFeed(@PathParam("feedId") String feedIdentifier) { _logger.info("REST Service /world-rss/feed/{0} - returning JSON representation of RSS news feed", feedIdentifier); return readFeed(feedIdentifier); } public Feed readFeed(String feedIdentifier) { if (feedIdentifier == null || feedIdentifier.length() == 0) { urlString = cnnFeed; } else { switch (feedIdentifier.toLowerCase()) { case "cnn": urlString = cnnFeed; break; case "bbc": urlString = bbcFeed; break; case "abc": urlString = abcFeed; break; case "nos": urlString = nosFeed; break; case "reuters": urlString = reutersFeed; break; default: urlString = cnnFeed; ; break; } } //if _logger.info("readFeed - start processing RSS Feed {0}", this.urlString); Feed feed = null; try { boolean isFeedHeader = true; // Set header values intial to the empty string String description = ""; String title = ""; String link = ""; String language = ""; String copyright = ""; String author = ""; String pubdate = ""; String guid = ""; // First create a new XMLInputFactory XMLInputFactory inputFactory = XMLInputFactory.newInstance(); // Setup a new eventReader // use java.net.URL to fetch feed // this call is not seen by APM // We don't monitor generic HttpUrlConnection calls which is what java.net.URL would use. //On WebLogic, we monitor outbound calls using the built-in JAX-RS and JAX-WS implementations. //The only way to make this kind of call and have it be monitored by APM would be to rewrite the RSS fetching code to use the WLS built-in Jersey JAX-RS API rather than java.net.URL. InputStream in = read(); XMLEventReader eventReader = inputFactory.createXMLEventReader(in); // read the XML document while (eventReader.hasNext()) { XMLEvent event = eventReader.nextEvent(); if (event.isStartElement()) { String localPart = event.asStartElement().getName().getLocalPart(); switch (localPart) { case ITEM: if (isFeedHeader) { isFeedHeader = false; feed = new Feed(title, link, description, language, copyright, pubdate); } event = eventReader.nextEvent(); break; case TITLE: title = getCharacterData(event, eventReader); break; case DESCRIPTION: description = getCharacterData(event, eventReader); break; case LINK: link = getCharacterData(event, eventReader); break; case GUID: guid = getCharacterData(event, eventReader); break; case LANGUAGE: language = getCharacterData(event, eventReader); break; case AUTHOR: author = getCharacterData(event, eventReader); break; case PUB_DATE: pubdate = getCharacterData(event, eventReader); break; case COPYRIGHT: copyright = getCharacterData(event, eventReader); break; } } else if (event.isEndElement()) { if (event.asEndElement().getName().getLocalPart() == (ITEM)) { FeedMessage message = new FeedMessage(); message.setAuthor(author); message.setDescription(description); message.setGuid(guid); message.setLink(link); message.setTitle(title); feed.getMessages().add(message); event = eventReader.nextEvent(); continue; } } } } catch (XMLStreamException e) { throw new RuntimeException(e); } if ("bbc".compareTo(feedIdentifier.toLowerCase()) == 0) { _logger.warning("Built in intentional delay of 4 seconds when fetching BBC feed. No particular reason, just to see whether you detectives would find it."); try { Thread.sleep(4000); } catch (InterruptedException e) { } _logger.fine("Done with silly sleep. Back to work."); } _logger.info("readFeed - return feed after fetching and parsing "); return feed; } private String getCharacterData(XMLEvent event, XMLEventReader eventReader) throws XMLStreamException { String result = ""; event = eventReader.nextEvent(); if (event instanceof Characters) { result = event.asCharacters().getData(); } return result; } /* private InputStream readRS() { try { Client c = Client.create(); WebResource resource = c.resource(this.urlString); String response = resource.accept("application/xml").get(String.class); return new ByteArrayInputStream(response.getBytes("UTF-8")); } catch (Exception e) { throw new RuntimeException(e); } } */ private InputStream read() { try { try { url = new URL(this.urlString); return url.openStream(); } catch (MalformedURLException e) { throw new RuntimeException(e); } } catch (IOException e) { throw new RuntimeException(e); } } }
apache-2.0
mosaic-cloud/mosaic-java-platform
tools-exceptions/src/main/java/eu/mosaic_cloud/tools/exceptions/tools/QueuedExceptions.java
1667
/* * #%L * mosaic-tools-exceptions * %% * Copyright (C) 2010 - 2013 Institute e-Austria Timisoara (Romania) * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ package eu.mosaic_cloud.tools.exceptions.tools; import java.util.concurrent.BlockingQueue; import java.util.concurrent.LinkedBlockingQueue; import eu.mosaic_cloud.tools.exceptions.core.CaughtException; import com.google.common.base.Preconditions; public final class QueuedExceptions extends Exception { private QueuedExceptions (final BlockingQueue<CaughtException> queue) { super (); this.queue = queue; Preconditions.checkNotNull (queue); } public final BlockingQueue<CaughtException> queue; public static final QueuedExceptions create (final BlockingQueue<CaughtException> queue) { return (new QueuedExceptions (queue)); } public static final QueuedExceptions create (final QueueingExceptionTracer exceptions) { final LinkedBlockingQueue<CaughtException> queue = new LinkedBlockingQueue<CaughtException> (); exceptions.queue.drainTo (queue); return (QueuedExceptions.create (queue)); } private static final long serialVersionUID = 1L; }
apache-2.0
chanakaudaya/carbon-transports
http/org.wso2.carbon.transport.http.netty/src/test/java/org/wso2/carbon/transport/http/netty/util/server/websocket/WebSocketRemoteServer.java
2132
/* * Copyright (c) 2017, WSO2 Inc. (http://www.wso2.org) All Rights Reserved. * * WSO2 Inc. licenses this file to you under the Apache License, * Version 2.0 (the "License"); you may not use this file except * in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. * */ package org.wso2.carbon.transport.http.netty.util.server.websocket; import io.netty.bootstrap.ServerBootstrap; import io.netty.channel.EventLoopGroup; import io.netty.channel.nio.NioEventLoopGroup; import io.netty.channel.socket.nio.NioServerSocketChannel; import io.netty.handler.ssl.SslContext; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * Simple WebSocket server for Test cases. */ public final class WebSocketRemoteServer { private static final Logger logger = LoggerFactory.getLogger(WebSocketRemoteServer.class); private final int port; private EventLoopGroup bossGroup; private EventLoopGroup workerGroup; public WebSocketRemoteServer(int port) { this.port = port; } public void run() throws InterruptedException { final SslContext sslCtx = null; bossGroup = new NioEventLoopGroup(1); workerGroup = new NioEventLoopGroup(2); ServerBootstrap b = new ServerBootstrap(); b.group(bossGroup, workerGroup) .channel(NioServerSocketChannel.class) .childHandler(new WebSocketRemoteServerInitializer(sslCtx)); b.bind(port).sync(); logger.info("WebSocket remote server started listening on port " + port); } public void stop() { bossGroup.shutdownGracefully(); workerGroup.shutdownGracefully(); logger.info("WebSocket remote server stopped listening on port " + port); } }
apache-2.0
bisigc/art
app/dao/GenericDAO.java
1893
package dao; import java.io.Serializable; import java.util.List; import javax.persistence.TypedQuery; import models.AbstractModel; /** * Interface for a {@link GenericDAO}. * * @author cbi * * @param <T> Generic Data Model Object * @param <PK> Generic Primary Key of Data Model Object */ public interface GenericDAO<T extends AbstractModel, PK extends Serializable> { /** * Returns the class of the data model used in the {@link GenericDAO}. * * @return Class of the injected Model */ Class<T> getModel(); /** * Returns all Objects of the data model. * * @return List of all objects of the data model * @throws Exception if data access failed */ List<T> getAll() throws Exception; /** * Finds all objects of the data model matching the submitted {@link TypedQuery}. * * @param query Typed Query * @return List of all found data objects * @throws Exception if data access failed */ List<T> find(TypedQuery<T> query) throws Exception; /** * Receives a data model, persists it and returns the persistet object. * * @param t model object * @return created model object * @throws Exception if data source access failed */ T create(T t) throws Exception; /** * Returns a data model object which has been retrieved with the * provided primary key. * * @param id primary key * @return data object * @throws Exception if data access failed */ T get(PK id) throws Exception; /** * Receives a data model, persists the changes and returns the persisted object. * * @param t data object * @return updated data object * @throws Exception if data source access failed */ T update(T t) throws Exception; /** * Receives the primary key of a data model and deletes it. * * @param id primary key * @throws Exception if data source access failed */ void delete(PK id) throws Exception; }
apache-2.0
rwaldron/traceur-compiler
test/feature/LegacyModules/ImportFromLocalModule.js
341
module m { export var x = 1; export var y = 2; } import {x as renamedX, y} from m; import {x} from m; module m2 from m; assert.equal(1, x); assert.equal(1, renamedX); assert.equal(2, y); assert.equal(x, renamedX); assert.equal(x, m.x); // Closure tests cannot handle Object.create(null) assert.isTrue(m === m2); assert.equal(y, m.y);
apache-2.0
codefacets/squish-core
app/models/squash/symbolication.rb
5437
# Copyright 2013 Square Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. require 'zlib' require 'base64' # Symbolication information for a project. This model stores an array of address # ranges and the file, line, and method information for each of those ranges. # Symbolication can then be done by finding the information corresponding to a # range that includes a particular program counter address. # # Symbolication objects are uniquely referenced by UUID. This is because Xcode # generates a UUID for each new build and architecture of a project. The UUID is # then distributed with the project, and used to look up the correct # Symbolication to use. # # For projects in compiled languages that do not attach a UUID to their debug # data, the Squash client library for that language will need to generate its # own UUID, to be distributed with the project. # # For more information on symbolication generally, see the README. # # ### Symbol Data # # Symbol data is generated using the "squash_ios_symbolicator" gem. The gem # produces data structures that can be compactly serialized. The gem is also # included in this project to support deserializing the resulting data. # # Serialization is accomplished by YAML-serializing the `Symbols` or `Lines` # object, zlib-encoding the result, and then base-64-encoding the compressed # output. This is also how the `symbols` and `lines` properties are transmitted # over the wire. # # No support is given for modifying these objects after they have been # deserialized from YAML. # # Properties # ---------- # # | | | # |:----------|:------------------------------------------------------------------------| # | `uuid` | A universally-unique identifier associated with the symbolication data. | # | `symbols` | A serialized `Symbolication::Symbols` object with debug_info data. | # | `lines` | A serialized `Symbolication::Lines` object with debug_lines data. | module Squash class Symbolication < Squash::Record # internal use only has_many :occurrences, inverse_of: :symbolication, primary_key: 'uuid', dependent: :restrict_with_exception self.primary_key = 'uuid' validates :uuid, presence: true, uniqueness: true, format: {with: /\A[0-9a-f]{8}-([0-9a-f]{4}-){3}[0-9a-f]{12}\z/i} after_commit(on: :create) do |sym| BackgroundRunner.run SymbolicationWorker, sym.id end attr_readonly :uuid # @private def symbols @symbols ||= begin syms = YAML.load(Zlib::Inflate.inflate(Base64.decode64(read_attribute(:symbols)))) raise TypeError, "expected Squash::Symbolicator::Symbols, got #{syms.class}" unless syms.kind_of?(Squash::Symbolicator::Symbols) syms end end # @private def symbols=(syms) raise TypeError, "expected Squash::Symbolicator::Symbols, got #{syms.class}" unless syms.kind_of?(Squash::Symbolicator::Symbols) write_attribute :symbols, Base64.encode64(Zlib::Deflate.deflate(syms.to_yaml)) @symbols = syms end # @private def lines @lines ||= begin lns = YAML.load(Zlib::Inflate.inflate(Base64.decode64(read_attribute(:lines)))) raise TypeError, "expected Squash::Symbolicator::Lines, got #{lns.class}" unless lns.kind_of?(Squash::Symbolicator::Lines) lns end end # @private def lines=(lns) raise TypeError, "expected Squash::Symbolicator::Lines, got #{lns.class}" unless lns.kind_of?(Squash::Symbolicator::Lines) write_attribute :lines, Base64.encode64(Zlib::Deflate.deflate(lns.to_yaml)) @lines = lns end # Returns the file path, line number, and method name corresponding to a # program counter address. The result is formatted for use as part of an # {Occurrence}'s backtrace element. # # If `lines` is provided, the line number will be the specific corresponding # line number within the method. Otherwise it will be the line number of the # method declaration. # # @param [Fixnum] address A stack return address (decimal number). # @return [Hash, nil] The file path, line number, and method name containing # that address, or `nil` if that address could not be symbolicated. def symbolicate(address) line = lines.for(address) if lines symbol = symbols.for(address) if line && symbol { 'file' => line.file, 'line' => line.line, 'symbol' => symbol.ios_method } elsif line { 'file' => line.file, 'line' => line.line } elsif symbol { 'file' => symbol.file, 'line' => symbol.line, 'symbol' => symbol.ios_method } else nil end end end end
apache-2.0
wanglongbiao/webapp-tools
Highlander/ship-gis/src/main/webapp/js/arcgis_js_api/library/3.22/moment/locale/cv.js
3246
//>>built (function(b,a){"object"===typeof exports&&"undefined"!==typeof module&&"function"===typeof require?a(require("../moment")):"function"===typeof define&&define.amd?define("moment/locale/cv",["../moment"],a):a(b.moment)})(this,function(b){return b.defineLocale("cv",{months:"\u043a\u04d1\u0440\u043b\u0430\u0447 \u043d\u0430\u0440\u04d1\u0441 \u043f\u0443\u0448 \u0430\u043a\u0430 \u043c\u0430\u0439 \u04ab\u04d7\u0440\u0442\u043c\u0435 \u0443\u0442\u04d1 \u04ab\u0443\u0440\u043b\u0430 \u0430\u0432\u04d1\u043d \u044e\u043f\u0430 \u0447\u04f3\u043a \u0440\u0430\u0448\u0442\u0430\u0432".split(" "), monthsShort:"\u043a\u04d1\u0440 \u043d\u0430\u0440 \u043f\u0443\u0448 \u0430\u043a\u0430 \u043c\u0430\u0439 \u04ab\u04d7\u0440 \u0443\u0442\u04d1 \u04ab\u0443\u0440 \u0430\u0432\u043d \u044e\u043f\u0430 \u0447\u04f3\u043a \u0440\u0430\u0448".split(" "),weekdays:"\u0432\u044b\u0440\u0441\u0430\u0440\u043d\u0438\u043a\u0443\u043d \u0442\u0443\u043d\u0442\u0438\u043a\u0443\u043d \u044b\u0442\u043b\u0430\u0440\u0438\u043a\u0443\u043d \u044e\u043d\u043a\u0443\u043d \u043a\u04d7\u04ab\u043d\u0435\u0440\u043d\u0438\u043a\u0443\u043d \u044d\u0440\u043d\u0435\u043a\u0443\u043d \u0448\u04d1\u043c\u0430\u0442\u043a\u0443\u043d".split(" "), weekdaysShort:"\u0432\u044b\u0440 \u0442\u0443\u043d \u044b\u0442\u043b \u044e\u043d \u043a\u04d7\u04ab \u044d\u0440\u043d \u0448\u04d1\u043c".split(" "),weekdaysMin:"\u0432\u0440 \u0442\u043d \u044b\u0442 \u044e\u043d \u043a\u04ab \u044d\u0440 \u0448\u043c".split(" "),longDateFormat:{LT:"HH:mm",LTS:"HH:mm:ss",L:"DD-MM-YYYY",LL:"YYYY [\u04ab\u0443\u043b\u0445\u0438] MMMM [\u0443\u0439\u04d1\u0445\u04d7\u043d] D[-\u043c\u04d7\u0448\u04d7]",LLL:"YYYY [\u04ab\u0443\u043b\u0445\u0438] MMMM [\u0443\u0439\u04d1\u0445\u04d7\u043d] D[-\u043c\u04d7\u0448\u04d7], HH:mm", LLLL:"dddd, YYYY [\u04ab\u0443\u043b\u0445\u0438] MMMM [\u0443\u0439\u04d1\u0445\u04d7\u043d] D[-\u043c\u04d7\u0448\u04d7], HH:mm"},calendar:{sameDay:"[\u041f\u0430\u044f\u043d] LT [\u0441\u0435\u0445\u0435\u0442\u0440\u0435]",nextDay:"[\u042b\u0440\u0430\u043d] LT [\u0441\u0435\u0445\u0435\u0442\u0440\u0435]",lastDay:"[\u04d6\u043d\u0435\u0440] LT [\u0441\u0435\u0445\u0435\u0442\u0440\u0435]",nextWeek:"[\u04aa\u0438\u0442\u0435\u0441] dddd LT [\u0441\u0435\u0445\u0435\u0442\u0440\u0435]",lastWeek:"[\u0418\u0440\u0442\u043d\u04d7] dddd LT [\u0441\u0435\u0445\u0435\u0442\u0440\u0435]", sameElse:"L"},relativeTime:{future:function(a){var b=/\u0441\u0435\u0445\u0435\u0442$/i.exec(a)?"\u0440\u0435\u043d":/\u04ab\u0443\u043b$/i.exec(a)?"\u0442\u0430\u043d":"\u0440\u0430\u043d";return a+b},past:"%s \u043a\u0430\u044f\u043b\u043b\u0430",s:"\u043f\u04d7\u0440-\u0438\u043a \u04ab\u0435\u043a\u043a\u0443\u043d\u0442",m:"\u043f\u04d7\u0440 \u043c\u0438\u043d\u0443\u0442",mm:"%d \u043c\u0438\u043d\u0443\u0442",h:"\u043f\u04d7\u0440 \u0441\u0435\u0445\u0435\u0442",hh:"%d \u0441\u0435\u0445\u0435\u0442", d:"\u043f\u04d7\u0440 \u043a\u0443\u043d",dd:"%d \u043a\u0443\u043d",M:"\u043f\u04d7\u0440 \u0443\u0439\u04d1\u0445",MM:"%d \u0443\u0439\u04d1\u0445",y:"\u043f\u04d7\u0440 \u04ab\u0443\u043b",yy:"%d \u04ab\u0443\u043b"},dayOfMonthOrdinalParse:/\d{1,2}-\u043c\u04d7\u0448/,ordinal:"%d-\u043c\u04d7\u0448",week:{dow:1,doy:7}})});
apache-2.0
pdalbora/gosu-lang
gosu-xml/src/main/java/gw/internal/xml/xsd/typeprovider/schema/XmlSchemaAttributeOrAttributeGroup.java
571
/* * Copyright 2014 Guidewire Software, Inc. */ package gw.internal.xml.xsd.typeprovider.schema; import gw.internal.xml.xsd.typeprovider.XmlSchemaIndex; import gw.lang.reflect.LocationInfo; public abstract class XmlSchemaAttributeOrAttributeGroup<T extends XmlSchemaAttributeOrAttributeGroup> extends XmlSchemaObject<T> { public XmlSchemaAttributeOrAttributeGroup( XmlSchemaIndex schemaIndex, LocationInfo locationInfo ) { super( schemaIndex, locationInfo ); } public XmlSchemaAttributeOrAttributeGroup resolveAttributeGroups() { return this; } }
apache-2.0
fnothaft/ananas
src/main/scala/net/fnothaft/ananas/models/Fragment.scala
824
/** * Copyright 2015 Frank Austin Nothaft * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package net.fnothaft.ananas.models import net.fnothaft.ananas.debruijn.TransientKmerVertex trait Fragment[L] extends Serializable { val id: L def flattenFragment: Array[(CanonicalKmer, TransientKmerVertex[L])] }
apache-2.0
twitter/scrooge
scrooge-core/src/main/scala/com/twitter/scrooge/StructBuilderFactory.scala
1449
package com.twitter.scrooge /** * A trait that provides an interface for building a new StructBuilder[T]. * * When added to the companion object, it makes it possible to create a T statically * without needing to call `newBuilder()` on an instance. For example, calling * `Struct.newBuilder()`. In this case, there will be no default values set for each * field, making it necessary for the caller to set all the fields in the struct. */ trait StructBuilderFactory[T <: ThriftStruct] { /** * A builder to create a new instance of T. * * For default values: * - Call `newBuilder()` on an instance of the struct * - Set an individual field in the struct with `builder.setField(index, value)` while * all other fields will be the same as the instance on which `newBuilder()` was called * - Set all the fields in the struct with `builder.setAllFields(seqOfValues)` * * For a static builder without any default values: * - Call `newBuilder()` on the struct object (i.e. `T.newBuilder()`) * - Set an individual field in the struct with `builder.setField(index, value)`. No * other fields will be set so it is imperative that the caller sets all of the struct * fields manually with `setField` or `setAllFields` * - Set all the fields in the struct with `builder.setAllFields(seqOfValues)` * * Finally, call `builder.build()` to build the new T. */ def newBuilder(): StructBuilder[T] }
apache-2.0
dagnir/aws-sdk-java
aws-java-sdk-inspector/src/main/java/com/amazonaws/services/inspector/AmazonInspectorClient.java
107586
/* * Copyright 2012-2017 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.inspector; import org.w3c.dom.*; import java.net.*; import java.util.*; import javax.annotation.Generated; import org.apache.commons.logging.*; import com.amazonaws.*; import com.amazonaws.annotation.SdkInternalApi; import com.amazonaws.auth.*; import com.amazonaws.handlers.*; import com.amazonaws.http.*; import com.amazonaws.internal.*; import com.amazonaws.internal.auth.*; import com.amazonaws.metrics.*; import com.amazonaws.regions.*; import com.amazonaws.transform.*; import com.amazonaws.util.*; import com.amazonaws.protocol.json.*; import com.amazonaws.util.AWSRequestMetrics.Field; import com.amazonaws.annotation.ThreadSafe; import com.amazonaws.client.AwsSyncClientParams; import com.amazonaws.services.inspector.AmazonInspectorClientBuilder; import com.amazonaws.AmazonServiceException; import com.amazonaws.services.inspector.model.*; import com.amazonaws.services.inspector.model.transform.*; /** * Client for accessing Amazon Inspector. All service calls made using this client are blocking, and will not return * until the service call completes. * <p> * <fullname>Amazon Inspector</fullname> * <p> * Amazon Inspector enables you to analyze the behavior of your AWS resources and to identify potential security issues. * For more information, see <a * href="http://docs.aws.amazon.com/inspector/latest/userguide/inspector_introduction.html"> Amazon Inspector User * Guide</a>. * </p> */ @ThreadSafe @Generated("com.amazonaws:aws-java-sdk-code-generator") public class AmazonInspectorClient extends AmazonWebServiceClient implements AmazonInspector { /** Provider for AWS credentials. */ private final AWSCredentialsProvider awsCredentialsProvider; private static final Log log = LogFactory.getLog(AmazonInspector.class); /** Default signing name for the service. */ private static final String DEFAULT_SIGNING_NAME = "inspector"; /** Client configuration factory providing ClientConfigurations tailored to this client */ protected static final ClientConfigurationFactory configFactory = new ClientConfigurationFactory(); private final com.amazonaws.protocol.json.SdkJsonProtocolFactory protocolFactory = new com.amazonaws.protocol.json.SdkJsonProtocolFactory( new JsonClientMetadata() .withProtocolVersion("1.1") .withSupportsCbor(false) .withSupportsIon(false) .addErrorMetadata( new JsonErrorShapeMetadata().withErrorCode("AssessmentRunInProgressException").withModeledClass( com.amazonaws.services.inspector.model.AssessmentRunInProgressException.class)) .addErrorMetadata( new JsonErrorShapeMetadata().withErrorCode("LimitExceededException").withModeledClass( com.amazonaws.services.inspector.model.LimitExceededException.class)) .addErrorMetadata( new JsonErrorShapeMetadata().withErrorCode("AccessDeniedException").withModeledClass( com.amazonaws.services.inspector.model.AccessDeniedException.class)) .addErrorMetadata( new JsonErrorShapeMetadata().withErrorCode("InvalidInputException").withModeledClass( com.amazonaws.services.inspector.model.InvalidInputException.class)) .addErrorMetadata( new JsonErrorShapeMetadata().withErrorCode("NoSuchEntityException").withModeledClass( com.amazonaws.services.inspector.model.NoSuchEntityException.class)) .addErrorMetadata( new JsonErrorShapeMetadata().withErrorCode("AgentsAlreadyRunningAssessmentException").withModeledClass( com.amazonaws.services.inspector.model.AgentsAlreadyRunningAssessmentException.class)) .addErrorMetadata( new JsonErrorShapeMetadata().withErrorCode("InvalidCrossAccountRoleException").withModeledClass( com.amazonaws.services.inspector.model.InvalidCrossAccountRoleException.class)) .addErrorMetadata( new JsonErrorShapeMetadata().withErrorCode("InternalException").withModeledClass( com.amazonaws.services.inspector.model.InternalException.class)) .withBaseServiceExceptionClass(com.amazonaws.services.inspector.model.AmazonInspectorException.class)); /** * Constructs a new client to invoke service methods on Amazon Inspector. A credentials provider chain will be used * that searches for credentials in this order: * <ul> * <li>Environment Variables - AWS_ACCESS_KEY_ID and AWS_SECRET_KEY</li> * <li>Java System Properties - aws.accessKeyId and aws.secretKey</li> * <li>Instance profile credentials delivered through the Amazon EC2 metadata service</li> * </ul> * * <p> * All service calls made using this new client object are blocking, and will not return until the service call * completes. * * @see DefaultAWSCredentialsProviderChain * @deprecated use {@link AmazonInspectorClientBuilder#defaultClient()} */ @Deprecated public AmazonInspectorClient() { this(DefaultAWSCredentialsProviderChain.getInstance(), configFactory.getConfig()); } /** * Constructs a new client to invoke service methods on Amazon Inspector. A credentials provider chain will be used * that searches for credentials in this order: * <ul> * <li>Environment Variables - AWS_ACCESS_KEY_ID and AWS_SECRET_KEY</li> * <li>Java System Properties - aws.accessKeyId and aws.secretKey</li> * <li>Instance profile credentials delivered through the Amazon EC2 metadata service</li> * </ul> * * <p> * All service calls made using this new client object are blocking, and will not return until the service call * completes. * * @param clientConfiguration * The client configuration options controlling how this client connects to Amazon Inspector (ex: proxy * settings, retry counts, etc.). * * @see DefaultAWSCredentialsProviderChain * @deprecated use {@link AmazonInspectorClientBuilder#withClientConfiguration(ClientConfiguration)} */ @Deprecated public AmazonInspectorClient(ClientConfiguration clientConfiguration) { this(DefaultAWSCredentialsProviderChain.getInstance(), clientConfiguration); } /** * Constructs a new client to invoke service methods on Amazon Inspector using the specified AWS account * credentials. * * <p> * All service calls made using this new client object are blocking, and will not return until the service call * completes. * * @param awsCredentials * The AWS credentials (access key ID and secret key) to use when authenticating with AWS services. * @deprecated use {@link AmazonInspectorClientBuilder#withCredentials(AWSCredentialsProvider)} for example: * {@code AmazonInspectorClientBuilder.standard().withCredentials(new AWSStaticCredentialsProvider(awsCredentials)).build();} */ @Deprecated public AmazonInspectorClient(AWSCredentials awsCredentials) { this(awsCredentials, configFactory.getConfig()); } /** * Constructs a new client to invoke service methods on Amazon Inspector using the specified AWS account credentials * and client configuration options. * * <p> * All service calls made using this new client object are blocking, and will not return until the service call * completes. * * @param awsCredentials * The AWS credentials (access key ID and secret key) to use when authenticating with AWS services. * @param clientConfiguration * The client configuration options controlling how this client connects to Amazon Inspector (ex: proxy * settings, retry counts, etc.). * @deprecated use {@link AmazonInspectorClientBuilder#withCredentials(AWSCredentialsProvider)} and * {@link AmazonInspectorClientBuilder#withClientConfiguration(ClientConfiguration)} */ @Deprecated public AmazonInspectorClient(AWSCredentials awsCredentials, ClientConfiguration clientConfiguration) { super(clientConfiguration); this.awsCredentialsProvider = new StaticCredentialsProvider(awsCredentials); init(); } /** * Constructs a new client to invoke service methods on Amazon Inspector using the specified AWS account credentials * provider. * * <p> * All service calls made using this new client object are blocking, and will not return until the service call * completes. * * @param awsCredentialsProvider * The AWS credentials provider which will provide credentials to authenticate requests with AWS services. * @deprecated use {@link AmazonInspectorClientBuilder#withCredentials(AWSCredentialsProvider)} */ @Deprecated public AmazonInspectorClient(AWSCredentialsProvider awsCredentialsProvider) { this(awsCredentialsProvider, configFactory.getConfig()); } /** * Constructs a new client to invoke service methods on Amazon Inspector using the specified AWS account credentials * provider and client configuration options. * * <p> * All service calls made using this new client object are blocking, and will not return until the service call * completes. * * @param awsCredentialsProvider * The AWS credentials provider which will provide credentials to authenticate requests with AWS services. * @param clientConfiguration * The client configuration options controlling how this client connects to Amazon Inspector (ex: proxy * settings, retry counts, etc.). * @deprecated use {@link AmazonInspectorClientBuilder#withCredentials(AWSCredentialsProvider)} and * {@link AmazonInspectorClientBuilder#withClientConfiguration(ClientConfiguration)} */ @Deprecated public AmazonInspectorClient(AWSCredentialsProvider awsCredentialsProvider, ClientConfiguration clientConfiguration) { this(awsCredentialsProvider, clientConfiguration, null); } /** * Constructs a new client to invoke service methods on Amazon Inspector using the specified AWS account credentials * provider, client configuration options, and request metric collector. * * <p> * All service calls made using this new client object are blocking, and will not return until the service call * completes. * * @param awsCredentialsProvider * The AWS credentials provider which will provide credentials to authenticate requests with AWS services. * @param clientConfiguration * The client configuration options controlling how this client connects to Amazon Inspector (ex: proxy * settings, retry counts, etc.). * @param requestMetricCollector * optional request metric collector * @deprecated use {@link AmazonInspectorClientBuilder#withCredentials(AWSCredentialsProvider)} and * {@link AmazonInspectorClientBuilder#withClientConfiguration(ClientConfiguration)} and * {@link AmazonInspectorClientBuilder#withMetricsCollector(RequestMetricCollector)} */ @Deprecated public AmazonInspectorClient(AWSCredentialsProvider awsCredentialsProvider, ClientConfiguration clientConfiguration, RequestMetricCollector requestMetricCollector) { super(clientConfiguration, requestMetricCollector); this.awsCredentialsProvider = awsCredentialsProvider; init(); } public static AmazonInspectorClientBuilder builder() { return AmazonInspectorClientBuilder.standard(); } /** * Constructs a new client to invoke service methods on Amazon Inspector using the specified parameters. * * <p> * All service calls made using this new client object are blocking, and will not return until the service call * completes. * * @param clientParams * Object providing client parameters. */ AmazonInspectorClient(AwsSyncClientParams clientParams) { super(clientParams); this.awsCredentialsProvider = clientParams.getCredentialsProvider(); init(); } private void init() { setServiceNameIntern(DEFAULT_SIGNING_NAME); setEndpointPrefix(ENDPOINT_PREFIX); // calling this.setEndPoint(...) will also modify the signer accordingly setEndpoint("https://inspector.us-east-1.amazonaws.com"); HandlerChainFactory chainFactory = new HandlerChainFactory(); requestHandler2s.addAll(chainFactory.newRequestHandlerChain("/com/amazonaws/services/inspector/request.handlers")); requestHandler2s.addAll(chainFactory.newRequestHandler2Chain("/com/amazonaws/services/inspector/request.handler2s")); requestHandler2s.addAll(chainFactory.getGlobalHandlers()); } /** * <p> * Assigns attributes (key and value pairs) to the findings that are specified by the ARNs of the findings. * </p> * * @param addAttributesToFindingsRequest * @return Result of the AddAttributesToFindings operation returned by the service. * @throws InternalException * Internal server error. * @throws InvalidInputException * The request was rejected because an invalid or out-of-range value was supplied for an input parameter. * @throws AccessDeniedException * You do not have required permissions to access the requested resource. * @throws NoSuchEntityException * The request was rejected because it referenced an entity that does not exist. The error code describes * the entity. * @sample AmazonInspector.AddAttributesToFindings * @see <a href="http://docs.aws.amazon.com/goto/WebAPI/inspector-2016-02-16/AddAttributesToFindings" * target="_top">AWS API Documentation</a> */ @Override public AddAttributesToFindingsResult addAttributesToFindings(AddAttributesToFindingsRequest request) { request = beforeClientExecution(request); return executeAddAttributesToFindings(request); } @SdkInternalApi final AddAttributesToFindingsResult executeAddAttributesToFindings(AddAttributesToFindingsRequest addAttributesToFindingsRequest) { ExecutionContext executionContext = createExecutionContext(addAttributesToFindingsRequest); AWSRequestMetrics awsRequestMetrics = executionContext.getAwsRequestMetrics(); awsRequestMetrics.startEvent(Field.ClientExecuteTime); Request<AddAttributesToFindingsRequest> request = null; Response<AddAttributesToFindingsResult> response = null; try { awsRequestMetrics.startEvent(Field.RequestMarshallTime); try { request = new AddAttributesToFindingsRequestProtocolMarshaller(protocolFactory).marshall(super .beforeMarshalling(addAttributesToFindingsRequest)); // Binds the request metrics to the current request. request.setAWSRequestMetrics(awsRequestMetrics); } finally { awsRequestMetrics.endEvent(Field.RequestMarshallTime); } HttpResponseHandler<AmazonWebServiceResponse<AddAttributesToFindingsResult>> responseHandler = protocolFactory.createResponseHandler( new JsonOperationMetadata().withPayloadJson(true).withHasStreamingSuccessResponse(false), new AddAttributesToFindingsResultJsonUnmarshaller()); response = invoke(request, responseHandler, executionContext); return response.getAwsResponse(); } finally { endClientExecution(awsRequestMetrics, request, response); } } /** * <p> * Creates a new assessment target using the ARN of the resource group that is generated by * <a>CreateResourceGroup</a>. You can create up to 50 assessment targets per AWS account. You can run up to 500 * concurrent agents per AWS account. For more information, see <a * href="http://docs.aws.amazon.com/inspector/latest/userguide/inspector_applications.html"> Amazon Inspector * Assessment Targets</a>. * </p> * * @param createAssessmentTargetRequest * @return Result of the CreateAssessmentTarget operation returned by the service. * @throws InternalException * Internal server error. * @throws InvalidInputException * The request was rejected because an invalid or out-of-range value was supplied for an input parameter. * @throws LimitExceededException * The request was rejected because it attempted to create resources beyond the current AWS account limits. * The error code describes the limit exceeded. * @throws AccessDeniedException * You do not have required permissions to access the requested resource. * @throws NoSuchEntityException * The request was rejected because it referenced an entity that does not exist. The error code describes * the entity. * @sample AmazonInspector.CreateAssessmentTarget * @see <a href="http://docs.aws.amazon.com/goto/WebAPI/inspector-2016-02-16/CreateAssessmentTarget" * target="_top">AWS API Documentation</a> */ @Override public CreateAssessmentTargetResult createAssessmentTarget(CreateAssessmentTargetRequest request) { request = beforeClientExecution(request); return executeCreateAssessmentTarget(request); } @SdkInternalApi final CreateAssessmentTargetResult executeCreateAssessmentTarget(CreateAssessmentTargetRequest createAssessmentTargetRequest) { ExecutionContext executionContext = createExecutionContext(createAssessmentTargetRequest); AWSRequestMetrics awsRequestMetrics = executionContext.getAwsRequestMetrics(); awsRequestMetrics.startEvent(Field.ClientExecuteTime); Request<CreateAssessmentTargetRequest> request = null; Response<CreateAssessmentTargetResult> response = null; try { awsRequestMetrics.startEvent(Field.RequestMarshallTime); try { request = new CreateAssessmentTargetRequestProtocolMarshaller(protocolFactory).marshall(super.beforeMarshalling(createAssessmentTargetRequest)); // Binds the request metrics to the current request. request.setAWSRequestMetrics(awsRequestMetrics); } finally { awsRequestMetrics.endEvent(Field.RequestMarshallTime); } HttpResponseHandler<AmazonWebServiceResponse<CreateAssessmentTargetResult>> responseHandler = protocolFactory.createResponseHandler( new JsonOperationMetadata().withPayloadJson(true).withHasStreamingSuccessResponse(false), new CreateAssessmentTargetResultJsonUnmarshaller()); response = invoke(request, responseHandler, executionContext); return response.getAwsResponse(); } finally { endClientExecution(awsRequestMetrics, request, response); } } /** * <p> * Creates an assessment template for the assessment target that is specified by the ARN of the assessment target. * </p> * * @param createAssessmentTemplateRequest * @return Result of the CreateAssessmentTemplate operation returned by the service. * @throws InternalException * Internal server error. * @throws InvalidInputException * The request was rejected because an invalid or out-of-range value was supplied for an input parameter. * @throws LimitExceededException * The request was rejected because it attempted to create resources beyond the current AWS account limits. * The error code describes the limit exceeded. * @throws AccessDeniedException * You do not have required permissions to access the requested resource. * @throws NoSuchEntityException * The request was rejected because it referenced an entity that does not exist. The error code describes * the entity. * @sample AmazonInspector.CreateAssessmentTemplate * @see <a href="http://docs.aws.amazon.com/goto/WebAPI/inspector-2016-02-16/CreateAssessmentTemplate" * target="_top">AWS API Documentation</a> */ @Override public CreateAssessmentTemplateResult createAssessmentTemplate(CreateAssessmentTemplateRequest request) { request = beforeClientExecution(request); return executeCreateAssessmentTemplate(request); } @SdkInternalApi final CreateAssessmentTemplateResult executeCreateAssessmentTemplate(CreateAssessmentTemplateRequest createAssessmentTemplateRequest) { ExecutionContext executionContext = createExecutionContext(createAssessmentTemplateRequest); AWSRequestMetrics awsRequestMetrics = executionContext.getAwsRequestMetrics(); awsRequestMetrics.startEvent(Field.ClientExecuteTime); Request<CreateAssessmentTemplateRequest> request = null; Response<CreateAssessmentTemplateResult> response = null; try { awsRequestMetrics.startEvent(Field.RequestMarshallTime); try { request = new CreateAssessmentTemplateRequestProtocolMarshaller(protocolFactory).marshall(super .beforeMarshalling(createAssessmentTemplateRequest)); // Binds the request metrics to the current request. request.setAWSRequestMetrics(awsRequestMetrics); } finally { awsRequestMetrics.endEvent(Field.RequestMarshallTime); } HttpResponseHandler<AmazonWebServiceResponse<CreateAssessmentTemplateResult>> responseHandler = protocolFactory.createResponseHandler( new JsonOperationMetadata().withPayloadJson(true).withHasStreamingSuccessResponse(false), new CreateAssessmentTemplateResultJsonUnmarshaller()); response = invoke(request, responseHandler, executionContext); return response.getAwsResponse(); } finally { endClientExecution(awsRequestMetrics, request, response); } } /** * <p> * Creates a resource group using the specified set of tags (key and value pairs) that are used to select the EC2 * instances to be included in an Amazon Inspector assessment target. The created resource group is then used to * create an Amazon Inspector assessment target. For more information, see <a>CreateAssessmentTarget</a>. * </p> * * @param createResourceGroupRequest * @return Result of the CreateResourceGroup operation returned by the service. * @throws InternalException * Internal server error. * @throws InvalidInputException * The request was rejected because an invalid or out-of-range value was supplied for an input parameter. * @throws LimitExceededException * The request was rejected because it attempted to create resources beyond the current AWS account limits. * The error code describes the limit exceeded. * @throws AccessDeniedException * You do not have required permissions to access the requested resource. * @sample AmazonInspector.CreateResourceGroup * @see <a href="http://docs.aws.amazon.com/goto/WebAPI/inspector-2016-02-16/CreateResourceGroup" target="_top">AWS * API Documentation</a> */ @Override public CreateResourceGroupResult createResourceGroup(CreateResourceGroupRequest request) { request = beforeClientExecution(request); return executeCreateResourceGroup(request); } @SdkInternalApi final CreateResourceGroupResult executeCreateResourceGroup(CreateResourceGroupRequest createResourceGroupRequest) { ExecutionContext executionContext = createExecutionContext(createResourceGroupRequest); AWSRequestMetrics awsRequestMetrics = executionContext.getAwsRequestMetrics(); awsRequestMetrics.startEvent(Field.ClientExecuteTime); Request<CreateResourceGroupRequest> request = null; Response<CreateResourceGroupResult> response = null; try { awsRequestMetrics.startEvent(Field.RequestMarshallTime); try { request = new CreateResourceGroupRequestProtocolMarshaller(protocolFactory).marshall(super.beforeMarshalling(createResourceGroupRequest)); // Binds the request metrics to the current request. request.setAWSRequestMetrics(awsRequestMetrics); } finally { awsRequestMetrics.endEvent(Field.RequestMarshallTime); } HttpResponseHandler<AmazonWebServiceResponse<CreateResourceGroupResult>> responseHandler = protocolFactory.createResponseHandler( new JsonOperationMetadata().withPayloadJson(true).withHasStreamingSuccessResponse(false), new CreateResourceGroupResultJsonUnmarshaller()); response = invoke(request, responseHandler, executionContext); return response.getAwsResponse(); } finally { endClientExecution(awsRequestMetrics, request, response); } } /** * <p> * Deletes the assessment run that is specified by the ARN of the assessment run. * </p> * * @param deleteAssessmentRunRequest * @return Result of the DeleteAssessmentRun operation returned by the service. * @throws InternalException * Internal server error. * @throws InvalidInputException * The request was rejected because an invalid or out-of-range value was supplied for an input parameter. * @throws AssessmentRunInProgressException * You cannot perform a specified action if an assessment run is currently in progress. * @throws AccessDeniedException * You do not have required permissions to access the requested resource. * @throws NoSuchEntityException * The request was rejected because it referenced an entity that does not exist. The error code describes * the entity. * @sample AmazonInspector.DeleteAssessmentRun * @see <a href="http://docs.aws.amazon.com/goto/WebAPI/inspector-2016-02-16/DeleteAssessmentRun" target="_top">AWS * API Documentation</a> */ @Override public DeleteAssessmentRunResult deleteAssessmentRun(DeleteAssessmentRunRequest request) { request = beforeClientExecution(request); return executeDeleteAssessmentRun(request); } @SdkInternalApi final DeleteAssessmentRunResult executeDeleteAssessmentRun(DeleteAssessmentRunRequest deleteAssessmentRunRequest) { ExecutionContext executionContext = createExecutionContext(deleteAssessmentRunRequest); AWSRequestMetrics awsRequestMetrics = executionContext.getAwsRequestMetrics(); awsRequestMetrics.startEvent(Field.ClientExecuteTime); Request<DeleteAssessmentRunRequest> request = null; Response<DeleteAssessmentRunResult> response = null; try { awsRequestMetrics.startEvent(Field.RequestMarshallTime); try { request = new DeleteAssessmentRunRequestProtocolMarshaller(protocolFactory).marshall(super.beforeMarshalling(deleteAssessmentRunRequest)); // Binds the request metrics to the current request. request.setAWSRequestMetrics(awsRequestMetrics); } finally { awsRequestMetrics.endEvent(Field.RequestMarshallTime); } HttpResponseHandler<AmazonWebServiceResponse<DeleteAssessmentRunResult>> responseHandler = protocolFactory.createResponseHandler( new JsonOperationMetadata().withPayloadJson(true).withHasStreamingSuccessResponse(false), new DeleteAssessmentRunResultJsonUnmarshaller()); response = invoke(request, responseHandler, executionContext); return response.getAwsResponse(); } finally { endClientExecution(awsRequestMetrics, request, response); } } /** * <p> * Deletes the assessment target that is specified by the ARN of the assessment target. * </p> * * @param deleteAssessmentTargetRequest * @return Result of the DeleteAssessmentTarget operation returned by the service. * @throws InternalException * Internal server error. * @throws InvalidInputException * The request was rejected because an invalid or out-of-range value was supplied for an input parameter. * @throws AssessmentRunInProgressException * You cannot perform a specified action if an assessment run is currently in progress. * @throws AccessDeniedException * You do not have required permissions to access the requested resource. * @throws NoSuchEntityException * The request was rejected because it referenced an entity that does not exist. The error code describes * the entity. * @sample AmazonInspector.DeleteAssessmentTarget * @see <a href="http://docs.aws.amazon.com/goto/WebAPI/inspector-2016-02-16/DeleteAssessmentTarget" * target="_top">AWS API Documentation</a> */ @Override public DeleteAssessmentTargetResult deleteAssessmentTarget(DeleteAssessmentTargetRequest request) { request = beforeClientExecution(request); return executeDeleteAssessmentTarget(request); } @SdkInternalApi final DeleteAssessmentTargetResult executeDeleteAssessmentTarget(DeleteAssessmentTargetRequest deleteAssessmentTargetRequest) { ExecutionContext executionContext = createExecutionContext(deleteAssessmentTargetRequest); AWSRequestMetrics awsRequestMetrics = executionContext.getAwsRequestMetrics(); awsRequestMetrics.startEvent(Field.ClientExecuteTime); Request<DeleteAssessmentTargetRequest> request = null; Response<DeleteAssessmentTargetResult> response = null; try { awsRequestMetrics.startEvent(Field.RequestMarshallTime); try { request = new DeleteAssessmentTargetRequestProtocolMarshaller(protocolFactory).marshall(super.beforeMarshalling(deleteAssessmentTargetRequest)); // Binds the request metrics to the current request. request.setAWSRequestMetrics(awsRequestMetrics); } finally { awsRequestMetrics.endEvent(Field.RequestMarshallTime); } HttpResponseHandler<AmazonWebServiceResponse<DeleteAssessmentTargetResult>> responseHandler = protocolFactory.createResponseHandler( new JsonOperationMetadata().withPayloadJson(true).withHasStreamingSuccessResponse(false), new DeleteAssessmentTargetResultJsonUnmarshaller()); response = invoke(request, responseHandler, executionContext); return response.getAwsResponse(); } finally { endClientExecution(awsRequestMetrics, request, response); } } /** * <p> * Deletes the assessment template that is specified by the ARN of the assessment template. * </p> * * @param deleteAssessmentTemplateRequest * @return Result of the DeleteAssessmentTemplate operation returned by the service. * @throws InternalException * Internal server error. * @throws InvalidInputException * The request was rejected because an invalid or out-of-range value was supplied for an input parameter. * @throws AssessmentRunInProgressException * You cannot perform a specified action if an assessment run is currently in progress. * @throws AccessDeniedException * You do not have required permissions to access the requested resource. * @throws NoSuchEntityException * The request was rejected because it referenced an entity that does not exist. The error code describes * the entity. * @sample AmazonInspector.DeleteAssessmentTemplate * @see <a href="http://docs.aws.amazon.com/goto/WebAPI/inspector-2016-02-16/DeleteAssessmentTemplate" * target="_top">AWS API Documentation</a> */ @Override public DeleteAssessmentTemplateResult deleteAssessmentTemplate(DeleteAssessmentTemplateRequest request) { request = beforeClientExecution(request); return executeDeleteAssessmentTemplate(request); } @SdkInternalApi final DeleteAssessmentTemplateResult executeDeleteAssessmentTemplate(DeleteAssessmentTemplateRequest deleteAssessmentTemplateRequest) { ExecutionContext executionContext = createExecutionContext(deleteAssessmentTemplateRequest); AWSRequestMetrics awsRequestMetrics = executionContext.getAwsRequestMetrics(); awsRequestMetrics.startEvent(Field.ClientExecuteTime); Request<DeleteAssessmentTemplateRequest> request = null; Response<DeleteAssessmentTemplateResult> response = null; try { awsRequestMetrics.startEvent(Field.RequestMarshallTime); try { request = new DeleteAssessmentTemplateRequestProtocolMarshaller(protocolFactory).marshall(super .beforeMarshalling(deleteAssessmentTemplateRequest)); // Binds the request metrics to the current request. request.setAWSRequestMetrics(awsRequestMetrics); } finally { awsRequestMetrics.endEvent(Field.RequestMarshallTime); } HttpResponseHandler<AmazonWebServiceResponse<DeleteAssessmentTemplateResult>> responseHandler = protocolFactory.createResponseHandler( new JsonOperationMetadata().withPayloadJson(true).withHasStreamingSuccessResponse(false), new DeleteAssessmentTemplateResultJsonUnmarshaller()); response = invoke(request, responseHandler, executionContext); return response.getAwsResponse(); } finally { endClientExecution(awsRequestMetrics, request, response); } } /** * <p> * Describes the assessment runs that are specified by the ARNs of the assessment runs. * </p> * * @param describeAssessmentRunsRequest * @return Result of the DescribeAssessmentRuns operation returned by the service. * @throws InternalException * Internal server error. * @throws InvalidInputException * The request was rejected because an invalid or out-of-range value was supplied for an input parameter. * @sample AmazonInspector.DescribeAssessmentRuns * @see <a href="http://docs.aws.amazon.com/goto/WebAPI/inspector-2016-02-16/DescribeAssessmentRuns" * target="_top">AWS API Documentation</a> */ @Override public DescribeAssessmentRunsResult describeAssessmentRuns(DescribeAssessmentRunsRequest request) { request = beforeClientExecution(request); return executeDescribeAssessmentRuns(request); } @SdkInternalApi final DescribeAssessmentRunsResult executeDescribeAssessmentRuns(DescribeAssessmentRunsRequest describeAssessmentRunsRequest) { ExecutionContext executionContext = createExecutionContext(describeAssessmentRunsRequest); AWSRequestMetrics awsRequestMetrics = executionContext.getAwsRequestMetrics(); awsRequestMetrics.startEvent(Field.ClientExecuteTime); Request<DescribeAssessmentRunsRequest> request = null; Response<DescribeAssessmentRunsResult> response = null; try { awsRequestMetrics.startEvent(Field.RequestMarshallTime); try { request = new DescribeAssessmentRunsRequestProtocolMarshaller(protocolFactory).marshall(super.beforeMarshalling(describeAssessmentRunsRequest)); // Binds the request metrics to the current request. request.setAWSRequestMetrics(awsRequestMetrics); } finally { awsRequestMetrics.endEvent(Field.RequestMarshallTime); } HttpResponseHandler<AmazonWebServiceResponse<DescribeAssessmentRunsResult>> responseHandler = protocolFactory.createResponseHandler( new JsonOperationMetadata().withPayloadJson(true).withHasStreamingSuccessResponse(false), new DescribeAssessmentRunsResultJsonUnmarshaller()); response = invoke(request, responseHandler, executionContext); return response.getAwsResponse(); } finally { endClientExecution(awsRequestMetrics, request, response); } } /** * <p> * Describes the assessment targets that are specified by the ARNs of the assessment targets. * </p> * * @param describeAssessmentTargetsRequest * @return Result of the DescribeAssessmentTargets operation returned by the service. * @throws InternalException * Internal server error. * @throws InvalidInputException * The request was rejected because an invalid or out-of-range value was supplied for an input parameter. * @sample AmazonInspector.DescribeAssessmentTargets * @see <a href="http://docs.aws.amazon.com/goto/WebAPI/inspector-2016-02-16/DescribeAssessmentTargets" * target="_top">AWS API Documentation</a> */ @Override public DescribeAssessmentTargetsResult describeAssessmentTargets(DescribeAssessmentTargetsRequest request) { request = beforeClientExecution(request); return executeDescribeAssessmentTargets(request); } @SdkInternalApi final DescribeAssessmentTargetsResult executeDescribeAssessmentTargets(DescribeAssessmentTargetsRequest describeAssessmentTargetsRequest) { ExecutionContext executionContext = createExecutionContext(describeAssessmentTargetsRequest); AWSRequestMetrics awsRequestMetrics = executionContext.getAwsRequestMetrics(); awsRequestMetrics.startEvent(Field.ClientExecuteTime); Request<DescribeAssessmentTargetsRequest> request = null; Response<DescribeAssessmentTargetsResult> response = null; try { awsRequestMetrics.startEvent(Field.RequestMarshallTime); try { request = new DescribeAssessmentTargetsRequestProtocolMarshaller(protocolFactory).marshall(super .beforeMarshalling(describeAssessmentTargetsRequest)); // Binds the request metrics to the current request. request.setAWSRequestMetrics(awsRequestMetrics); } finally { awsRequestMetrics.endEvent(Field.RequestMarshallTime); } HttpResponseHandler<AmazonWebServiceResponse<DescribeAssessmentTargetsResult>> responseHandler = protocolFactory.createResponseHandler( new JsonOperationMetadata().withPayloadJson(true).withHasStreamingSuccessResponse(false), new DescribeAssessmentTargetsResultJsonUnmarshaller()); response = invoke(request, responseHandler, executionContext); return response.getAwsResponse(); } finally { endClientExecution(awsRequestMetrics, request, response); } } /** * <p> * Describes the assessment templates that are specified by the ARNs of the assessment templates. * </p> * * @param describeAssessmentTemplatesRequest * @return Result of the DescribeAssessmentTemplates operation returned by the service. * @throws InternalException * Internal server error. * @throws InvalidInputException * The request was rejected because an invalid or out-of-range value was supplied for an input parameter. * @sample AmazonInspector.DescribeAssessmentTemplates * @see <a href="http://docs.aws.amazon.com/goto/WebAPI/inspector-2016-02-16/DescribeAssessmentTemplates" * target="_top">AWS API Documentation</a> */ @Override public DescribeAssessmentTemplatesResult describeAssessmentTemplates(DescribeAssessmentTemplatesRequest request) { request = beforeClientExecution(request); return executeDescribeAssessmentTemplates(request); } @SdkInternalApi final DescribeAssessmentTemplatesResult executeDescribeAssessmentTemplates(DescribeAssessmentTemplatesRequest describeAssessmentTemplatesRequest) { ExecutionContext executionContext = createExecutionContext(describeAssessmentTemplatesRequest); AWSRequestMetrics awsRequestMetrics = executionContext.getAwsRequestMetrics(); awsRequestMetrics.startEvent(Field.ClientExecuteTime); Request<DescribeAssessmentTemplatesRequest> request = null; Response<DescribeAssessmentTemplatesResult> response = null; try { awsRequestMetrics.startEvent(Field.RequestMarshallTime); try { request = new DescribeAssessmentTemplatesRequestProtocolMarshaller(protocolFactory).marshall(super .beforeMarshalling(describeAssessmentTemplatesRequest)); // Binds the request metrics to the current request. request.setAWSRequestMetrics(awsRequestMetrics); } finally { awsRequestMetrics.endEvent(Field.RequestMarshallTime); } HttpResponseHandler<AmazonWebServiceResponse<DescribeAssessmentTemplatesResult>> responseHandler = protocolFactory.createResponseHandler( new JsonOperationMetadata().withPayloadJson(true).withHasStreamingSuccessResponse(false), new DescribeAssessmentTemplatesResultJsonUnmarshaller()); response = invoke(request, responseHandler, executionContext); return response.getAwsResponse(); } finally { endClientExecution(awsRequestMetrics, request, response); } } /** * <p> * Describes the IAM role that enables Amazon Inspector to access your AWS account. * </p> * * @param describeCrossAccountAccessRoleRequest * @return Result of the DescribeCrossAccountAccessRole operation returned by the service. * @throws InternalException * Internal server error. * @sample AmazonInspector.DescribeCrossAccountAccessRole * @see <a href="http://docs.aws.amazon.com/goto/WebAPI/inspector-2016-02-16/DescribeCrossAccountAccessRole" * target="_top">AWS API Documentation</a> */ @Override public DescribeCrossAccountAccessRoleResult describeCrossAccountAccessRole(DescribeCrossAccountAccessRoleRequest request) { request = beforeClientExecution(request); return executeDescribeCrossAccountAccessRole(request); } @SdkInternalApi final DescribeCrossAccountAccessRoleResult executeDescribeCrossAccountAccessRole(DescribeCrossAccountAccessRoleRequest describeCrossAccountAccessRoleRequest) { ExecutionContext executionContext = createExecutionContext(describeCrossAccountAccessRoleRequest); AWSRequestMetrics awsRequestMetrics = executionContext.getAwsRequestMetrics(); awsRequestMetrics.startEvent(Field.ClientExecuteTime); Request<DescribeCrossAccountAccessRoleRequest> request = null; Response<DescribeCrossAccountAccessRoleResult> response = null; try { awsRequestMetrics.startEvent(Field.RequestMarshallTime); try { request = new DescribeCrossAccountAccessRoleRequestProtocolMarshaller(protocolFactory).marshall(super .beforeMarshalling(describeCrossAccountAccessRoleRequest)); // Binds the request metrics to the current request. request.setAWSRequestMetrics(awsRequestMetrics); } finally { awsRequestMetrics.endEvent(Field.RequestMarshallTime); } HttpResponseHandler<AmazonWebServiceResponse<DescribeCrossAccountAccessRoleResult>> responseHandler = protocolFactory.createResponseHandler( new JsonOperationMetadata().withPayloadJson(true).withHasStreamingSuccessResponse(false), new DescribeCrossAccountAccessRoleResultJsonUnmarshaller()); response = invoke(request, responseHandler, executionContext); return response.getAwsResponse(); } finally { endClientExecution(awsRequestMetrics, request, response); } } /** * <p> * Describes the findings that are specified by the ARNs of the findings. * </p> * * @param describeFindingsRequest * @return Result of the DescribeFindings operation returned by the service. * @throws InternalException * Internal server error. * @throws InvalidInputException * The request was rejected because an invalid or out-of-range value was supplied for an input parameter. * @sample AmazonInspector.DescribeFindings * @see <a href="http://docs.aws.amazon.com/goto/WebAPI/inspector-2016-02-16/DescribeFindings" target="_top">AWS API * Documentation</a> */ @Override public DescribeFindingsResult describeFindings(DescribeFindingsRequest request) { request = beforeClientExecution(request); return executeDescribeFindings(request); } @SdkInternalApi final DescribeFindingsResult executeDescribeFindings(DescribeFindingsRequest describeFindingsRequest) { ExecutionContext executionContext = createExecutionContext(describeFindingsRequest); AWSRequestMetrics awsRequestMetrics = executionContext.getAwsRequestMetrics(); awsRequestMetrics.startEvent(Field.ClientExecuteTime); Request<DescribeFindingsRequest> request = null; Response<DescribeFindingsResult> response = null; try { awsRequestMetrics.startEvent(Field.RequestMarshallTime); try { request = new DescribeFindingsRequestProtocolMarshaller(protocolFactory).marshall(super.beforeMarshalling(describeFindingsRequest)); // Binds the request metrics to the current request. request.setAWSRequestMetrics(awsRequestMetrics); } finally { awsRequestMetrics.endEvent(Field.RequestMarshallTime); } HttpResponseHandler<AmazonWebServiceResponse<DescribeFindingsResult>> responseHandler = protocolFactory.createResponseHandler( new JsonOperationMetadata().withPayloadJson(true).withHasStreamingSuccessResponse(false), new DescribeFindingsResultJsonUnmarshaller()); response = invoke(request, responseHandler, executionContext); return response.getAwsResponse(); } finally { endClientExecution(awsRequestMetrics, request, response); } } /** * <p> * Describes the resource groups that are specified by the ARNs of the resource groups. * </p> * * @param describeResourceGroupsRequest * @return Result of the DescribeResourceGroups operation returned by the service. * @throws InternalException * Internal server error. * @throws InvalidInputException * The request was rejected because an invalid or out-of-range value was supplied for an input parameter. * @sample AmazonInspector.DescribeResourceGroups * @see <a href="http://docs.aws.amazon.com/goto/WebAPI/inspector-2016-02-16/DescribeResourceGroups" * target="_top">AWS API Documentation</a> */ @Override public DescribeResourceGroupsResult describeResourceGroups(DescribeResourceGroupsRequest request) { request = beforeClientExecution(request); return executeDescribeResourceGroups(request); } @SdkInternalApi final DescribeResourceGroupsResult executeDescribeResourceGroups(DescribeResourceGroupsRequest describeResourceGroupsRequest) { ExecutionContext executionContext = createExecutionContext(describeResourceGroupsRequest); AWSRequestMetrics awsRequestMetrics = executionContext.getAwsRequestMetrics(); awsRequestMetrics.startEvent(Field.ClientExecuteTime); Request<DescribeResourceGroupsRequest> request = null; Response<DescribeResourceGroupsResult> response = null; try { awsRequestMetrics.startEvent(Field.RequestMarshallTime); try { request = new DescribeResourceGroupsRequestProtocolMarshaller(protocolFactory).marshall(super.beforeMarshalling(describeResourceGroupsRequest)); // Binds the request metrics to the current request. request.setAWSRequestMetrics(awsRequestMetrics); } finally { awsRequestMetrics.endEvent(Field.RequestMarshallTime); } HttpResponseHandler<AmazonWebServiceResponse<DescribeResourceGroupsResult>> responseHandler = protocolFactory.createResponseHandler( new JsonOperationMetadata().withPayloadJson(true).withHasStreamingSuccessResponse(false), new DescribeResourceGroupsResultJsonUnmarshaller()); response = invoke(request, responseHandler, executionContext); return response.getAwsResponse(); } finally { endClientExecution(awsRequestMetrics, request, response); } } /** * <p> * Describes the rules packages that are specified by the ARNs of the rules packages. * </p> * * @param describeRulesPackagesRequest * @return Result of the DescribeRulesPackages operation returned by the service. * @throws InternalException * Internal server error. * @throws InvalidInputException * The request was rejected because an invalid or out-of-range value was supplied for an input parameter. * @sample AmazonInspector.DescribeRulesPackages * @see <a href="http://docs.aws.amazon.com/goto/WebAPI/inspector-2016-02-16/DescribeRulesPackages" * target="_top">AWS API Documentation</a> */ @Override public DescribeRulesPackagesResult describeRulesPackages(DescribeRulesPackagesRequest request) { request = beforeClientExecution(request); return executeDescribeRulesPackages(request); } @SdkInternalApi final DescribeRulesPackagesResult executeDescribeRulesPackages(DescribeRulesPackagesRequest describeRulesPackagesRequest) { ExecutionContext executionContext = createExecutionContext(describeRulesPackagesRequest); AWSRequestMetrics awsRequestMetrics = executionContext.getAwsRequestMetrics(); awsRequestMetrics.startEvent(Field.ClientExecuteTime); Request<DescribeRulesPackagesRequest> request = null; Response<DescribeRulesPackagesResult> response = null; try { awsRequestMetrics.startEvent(Field.RequestMarshallTime); try { request = new DescribeRulesPackagesRequestProtocolMarshaller(protocolFactory).marshall(super.beforeMarshalling(describeRulesPackagesRequest)); // Binds the request metrics to the current request. request.setAWSRequestMetrics(awsRequestMetrics); } finally { awsRequestMetrics.endEvent(Field.RequestMarshallTime); } HttpResponseHandler<AmazonWebServiceResponse<DescribeRulesPackagesResult>> responseHandler = protocolFactory .createResponseHandler(new JsonOperationMetadata().withPayloadJson(true).withHasStreamingSuccessResponse(false), new DescribeRulesPackagesResultJsonUnmarshaller()); response = invoke(request, responseHandler, executionContext); return response.getAwsResponse(); } finally { endClientExecution(awsRequestMetrics, request, response); } } /** * <p> * Information about the data that is collected for the specified assessment run. * </p> * * @param getTelemetryMetadataRequest * @return Result of the GetTelemetryMetadata operation returned by the service. * @throws InternalException * Internal server error. * @throws InvalidInputException * The request was rejected because an invalid or out-of-range value was supplied for an input parameter. * @throws AccessDeniedException * You do not have required permissions to access the requested resource. * @throws NoSuchEntityException * The request was rejected because it referenced an entity that does not exist. The error code describes * the entity. * @sample AmazonInspector.GetTelemetryMetadata * @see <a href="http://docs.aws.amazon.com/goto/WebAPI/inspector-2016-02-16/GetTelemetryMetadata" target="_top">AWS * API Documentation</a> */ @Override public GetTelemetryMetadataResult getTelemetryMetadata(GetTelemetryMetadataRequest request) { request = beforeClientExecution(request); return executeGetTelemetryMetadata(request); } @SdkInternalApi final GetTelemetryMetadataResult executeGetTelemetryMetadata(GetTelemetryMetadataRequest getTelemetryMetadataRequest) { ExecutionContext executionContext = createExecutionContext(getTelemetryMetadataRequest); AWSRequestMetrics awsRequestMetrics = executionContext.getAwsRequestMetrics(); awsRequestMetrics.startEvent(Field.ClientExecuteTime); Request<GetTelemetryMetadataRequest> request = null; Response<GetTelemetryMetadataResult> response = null; try { awsRequestMetrics.startEvent(Field.RequestMarshallTime); try { request = new GetTelemetryMetadataRequestProtocolMarshaller(protocolFactory).marshall(super.beforeMarshalling(getTelemetryMetadataRequest)); // Binds the request metrics to the current request. request.setAWSRequestMetrics(awsRequestMetrics); } finally { awsRequestMetrics.endEvent(Field.RequestMarshallTime); } HttpResponseHandler<AmazonWebServiceResponse<GetTelemetryMetadataResult>> responseHandler = protocolFactory.createResponseHandler( new JsonOperationMetadata().withPayloadJson(true).withHasStreamingSuccessResponse(false), new GetTelemetryMetadataResultJsonUnmarshaller()); response = invoke(request, responseHandler, executionContext); return response.getAwsResponse(); } finally { endClientExecution(awsRequestMetrics, request, response); } } /** * <p> * Lists the agents of the assessment runs that are specified by the ARNs of the assessment runs. * </p> * * @param listAssessmentRunAgentsRequest * @return Result of the ListAssessmentRunAgents operation returned by the service. * @throws InternalException * Internal server error. * @throws InvalidInputException * The request was rejected because an invalid or out-of-range value was supplied for an input parameter. * @throws AccessDeniedException * You do not have required permissions to access the requested resource. * @throws NoSuchEntityException * The request was rejected because it referenced an entity that does not exist. The error code describes * the entity. * @sample AmazonInspector.ListAssessmentRunAgents * @see <a href="http://docs.aws.amazon.com/goto/WebAPI/inspector-2016-02-16/ListAssessmentRunAgents" * target="_top">AWS API Documentation</a> */ @Override public ListAssessmentRunAgentsResult listAssessmentRunAgents(ListAssessmentRunAgentsRequest request) { request = beforeClientExecution(request); return executeListAssessmentRunAgents(request); } @SdkInternalApi final ListAssessmentRunAgentsResult executeListAssessmentRunAgents(ListAssessmentRunAgentsRequest listAssessmentRunAgentsRequest) { ExecutionContext executionContext = createExecutionContext(listAssessmentRunAgentsRequest); AWSRequestMetrics awsRequestMetrics = executionContext.getAwsRequestMetrics(); awsRequestMetrics.startEvent(Field.ClientExecuteTime); Request<ListAssessmentRunAgentsRequest> request = null; Response<ListAssessmentRunAgentsResult> response = null; try { awsRequestMetrics.startEvent(Field.RequestMarshallTime); try { request = new ListAssessmentRunAgentsRequestProtocolMarshaller(protocolFactory).marshall(super .beforeMarshalling(listAssessmentRunAgentsRequest)); // Binds the request metrics to the current request. request.setAWSRequestMetrics(awsRequestMetrics); } finally { awsRequestMetrics.endEvent(Field.RequestMarshallTime); } HttpResponseHandler<AmazonWebServiceResponse<ListAssessmentRunAgentsResult>> responseHandler = protocolFactory.createResponseHandler( new JsonOperationMetadata().withPayloadJson(true).withHasStreamingSuccessResponse(false), new ListAssessmentRunAgentsResultJsonUnmarshaller()); response = invoke(request, responseHandler, executionContext); return response.getAwsResponse(); } finally { endClientExecution(awsRequestMetrics, request, response); } } /** * <p> * Lists the assessment runs that correspond to the assessment templates that are specified by the ARNs of the * assessment templates. * </p> * * @param listAssessmentRunsRequest * @return Result of the ListAssessmentRuns operation returned by the service. * @throws InternalException * Internal server error. * @throws InvalidInputException * The request was rejected because an invalid or out-of-range value was supplied for an input parameter. * @throws AccessDeniedException * You do not have required permissions to access the requested resource. * @throws NoSuchEntityException * The request was rejected because it referenced an entity that does not exist. The error code describes * the entity. * @sample AmazonInspector.ListAssessmentRuns * @see <a href="http://docs.aws.amazon.com/goto/WebAPI/inspector-2016-02-16/ListAssessmentRuns" target="_top">AWS * API Documentation</a> */ @Override public ListAssessmentRunsResult listAssessmentRuns(ListAssessmentRunsRequest request) { request = beforeClientExecution(request); return executeListAssessmentRuns(request); } @SdkInternalApi final ListAssessmentRunsResult executeListAssessmentRuns(ListAssessmentRunsRequest listAssessmentRunsRequest) { ExecutionContext executionContext = createExecutionContext(listAssessmentRunsRequest); AWSRequestMetrics awsRequestMetrics = executionContext.getAwsRequestMetrics(); awsRequestMetrics.startEvent(Field.ClientExecuteTime); Request<ListAssessmentRunsRequest> request = null; Response<ListAssessmentRunsResult> response = null; try { awsRequestMetrics.startEvent(Field.RequestMarshallTime); try { request = new ListAssessmentRunsRequestProtocolMarshaller(protocolFactory).marshall(super.beforeMarshalling(listAssessmentRunsRequest)); // Binds the request metrics to the current request. request.setAWSRequestMetrics(awsRequestMetrics); } finally { awsRequestMetrics.endEvent(Field.RequestMarshallTime); } HttpResponseHandler<AmazonWebServiceResponse<ListAssessmentRunsResult>> responseHandler = protocolFactory.createResponseHandler( new JsonOperationMetadata().withPayloadJson(true).withHasStreamingSuccessResponse(false), new ListAssessmentRunsResultJsonUnmarshaller()); response = invoke(request, responseHandler, executionContext); return response.getAwsResponse(); } finally { endClientExecution(awsRequestMetrics, request, response); } } /** * <p> * Lists the ARNs of the assessment targets within this AWS account. For more information about assessment targets, * see <a href="http://docs.aws.amazon.com/inspector/latest/userguide/inspector_applications.html">Amazon Inspector * Assessment Targets</a>. * </p> * * @param listAssessmentTargetsRequest * @return Result of the ListAssessmentTargets operation returned by the service. * @throws InternalException * Internal server error. * @throws InvalidInputException * The request was rejected because an invalid or out-of-range value was supplied for an input parameter. * @throws AccessDeniedException * You do not have required permissions to access the requested resource. * @sample AmazonInspector.ListAssessmentTargets * @see <a href="http://docs.aws.amazon.com/goto/WebAPI/inspector-2016-02-16/ListAssessmentTargets" * target="_top">AWS API Documentation</a> */ @Override public ListAssessmentTargetsResult listAssessmentTargets(ListAssessmentTargetsRequest request) { request = beforeClientExecution(request); return executeListAssessmentTargets(request); } @SdkInternalApi final ListAssessmentTargetsResult executeListAssessmentTargets(ListAssessmentTargetsRequest listAssessmentTargetsRequest) { ExecutionContext executionContext = createExecutionContext(listAssessmentTargetsRequest); AWSRequestMetrics awsRequestMetrics = executionContext.getAwsRequestMetrics(); awsRequestMetrics.startEvent(Field.ClientExecuteTime); Request<ListAssessmentTargetsRequest> request = null; Response<ListAssessmentTargetsResult> response = null; try { awsRequestMetrics.startEvent(Field.RequestMarshallTime); try { request = new ListAssessmentTargetsRequestProtocolMarshaller(protocolFactory).marshall(super.beforeMarshalling(listAssessmentTargetsRequest)); // Binds the request metrics to the current request. request.setAWSRequestMetrics(awsRequestMetrics); } finally { awsRequestMetrics.endEvent(Field.RequestMarshallTime); } HttpResponseHandler<AmazonWebServiceResponse<ListAssessmentTargetsResult>> responseHandler = protocolFactory .createResponseHandler(new JsonOperationMetadata().withPayloadJson(true).withHasStreamingSuccessResponse(false), new ListAssessmentTargetsResultJsonUnmarshaller()); response = invoke(request, responseHandler, executionContext); return response.getAwsResponse(); } finally { endClientExecution(awsRequestMetrics, request, response); } } /** * <p> * Lists the assessment templates that correspond to the assessment targets that are specified by the ARNs of the * assessment targets. * </p> * * @param listAssessmentTemplatesRequest * @return Result of the ListAssessmentTemplates operation returned by the service. * @throws InternalException * Internal server error. * @throws InvalidInputException * The request was rejected because an invalid or out-of-range value was supplied for an input parameter. * @throws AccessDeniedException * You do not have required permissions to access the requested resource. * @throws NoSuchEntityException * The request was rejected because it referenced an entity that does not exist. The error code describes * the entity. * @sample AmazonInspector.ListAssessmentTemplates * @see <a href="http://docs.aws.amazon.com/goto/WebAPI/inspector-2016-02-16/ListAssessmentTemplates" * target="_top">AWS API Documentation</a> */ @Override public ListAssessmentTemplatesResult listAssessmentTemplates(ListAssessmentTemplatesRequest request) { request = beforeClientExecution(request); return executeListAssessmentTemplates(request); } @SdkInternalApi final ListAssessmentTemplatesResult executeListAssessmentTemplates(ListAssessmentTemplatesRequest listAssessmentTemplatesRequest) { ExecutionContext executionContext = createExecutionContext(listAssessmentTemplatesRequest); AWSRequestMetrics awsRequestMetrics = executionContext.getAwsRequestMetrics(); awsRequestMetrics.startEvent(Field.ClientExecuteTime); Request<ListAssessmentTemplatesRequest> request = null; Response<ListAssessmentTemplatesResult> response = null; try { awsRequestMetrics.startEvent(Field.RequestMarshallTime); try { request = new ListAssessmentTemplatesRequestProtocolMarshaller(protocolFactory).marshall(super .beforeMarshalling(listAssessmentTemplatesRequest)); // Binds the request metrics to the current request. request.setAWSRequestMetrics(awsRequestMetrics); } finally { awsRequestMetrics.endEvent(Field.RequestMarshallTime); } HttpResponseHandler<AmazonWebServiceResponse<ListAssessmentTemplatesResult>> responseHandler = protocolFactory.createResponseHandler( new JsonOperationMetadata().withPayloadJson(true).withHasStreamingSuccessResponse(false), new ListAssessmentTemplatesResultJsonUnmarshaller()); response = invoke(request, responseHandler, executionContext); return response.getAwsResponse(); } finally { endClientExecution(awsRequestMetrics, request, response); } } /** * <p> * Lists all the event subscriptions for the assessment template that is specified by the ARN of the assessment * template. For more information, see <a>SubscribeToEvent</a> and <a>UnsubscribeFromEvent</a>. * </p> * * @param listEventSubscriptionsRequest * @return Result of the ListEventSubscriptions operation returned by the service. * @throws InternalException * Internal server error. * @throws InvalidInputException * The request was rejected because an invalid or out-of-range value was supplied for an input parameter. * @throws AccessDeniedException * You do not have required permissions to access the requested resource. * @throws NoSuchEntityException * The request was rejected because it referenced an entity that does not exist. The error code describes * the entity. * @sample AmazonInspector.ListEventSubscriptions * @see <a href="http://docs.aws.amazon.com/goto/WebAPI/inspector-2016-02-16/ListEventSubscriptions" * target="_top">AWS API Documentation</a> */ @Override public ListEventSubscriptionsResult listEventSubscriptions(ListEventSubscriptionsRequest request) { request = beforeClientExecution(request); return executeListEventSubscriptions(request); } @SdkInternalApi final ListEventSubscriptionsResult executeListEventSubscriptions(ListEventSubscriptionsRequest listEventSubscriptionsRequest) { ExecutionContext executionContext = createExecutionContext(listEventSubscriptionsRequest); AWSRequestMetrics awsRequestMetrics = executionContext.getAwsRequestMetrics(); awsRequestMetrics.startEvent(Field.ClientExecuteTime); Request<ListEventSubscriptionsRequest> request = null; Response<ListEventSubscriptionsResult> response = null; try { awsRequestMetrics.startEvent(Field.RequestMarshallTime); try { request = new ListEventSubscriptionsRequestProtocolMarshaller(protocolFactory).marshall(super.beforeMarshalling(listEventSubscriptionsRequest)); // Binds the request metrics to the current request. request.setAWSRequestMetrics(awsRequestMetrics); } finally { awsRequestMetrics.endEvent(Field.RequestMarshallTime); } HttpResponseHandler<AmazonWebServiceResponse<ListEventSubscriptionsResult>> responseHandler = protocolFactory.createResponseHandler( new JsonOperationMetadata().withPayloadJson(true).withHasStreamingSuccessResponse(false), new ListEventSubscriptionsResultJsonUnmarshaller()); response = invoke(request, responseHandler, executionContext); return response.getAwsResponse(); } finally { endClientExecution(awsRequestMetrics, request, response); } } /** * <p> * Lists findings that are generated by the assessment runs that are specified by the ARNs of the assessment runs. * </p> * * @param listFindingsRequest * @return Result of the ListFindings operation returned by the service. * @throws InternalException * Internal server error. * @throws InvalidInputException * The request was rejected because an invalid or out-of-range value was supplied for an input parameter. * @throws AccessDeniedException * You do not have required permissions to access the requested resource. * @throws NoSuchEntityException * The request was rejected because it referenced an entity that does not exist. The error code describes * the entity. * @sample AmazonInspector.ListFindings * @see <a href="http://docs.aws.amazon.com/goto/WebAPI/inspector-2016-02-16/ListFindings" target="_top">AWS API * Documentation</a> */ @Override public ListFindingsResult listFindings(ListFindingsRequest request) { request = beforeClientExecution(request); return executeListFindings(request); } @SdkInternalApi final ListFindingsResult executeListFindings(ListFindingsRequest listFindingsRequest) { ExecutionContext executionContext = createExecutionContext(listFindingsRequest); AWSRequestMetrics awsRequestMetrics = executionContext.getAwsRequestMetrics(); awsRequestMetrics.startEvent(Field.ClientExecuteTime); Request<ListFindingsRequest> request = null; Response<ListFindingsResult> response = null; try { awsRequestMetrics.startEvent(Field.RequestMarshallTime); try { request = new ListFindingsRequestProtocolMarshaller(protocolFactory).marshall(super.beforeMarshalling(listFindingsRequest)); // Binds the request metrics to the current request. request.setAWSRequestMetrics(awsRequestMetrics); } finally { awsRequestMetrics.endEvent(Field.RequestMarshallTime); } HttpResponseHandler<AmazonWebServiceResponse<ListFindingsResult>> responseHandler = protocolFactory.createResponseHandler( new JsonOperationMetadata().withPayloadJson(true).withHasStreamingSuccessResponse(false), new ListFindingsResultJsonUnmarshaller()); response = invoke(request, responseHandler, executionContext); return response.getAwsResponse(); } finally { endClientExecution(awsRequestMetrics, request, response); } } /** * <p> * Lists all available Amazon Inspector rules packages. * </p> * * @param listRulesPackagesRequest * @return Result of the ListRulesPackages operation returned by the service. * @throws InternalException * Internal server error. * @throws InvalidInputException * The request was rejected because an invalid or out-of-range value was supplied for an input parameter. * @throws AccessDeniedException * You do not have required permissions to access the requested resource. * @sample AmazonInspector.ListRulesPackages * @see <a href="http://docs.aws.amazon.com/goto/WebAPI/inspector-2016-02-16/ListRulesPackages" target="_top">AWS * API Documentation</a> */ @Override public ListRulesPackagesResult listRulesPackages(ListRulesPackagesRequest request) { request = beforeClientExecution(request); return executeListRulesPackages(request); } @SdkInternalApi final ListRulesPackagesResult executeListRulesPackages(ListRulesPackagesRequest listRulesPackagesRequest) { ExecutionContext executionContext = createExecutionContext(listRulesPackagesRequest); AWSRequestMetrics awsRequestMetrics = executionContext.getAwsRequestMetrics(); awsRequestMetrics.startEvent(Field.ClientExecuteTime); Request<ListRulesPackagesRequest> request = null; Response<ListRulesPackagesResult> response = null; try { awsRequestMetrics.startEvent(Field.RequestMarshallTime); try { request = new ListRulesPackagesRequestProtocolMarshaller(protocolFactory).marshall(super.beforeMarshalling(listRulesPackagesRequest)); // Binds the request metrics to the current request. request.setAWSRequestMetrics(awsRequestMetrics); } finally { awsRequestMetrics.endEvent(Field.RequestMarshallTime); } HttpResponseHandler<AmazonWebServiceResponse<ListRulesPackagesResult>> responseHandler = protocolFactory.createResponseHandler( new JsonOperationMetadata().withPayloadJson(true).withHasStreamingSuccessResponse(false), new ListRulesPackagesResultJsonUnmarshaller()); response = invoke(request, responseHandler, executionContext); return response.getAwsResponse(); } finally { endClientExecution(awsRequestMetrics, request, response); } } /** * <p> * Lists all tags associated with an assessment template. * </p> * * @param listTagsForResourceRequest * @return Result of the ListTagsForResource operation returned by the service. * @throws InternalException * Internal server error. * @throws InvalidInputException * The request was rejected because an invalid or out-of-range value was supplied for an input parameter. * @throws AccessDeniedException * You do not have required permissions to access the requested resource. * @throws NoSuchEntityException * The request was rejected because it referenced an entity that does not exist. The error code describes * the entity. * @sample AmazonInspector.ListTagsForResource * @see <a href="http://docs.aws.amazon.com/goto/WebAPI/inspector-2016-02-16/ListTagsForResource" target="_top">AWS * API Documentation</a> */ @Override public ListTagsForResourceResult listTagsForResource(ListTagsForResourceRequest request) { request = beforeClientExecution(request); return executeListTagsForResource(request); } @SdkInternalApi final ListTagsForResourceResult executeListTagsForResource(ListTagsForResourceRequest listTagsForResourceRequest) { ExecutionContext executionContext = createExecutionContext(listTagsForResourceRequest); AWSRequestMetrics awsRequestMetrics = executionContext.getAwsRequestMetrics(); awsRequestMetrics.startEvent(Field.ClientExecuteTime); Request<ListTagsForResourceRequest> request = null; Response<ListTagsForResourceResult> response = null; try { awsRequestMetrics.startEvent(Field.RequestMarshallTime); try { request = new ListTagsForResourceRequestProtocolMarshaller(protocolFactory).marshall(super.beforeMarshalling(listTagsForResourceRequest)); // Binds the request metrics to the current request. request.setAWSRequestMetrics(awsRequestMetrics); } finally { awsRequestMetrics.endEvent(Field.RequestMarshallTime); } HttpResponseHandler<AmazonWebServiceResponse<ListTagsForResourceResult>> responseHandler = protocolFactory.createResponseHandler( new JsonOperationMetadata().withPayloadJson(true).withHasStreamingSuccessResponse(false), new ListTagsForResourceResultJsonUnmarshaller()); response = invoke(request, responseHandler, executionContext); return response.getAwsResponse(); } finally { endClientExecution(awsRequestMetrics, request, response); } } /** * <p> * Previews the agents installed on the EC2 instances that are part of the specified assessment target. * </p> * * @param previewAgentsRequest * @return Result of the PreviewAgents operation returned by the service. * @throws InternalException * Internal server error. * @throws InvalidInputException * The request was rejected because an invalid or out-of-range value was supplied for an input parameter. * @throws AccessDeniedException * You do not have required permissions to access the requested resource. * @throws NoSuchEntityException * The request was rejected because it referenced an entity that does not exist. The error code describes * the entity. * @throws InvalidCrossAccountRoleException * Amazon Inspector cannot assume the cross-account role that it needs to list your EC2 instances during the * assessment run. * @sample AmazonInspector.PreviewAgents * @see <a href="http://docs.aws.amazon.com/goto/WebAPI/inspector-2016-02-16/PreviewAgents" target="_top">AWS API * Documentation</a> */ @Override public PreviewAgentsResult previewAgents(PreviewAgentsRequest request) { request = beforeClientExecution(request); return executePreviewAgents(request); } @SdkInternalApi final PreviewAgentsResult executePreviewAgents(PreviewAgentsRequest previewAgentsRequest) { ExecutionContext executionContext = createExecutionContext(previewAgentsRequest); AWSRequestMetrics awsRequestMetrics = executionContext.getAwsRequestMetrics(); awsRequestMetrics.startEvent(Field.ClientExecuteTime); Request<PreviewAgentsRequest> request = null; Response<PreviewAgentsResult> response = null; try { awsRequestMetrics.startEvent(Field.RequestMarshallTime); try { request = new PreviewAgentsRequestProtocolMarshaller(protocolFactory).marshall(super.beforeMarshalling(previewAgentsRequest)); // Binds the request metrics to the current request. request.setAWSRequestMetrics(awsRequestMetrics); } finally { awsRequestMetrics.endEvent(Field.RequestMarshallTime); } HttpResponseHandler<AmazonWebServiceResponse<PreviewAgentsResult>> responseHandler = protocolFactory.createResponseHandler( new JsonOperationMetadata().withPayloadJson(true).withHasStreamingSuccessResponse(false), new PreviewAgentsResultJsonUnmarshaller()); response = invoke(request, responseHandler, executionContext); return response.getAwsResponse(); } finally { endClientExecution(awsRequestMetrics, request, response); } } /** * <p> * Registers the IAM role that Amazon Inspector uses to list your EC2 instances at the start of the assessment run * or when you call the <a>PreviewAgents</a> action. * </p> * * @param registerCrossAccountAccessRoleRequest * @return Result of the RegisterCrossAccountAccessRole operation returned by the service. * @throws InternalException * Internal server error. * @throws InvalidInputException * The request was rejected because an invalid or out-of-range value was supplied for an input parameter. * @throws AccessDeniedException * You do not have required permissions to access the requested resource. * @throws InvalidCrossAccountRoleException * Amazon Inspector cannot assume the cross-account role that it needs to list your EC2 instances during the * assessment run. * @sample AmazonInspector.RegisterCrossAccountAccessRole * @see <a href="http://docs.aws.amazon.com/goto/WebAPI/inspector-2016-02-16/RegisterCrossAccountAccessRole" * target="_top">AWS API Documentation</a> */ @Override public RegisterCrossAccountAccessRoleResult registerCrossAccountAccessRole(RegisterCrossAccountAccessRoleRequest request) { request = beforeClientExecution(request); return executeRegisterCrossAccountAccessRole(request); } @SdkInternalApi final RegisterCrossAccountAccessRoleResult executeRegisterCrossAccountAccessRole(RegisterCrossAccountAccessRoleRequest registerCrossAccountAccessRoleRequest) { ExecutionContext executionContext = createExecutionContext(registerCrossAccountAccessRoleRequest); AWSRequestMetrics awsRequestMetrics = executionContext.getAwsRequestMetrics(); awsRequestMetrics.startEvent(Field.ClientExecuteTime); Request<RegisterCrossAccountAccessRoleRequest> request = null; Response<RegisterCrossAccountAccessRoleResult> response = null; try { awsRequestMetrics.startEvent(Field.RequestMarshallTime); try { request = new RegisterCrossAccountAccessRoleRequestProtocolMarshaller(protocolFactory).marshall(super .beforeMarshalling(registerCrossAccountAccessRoleRequest)); // Binds the request metrics to the current request. request.setAWSRequestMetrics(awsRequestMetrics); } finally { awsRequestMetrics.endEvent(Field.RequestMarshallTime); } HttpResponseHandler<AmazonWebServiceResponse<RegisterCrossAccountAccessRoleResult>> responseHandler = protocolFactory.createResponseHandler( new JsonOperationMetadata().withPayloadJson(true).withHasStreamingSuccessResponse(false), new RegisterCrossAccountAccessRoleResultJsonUnmarshaller()); response = invoke(request, responseHandler, executionContext); return response.getAwsResponse(); } finally { endClientExecution(awsRequestMetrics, request, response); } } /** * <p> * Removes entire attributes (key and value pairs) from the findings that are specified by the ARNs of the findings * where an attribute with the specified key exists. * </p> * * @param removeAttributesFromFindingsRequest * @return Result of the RemoveAttributesFromFindings operation returned by the service. * @throws InternalException * Internal server error. * @throws InvalidInputException * The request was rejected because an invalid or out-of-range value was supplied for an input parameter. * @throws AccessDeniedException * You do not have required permissions to access the requested resource. * @throws NoSuchEntityException * The request was rejected because it referenced an entity that does not exist. The error code describes * the entity. * @sample AmazonInspector.RemoveAttributesFromFindings * @see <a href="http://docs.aws.amazon.com/goto/WebAPI/inspector-2016-02-16/RemoveAttributesFromFindings" * target="_top">AWS API Documentation</a> */ @Override public RemoveAttributesFromFindingsResult removeAttributesFromFindings(RemoveAttributesFromFindingsRequest request) { request = beforeClientExecution(request); return executeRemoveAttributesFromFindings(request); } @SdkInternalApi final RemoveAttributesFromFindingsResult executeRemoveAttributesFromFindings(RemoveAttributesFromFindingsRequest removeAttributesFromFindingsRequest) { ExecutionContext executionContext = createExecutionContext(removeAttributesFromFindingsRequest); AWSRequestMetrics awsRequestMetrics = executionContext.getAwsRequestMetrics(); awsRequestMetrics.startEvent(Field.ClientExecuteTime); Request<RemoveAttributesFromFindingsRequest> request = null; Response<RemoveAttributesFromFindingsResult> response = null; try { awsRequestMetrics.startEvent(Field.RequestMarshallTime); try { request = new RemoveAttributesFromFindingsRequestProtocolMarshaller(protocolFactory).marshall(super .beforeMarshalling(removeAttributesFromFindingsRequest)); // Binds the request metrics to the current request. request.setAWSRequestMetrics(awsRequestMetrics); } finally { awsRequestMetrics.endEvent(Field.RequestMarshallTime); } HttpResponseHandler<AmazonWebServiceResponse<RemoveAttributesFromFindingsResult>> responseHandler = protocolFactory.createResponseHandler( new JsonOperationMetadata().withPayloadJson(true).withHasStreamingSuccessResponse(false), new RemoveAttributesFromFindingsResultJsonUnmarshaller()); response = invoke(request, responseHandler, executionContext); return response.getAwsResponse(); } finally { endClientExecution(awsRequestMetrics, request, response); } } /** * <p> * Sets tags (key and value pairs) to the assessment template that is specified by the ARN of the assessment * template. * </p> * * @param setTagsForResourceRequest * @return Result of the SetTagsForResource operation returned by the service. * @throws InternalException * Internal server error. * @throws InvalidInputException * The request was rejected because an invalid or out-of-range value was supplied for an input parameter. * @throws AccessDeniedException * You do not have required permissions to access the requested resource. * @throws NoSuchEntityException * The request was rejected because it referenced an entity that does not exist. The error code describes * the entity. * @sample AmazonInspector.SetTagsForResource * @see <a href="http://docs.aws.amazon.com/goto/WebAPI/inspector-2016-02-16/SetTagsForResource" target="_top">AWS * API Documentation</a> */ @Override public SetTagsForResourceResult setTagsForResource(SetTagsForResourceRequest request) { request = beforeClientExecution(request); return executeSetTagsForResource(request); } @SdkInternalApi final SetTagsForResourceResult executeSetTagsForResource(SetTagsForResourceRequest setTagsForResourceRequest) { ExecutionContext executionContext = createExecutionContext(setTagsForResourceRequest); AWSRequestMetrics awsRequestMetrics = executionContext.getAwsRequestMetrics(); awsRequestMetrics.startEvent(Field.ClientExecuteTime); Request<SetTagsForResourceRequest> request = null; Response<SetTagsForResourceResult> response = null; try { awsRequestMetrics.startEvent(Field.RequestMarshallTime); try { request = new SetTagsForResourceRequestProtocolMarshaller(protocolFactory).marshall(super.beforeMarshalling(setTagsForResourceRequest)); // Binds the request metrics to the current request. request.setAWSRequestMetrics(awsRequestMetrics); } finally { awsRequestMetrics.endEvent(Field.RequestMarshallTime); } HttpResponseHandler<AmazonWebServiceResponse<SetTagsForResourceResult>> responseHandler = protocolFactory.createResponseHandler( new JsonOperationMetadata().withPayloadJson(true).withHasStreamingSuccessResponse(false), new SetTagsForResourceResultJsonUnmarshaller()); response = invoke(request, responseHandler, executionContext); return response.getAwsResponse(); } finally { endClientExecution(awsRequestMetrics, request, response); } } /** * <p> * Starts the assessment run specified by the ARN of the assessment template. For this API to function properly, you * must not exceed the limit of running up to 500 concurrent agents per AWS account. * </p> * * @param startAssessmentRunRequest * @return Result of the StartAssessmentRun operation returned by the service. * @throws InternalException * Internal server error. * @throws InvalidInputException * The request was rejected because an invalid or out-of-range value was supplied for an input parameter. * @throws LimitExceededException * The request was rejected because it attempted to create resources beyond the current AWS account limits. * The error code describes the limit exceeded. * @throws AccessDeniedException * You do not have required permissions to access the requested resource. * @throws NoSuchEntityException * The request was rejected because it referenced an entity that does not exist. The error code describes * the entity. * @throws InvalidCrossAccountRoleException * Amazon Inspector cannot assume the cross-account role that it needs to list your EC2 instances during the * assessment run. * @throws AgentsAlreadyRunningAssessmentException * You started an assessment run, but one of the instances is already participating in another assessment * run. * @sample AmazonInspector.StartAssessmentRun * @see <a href="http://docs.aws.amazon.com/goto/WebAPI/inspector-2016-02-16/StartAssessmentRun" target="_top">AWS * API Documentation</a> */ @Override public StartAssessmentRunResult startAssessmentRun(StartAssessmentRunRequest request) { request = beforeClientExecution(request); return executeStartAssessmentRun(request); } @SdkInternalApi final StartAssessmentRunResult executeStartAssessmentRun(StartAssessmentRunRequest startAssessmentRunRequest) { ExecutionContext executionContext = createExecutionContext(startAssessmentRunRequest); AWSRequestMetrics awsRequestMetrics = executionContext.getAwsRequestMetrics(); awsRequestMetrics.startEvent(Field.ClientExecuteTime); Request<StartAssessmentRunRequest> request = null; Response<StartAssessmentRunResult> response = null; try { awsRequestMetrics.startEvent(Field.RequestMarshallTime); try { request = new StartAssessmentRunRequestProtocolMarshaller(protocolFactory).marshall(super.beforeMarshalling(startAssessmentRunRequest)); // Binds the request metrics to the current request. request.setAWSRequestMetrics(awsRequestMetrics); } finally { awsRequestMetrics.endEvent(Field.RequestMarshallTime); } HttpResponseHandler<AmazonWebServiceResponse<StartAssessmentRunResult>> responseHandler = protocolFactory.createResponseHandler( new JsonOperationMetadata().withPayloadJson(true).withHasStreamingSuccessResponse(false), new StartAssessmentRunResultJsonUnmarshaller()); response = invoke(request, responseHandler, executionContext); return response.getAwsResponse(); } finally { endClientExecution(awsRequestMetrics, request, response); } } /** * <p> * Stops the assessment run that is specified by the ARN of the assessment run. * </p> * * @param stopAssessmentRunRequest * @return Result of the StopAssessmentRun operation returned by the service. * @throws InternalException * Internal server error. * @throws InvalidInputException * The request was rejected because an invalid or out-of-range value was supplied for an input parameter. * @throws AccessDeniedException * You do not have required permissions to access the requested resource. * @throws NoSuchEntityException * The request was rejected because it referenced an entity that does not exist. The error code describes * the entity. * @sample AmazonInspector.StopAssessmentRun * @see <a href="http://docs.aws.amazon.com/goto/WebAPI/inspector-2016-02-16/StopAssessmentRun" target="_top">AWS * API Documentation</a> */ @Override public StopAssessmentRunResult stopAssessmentRun(StopAssessmentRunRequest request) { request = beforeClientExecution(request); return executeStopAssessmentRun(request); } @SdkInternalApi final StopAssessmentRunResult executeStopAssessmentRun(StopAssessmentRunRequest stopAssessmentRunRequest) { ExecutionContext executionContext = createExecutionContext(stopAssessmentRunRequest); AWSRequestMetrics awsRequestMetrics = executionContext.getAwsRequestMetrics(); awsRequestMetrics.startEvent(Field.ClientExecuteTime); Request<StopAssessmentRunRequest> request = null; Response<StopAssessmentRunResult> response = null; try { awsRequestMetrics.startEvent(Field.RequestMarshallTime); try { request = new StopAssessmentRunRequestProtocolMarshaller(protocolFactory).marshall(super.beforeMarshalling(stopAssessmentRunRequest)); // Binds the request metrics to the current request. request.setAWSRequestMetrics(awsRequestMetrics); } finally { awsRequestMetrics.endEvent(Field.RequestMarshallTime); } HttpResponseHandler<AmazonWebServiceResponse<StopAssessmentRunResult>> responseHandler = protocolFactory.createResponseHandler( new JsonOperationMetadata().withPayloadJson(true).withHasStreamingSuccessResponse(false), new StopAssessmentRunResultJsonUnmarshaller()); response = invoke(request, responseHandler, executionContext); return response.getAwsResponse(); } finally { endClientExecution(awsRequestMetrics, request, response); } } /** * <p> * Enables the process of sending Amazon Simple Notification Service (SNS) notifications about a specified event to * a specified SNS topic. * </p> * * @param subscribeToEventRequest * @return Result of the SubscribeToEvent operation returned by the service. * @throws InternalException * Internal server error. * @throws InvalidInputException * The request was rejected because an invalid or out-of-range value was supplied for an input parameter. * @throws LimitExceededException * The request was rejected because it attempted to create resources beyond the current AWS account limits. * The error code describes the limit exceeded. * @throws AccessDeniedException * You do not have required permissions to access the requested resource. * @throws NoSuchEntityException * The request was rejected because it referenced an entity that does not exist. The error code describes * the entity. * @sample AmazonInspector.SubscribeToEvent * @see <a href="http://docs.aws.amazon.com/goto/WebAPI/inspector-2016-02-16/SubscribeToEvent" target="_top">AWS API * Documentation</a> */ @Override public SubscribeToEventResult subscribeToEvent(SubscribeToEventRequest request) { request = beforeClientExecution(request); return executeSubscribeToEvent(request); } @SdkInternalApi final SubscribeToEventResult executeSubscribeToEvent(SubscribeToEventRequest subscribeToEventRequest) { ExecutionContext executionContext = createExecutionContext(subscribeToEventRequest); AWSRequestMetrics awsRequestMetrics = executionContext.getAwsRequestMetrics(); awsRequestMetrics.startEvent(Field.ClientExecuteTime); Request<SubscribeToEventRequest> request = null; Response<SubscribeToEventResult> response = null; try { awsRequestMetrics.startEvent(Field.RequestMarshallTime); try { request = new SubscribeToEventRequestProtocolMarshaller(protocolFactory).marshall(super.beforeMarshalling(subscribeToEventRequest)); // Binds the request metrics to the current request. request.setAWSRequestMetrics(awsRequestMetrics); } finally { awsRequestMetrics.endEvent(Field.RequestMarshallTime); } HttpResponseHandler<AmazonWebServiceResponse<SubscribeToEventResult>> responseHandler = protocolFactory.createResponseHandler( new JsonOperationMetadata().withPayloadJson(true).withHasStreamingSuccessResponse(false), new SubscribeToEventResultJsonUnmarshaller()); response = invoke(request, responseHandler, executionContext); return response.getAwsResponse(); } finally { endClientExecution(awsRequestMetrics, request, response); } } /** * <p> * Disables the process of sending Amazon Simple Notification Service (SNS) notifications about a specified event to * a specified SNS topic. * </p> * * @param unsubscribeFromEventRequest * @return Result of the UnsubscribeFromEvent operation returned by the service. * @throws InternalException * Internal server error. * @throws InvalidInputException * The request was rejected because an invalid or out-of-range value was supplied for an input parameter. * @throws AccessDeniedException * You do not have required permissions to access the requested resource. * @throws NoSuchEntityException * The request was rejected because it referenced an entity that does not exist. The error code describes * the entity. * @sample AmazonInspector.UnsubscribeFromEvent * @see <a href="http://docs.aws.amazon.com/goto/WebAPI/inspector-2016-02-16/UnsubscribeFromEvent" target="_top">AWS * API Documentation</a> */ @Override public UnsubscribeFromEventResult unsubscribeFromEvent(UnsubscribeFromEventRequest request) { request = beforeClientExecution(request); return executeUnsubscribeFromEvent(request); } @SdkInternalApi final UnsubscribeFromEventResult executeUnsubscribeFromEvent(UnsubscribeFromEventRequest unsubscribeFromEventRequest) { ExecutionContext executionContext = createExecutionContext(unsubscribeFromEventRequest); AWSRequestMetrics awsRequestMetrics = executionContext.getAwsRequestMetrics(); awsRequestMetrics.startEvent(Field.ClientExecuteTime); Request<UnsubscribeFromEventRequest> request = null; Response<UnsubscribeFromEventResult> response = null; try { awsRequestMetrics.startEvent(Field.RequestMarshallTime); try { request = new UnsubscribeFromEventRequestProtocolMarshaller(protocolFactory).marshall(super.beforeMarshalling(unsubscribeFromEventRequest)); // Binds the request metrics to the current request. request.setAWSRequestMetrics(awsRequestMetrics); } finally { awsRequestMetrics.endEvent(Field.RequestMarshallTime); } HttpResponseHandler<AmazonWebServiceResponse<UnsubscribeFromEventResult>> responseHandler = protocolFactory.createResponseHandler( new JsonOperationMetadata().withPayloadJson(true).withHasStreamingSuccessResponse(false), new UnsubscribeFromEventResultJsonUnmarshaller()); response = invoke(request, responseHandler, executionContext); return response.getAwsResponse(); } finally { endClientExecution(awsRequestMetrics, request, response); } } /** * <p> * Updates the assessment target that is specified by the ARN of the assessment target. * </p> * * @param updateAssessmentTargetRequest * @return Result of the UpdateAssessmentTarget operation returned by the service. * @throws InternalException * Internal server error. * @throws InvalidInputException * The request was rejected because an invalid or out-of-range value was supplied for an input parameter. * @throws AccessDeniedException * You do not have required permissions to access the requested resource. * @throws NoSuchEntityException * The request was rejected because it referenced an entity that does not exist. The error code describes * the entity. * @sample AmazonInspector.UpdateAssessmentTarget * @see <a href="http://docs.aws.amazon.com/goto/WebAPI/inspector-2016-02-16/UpdateAssessmentTarget" * target="_top">AWS API Documentation</a> */ @Override public UpdateAssessmentTargetResult updateAssessmentTarget(UpdateAssessmentTargetRequest request) { request = beforeClientExecution(request); return executeUpdateAssessmentTarget(request); } @SdkInternalApi final UpdateAssessmentTargetResult executeUpdateAssessmentTarget(UpdateAssessmentTargetRequest updateAssessmentTargetRequest) { ExecutionContext executionContext = createExecutionContext(updateAssessmentTargetRequest); AWSRequestMetrics awsRequestMetrics = executionContext.getAwsRequestMetrics(); awsRequestMetrics.startEvent(Field.ClientExecuteTime); Request<UpdateAssessmentTargetRequest> request = null; Response<UpdateAssessmentTargetResult> response = null; try { awsRequestMetrics.startEvent(Field.RequestMarshallTime); try { request = new UpdateAssessmentTargetRequestProtocolMarshaller(protocolFactory).marshall(super.beforeMarshalling(updateAssessmentTargetRequest)); // Binds the request metrics to the current request. request.setAWSRequestMetrics(awsRequestMetrics); } finally { awsRequestMetrics.endEvent(Field.RequestMarshallTime); } HttpResponseHandler<AmazonWebServiceResponse<UpdateAssessmentTargetResult>> responseHandler = protocolFactory.createResponseHandler( new JsonOperationMetadata().withPayloadJson(true).withHasStreamingSuccessResponse(false), new UpdateAssessmentTargetResultJsonUnmarshaller()); response = invoke(request, responseHandler, executionContext); return response.getAwsResponse(); } finally { endClientExecution(awsRequestMetrics, request, response); } } /** * Returns additional metadata for a previously executed successful, request, typically used for debugging issues * where a service isn't acting as expected. This data isn't considered part of the result data returned by an * operation, so it's available through this separate, diagnostic interface. * <p> * Response metadata is only cached for a limited period of time, so if you need to access this extra diagnostic * information for an executed request, you should use this method to retrieve it as soon as possible after * executing the request. * * @param request * The originally executed request * * @return The response metadata for the specified request, or null if none is available. */ public ResponseMetadata getCachedResponseMetadata(AmazonWebServiceRequest request) { return client.getResponseMetadataForRequest(request); } /** * Normal invoke with authentication. Credentials are required and may be overriden at the request level. **/ private <X, Y extends AmazonWebServiceRequest> Response<X> invoke(Request<Y> request, HttpResponseHandler<AmazonWebServiceResponse<X>> responseHandler, ExecutionContext executionContext) { executionContext.setCredentialsProvider(CredentialUtils.getCredentialsProvider(request.getOriginalRequest(), awsCredentialsProvider)); return doInvoke(request, responseHandler, executionContext); } /** * Invoke with no authentication. Credentials are not required and any credentials set on the client or request will * be ignored for this operation. **/ private <X, Y extends AmazonWebServiceRequest> Response<X> anonymousInvoke(Request<Y> request, HttpResponseHandler<AmazonWebServiceResponse<X>> responseHandler, ExecutionContext executionContext) { return doInvoke(request, responseHandler, executionContext); } /** * Invoke the request using the http client. Assumes credentials (or lack thereof) have been configured in the * ExecutionContext beforehand. **/ private <X, Y extends AmazonWebServiceRequest> Response<X> doInvoke(Request<Y> request, HttpResponseHandler<AmazonWebServiceResponse<X>> responseHandler, ExecutionContext executionContext) { request.setEndpoint(endpoint); request.setTimeOffset(timeOffset); HttpResponseHandler<AmazonServiceException> errorResponseHandler = protocolFactory.createErrorResponseHandler(new JsonErrorResponseMetadata()); return client.execute(request, responseHandler, errorResponseHandler, executionContext); } }
apache-2.0
Nokorbis/ar-command-signs
plugin/src/main/java/be/nokorbis/spigot/commandsigns/command/subcommands/InfoCommand.java
2078
package be.nokorbis.spigot.commandsigns.command.subcommands; import be.nokorbis.spigot.commandsigns.command.CommandRequiringManager; import be.nokorbis.spigot.commandsigns.controller.NCommandSignsManager; import be.nokorbis.spigot.commandsigns.model.CommandBlock; import be.nokorbis.spigot.commandsigns.model.CommandBlockPendingInteraction; import be.nokorbis.spigot.commandsigns.model.CommandSignsCommandException; import be.nokorbis.spigot.commandsigns.utils.CommandSignUtils; import org.bukkit.command.CommandSender; import org.bukkit.entity.Player; import java.util.List; /** * Created by nokorbis on 1/20/16. */ public class InfoCommand extends CommandRequiringManager { public InfoCommand(NCommandSignsManager manager) { super(manager, "info", new String[] {"i"}); this.basePermission = "commandsign.admin.info"; } @Override public boolean execute(CommandSender sender, List<String> args) throws CommandSignsCommandException { if (!(sender instanceof Player)) { throw new CommandSignsCommandException(commandMessages.get("error.command.player_requirement")); } Player player = (Player) sender; if (args.isEmpty()) { if (isPlayerAvailable(player)) { CommandBlockPendingInteraction interaction = new CommandBlockPendingInteraction(); interaction.type = CommandBlockPendingInteraction.Type.INFO; interaction.player = player; manager.addPendingInteraction(interaction); player.sendMessage(commandMessages.get("howto.click_for_info")); } } else { try { long id = Long.parseLong(args.get(0)); CommandBlock cmd = manager.getCommandBlock(id); if (cmd == null) { throw new CommandSignsCommandException(commandMessages.get("error.invalid_command_id")); } CommandSignUtils.info(player, cmd, manager.getAddons()); } catch (NumberFormatException ex) { throw new CommandSignsCommandException(commandMessages.get("error.command.number_requirement")); } } return true; } @Override public void printUsage(CommandSender sender) { sender.sendMessage("/commandsign info [ID]"); } }
apache-2.0
stevshil/tpsexam
site/includes.php
65
<?php set_include_path(get_include_path() . ':' . getcwd()); ?>
apache-2.0
DongPoSu/LearnJava
src/main/java/learn/nio/channel/file/ChannelTransfer.java
959
package learn.nio.channel.file; import java.io.FileInputStream; import java.io.IOException; import java.nio.channels.Channels; import java.nio.channels.FileChannel; import java.nio.channels.WritableByteChannel; /** * @author suzheng * @version v.1.0 */ public class ChannelTransfer { public static void main(String[] args) throws IOException { if(args.length ==0){ System.out.println("Usage: filename ..."); return; } catFiles(Channels.newChannel(System.out), args); } private static void catFiles(WritableByteChannel target, String[] args) throws IOException { for (int i = 0; i < args.length; i++) { FileInputStream fileInputStream = new FileInputStream(args[i]); FileChannel channel = fileInputStream.getChannel(); channel.transferTo(0, channel.size(),target); channel.close(); fileInputStream.close(); } } }
apache-2.0
mmmsplay10/QuizUpWinner
quizup/o/Àé$Àä.java
309
package o; import java.util.ArrayList; final class ˎ$ˊ { Object ˊ; Object ˋ; cON<String, Object> ˎ; ArrayList<iF> ˏ; cON<String, auX> ᐝ; } /* Location: /Users/vikas/Documents/Mhacks_Real_app/classes-dex2jar.jar * Qualified Name: o.Àé.Àä * JD-Core Version: 0.6.2 */
apache-2.0
andresriancho/moto
moto/route53/responses.py
4858
from jinja2 import Template from urlparse import parse_qs, urlparse from .models import route53_backend import xmltodict import dicttoxml def list_or_create_hostzone_response(request, full_url, headers): if request.method == "POST": elements = xmltodict.parse(request.body) new_zone = route53_backend.create_hosted_zone(elements["CreateHostedZoneRequest"]["Name"]) template = Template(CREATE_HOSTED_ZONE_RESPONSE) return 201, headers, template.render(zone=new_zone) elif request.method == "GET": all_zones = route53_backend.get_all_hosted_zones() template = Template(LIST_HOSTED_ZONES_RESPONSE) return 200, headers, template.render(zones=all_zones) def get_or_delete_hostzone_response(request, full_url, headers): parsed_url = urlparse(full_url) zoneid = parsed_url.path.rstrip('/').rsplit('/', 1)[1] the_zone = route53_backend.get_hosted_zone(zoneid) if not the_zone: return 404, headers, "Zone %s not Found" % zoneid if request.method == "GET": template = Template(GET_HOSTED_ZONE_RESPONSE) return 200, headers, template.render(zone=the_zone) elif request.method == "DELETE": route53_backend.delete_hosted_zone(zoneid) return 200, headers, DELETE_HOSTED_ZONE_RESPONSE def rrset_response(request, full_url, headers): parsed_url = urlparse(full_url) method = request.method zoneid = parsed_url.path.rstrip('/').rsplit('/', 2)[1] the_zone = route53_backend.get_hosted_zone(zoneid) if not the_zone: return 404, headers, "Zone %s Not Found" % zoneid if method == "POST": elements = xmltodict.parse(request.body) change_list = elements['ChangeResourceRecordSetsRequest']['ChangeBatch']['Changes']['Change'] if not isinstance(change_list, list): change_list = [elements['ChangeResourceRecordSetsRequest']['ChangeBatch']['Changes']['Change']] for value in change_list: action = value['Action'] rrset = value['ResourceRecordSet'] if action == 'CREATE': the_zone.add_rrset(rrset["Name"], rrset) elif action == "DELETE": the_zone.delete_rrset(rrset["Name"]) return 200, headers, CHANGE_RRSET_RESPONSE elif method == "GET": querystring = parse_qs(parsed_url.query) template = Template(LIST_RRSET_REPONSE) rrset_list = [] for key, value in the_zone.rrsets.items(): if 'type' in querystring and querystring["type"][0] != value["Type"]: continue if 'name' in querystring and querystring["name"][0] != value["Name"]: continue rrset_list.append(dicttoxml.dicttoxml({"ResourceRecordSet": value}, root=False)) return 200, headers, template.render(rrsets=rrset_list) LIST_RRSET_REPONSE = """<ListResourceRecordSetsResponse xmlns="https://route53.amazonaws.com/doc/2012-12-12/"> <ResourceRecordSets> {% for rrset in rrsets %} {{ rrset }} {% endfor %} </ResourceRecordSets> </ListResourceRecordSetsResponse>""" CHANGE_RRSET_RESPONSE = """<ChangeResourceRecordSetsResponse xmlns="https://route53.amazonaws.com/doc/2012-12-12/"> <ChangeInfo> <Status>PENDING</Status> <SubmittedAt>2010-09-10T01:36:41.958Z</SubmittedAt> </ChangeInfo> </ChangeResourceRecordSetsResponse>""" DELETE_HOSTED_ZONE_RESPONSE = """<DeleteHostedZoneResponse xmlns="https://route53.amazonaws.com/doc/2012-12-12/"> <ChangeInfo> </ChangeInfo> </DeleteHostedZoneResponse>""" GET_HOSTED_ZONE_RESPONSE = """<GetHostedZoneResponse xmlns="https://route53.amazonaws.com/doc/2012-12-12/"> <HostedZone> <Id>/hostedzone/{{ zone.id }}</Id> <Name>{{ zone.name }}</Name> <ResourceRecordSetCount>{{ zone.rrsets|count }}</ResourceRecordSetCount> </HostedZone> <DelegationSet> <NameServer>moto.test.com</NameServer> </DelegationSet> </GetHostedZoneResponse>""" CREATE_HOSTED_ZONE_RESPONSE = """<CreateHostedZoneResponse xmlns="https://route53.amazonaws.com/doc/2012-12-12/"> <HostedZone> <Id>/hostedzone/{{ zone.id }}</Id> <Name>{{ zone.name }}</Name> <ResourceRecordSetCount>0</ResourceRecordSetCount> </HostedZone> <DelegationSet> <NameServers> <NameServer>moto.test.com</NameServer> </NameServers> </DelegationSet> </CreateHostedZoneResponse>""" LIST_HOSTED_ZONES_RESPONSE = """<ListHostedZonesResponse xmlns="https://route53.amazonaws.com/doc/2012-12-12/"> <HostedZones> {% for zone in zones %} <HostedZone> <Id>{{ zone.id }}</Id> <Name>{{ zone.name }}</Name> <ResourceRecordSetCount>{{ zone.rrsets|count }}</ResourceRecordSetCount> </HostedZone> {% endfor %} </HostedZones> </ListHostedZonesResponse>"""
apache-2.0
googleapis/python-pubsublite
google/cloud/pubsublite/cloudpubsub/message_transforms.py
4968
# Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import datetime from google.api_core.exceptions import InvalidArgument from google.protobuf.timestamp_pb2 import Timestamp # pytype: disable=pyi-error from google.pubsub_v1 import PubsubMessage from google.cloud.pubsublite.cloudpubsub import MessageTransformer from google.cloud.pubsublite.internal import fast_serialize from google.cloud.pubsublite.types import Partition, MessageMetadata from google.cloud.pubsublite_v1 import AttributeValues, SequencedMessage, PubSubMessage PUBSUB_LITE_EVENT_TIME = "x-goog-pubsublite-event-time" def _encode_attribute_event_time_proto(ts: Timestamp) -> str: return fast_serialize.dump([ts.seconds, ts.nanos]) def _decode_attribute_event_time_proto(attr: str) -> Timestamp: try: ts = Timestamp() loaded = fast_serialize.load(attr) ts.seconds = loaded[0] ts.nanos = loaded[1] return ts except Exception: # noqa: E722 raise InvalidArgument("Invalid value for event time attribute.") def encode_attribute_event_time(dt: datetime.datetime) -> str: ts = Timestamp() ts.FromDatetime(dt.astimezone(datetime.timezone.utc)) return _encode_attribute_event_time_proto(ts) def decode_attribute_event_time(attr: str) -> datetime.datetime: return ( _decode_attribute_event_time_proto(attr) .ToDatetime() .replace(tzinfo=datetime.timezone.utc) ) def _parse_attributes(values: AttributeValues) -> str: if not len(values.values) == 1: raise InvalidArgument( "Received an unparseable message with multiple values for an attribute." ) value: bytes = values.values[0] try: return value.decode("utf-8") except UnicodeError: raise InvalidArgument( "Received an unparseable message with a non-utf8 attribute." ) def add_id_to_cps_subscribe_transformer( partition: Partition, transformer: MessageTransformer ) -> MessageTransformer: def add_id_to_message(source: SequencedMessage): source_pb = source._pb message: PubsubMessage = transformer.transform(source) message_pb = message._pb if message_pb.message_id: raise InvalidArgument( "Message after transforming has the message_id field set." ) message_pb.message_id = MessageMetadata._encode_parts( partition.value, source_pb.cursor.offset ) return message return MessageTransformer.of_callable(add_id_to_message) def to_cps_subscribe_message(source: SequencedMessage) -> PubsubMessage: source_pb = source._pb out_pb = _to_cps_publish_message_proto(source_pb.message) out_pb.publish_time.CopyFrom(source_pb.publish_time) out = PubsubMessage() out._pb = out_pb return out def _to_cps_publish_message_proto( source: PubSubMessage.meta.pb, ) -> PubsubMessage.meta.pb: out = PubsubMessage.meta.pb() try: out.ordering_key = source.key.decode("utf-8") except UnicodeError: raise InvalidArgument("Received an unparseable message with a non-utf8 key.") if PUBSUB_LITE_EVENT_TIME in source.attributes: raise InvalidArgument( "Special timestamp attribute exists in wire message. Unable to parse message." ) out.data = source.data for key, values in source.attributes.items(): out.attributes[key] = _parse_attributes(values) if source.HasField("event_time"): out.attributes[PUBSUB_LITE_EVENT_TIME] = _encode_attribute_event_time_proto( source.event_time ) return out def to_cps_publish_message(source: PubSubMessage) -> PubsubMessage: out = PubsubMessage() out._pb = _to_cps_publish_message_proto(source._pb) return out def from_cps_publish_message(source: PubsubMessage) -> PubSubMessage: source_pb = source._pb out = PubSubMessage() out_pb = out._pb if PUBSUB_LITE_EVENT_TIME in source_pb.attributes: out_pb.event_time.CopyFrom( _decode_attribute_event_time_proto( source_pb.attributes[PUBSUB_LITE_EVENT_TIME] ) ) out_pb.data = source_pb.data out_pb.key = source_pb.ordering_key.encode("utf-8") for key, value in source_pb.attributes.items(): if key != PUBSUB_LITE_EVENT_TIME: out_pb.attributes[key].values.append(value.encode("utf-8")) return out
apache-2.0
google/apis-client-generator
src/googleapis/codegen/configuration_test.py
4793
#!/usr/bin/python2.7 # Copyright 2012 Google Inc. All Rights Reserved. """Tests validity of the apiserving/libgen/gen configuration files. This looks at the contents of the config files used by the code generator to make sure they won't blow up at run time. """ __author__ = 'aiuto@google.com (Tony Aiuto)' import os import re from google.apputils import basetest from googleapis.codegen import platforms from googleapis.codegen.utilities import json_expander from googleapis.codegen.utilities import json_with_comments class ConfigurationTest(basetest.TestCase): _SRC_DATA_DIR = os.path.dirname(__file__) @staticmethod def WalkFileTree(pattern, root=_SRC_DATA_DIR): """Walk the source file tree and return file paths matching the pattern. Args: pattern: (str) A regex for a file pattern. root: (str) root of search tree. Yields: (str) list of path names """ # Walk tree for jar files to directly include matcher = re.compile(pattern) for root, unused_dirs, file_names in os.walk(root): for file_name in file_names: if matcher.match(file_name): yield os.path.join(root, file_name) def LoadJsonFile(self, path, expand=False): """Loads a file but ignores the broken ones. Fails a test assertion if the file is not loadable. Args: path: (str) path to file. expand: (bool, default False) whether to expand as a Json template. Returns: (dict) or None if the file is in an allow list of known broken files. """ json_file = open(path) content = json_file.read() self.assertLess(1, len(content)) json_file.close() try: json_data = json_with_comments.Loads(content) except ValueError as err: # Ignore the known broken files. if not path.endswith('testdata/broken.json'): self.fail('%s: %s' % (path, err)) return None if expand: json_data = json_expander.ExpandJsonTemplate(json_data) return json_data def testCheckAllJsonFiles(self): for path in self.WalkFileTree(r'.*\.json$'): json_data = self.LoadJsonFile(path) if json_data: self.assertTrue(isinstance(json_data, dict)) def testCheckAllFeaturesFiles(self): """Make sure the features.json files obey the rules.""" def CheckFileContent(path, json_data): def HasElement(e): if json_data.get(e) is None: self.fail('%s: is missing "%s"' % (path, e)) HasElement('description') HasElement('releaseVersion') language = json_data.get('language') possible_environments = set(platforms.PLATFORMS[platforms.ALL]) if language: for p in platforms.PLATFORMS.get(language, []): possible_environments.add(p) for r in json_data.get('requires', []): def HasRequiresElement(d, e): if d.get(e) is None: self.fail('%s: "requires" item is missing "%s"' % (path, e)) HasRequiresElement(r, 'name') HasRequiresElement(r, 'version') HasRequiresElement(r, 'environments') environments = r['environments'] for e in environments: if e not in possible_environments: self.fail('%s: bad environment list: %s in %s' % (path, environments, r)) for f in r.get('files', []): file_type = f['type'] if file_type not in platforms.FILE_TYPES[platforms.ALL]: self.fail('%s: unknown file type %s in %s' % (path, file_type, r)) for path in self.WalkFileTree(r'features\.json$'): json_data = self.LoadJsonFile(path, True) if json_data: CheckFileContent(path, json_data) def testCheckDependenciesExist(self): # Test that the files in requires actually exist. nonexistent = {} for path in self.WalkFileTree(r'features\.json$'): # Skip this check for 'default' versions of language surface. Those don't # get used in the service, so they don't need dependencies if 'default' in path: continue # Skip this check for test files. if path.find('/testdata/') >= 0: continue json_data = self.LoadJsonFile(path, True) if not json_data: raise Exception('cannot parse json: %s' % path) features_dir = os.path.dirname(path) paths = [] for r in json_data.get('requires', []): for f in r.get('files', []): file_path = f.get('path') if file_path: p = os.path.join(features_dir, 'dependencies', file_path) paths.append(p) missing = [p for p in paths if not os.path.exists(p)] if missing: nonexistent[path] = missing self.assertTrue(not nonexistent, 'paths not found: %s' % nonexistent) if __name__ == '__main__': basetest.main()
apache-2.0
rickbw/crud-http
src/main/java/crud/http/util/FailedResponseOperator.java
7059
/* Copyright 2014 Rick Warren * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package crud.http.util; import javax.ws.rs.core.Response; import com.google.common.collect.ContiguousSet; import com.google.common.collect.DiscreteDomain; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Range; import com.sun.jersey.api.client.ClientResponse; import com.sun.jersey.api.client.UniformInterfaceException; import crud.fluent.FluentReadableResource; import crud.fluent.FluentReadableResourceProvider; import rx.Observable; import rx.Observer; import rx.Subscriber; /** * Allows an {@link Observer} to treat {@link ClientResponse}s that represent * errors intuitively, as errors, via {@link Observer#onError(Throwable)}, * rather than as expected values, via {@link Observer#onNext(Object)}. * The application provides a set of statuses that it wishes to treat as * errors, and this class will wrap responses with those statuses into * {@link UniformInterfaceException} and dispatch them to {@code onError}. * * If retries are desired, it's recommended to pass an instance of this class * to {@code lift} before calling {@code retry}. Otherwise, the only errors * that will be detected and retried are those that result in exceptions from * Jersey. * * @see UniformInterfaceException#getResponse() * @see FluentReadableResource#lift(rx.Observable.Operator) * @see FluentReadableResourceProvider#lift(rx.Observable.Operator) * @see FluentReadableResource#retry(int) * @see FluentReadableResourceProvider#retry(int) */ public final class FailedResponseOperator implements Observable.Operator<ClientResponse, ClientResponse> { private static final int MIN_STATUS_CODE = 100; private static final int MAX_STATUS_CODE = 599; private static final int MIN_SUCCESS_STATUS_CODE = 200; private static final int MAX_SUCCESS_STATUS_CODE = 299; private static final int MIN_SERVER_ERROR_STATUS_CODE = 500; private static final int MAX_SERVER_ERROR_STATUS_CODE = 599; private static FailedResponseOperator serverErrors = null; private static FailedResponseOperator nonSuccessResponses = null; private final ImmutableSet<Integer> failedStatuses; /** * Treat all 500-range responses as errors. */ public static FailedResponseOperator serverErrors() { if (serverErrors == null) { // Don't delegate to fromStatusCodes(): it does extraneous checking serverErrors = new FailedResponseOperator(ContiguousSet.create( Range.closed(MIN_SERVER_ERROR_STATUS_CODE, MAX_SERVER_ERROR_STATUS_CODE), DiscreteDomain.integers())); } return serverErrors; } /** * Treat all non-200-range responses as errors. */ public static FailedResponseOperator nonSuccessResponses() { if (nonSuccessResponses == null) { final ImmutableSet<Integer> prefix = ContiguousSet.create( Range.closedOpen(MIN_STATUS_CODE, MIN_SUCCESS_STATUS_CODE), DiscreteDomain.integers()); final ImmutableSet<Integer> suffix = ContiguousSet.create( Range.openClosed(MAX_SUCCESS_STATUS_CODE, MAX_STATUS_CODE), DiscreteDomain.integers()); final ImmutableSet<Integer> all = ImmutableSet.<Integer>builder() .addAll(prefix) .addAll(suffix) .build(); // Don't delegate to fromStatusCodes(): it does extraneous checking nonSuccessResponses = new FailedResponseOperator(all); } return nonSuccessResponses; } public static FailedResponseOperator fromClientResponseStatuses(final Iterable<ClientResponse.Status> statuses) { final ImmutableSet.Builder<Integer> statusCodes = new ImmutableSet.Builder<>(); for (final ClientResponse.Status status : statuses) { statusCodes.add(status.getStatusCode()); } // Don't delegate to fromStatusCodes(): it does extraneous checking return new FailedResponseOperator(statusCodes.build()); } public static FailedResponseOperator fromResponseStatuses(final Iterable<Response.Status> statuses) { final ImmutableSet.Builder<Integer> statusCodes = new ImmutableSet.Builder<>(); for (final Response.Status status : statuses) { statusCodes.add(status.getStatusCode()); } // Don't delegate to fromStatusCodes(): it does extraneous checking return new FailedResponseOperator(statusCodes.build()); } public static FailedResponseOperator fromStatusCodes(final Iterable<Integer> statuses) { for (final Integer status : statuses) { if (status < MIN_STATUS_CODE || status > MAX_STATUS_CODE) { throw new IllegalArgumentException("HTTP status code out of range: " + status); } } return new FailedResponseOperator(statuses); } private FailedResponseOperator(final Iterable<Integer> failedStatuses) { this.failedStatuses = ImmutableSet.copyOf(failedStatuses); } @Override public Subscriber<? super ClientResponse> call(final Subscriber<? super ClientResponse> sub) { return new ErrorResponseSubscriber(sub); } private final class ErrorResponseSubscriber extends Subscriber<ClientResponse> { private final Subscriber<? super ClientResponse> delegate; private volatile boolean errorOccurred = false; public ErrorResponseSubscriber(final Subscriber<? super ClientResponse> delegate) { super(delegate); this.delegate = delegate; } @Override public void onNext(final ClientResponse response) { if (this.errorOccurred) { return; } if (FailedResponseOperator.this.failedStatuses.contains(response.getStatus())) { this.errorOccurred = true; this.delegate.onError(new UniformInterfaceException(response)); } else { this.delegate.onNext(response); } } @Override public void onCompleted() { if (this.errorOccurred) { return; } this.delegate.onCompleted(); } @Override public void onError(final Throwable e) { if (this.errorOccurred) { return; } this.delegate.onError(e); } } }
apache-2.0
knutwalker/google-closure-library
closure/goog/string/string.js
43741
// Copyright 2006 The Closure Library Authors. All Rights Reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS-IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. /** * @fileoverview Utilities for string manipulation. */ /** * Namespace for string utilities */ goog.provide('goog.string'); goog.provide('goog.string.Unicode'); /** * Common Unicode string characters. * @enum {string} */ goog.string.Unicode = { NBSP: '\xa0' }; /** * Fast prefix-checker. * @param {string} str The string to check. * @param {string} prefix A string to look for at the start of {@code str}. * @return {boolean} True if {@code str} begins with {@code prefix}. */ goog.string.startsWith = function(str, prefix) { return str.lastIndexOf(prefix, 0) == 0; }; /** * Fast suffix-checker. * @param {string} str The string to check. * @param {string} suffix A string to look for at the end of {@code str}. * @return {boolean} True if {@code str} ends with {@code suffix}. */ goog.string.endsWith = function(str, suffix) { var l = str.length - suffix.length; return l >= 0 && str.indexOf(suffix, l) == l; }; /** * Case-insensitive prefix-checker. * @param {string} str The string to check. * @param {string} prefix A string to look for at the end of {@code str}. * @return {boolean} True if {@code str} begins with {@code prefix} (ignoring * case). */ goog.string.caseInsensitiveStartsWith = function(str, prefix) { return goog.string.caseInsensitiveCompare( prefix, str.substr(0, prefix.length)) == 0; }; /** * Case-insensitive suffix-checker. * @param {string} str The string to check. * @param {string} suffix A string to look for at the end of {@code str}. * @return {boolean} True if {@code str} ends with {@code suffix} (ignoring * case). */ goog.string.caseInsensitiveEndsWith = function(str, suffix) { return goog.string.caseInsensitiveCompare( suffix, str.substr(str.length - suffix.length, suffix.length)) == 0; }; /** <<<<<<< HEAD ======= * Case-insensitive equality checker. * @param {string} str1 First string to check. * @param {string} str2 Second string to check. * @return {boolean} True if {@code str1} and {@code str2} are the same string, * ignoring case. */ goog.string.caseInsensitiveEquals = function(str1, str2) { return str1.toLowerCase() == str2.toLowerCase(); }; /** >>>>>>> newgitrepo * Does simple python-style string substitution. * subs("foo%s hot%s", "bar", "dog") becomes "foobar hotdog". * @param {string} str The string containing the pattern. * @param {...*} var_args The items to substitute into the pattern. * @return {string} A copy of {@code str} in which each occurrence of * {@code %s} has been replaced an argument from {@code var_args}. */ goog.string.subs = function(str, var_args) { // This appears to be slow, but testing shows it compares more or less // equivalent to the regex.exec method. for (var i = 1; i < arguments.length; i++) { // We cast to String in case an argument is a Function. Replacing $&, for // example, with $$$& stops the replace from subsituting the whole match // into the resultant string. $$$& in the first replace becomes $$& in the // second, which leaves $& in the resultant string. Also: // $$, $`, $', $n $nn var replacement = String(arguments[i]).replace(/\$/g, '$$$$'); str = str.replace(/\%s/, replacement); } return str; }; /** * Converts multiple whitespace chars (spaces, non-breaking-spaces, new lines * and tabs) to a single space, and strips leading and trailing whitespace. * @param {string} str Input string. * @return {string} A copy of {@code str} with collapsed whitespace. */ goog.string.collapseWhitespace = function(str) { // Since IE doesn't include non-breaking-space (0xa0) in their \s character // class (as required by section 7.2 of the ECMAScript spec), we explicitly // include it in the regexp to enforce consistent cross-browser behavior. return str.replace(/[\s\xa0]+/g, ' ').replace(/^\s+|\s+$/g, ''); }; /** * Checks if a string is empty or contains only whitespaces. * @param {string} str The string to check. * @return {boolean} True if {@code str} is empty or whitespace only. */ goog.string.isEmpty = function(str) { // testing length == 0 first is actually slower in all browsers (about the // same in Opera). // Since IE doesn't include non-breaking-space (0xa0) in their \s character // class (as required by section 7.2 of the ECMAScript spec), we explicitly // include it in the regexp to enforce consistent cross-browser behavior. return /^[\s\xa0]*$/.test(str); }; /** * Checks if a string is null, undefined, empty or contains only whitespaces. * @param {*} str The string to check. * @return {boolean} True if{@code str} is null, undefined, empty, or * whitespace only. */ goog.string.isEmptySafe = function(str) { return goog.string.isEmpty(goog.string.makeSafe(str)); }; /** * Checks if a string is all breaking whitespace. * @param {string} str The string to check. * @return {boolean} Whether the string is all breaking whitespace. */ goog.string.isBreakingWhitespace = function(str) { return !/[^\t\n\r ]/.test(str); }; /** * Checks if a string contains all letters. * @param {string} str string to check. * @return {boolean} True if {@code str} consists entirely of letters. */ goog.string.isAlpha = function(str) { return !/[^a-zA-Z]/.test(str); }; /** * Checks if a string contains only numbers. * @param {*} str string to check. If not a string, it will be * casted to one. * @return {boolean} True if {@code str} is numeric. */ goog.string.isNumeric = function(str) { return !/[^0-9]/.test(str); }; /** * Checks if a string contains only numbers or letters. * @param {string} str string to check. * @return {boolean} True if {@code str} is alphanumeric. */ goog.string.isAlphaNumeric = function(str) { return !/[^a-zA-Z0-9]/.test(str); }; /** * Checks if a character is a space character. * @param {string} ch Character to check. * @return {boolean} True if {code ch} is a space. */ goog.string.isSpace = function(ch) { return ch == ' '; }; /** * Checks if a character is a valid unicode character. * @param {string} ch Character to check. * @return {boolean} True if {code ch} is a valid unicode character. */ goog.string.isUnicodeChar = function(ch) { return ch.length == 1 && ch >= ' ' && ch <= '~' || ch >= '\u0080' && ch <= '\uFFFD'; }; /** * Takes a string and replaces newlines with a space. Multiple lines are * replaced with a single space. * @param {string} str The string from which to strip newlines. * @return {string} A copy of {@code str} stripped of newlines. */ goog.string.stripNewlines = function(str) { return str.replace(/(\r\n|\r|\n)+/g, ' '); }; /** * Replaces Windows and Mac new lines with unix style: \r or \r\n with \n. * @param {string} str The string to in which to canonicalize newlines. * @return {string} {@code str} A copy of {@code} with canonicalized newlines. */ goog.string.canonicalizeNewlines = function(str) { return str.replace(/(\r\n|\r|\n)/g, '\n'); }; /** * Normalizes whitespace in a string, replacing all whitespace chars with * a space. * @param {string} str The string in which to normalize whitespace. * @return {string} A copy of {@code str} with all whitespace normalized. */ goog.string.normalizeWhitespace = function(str) { return str.replace(/\xa0|\s/g, ' '); }; /** * Normalizes spaces in a string, replacing all consecutive spaces and tabs * with a single space. Replaces non-breaking space with a space. * @param {string} str The string in which to normalize spaces. * @return {string} A copy of {@code str} with all consecutive spaces and tabs * replaced with a single space. */ goog.string.normalizeSpaces = function(str) { return str.replace(/\xa0|[ \t]+/g, ' '); }; /** * Removes the breaking spaces from the left and right of the string and * collapses the sequences of breaking spaces in the middle into single spaces. * The original and the result strings render the same way in HTML. * @param {string} str A string in which to collapse spaces. * @return {string} Copy of the string with normalized breaking spaces. */ goog.string.collapseBreakingSpaces = function(str) { return str.replace(/[\t\r\n ]+/g, ' ').replace( /^[\t\r\n ]+|[\t\r\n ]+$/g, ''); }; /** * Trims white spaces to the left and right of a string. * @param {string} str The string to trim. * @return {string} A trimmed copy of {@code str}. */ goog.string.trim = function(str) { // Since IE doesn't include non-breaking-space (0xa0) in their \s character // class (as required by section 7.2 of the ECMAScript spec), we explicitly // include it in the regexp to enforce consistent cross-browser behavior. return str.replace(/^[\s\xa0]+|[\s\xa0]+$/g, ''); }; /** * Trims whitespaces at the left end of a string. * @param {string} str The string to left trim. * @return {string} A trimmed copy of {@code str}. */ goog.string.trimLeft = function(str) { // Since IE doesn't include non-breaking-space (0xa0) in their \s character // class (as required by section 7.2 of the ECMAScript spec), we explicitly // include it in the regexp to enforce consistent cross-browser behavior. return str.replace(/^[\s\xa0]+/, ''); }; /** * Trims whitespaces at the right end of a string. * @param {string} str The string to right trim. * @return {string} A trimmed copy of {@code str}. */ goog.string.trimRight = function(str) { // Since IE doesn't include non-breaking-space (0xa0) in their \s character // class (as required by section 7.2 of the ECMAScript spec), we explicitly // include it in the regexp to enforce consistent cross-browser behavior. return str.replace(/[\s\xa0]+$/, ''); }; /** * A string comparator that ignores case. * -1 = str1 less than str2 * 0 = str1 equals str2 * 1 = str1 greater than str2 * * @param {string} str1 The string to compare. * @param {string} str2 The string to compare {@code str1} to. * @return {number} The comparator result, as described above. */ goog.string.caseInsensitiveCompare = function(str1, str2) { var test1 = String(str1).toLowerCase(); var test2 = String(str2).toLowerCase(); if (test1 < test2) { return -1; } else if (test1 == test2) { return 0; } else { return 1; } }; /** * Regular expression used for splitting a string into substrings of fractional * numbers, integers, and non-numeric characters. * @type {RegExp} * @private */ goog.string.numerateCompareRegExp_ = /(\.\d+)|(\d+)|(\D+)/g; /** * String comparison function that handles numbers in a way humans might expect. * Using this function, the string "File 2.jpg" sorts before "File 10.jpg". The * comparison is mostly case-insensitive, though strings that are identical * except for case are sorted with the upper-case strings before lower-case. * * This comparison function is significantly slower (about 500x) than either * the default or the case-insensitive compare. It should not be used in * time-critical code, but should be fast enough to sort several hundred short * strings (like filenames) with a reasonable delay. * * @param {string} str1 The string to compare in a numerically sensitive way. * @param {string} str2 The string to compare {@code str1} to. * @return {number} less than 0 if str1 < str2, 0 if str1 == str2, greater than * 0 if str1 > str2. */ goog.string.numerateCompare = function(str1, str2) { if (str1 == str2) { return 0; } if (!str1) { return -1; } if (!str2) { return 1; } // Using match to split the entire string ahead of time turns out to be faster // for most inputs than using RegExp.exec or iterating over each character. var tokens1 = str1.toLowerCase().match(goog.string.numerateCompareRegExp_); var tokens2 = str2.toLowerCase().match(goog.string.numerateCompareRegExp_); var count = Math.min(tokens1.length, tokens2.length); for (var i = 0; i < count; i++) { var a = tokens1[i]; var b = tokens2[i]; // Compare pairs of tokens, returning if one token sorts before the other. if (a != b) { // Only if both tokens are integers is a special comparison required. // Decimal numbers are sorted as strings (e.g., '.09' < '.1'). var num1 = parseInt(a, 10); if (!isNaN(num1)) { var num2 = parseInt(b, 10); if (!isNaN(num2) && num1 - num2) { return num1 - num2; } } return a < b ? -1 : 1; } } // If one string is a substring of the other, the shorter string sorts first. if (tokens1.length != tokens2.length) { return tokens1.length - tokens2.length; } // The two strings must be equivalent except for case (perfect equality is // tested at the head of the function.) Revert to default ASCII-betical string // comparison to stablize the sort. return str1 < str2 ? -1 : 1; }; /** * URL-encodes a string * @param {*} str The string to url-encode. * @return {string} An encoded copy of {@code str} that is safe for urls. * Note that '#', ':', and other characters used to delimit portions * of URLs *will* be encoded. */ goog.string.urlEncode = function(str) { return encodeURIComponent(String(str)); }; /** * URL-decodes the string. We need to specially handle '+'s because * the javascript library doesn't convert them to spaces. * @param {string} str The string to url decode. * @return {string} The decoded {@code str}. */ goog.string.urlDecode = function(str) { return decodeURIComponent(str.replace(/\+/g, ' ')); }; /** * Converts \n to <br>s or <br />s. * @param {string} str The string in which to convert newlines. * @param {boolean=} opt_xml Whether to use XML compatible tags. * @return {string} A copy of {@code str} with converted newlines. */ goog.string.newLineToBr = function(str, opt_xml) { return str.replace(/(\r\n|\r|\n)/g, opt_xml ? '<br />' : '<br>'); }; /** * Escape double quote '"' characters in addition to '&', '<', and '>' so that a * string can be included in an HTML tag attribute value within double quotes. * * It should be noted that > doesn't need to be escaped for the HTML or XML to * be valid, but it has been decided to escape it for consistency with other * implementations. * * NOTE(user): * HtmlEscape is often called during the generation of large blocks of HTML. * Using statics for the regular expressions and strings is an optimization * that can more than half the amount of time IE spends in this function for * large apps, since strings and regexes both contribute to GC allocations. * * Testing for the presence of a character before escaping increases the number * of function calls, but actually provides a speed increase for the average * case -- since the average case often doesn't require the escaping of all 4 * characters and indexOf() is much cheaper than replace(). * The worst case does suffer slightly from the additional calls, therefore the * opt_isLikelyToContainHtmlChars option has been included for situations * where all 4 HTML entities are very likely to be present and need escaping. * * Some benchmarks (times tended to fluctuate +-0.05ms): * FireFox IE6 * (no chars / average (mix of cases) / all 4 chars) * no checks 0.13 / 0.22 / 0.22 0.23 / 0.53 / 0.80 * indexOf 0.08 / 0.17 / 0.26 0.22 / 0.54 / 0.84 * indexOf + re test 0.07 / 0.17 / 0.28 0.19 / 0.50 / 0.85 * * An additional advantage of checking if replace actually needs to be called * is a reduction in the number of object allocations, so as the size of the * application grows the difference between the various methods would increase. * * @param {string} str string to be escaped. * @param {boolean=} opt_isLikelyToContainHtmlChars Don't perform a check to see * if the character needs replacing - use this option if you expect each of * the characters to appear often. Leave false if you expect few html * characters to occur in your strings, such as if you are escaping HTML. * @return {string} An escaped copy of {@code str}. */ goog.string.htmlEscape = function(str, opt_isLikelyToContainHtmlChars) { if (opt_isLikelyToContainHtmlChars) { return str.replace(goog.string.amperRe_, '&amp;') .replace(goog.string.ltRe_, '&lt;') .replace(goog.string.gtRe_, '&gt;') .replace(goog.string.quotRe_, '&quot;'); } else { // quick test helps in the case when there are no chars to replace, in // worst case this makes barely a difference to the time taken if (!goog.string.allRe_.test(str)) return str; // str.indexOf is faster than regex.test in this case if (str.indexOf('&') != -1) { str = str.replace(goog.string.amperRe_, '&amp;'); } if (str.indexOf('<') != -1) { str = str.replace(goog.string.ltRe_, '&lt;'); } if (str.indexOf('>') != -1) { str = str.replace(goog.string.gtRe_, '&gt;'); } if (str.indexOf('"') != -1) { str = str.replace(goog.string.quotRe_, '&quot;'); } return str; } }; /** * Regular expression that matches an ampersand, for use in escaping. * @type {RegExp} * @private */ goog.string.amperRe_ = /&/g; /** * Regular expression that matches a less than sign, for use in escaping. * @type {RegExp} * @private */ goog.string.ltRe_ = /</g; /** * Regular expression that matches a greater than sign, for use in escaping. * @type {RegExp} * @private */ goog.string.gtRe_ = />/g; /** * Regular expression that matches a double quote, for use in escaping. * @type {RegExp} * @private */ goog.string.quotRe_ = /\"/g; /** * Regular expression that matches any character that needs to be escaped. * @type {RegExp} * @private */ goog.string.allRe_ = /[&<>\"]/; /** * Unescapes an HTML string. * * @param {string} str The string to unescape. * @return {string} An unescaped copy of {@code str}. */ goog.string.unescapeEntities = function(str) { if (goog.string.contains(str, '&')) { // We are careful not to use a DOM if we do not have one. We use the [] // notation so that the JSCompiler will not complain about these objects and // fields in the case where we have no DOM. if ('document' in goog.global) { return goog.string.unescapeEntitiesUsingDom_(str); } else { // Fall back on pure XML entities return goog.string.unescapePureXmlEntities_(str); } } return str; }; /** * Unescapes an HTML string using a DOM to resolve non-XML, non-numeric * entities. This function is XSS-safe and whitespace-preserving. * @private * @param {string} str The string to unescape. * @return {string} The unescaped {@code str} string. */ goog.string.unescapeEntitiesUsingDom_ = function(str) { var seen = {'&amp;': '&', '&lt;': '<', '&gt;': '>', '&quot;': '"'}; var div = document.createElement('div'); // Match as many valid entity characters as possible. If the actual entity // happens to be shorter, it will still work as innerHTML will return the // trailing characters unchanged. Since the entity characters do not include // open angle bracket, there is no chance of XSS from the innerHTML use. // Since no whitespace is passed to innerHTML, whitespace is preserved. return str.replace(goog.string.HTML_ENTITY_PATTERN_, function(s, entity) { // Check for cached entity. var value = seen[s]; if (value) { return value; } // Check for numeric entity. if (entity.charAt(0) == '#') { // Prefix with 0 so that hex entities (e.g. &#x10) parse as hex numbers. var n = Number('0' + entity.substr(1)); if (!isNaN(n)) { value = String.fromCharCode(n); } } // Fall back to innerHTML otherwise. if (!value) { // Append a non-entity character to avoid a bug in Webkit that parses // an invalid entity at the end of innerHTML text as the empty string. div.innerHTML = s + ' '; // Then remove the trailing character from the result. value = div.firstChild.nodeValue.slice(0, -1); } // Cache and return. return seen[s] = value; }); }; /** * Unescapes XML entities. * @private * @param {string} str The string to unescape. * @return {string} An unescaped copy of {@code str}. */ goog.string.unescapePureXmlEntities_ = function(str) { return str.replace(/&([^;]+);/g, function(s, entity) { switch (entity) { case 'amp': return '&'; case 'lt': return '<'; case 'gt': return '>'; case 'quot': return '"'; default: if (entity.charAt(0) == '#') { // Prefix with 0 so that hex entities (e.g. &#x10) parse as hex. var n = Number('0' + entity.substr(1)); if (!isNaN(n)) { return String.fromCharCode(n); } } // For invalid entities we just return the entity return s; } }); }; /** * Regular expression that matches an HTML entity. * See also HTML5: Tokenization / Tokenizing character references. * @private * @type {!RegExp} */ goog.string.HTML_ENTITY_PATTERN_ = /&([^;\s<&]+);?/g; /** * Do escaping of whitespace to preserve spatial formatting. We use character * entity #160 to make it safer for xml. * @param {string} str The string in which to escape whitespace. * @param {boolean=} opt_xml Whether to use XML compatible tags. * @return {string} An escaped copy of {@code str}. */ goog.string.whitespaceEscape = function(str, opt_xml) { return goog.string.newLineToBr(str.replace(/ /g, ' &#160;'), opt_xml); }; /** * Strip quote characters around a string. The second argument is a string of * characters to treat as quotes. This can be a single character or a string of * multiple character and in that case each of those are treated as possible * quote characters. For example: * * <pre> * goog.string.stripQuotes('"abc"', '"`') --> 'abc' * goog.string.stripQuotes('`abc`', '"`') --> 'abc' * </pre> * * @param {string} str The string to strip. * @param {string} quoteChars The quote characters to strip. * @return {string} A copy of {@code str} without the quotes. */ goog.string.stripQuotes = function(str, quoteChars) { var length = quoteChars.length; for (var i = 0; i < length; i++) { var quoteChar = length == 1 ? quoteChars : quoteChars.charAt(i); if (str.charAt(0) == quoteChar && str.charAt(str.length - 1) == quoteChar) { return str.substring(1, str.length - 1); } } return str; }; /** * Truncates a string to a certain length and adds '...' if necessary. The * length also accounts for the ellipsis, so a maximum length of 10 and a string * 'Hello World!' produces 'Hello W...'. * @param {string} str The string to truncate. * @param {number} chars Max number of characters. * @param {boolean=} opt_protectEscapedCharacters Whether to protect escaped * characters from being cut off in the middle. * @return {string} The truncated {@code str} string. */ goog.string.truncate = function(str, chars, opt_protectEscapedCharacters) { if (opt_protectEscapedCharacters) { str = goog.string.unescapeEntities(str); } if (str.length > chars) { str = str.substring(0, chars - 3) + '...'; } if (opt_protectEscapedCharacters) { str = goog.string.htmlEscape(str); } return str; }; /** * Truncate a string in the middle, adding "..." if necessary, * and favoring the beginning of the string. * @param {string} str The string to truncate the middle of. * @param {number} chars Max number of characters. * @param {boolean=} opt_protectEscapedCharacters Whether to protect escaped * characters from being cutoff in the middle. * @param {number=} opt_trailingChars Optional number of trailing characters to * leave at the end of the string, instead of truncating as close to the * middle as possible. * @return {string} A truncated copy of {@code str}. */ goog.string.truncateMiddle = function(str, chars, opt_protectEscapedCharacters, opt_trailingChars) { if (opt_protectEscapedCharacters) { str = goog.string.unescapeEntities(str); } if (opt_trailingChars && str.length > chars) { if (opt_trailingChars > chars) { opt_trailingChars = chars; } var endPoint = str.length - opt_trailingChars; var startPoint = chars - opt_trailingChars; str = str.substring(0, startPoint) + '...' + str.substring(endPoint); } else if (str.length > chars) { // Favor the beginning of the string: var half = Math.floor(chars / 2); var endPos = str.length - half; half += chars % 2; str = str.substring(0, half) + '...' + str.substring(endPos); } if (opt_protectEscapedCharacters) { str = goog.string.htmlEscape(str); } return str; }; /** * Special chars that need to be escaped for goog.string.quote. * @private * @type {Object} */ goog.string.specialEscapeChars_ = { '\0': '\\0', '\b': '\\b', '\f': '\\f', '\n': '\\n', '\r': '\\r', '\t': '\\t', '\x0B': '\\x0B', // '\v' is not supported in JScript '"': '\\"', '\\': '\\\\' }; /** * Character mappings used internally for goog.string.escapeChar. * @private * @type {Object} */ goog.string.jsEscapeCache_ = { '\'': '\\\'' }; /** * Encloses a string in double quotes and escapes characters so that the * string is a valid JS string. * @param {string} s The string to quote. * @return {string} A copy of {@code s} surrounded by double quotes. */ goog.string.quote = function(s) { s = String(s); if (s.quote) { return s.quote(); } else { var sb = ['"']; for (var i = 0; i < s.length; i++) { var ch = s.charAt(i); var cc = ch.charCodeAt(0); sb[i + 1] = goog.string.specialEscapeChars_[ch] || ((cc > 31 && cc < 127) ? ch : goog.string.escapeChar(ch)); } sb.push('"'); return sb.join(''); } }; /** * Takes a string and returns the escaped string for that character. * @param {string} str The string to escape. * @return {string} An escaped string representing {@code str}. */ goog.string.escapeString = function(str) { var sb = []; for (var i = 0; i < str.length; i++) { sb[i] = goog.string.escapeChar(str.charAt(i)); } return sb.join(''); }; /** * Takes a character and returns the escaped string for that character. For * example escapeChar(String.fromCharCode(15)) -> "\\x0E". * @param {string} c The character to escape. * @return {string} An escaped string representing {@code c}. */ goog.string.escapeChar = function(c) { if (c in goog.string.jsEscapeCache_) { return goog.string.jsEscapeCache_[c]; } if (c in goog.string.specialEscapeChars_) { return goog.string.jsEscapeCache_[c] = goog.string.specialEscapeChars_[c]; } var rv = c; var cc = c.charCodeAt(0); if (cc > 31 && cc < 127) { rv = c; } else { // tab is 9 but handled above if (cc < 256) { rv = '\\x'; if (cc < 16 || cc > 256) { rv += '0'; } } else { rv = '\\u'; if (cc < 4096) { // \u1000 rv += '0'; } } rv += cc.toString(16).toUpperCase(); } return goog.string.jsEscapeCache_[c] = rv; }; /** * Takes a string and creates a map (Object) in which the keys are the * characters in the string. The value for the key is set to true. You can * then use goog.object.map or goog.array.map to change the values. * @param {string} s The string to build the map from. * @return {Object} The map of characters used. */ // TODO(arv): It seems like we should have a generic goog.array.toMap. But do // we want a dependency on goog.array in goog.string? goog.string.toMap = function(s) { var rv = {}; for (var i = 0; i < s.length; i++) { rv[s.charAt(i)] = true; } return rv; }; /** * Checks whether a string contains a given substring. * @param {string} s The string to test. * @param {string} ss The substring to test for. * @return {boolean} True if {@code s} contains {@code ss}. */ goog.string.contains = function(s, ss) { return s.indexOf(ss) != -1; }; /** * Returns the non-overlapping occurrences of ss in s. * If either s or ss evalutes to false, then returns zero. * @param {string} s The string to look in. * @param {string} ss The string to look for. * @return {number} Number of occurrences of ss in s. */ goog.string.countOf = function(s, ss) { return s && ss ? s.split(ss).length - 1 : 0; }; /** * Removes a substring of a specified length at a specific * index in a string. * @param {string} s The base string from which to remove. * @param {number} index The index at which to remove the substring. * @param {number} stringLength The length of the substring to remove. * @return {string} A copy of {@code s} with the substring removed or the full * string if nothing is removed or the input is invalid. */ goog.string.removeAt = function(s, index, stringLength) { var resultStr = s; // If the index is greater or equal to 0 then remove substring if (index >= 0 && index < s.length && stringLength > 0) { resultStr = s.substr(0, index) + s.substr(index + stringLength, s.length - index - stringLength); } return resultStr; }; /** * Removes the first occurrence of a substring from a string. * @param {string} s The base string from which to remove. * @param {string} ss The string to remove. * @return {string} A copy of {@code s} with {@code ss} removed or the full * string if nothing is removed. */ goog.string.remove = function(s, ss) { var re = new RegExp(goog.string.regExpEscape(ss), ''); return s.replace(re, ''); }; /** * Removes all occurrences of a substring from a string. * @param {string} s The base string from which to remove. * @param {string} ss The string to remove. * @return {string} A copy of {@code s} with {@code ss} removed or the full * string if nothing is removed. */ goog.string.removeAll = function(s, ss) { var re = new RegExp(goog.string.regExpEscape(ss), 'g'); return s.replace(re, ''); }; /** * Escapes characters in the string that are not safe to use in a RegExp. * @param {*} s The string to escape. If not a string, it will be casted * to one. * @return {string} A RegExp safe, escaped copy of {@code s}. */ goog.string.regExpEscape = function(s) { return String(s).replace(/([-()\[\]{}+?*.$\^|,:#<!\\])/g, '\\$1'). replace(/\x08/g, '\\x08'); }; /** * Repeats a string n times. * @param {string} string The string to repeat. * @param {number} length The number of times to repeat. * @return {string} A string containing {@code length} repetitions of * {@code string}. */ goog.string.repeat = function(string, length) { return new Array(length + 1).join(string); }; /** * Pads number to given length and optionally rounds it to a given precision. * For example: * <pre>padNumber(1.25, 2, 3) -> '01.250' * padNumber(1.25, 2) -> '01.25' * padNumber(1.25, 2, 1) -> '01.3' * padNumber(1.25, 0) -> '1.25'</pre> * * @param {number} num The number to pad. * @param {number} length The desired length. * @param {number=} opt_precision The desired precision. * @return {string} {@code num} as a string with the given options. */ goog.string.padNumber = function(num, length, opt_precision) { var s = goog.isDef(opt_precision) ? num.toFixed(opt_precision) : String(num); var index = s.indexOf('.'); if (index == -1) { index = s.length; } return goog.string.repeat('0', Math.max(0, length - index)) + s; }; /** * Returns a string representation of the given object, with * null and undefined being returned as the empty string. * * @param {*} obj The object to convert. * @return {string} A string representation of the {@code obj}. */ goog.string.makeSafe = function(obj) { return obj == null ? '' : String(obj); }; /** * Concatenates string expressions. This is useful * since some browsers are very inefficient when it comes to using plus to * concat strings. Be careful when using null and undefined here since * these will not be included in the result. If you need to represent these * be sure to cast the argument to a String first. * For example: * <pre>buildString('a', 'b', 'c', 'd') -> 'abcd' * buildString(null, undefined) -> '' * </pre> * @param {...*} var_args A list of strings to concatenate. If not a string, * it will be casted to one. * @return {string} The concatenation of {@code var_args}. */ goog.string.buildString = function(var_args) { return Array.prototype.join.call(arguments, ''); }; /** * Returns a string with at least 64-bits of randomness. * * Doesn't trust Javascript's random function entirely. Uses a combination of * random and current timestamp, and then encodes the string in base-36 to * make it shorter. * * @return {string} A random string, e.g. sn1s7vb4gcic. */ goog.string.getRandomString = function() { var x = 2147483648; return Math.floor(Math.random() * x).toString(36) + Math.abs(Math.floor(Math.random() * x) ^ goog.now()).toString(36); }; /** * Compares two version numbers. * * @param {string|number} version1 Version of first item. * @param {string|number} version2 Version of second item. * * @return {number} 1 if {@code version1} is higher. * 0 if arguments are equal. * -1 if {@code version2} is higher. */ goog.string.compareVersions = function(version1, version2) { var order = 0; // Trim leading and trailing whitespace and split the versions into // subversions. var v1Subs = goog.string.trim(String(version1)).split('.'); var v2Subs = goog.string.trim(String(version2)).split('.'); var subCount = Math.max(v1Subs.length, v2Subs.length); // Iterate over the subversions, as long as they appear to be equivalent. for (var subIdx = 0; order == 0 && subIdx < subCount; subIdx++) { var v1Sub = v1Subs[subIdx] || ''; var v2Sub = v2Subs[subIdx] || ''; // Split the subversions into pairs of numbers and qualifiers (like 'b'). // Two different RegExp objects are needed because they are both using // the 'g' flag. var v1CompParser = new RegExp('(\\d*)(\\D*)', 'g'); var v2CompParser = new RegExp('(\\d*)(\\D*)', 'g'); do { var v1Comp = v1CompParser.exec(v1Sub) || ['', '', '']; var v2Comp = v2CompParser.exec(v2Sub) || ['', '', '']; // Break if there are no more matches. if (v1Comp[0].length == 0 && v2Comp[0].length == 0) { break; } // Parse the numeric part of the subversion. A missing number is // equivalent to 0. var v1CompNum = v1Comp[1].length == 0 ? 0 : parseInt(v1Comp[1], 10); var v2CompNum = v2Comp[1].length == 0 ? 0 : parseInt(v2Comp[1], 10); // Compare the subversion components. The number has the highest // precedence. Next, if the numbers are equal, a subversion without any // qualifier is always higher than a subversion with any qualifier. Next, // the qualifiers are compared as strings. order = goog.string.compareElements_(v1CompNum, v2CompNum) || goog.string.compareElements_(v1Comp[2].length == 0, v2Comp[2].length == 0) || goog.string.compareElements_(v1Comp[2], v2Comp[2]); // Stop as soon as an inequality is discovered. } while (order == 0); } return order; }; /** * Compares elements of a version number. * * @param {string|number|boolean} left An element from a version number. * @param {string|number|boolean} right An element from a version number. * * @return {number} 1 if {@code left} is higher. * 0 if arguments are equal. * -1 if {@code right} is higher. * @private */ goog.string.compareElements_ = function(left, right) { if (left < right) { return -1; } else if (left > right) { return 1; } return 0; }; /** * Maximum value of #goog.string.hashCode, exclusive. 2^32. * @type {number} * @private */ goog.string.HASHCODE_MAX_ = 0x100000000; /** * String hash function similar to java.lang.String.hashCode(). * The hash code for a string is computed as * s[0] * 31 ^ (n - 1) + s[1] * 31 ^ (n - 2) + ... + s[n - 1], * where s[i] is the ith character of the string and n is the length of * the string. We mod the result to make it between 0 (inclusive) and 2^32 * (exclusive). * @param {string} str A string. * @return {number} Hash value for {@code str}, between 0 (inclusive) and 2^32 * (exclusive). The empty string returns 0. */ goog.string.hashCode = function(str) { var result = 0; for (var i = 0; i < str.length; ++i) { result = 31 * result + str.charCodeAt(i); // Normalize to 4 byte range, 0 ... 2^32. result %= goog.string.HASHCODE_MAX_; } return result; }; /** * The most recent unique ID. |0 is equivalent to Math.floor in this case. * @type {number} * @private */ goog.string.uniqueStringCounter_ = Math.random() * 0x80000000 | 0; /** * Generates and returns a string which is unique in the current document. * This is useful, for example, to create unique IDs for DOM elements. * @return {string} A unique id. */ goog.string.createUniqueString = function() { return 'goog_' + goog.string.uniqueStringCounter_++; }; /** * Converts the supplied string to a number, which may be Ininity or NaN. * This function strips whitespace: (toNumber(' 123') === 123) * This function accepts scientific notation: (toNumber('1e1') === 10) * * This is better than Javascript's built-in conversions because, sadly: * (Number(' ') === 0) and (parseFloat('123a') === 123) * * @param {string} str The string to convert. * @return {number} The number the supplied string represents, or NaN. */ goog.string.toNumber = function(str) { var num = Number(str); if (num == 0 && goog.string.isEmpty(str)) { return NaN; } return num; }; /** <<<<<<< HEAD ======= * Returns whether the given string is lower camel case (e.g. "isFooBar"). * * Note that this assumes the string is entirely letters. * @see http://en.wikipedia.org/wiki/CamelCase#Variations_and_synonyms * * @param {string} str String to test. * @return {boolean} Whether the string is lower camel case. */ goog.string.isLowerCamelCase = function(str) { return /^[a-z]+([A-Z][a-z]*)*$/.test(str); }; /** * Returns whether the given string is upper camel case (e.g. "FooBarBaz"). * * Note that this assumes the string is entirely letters. * @see http://en.wikipedia.org/wiki/CamelCase#Variations_and_synonyms * * @param {string} str String to test. * @return {boolean} Whether the string is upper camel case. */ goog.string.isUpperCamelCase = function(str) { return /^([A-Z][a-z]*)+$/.test(str); }; /** >>>>>>> newgitrepo * Converts a string from selector-case to camelCase (e.g. from * "multi-part-string" to "multiPartString"), useful for converting * CSS selectors and HTML dataset keys to their equivalent JS properties. * @param {string} str The string in selector-case form. * @return {string} The string in camelCase form. */ goog.string.toCamelCase = function(str) { return String(str).replace(/\-([a-z])/g, function(all, match) { return match.toUpperCase(); }); }; /** * Converts a string from camelCase to selector-case (e.g. from * "multiPartString" to "multi-part-string"), useful for converting JS * style and dataset properties to equivalent CSS selectors and HTML keys. * @param {string} str The string in camelCase form. * @return {string} The string in selector-case form. */ goog.string.toSelectorCase = function(str) { return String(str).replace(/([A-Z])/g, '-$1').toLowerCase(); }; /** * Converts a string into TitleCase. First character of the string is always * capitalized in addition to the first letter of every subsequent word. * Words are delimited by one or more whitespaces by default. Custom delimiters * can optionally be specified to replace the default, which doesn't preserve * whitespace delimiters and instead must be explicitly included if needed. * * Default delimiter => " ": * goog.string.toTitleCase('oneTwoThree') => 'OneTwoThree' * goog.string.toTitleCase('one two three') => 'One Two Three' * goog.string.toTitleCase(' one two ') => ' One Two ' * goog.string.toTitleCase('one_two_three') => 'One_two_three' * goog.string.toTitleCase('one-two-three') => 'One-two-three' * * Custom delimiter => "_-.": * goog.string.toTitleCase('oneTwoThree', '_-.') => 'OneTwoThree' * goog.string.toTitleCase('one two three', '_-.') => 'One two three' * goog.string.toTitleCase(' one two ', '_-.') => ' one two ' * goog.string.toTitleCase('one_two_three', '_-.') => 'One_Two_Three' * goog.string.toTitleCase('one-two-three', '_-.') => 'One-Two-Three' * goog.string.toTitleCase('one...two...three', '_-.') => 'One...Two...Three' * goog.string.toTitleCase('one. two. three', '_-.') => 'One. two. three' * goog.string.toTitleCase('one-two.three', '_-.') => 'One-Two.Three' * * @param {string} str String value in camelCase form. * @param {string=} opt_delimiters Custom delimiter character set used to * distinguish words in the string value. Each character represents a * single delimiter. When provided, default whitespace delimiter is * overridden and must be explicitly included if needed. * @return {string} String value in TitleCase form. */ goog.string.toTitleCase = function(str, opt_delimiters) { var delimiters = goog.isString(opt_delimiters) ? goog.string.regExpEscape(opt_delimiters) : '\\s'; // For IE8, we need to prevent using an empty character set. Otherwise, // incorrect matching will occur. delimiters = delimiters ? '|[' + delimiters + ']+' : ''; var regexp = new RegExp('(^' + delimiters + ')([a-z])', 'g'); return str.replace(regexp, function(all, p1, p2) { return p1 + p2.toUpperCase(); }); }; /** * Parse a string in decimal or hexidecimal ('0xFFFF') form. * * To parse a particular radix, please use parseInt(string, radix) directly. See * https://developer.mozilla.org/en/JavaScript/Reference/Global_Objects/parseInt * * This is a wrapper for the built-in parseInt function that will only parse * numbers as base 10 or base 16. Some JS implementations assume strings * starting with "0" are intended to be octal. ES3 allowed but discouraged * this behavior. ES5 forbids it. This function emulates the ES5 behavior. * * For more information, see Mozilla JS Reference: http://goo.gl/8RiFj * * @param {string|number|null|undefined} value The value to be parsed. * @return {number} The number, parsed. If the string failed to parse, this * will be NaN. */ goog.string.parseInt = function(value) { // Force finite numbers to strings. if (isFinite(value)) { value = String(value); } if (goog.isString(value)) { // If the string starts with '0x' or '-0x', parse as hex. return /^\s*-?0x/i.test(value) ? parseInt(value, 16) : parseInt(value, 10); } return NaN; };
apache-2.0
dturner-tw/pants
src/python/pants/goal/context.py
12904
# coding=utf-8 # Copyright 2014 Pants project contributors (see CONTRIBUTORS.md). # Licensed under the Apache License, Version 2.0 (see LICENSE). from __future__ import (absolute_import, division, generators, nested_scopes, print_function, unicode_literals, with_statement) import os import sys from collections import defaultdict from contextlib import contextmanager from twitter.common.collections import OrderedSet from pants.base.build_environment import get_buildroot, get_scm from pants.base.worker_pool import SubprocPool from pants.base.workunit import WorkUnitLabel from pants.build_graph.build_graph import BuildGraph from pants.build_graph.target import Target from pants.goal.products import Products from pants.goal.workspace import ScmWorkspace from pants.process.pidlock import OwnerPrintingPIDLockFile from pants.reporting.report import Report from pants.source.source_root import SourceRootConfig class Context(object): """Contains the context for a single run of pants. Task implementations can access configuration data from pants.ini and any flags they have exposed here as well as information about the targets involved in the run. Advanced uses of the context include adding new targets to it for upstream or downstream goals to operate on and mapping of products a goal creates to the targets the products are associated with. """ class Log(object): """A logger facade that logs into the pants reporting framework.""" def __init__(self, run_tracker): self._run_tracker = run_tracker def debug(self, *msg_elements): self._run_tracker.log(Report.DEBUG, *msg_elements) def info(self, *msg_elements): self._run_tracker.log(Report.INFO, *msg_elements) def warn(self, *msg_elements): self._run_tracker.log(Report.WARN, *msg_elements) def error(self, *msg_elements): self._run_tracker.log(Report.ERROR, *msg_elements) def fatal(self, *msg_elements): self._run_tracker.log(Report.FATAL, *msg_elements) # TODO: Figure out a more structured way to construct and use context than this big flat # repository of attributes? def __init__(self, options, run_tracker, target_roots, requested_goals=None, target_base=None, build_graph=None, build_file_parser=None, address_mapper=None, console_outstream=None, scm=None, workspace=None, spec_excludes=None, invalidation_report=None): self._options = options self.build_graph = build_graph self.build_file_parser = build_file_parser self.address_mapper = address_mapper self.run_tracker = run_tracker self._log = self.Log(run_tracker) self._target_base = target_base or Target self._products = Products() self._buildroot = get_buildroot() self._source_roots = SourceRootConfig.global_instance().get_source_roots() self._lock = OwnerPrintingPIDLockFile(os.path.join(self._buildroot, '.pants.run')) self._java_sysprops = None # Computed lazily. self.requested_goals = requested_goals or [] self._console_outstream = console_outstream or sys.stdout self._scm = scm or get_scm() self._workspace = workspace or (ScmWorkspace(self._scm) if self._scm else None) self._spec_excludes = spec_excludes self._replace_targets(target_roots) self._invalidation_report = invalidation_report @property def options(self): """Returns the new-style options.""" return self._options @property def log(self): """Returns the preferred logger for goals to use.""" return self._log @property def products(self): """Returns the Products manager for the current run.""" return self._products @property def source_roots(self): """Returns the :class:`pants.source.source_root.SourceRoots` instance for the current run.""" return self._source_roots @property def target_roots(self): """Returns the targets specified on the command line. This set is strictly a subset of all targets in play for the run as returned by self.targets(). Note that for a command line invocation that uses wildcard selectors : or ::, the targets globbed by the wildcards are considered to be target roots. """ return self._target_roots @property def console_outstream(self): """Returns the output stream to write console messages to.""" return self._console_outstream @property def scm(self): """Returns the current workspace's scm, if any.""" return self._scm @property def workspace(self): """Returns the current workspace, if any.""" return self._workspace @property def spec_excludes(self): return self._spec_excludes @property def invalidation_report(self): return self._invalidation_report def __str__(self): ident = Target.identify(self.targets()) return 'Context(id:{}, targets:{})'.format(ident, self.targets()) def submit_background_work_chain(self, work_chain, parent_workunit_name=None): background_root_workunit = self.run_tracker.get_background_root_workunit() if parent_workunit_name: # We have to keep this workunit alive until all its child work is done, so # we manipulate the context manually instead of using it as a contextmanager. # This is slightly funky, but the with-context usage is so pervasive and # useful elsewhere that it's worth the funkiness in this one place. workunit_parent_ctx = self.run_tracker.new_workunit_under_parent( name=parent_workunit_name, labels=[WorkUnitLabel.MULTITOOL], parent=background_root_workunit) workunit_parent = workunit_parent_ctx.__enter__() done_hook = lambda: workunit_parent_ctx.__exit__(None, None, None) else: workunit_parent = background_root_workunit # Run directly under the root. done_hook = None self.run_tracker.background_worker_pool().submit_async_work_chain( work_chain, workunit_parent=workunit_parent, done_hook=done_hook) def background_worker_pool(self): """Returns the pool to which tasks can submit background work.""" return self.run_tracker.background_worker_pool() def subproc_map(self, f, items): """Map function `f` over `items` in subprocesses and return the result. :param f: A multiproc-friendly (importable) work function. :param items: A iterable of pickleable arguments to f. """ try: # Pool.map (and async_map().get() w/o timeout) can miss SIGINT. # See: http://stackoverflow.com/a/1408476, http://bugs.python.org/issue8844 # Instead, we map_async(...), wait *with a timeout* until ready, then .get() # NB: in 2.x, wait() with timeout wakes up often to check, burning CPU. Oh well. res = SubprocPool.foreground().map_async(f, items) while not res.ready(): res.wait(60) # Repeatedly wait for up to a minute. if not res.ready(): self.log.debug('subproc_map result still not ready...') return res.get() except KeyboardInterrupt: SubprocPool.shutdown(True) raise @contextmanager def new_workunit(self, name, labels=None, cmd='', log_config=None): """Create a new workunit under the calling thread's current workunit.""" with self.run_tracker.new_workunit(name=name, labels=labels, cmd=cmd, log_config=log_config) as workunit: yield workunit def acquire_lock(self): """ Acquire the global lock for the root directory associated with this context. When a goal requires serialization, it will call this to acquire the lock. """ if self.options.for_global_scope().lock: if not self._lock.i_am_locking(): self._lock.acquire() def release_lock(self): """Release the global lock if it's held. Returns True if the lock was held before this call. """ if not self._lock.i_am_locking(): return False else: self._lock.release() return True def is_unlocked(self): """Whether the global lock object is actively holding the lock.""" return not self._lock.i_am_locking() def _replace_targets(self, target_roots): # Replaces all targets in the context with the given roots and their transitive dependencies. # # If another task has already retrieved the current targets, mutable state may have been # initialized somewhere, making it now unsafe to replace targets. Thus callers of this method # must know what they're doing! # # TODO(John Sirois): This currently has 0 uses (outside ContextTest) in pantsbuild/pants and # only 1 remaining known use case in the Foursquare codebase that will be able to go away with # the post RoundEngine engine - kill the method at that time. self._target_roots = list(target_roots) def add_new_target(self, address, target_type, target_base=None, dependencies=None, derived_from=None, **kwargs): """Creates a new target, adds it to the context and returns it. This method ensures the target resolves files against the given target_base, creating the directory if needed and registering a source root. """ target_base = os.path.join(get_buildroot(), target_base or address.spec_path) if not os.path.exists(target_base): os.makedirs(target_base) # TODO: Adding source roots on the fly like this is yucky, but hopefully this # method will go away entirely under the new engine. It's primarily used for injecting # synthetic codegen targets, and that isn't how codegen will work in the future. if not self.source_roots.find_by_path(target_base): self.source_roots.add_source_root(target_base) if dependencies: dependencies = [dep.address for dep in dependencies] self.build_graph.inject_synthetic_target(address=address, target_type=target_type, dependencies=dependencies, derived_from=derived_from, **kwargs) new_target = self.build_graph.get_target(address) return new_target def targets(self, predicate=None, postorder=False): """Selects targets in-play in this run from the target roots and their transitive dependencies. Also includes any new synthetic targets created from the target roots or their transitive dependencies during the course of the run. :param predicate: If specified, the predicate will be used to narrow the scope of targets returned. :param bool postorder: `True` to gather transitive dependencies with a postorder traversal; `False` or preorder by default. :returns: A list of matching targets. """ target_set = self._collect_targets(self.target_roots, postorder=postorder) synthetics = OrderedSet() for synthetic_address in self.build_graph.synthetic_addresses: if self.build_graph.get_concrete_derived_from(synthetic_address) in target_set: synthetics.add(self.build_graph.get_target(synthetic_address)) synthetic_set = self._collect_targets(synthetics, postorder=postorder) target_set.update(synthetic_set) return filter(predicate, target_set) def _collect_targets(self, root_targets, postorder=False): addresses = [target.address for target in root_targets] target_set = self.build_graph.transitive_subgraph_of_addresses(addresses, postorder=postorder) return target_set def dependents(self, on_predicate=None, from_predicate=None): """Returns a map from targets that satisfy the from_predicate to targets they depend on that satisfy the on_predicate. """ core = set(self.targets(on_predicate)) dependees = defaultdict(set) for target in self.targets(from_predicate): for dependency in target.dependencies: if dependency in core: dependees[target].add(dependency) return dependees def resolve(self, spec): """Returns an iterator over the target(s) the given address points to.""" return self.build_graph.resolve(spec) def scan(self, root=None): """Scans and parses all BUILD files found under ``root``. Only BUILD files found under ``root`` are parsed as roots in the graph, but any dependencies of targets parsed in the root tree's BUILD files will be followed and this may lead to BUILD files outside of ``root`` being parsed and included in the returned build graph. :param string root: The path to scan; by default, the build root. :returns: A new build graph encapsulating the targets found. """ build_graph = BuildGraph(self.address_mapper) for address in self.address_mapper.scan_addresses(root, spec_excludes=self.spec_excludes): build_graph.inject_address_closure(address) return build_graph
apache-2.0
hyperhq/kubernetes
staging/src/k8s.io/apiserver/pkg/storage/watch_cache_test.go
9848
/* Copyright 2014 The Kubernetes Authors. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package storage import ( "strconv" "testing" "time" "k8s.io/api/core/v1" apiequality "k8s.io/apimachinery/pkg/api/equality" "k8s.io/apimachinery/pkg/api/errors" metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" "k8s.io/apimachinery/pkg/fields" "k8s.io/apimachinery/pkg/labels" "k8s.io/apimachinery/pkg/runtime" "k8s.io/apimachinery/pkg/util/clock" "k8s.io/apimachinery/pkg/util/sets" "k8s.io/apimachinery/pkg/util/wait" "k8s.io/apimachinery/pkg/watch" "github.com/hyperhq/client-go/tools/cache" ) func makeTestPod(name string, resourceVersion uint64) *v1.Pod { return &v1.Pod{ ObjectMeta: metav1.ObjectMeta{ Namespace: "ns", Name: name, ResourceVersion: strconv.FormatUint(resourceVersion, 10), }, } } // newTestWatchCache just adds a fake clock. func newTestWatchCache(capacity int) *watchCache { keyFunc := func(obj runtime.Object) (string, error) { return NamespaceKeyFunc("prefix", obj) } getAttrsFunc := func(obj runtime.Object) (labels.Set, fields.Set, bool, error) { return nil, nil, false, nil } wc := newWatchCache(capacity, keyFunc, getAttrsFunc) wc.clock = clock.NewFakeClock(time.Now()) return wc } func TestWatchCacheBasic(t *testing.T) { store := newTestWatchCache(2) // Test Add/Update/Delete. pod1 := makeTestPod("pod", 1) if err := store.Add(pod1); err != nil { t.Errorf("unexpected error: %v", err) } if item, ok, _ := store.Get(pod1); !ok { t.Errorf("didn't find pod") } else { if !apiequality.Semantic.DeepEqual(&storeElement{Key: "prefix/ns/pod", Object: pod1}, item) { t.Errorf("expected %v, got %v", pod1, item) } } pod2 := makeTestPod("pod", 2) if err := store.Update(pod2); err != nil { t.Errorf("unexpected error: %v", err) } if item, ok, _ := store.Get(pod2); !ok { t.Errorf("didn't find pod") } else { if !apiequality.Semantic.DeepEqual(&storeElement{Key: "prefix/ns/pod", Object: pod2}, item) { t.Errorf("expected %v, got %v", pod1, item) } } pod3 := makeTestPod("pod", 3) if err := store.Delete(pod3); err != nil { t.Errorf("unexpected error: %v", err) } if _, ok, _ := store.Get(pod3); ok { t.Errorf("found pod") } // Test List. store.Add(makeTestPod("pod1", 4)) store.Add(makeTestPod("pod2", 5)) store.Add(makeTestPod("pod3", 6)) { podNames := sets.String{} for _, item := range store.List() { podNames.Insert(item.(*storeElement).Object.(*v1.Pod).ObjectMeta.Name) } if !podNames.HasAll("pod1", "pod2", "pod3") { t.Errorf("missing pods, found %v", podNames) } if len(podNames) != 3 { t.Errorf("found missing/extra items") } } // Test Replace. store.Replace([]interface{}{ makeTestPod("pod4", 7), makeTestPod("pod5", 8), }, "8") { podNames := sets.String{} for _, item := range store.List() { podNames.Insert(item.(*storeElement).Object.(*v1.Pod).ObjectMeta.Name) } if !podNames.HasAll("pod4", "pod5") { t.Errorf("missing pods, found %v", podNames) } if len(podNames) != 2 { t.Errorf("found missing/extra items") } } } func TestEvents(t *testing.T) { store := newTestWatchCache(5) store.Add(makeTestPod("pod", 3)) // Test for Added event. { _, err := store.GetAllEventsSince(1) if err == nil { t.Errorf("expected error too old") } if _, ok := err.(*errors.StatusError); !ok { t.Errorf("expected error to be of type StatusError") } } { result, err := store.GetAllEventsSince(2) if err != nil { t.Errorf("unexpected error: %v", err) } if len(result) != 1 { t.Fatalf("unexpected events: %v", result) } if result[0].Type != watch.Added { t.Errorf("unexpected event type: %v", result[0].Type) } pod := makeTestPod("pod", uint64(3)) if !apiequality.Semantic.DeepEqual(pod, result[0].Object) { t.Errorf("unexpected item: %v, expected: %v", result[0].Object, pod) } if result[0].PrevObject != nil { t.Errorf("unexpected item: %v", result[0].PrevObject) } } store.Update(makeTestPod("pod", 4)) store.Update(makeTestPod("pod", 5)) // Test with not full cache. { _, err := store.GetAllEventsSince(1) if err == nil { t.Errorf("expected error too old") } } { result, err := store.GetAllEventsSince(3) if err != nil { t.Errorf("unexpected error: %v", err) } if len(result) != 2 { t.Fatalf("unexpected events: %v", result) } for i := 0; i < 2; i++ { if result[i].Type != watch.Modified { t.Errorf("unexpected event type: %v", result[i].Type) } pod := makeTestPod("pod", uint64(i+4)) if !apiequality.Semantic.DeepEqual(pod, result[i].Object) { t.Errorf("unexpected item: %v, expected: %v", result[i].Object, pod) } prevPod := makeTestPod("pod", uint64(i+3)) if !apiequality.Semantic.DeepEqual(prevPod, result[i].PrevObject) { t.Errorf("unexpected item: %v, expected: %v", result[i].PrevObject, prevPod) } } } for i := 6; i < 10; i++ { store.Update(makeTestPod("pod", uint64(i))) } // Test with full cache - there should be elements from 5 to 9. { _, err := store.GetAllEventsSince(3) if err == nil { t.Errorf("expected error too old") } } { result, err := store.GetAllEventsSince(4) if err != nil { t.Errorf("unexpected error: %v", err) } if len(result) != 5 { t.Fatalf("unexpected events: %v", result) } for i := 0; i < 5; i++ { pod := makeTestPod("pod", uint64(i+5)) if !apiequality.Semantic.DeepEqual(pod, result[i].Object) { t.Errorf("unexpected item: %v, expected: %v", result[i].Object, pod) } } } // Test for delete event. store.Delete(makeTestPod("pod", uint64(10))) { result, err := store.GetAllEventsSince(9) if err != nil { t.Errorf("unexpected error: %v", err) } if len(result) != 1 { t.Fatalf("unexpected events: %v", result) } if result[0].Type != watch.Deleted { t.Errorf("unexpected event type: %v", result[0].Type) } pod := makeTestPod("pod", uint64(10)) if !apiequality.Semantic.DeepEqual(pod, result[0].Object) { t.Errorf("unexpected item: %v, expected: %v", result[0].Object, pod) } prevPod := makeTestPod("pod", uint64(9)) if !apiequality.Semantic.DeepEqual(prevPod, result[0].PrevObject) { t.Errorf("unexpected item: %v, expected: %v", result[0].PrevObject, prevPod) } } } func TestWaitUntilFreshAndList(t *testing.T) { store := newTestWatchCache(3) // In background, update the store. go func() { store.Add(makeTestPod("foo", 2)) store.Add(makeTestPod("bar", 5)) }() list, resourceVersion, err := store.WaitUntilFreshAndList(5, nil) if err != nil { t.Fatalf("unexpected error: %v", err) } if resourceVersion != 5 { t.Errorf("unexpected resourceVersion: %v, expected: 5", resourceVersion) } if len(list) != 2 { t.Errorf("unexpected list returned: %#v", list) } } func TestWaitUntilFreshAndGet(t *testing.T) { store := newTestWatchCache(3) // In background, update the store. go func() { store.Add(makeTestPod("foo", 2)) store.Add(makeTestPod("bar", 5)) }() obj, exists, resourceVersion, err := store.WaitUntilFreshAndGet(5, "prefix/ns/bar", nil) if err != nil { t.Fatalf("unexpected error: %v", err) } if resourceVersion != 5 { t.Errorf("unexpected resourceVersion: %v, expected: 5", resourceVersion) } if !exists { t.Fatalf("no results returned: %#v", obj) } if !apiequality.Semantic.DeepEqual(&storeElement{Key: "prefix/ns/bar", Object: makeTestPod("bar", 5)}, obj) { t.Errorf("unexpected element returned: %#v", obj) } } func TestWaitUntilFreshAndListTimeout(t *testing.T) { store := newTestWatchCache(3) fc := store.clock.(*clock.FakeClock) // In background, step clock after the below call starts the timer. go func() { for !fc.HasWaiters() { time.Sleep(time.Millisecond) } fc.Step(blockTimeout) // Add an object to make sure the test would // eventually fail instead of just waiting // forever. time.Sleep(30 * time.Second) store.Add(makeTestPod("bar", 5)) }() _, _, err := store.WaitUntilFreshAndList(5, nil) if err == nil { t.Fatalf("unexpected lack of timeout error") } } type testLW struct { ListFunc func(options metav1.ListOptions) (runtime.Object, error) WatchFunc func(options metav1.ListOptions) (watch.Interface, error) } func (t *testLW) List(options metav1.ListOptions) (runtime.Object, error) { return t.ListFunc(options) } func (t *testLW) Watch(options metav1.ListOptions) (watch.Interface, error) { return t.WatchFunc(options) } func TestReflectorForWatchCache(t *testing.T) { store := newTestWatchCache(5) { _, version, err := store.WaitUntilFreshAndList(0, nil) if err != nil { t.Fatalf("unexpected error: %v", err) } if version != 0 { t.Errorf("unexpected resource version: %d", version) } } lw := &testLW{ WatchFunc: func(options metav1.ListOptions) (watch.Interface, error) { fw := watch.NewFake() go fw.Stop() return fw, nil }, ListFunc: func(options metav1.ListOptions) (runtime.Object, error) { return &v1.PodList{ListMeta: metav1.ListMeta{ResourceVersion: "10"}}, nil }, } r := cache.NewReflector(lw, &v1.Pod{}, store, 0) r.ListAndWatch(wait.NeverStop) { _, version, err := store.WaitUntilFreshAndList(10, nil) if err != nil { t.Fatalf("unexpected error: %v", err) } if version != 10 { t.Errorf("unexpected resource version: %d", version) } } }
apache-2.0
heinigger/utils
thirdParty/applibrary/src/main/java/utils/app/ActivityManagerUtil.java
3301
package utils.app; import android.app.Activity; import java.util.Stack; /** * public class BaseActivity extends AppCompatActivity { * private ActivityManagerUtil activityManagerUtil; * protected void onCreate(Bundle savedInstanceState) { * super.onCreate(savedInstanceState); * setContentView(R.layout.activity_base); * activityManagerUtil = ActivityManagerUtil.getInstance(); * activityManagerUtil.pushOneActivity(this); * } * protected void onDestroy() { * super.onDestroy(); * activityManagerUtil.popOneActivity(this); * } * } * Created by Administrator * on 2016/5/19. */ @SuppressWarnings("unused") public class ActivityManagerUtil { private static ActivityManagerUtil instance; //activity栈 private Stack<Activity> activityStack; /** * 单例模式 * * @return 单例 */ public static ActivityManagerUtil getInstance() { if (instance == null) { instance = new ActivityManagerUtil(); } return instance; } /** * 把一个activity压入栈中 * * @param actvity activity */ public void pushOneActivity(Activity actvity) { if (activityStack == null) { activityStack = new Stack<>(); } activityStack.add(actvity); } /** * 移除一个activity * * @param activity activity */ public void popOneActivity(Activity activity) { if (activityStack != null && activityStack.size() > 0) { if (activity != null) { activityStack.remove(activity); activity.finish(); } } } /** * 获取栈顶的activity,先进后出原则 * * @return 栈顶的activity */ public Activity getLastActivity() { return activityStack.lastElement(); } /** * 结束指定的Activity * * @param activity activity */ public void finishActivity(Activity activity) { if (activity != null) { activityStack.remove(activity); activity.finish(); } } /** * 结束指定类名的Activity * * @param cls 指定的Activity */ public void finishActivity(Class<?> cls) { for (Activity activity : activityStack) { if (activity.getClass().equals(cls)) { finishActivity(activity); } } } /** * 结束所有activity */ public void finishAllActivity() { try { for (int i = 0; i < activityStack.size(); i++) { if (null != activityStack.get(i)) { activityStack.get(i).finish(); } } activityStack.clear(); } catch (Exception e) { e.printStackTrace(); } } /** * 退出应用程序 */ public void appExit() { try { finishAllActivity(); //退出JVM(java虚拟机),释放所占内存资源,0表示正常退出(非0的都为异常退出) System.exit(0); //从操作系统中结束掉当前程序的进程 android.os.Process.killProcess(android.os.Process.myPid()); } catch (Exception e) { e.printStackTrace(); } } }
apache-2.0
JoelMarcey/buck
test/com/facebook/buck/jvm/java/abi/AbiFilteringClassVisitorTest.java
7390
/* * Copyright 2017-present Facebook, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. You may obtain * a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. */ package com.facebook.buck.jvm.java.abi; import static org.easymock.EasyMock.createMock; import static org.easymock.EasyMock.expect; import static org.easymock.EasyMock.replay; import static org.easymock.EasyMock.verify; import org.junit.Before; import org.junit.Test; import org.objectweb.asm.ClassVisitor; import org.objectweb.asm.Opcodes; public class AbiFilteringClassVisitorTest { private ClassVisitor mockVisitor; private AbiFilteringClassVisitor filteringVisitor; @Before public void setUp() { mockVisitor = createMock(ClassVisitor.class); filteringVisitor = new AbiFilteringClassVisitor(mockVisitor); } @Test public void testExcludesPrivateFields() { testExcludesFieldWithAccess(Opcodes.ACC_PRIVATE); } @Test public void testExcludesPrivateStaticFields() { testExcludesFieldWithAccess(Opcodes.ACC_PRIVATE | Opcodes.ACC_STATIC); } @Test public void testExcludesSyntheticFields() { testExcludesFieldWithAccess(Opcodes.ACC_PUBLIC | Opcodes.ACC_SYNTHETIC); } @Test public void testIncludesPackageFields() { testIncludesFieldWithAccess(0); } @Test public void testIncludesPackageStaticFields() { testIncludesFieldWithAccess(Opcodes.ACC_STATIC); } @Test public void testIncludesPublicFields() { testIncludesFieldWithAccess(Opcodes.ACC_PUBLIC); } @Test public void testIncludesProtectedFields() { testIncludesFieldWithAccess(Opcodes.ACC_PROTECTED); } @Test public void testNotConfusedByOtherFieldAccessFlagsIncluding() { testIncludesFieldWithAccess(Opcodes.ACC_PUBLIC | Opcodes.ACC_STATIC | Opcodes.ACC_VOLATILE); } @Test public void testNotConfusedByOtherFieldAccessFlagsExcluding() { testExcludesFieldWithAccess(Opcodes.ACC_PRIVATE | Opcodes.ACC_STATIC | Opcodes.ACC_VOLATILE); } @Test public void testExcludesPrivateMethods() { testExcludesMethodWithAccess(Opcodes.ACC_PRIVATE); } @Test public void testIncludesPackageMethods() { testIncludesMethodWithAccess(Opcodes.ACC_PUBLIC); } @Test public void testIncludesProtectedMethods() { testIncludesMethodWithAccess(Opcodes.ACC_PUBLIC); } @Test public void testIncludesPublicMethods() { testIncludesMethodWithAccess(Opcodes.ACC_PUBLIC); } @Test public void testExcludesSyntheticMethods() { testExcludesMethodWithAccess(Opcodes.ACC_PUBLIC | Opcodes.ACC_SYNTHETIC); } @Test public void testNotConfusedByOtherMethodAccessFlagsIncluding() { testIncludesMethodWithAccess( Opcodes.ACC_PUBLIC | Opcodes.ACC_ABSTRACT | Opcodes.ACC_SYNCHRONIZED); } @Test public void testNotConfusedByOtherMethodAccessFlagsExcluding() { testExcludesMethodWithAccess( Opcodes.ACC_PRIVATE | Opcodes.ACC_ABSTRACT | Opcodes.ACC_SYNCHRONIZED); } @Test public void testExcludesStaticInitializers() { testExcludesMethodWithAccess(Opcodes.ACC_STATIC, "<clinit>"); } @Test public void testAlwaysVisitsClassNode() { visitClass(mockVisitor, "Foo"); replay(mockVisitor); visitClass(filteringVisitor, "Foo"); verify(mockVisitor); } @Test public void testIncludesInnerClassEntryForClassItself() { visitClass(mockVisitor, "Foo$Inner"); mockVisitor.visitInnerClass("Foo$Inner", "Foo", "Inner", Opcodes.ACC_PUBLIC); replay(mockVisitor); visitClass(filteringVisitor, "Foo$Inner"); filteringVisitor.visitInnerClass("Foo$Inner", "Foo", "Inner", Opcodes.ACC_PUBLIC); verify(mockVisitor); } @Test public void testIncludesInnerClassEntryForInnerClass() { visitClass(mockVisitor, "Foo"); mockVisitor.visitInnerClass("Foo$Inner", "Foo", "Inner", Opcodes.ACC_PUBLIC); replay(mockVisitor); visitClass(filteringVisitor, "Foo"); filteringVisitor.visitInnerClass("Foo$Inner", "Foo", "Inner", Opcodes.ACC_PUBLIC); verify(mockVisitor); } @Test public void testIncludesInnerClassEntryForOtherClassInnerClass() { visitClass(mockVisitor, "Foo"); mockVisitor.visitInnerClass("Bar$Inner", "Bar", "Inner", Opcodes.ACC_PUBLIC); replay(mockVisitor); visitClass(filteringVisitor, "Foo"); filteringVisitor.visitInnerClass("Bar$Inner", "Bar", "Inner", Opcodes.ACC_PUBLIC); verify(mockVisitor); } @Test public void testExcludesPrivateInnerClasses() { visitClass(mockVisitor, "Foo"); replay(mockVisitor); visitClass(filteringVisitor, "Foo"); filteringVisitor.visitInnerClass("Foo$Inner", "Foo", "Inner", Opcodes.ACC_PRIVATE); verify(mockVisitor); } @Test public void testExcludesSyntheticInnerClasses() { visitClass(mockVisitor, "Foo"); replay(mockVisitor); visitClass(filteringVisitor, "Foo"); filteringVisitor.visitInnerClass( "Foo$Inner", "Foo", "Inner", Opcodes.ACC_PUBLIC | Opcodes.ACC_SYNTHETIC); verify(mockVisitor); } @Test public void testExcludesAnonymousInnerClasses() { visitClass(mockVisitor, "Foo"); replay(mockVisitor); visitClass(filteringVisitor, "Foo"); filteringVisitor.visitInnerClass("Foo$1", null, null, 0); verify(mockVisitor); } @Test public void testExcludesLocalClasses() { visitClass(mockVisitor, "Foo"); replay(mockVisitor); visitClass(filteringVisitor, "Foo"); filteringVisitor.visitInnerClass("Foo$1Bar", null, "Bar", 0); verify(mockVisitor); } private static void visitClass(ClassVisitor cv, String name) { cv.visit(Opcodes.V1_8, Opcodes.ACC_PUBLIC, name, null, "java/lang/Object", null); } private void testExcludesFieldWithAccess(int access) { testFieldWithAccess(access, false); } private void testIncludesFieldWithAccess(int access) { testFieldWithAccess(access, true); } private void testFieldWithAccess(int access, boolean shouldInclude) { if (shouldInclude) { expect(mockVisitor.visitField(access, "Foo", "I", null, null)).andReturn(null); } replay(mockVisitor); filteringVisitor.visitField(access, "Foo", "I", null, null); verify(mockVisitor); } private void testExcludesMethodWithAccess(int access) { testExcludesMethodWithAccess(access, "foo"); } private void testIncludesMethodWithAccess(int access) { testIncludesMethodWithAccess(access, "foo"); } private void testExcludesMethodWithAccess(int access, String name) { testMethodWithAccess(access, name, false); } private void testIncludesMethodWithAccess(int access, String name) { testMethodWithAccess(access, name, true); } private void testMethodWithAccess(int access, String name, boolean shouldInclude) { if (shouldInclude) { expect(mockVisitor.visitMethod(access, name, "()V", null, null)).andReturn(null); } replay(mockVisitor); filteringVisitor.visitMethod(access, name, "()V", null, null); verify(mockVisitor); } }
apache-2.0
rodzyn0688/zipkin
zipkin-scrooge/src/test/scala/com/twitter/zipkin/adapter/ThriftAdapterSpec.scala
4413
/* * Copyright 2012 Twitter Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package com.twitter.zipkin.adapter import com.twitter.zipkin.common._ import com.twitter.zipkin.gen import org.specs.Specification import org.specs.mock.{ClassMocker, JMocker} import java.nio.ByteBuffer class ThriftAdapterSpec extends Specification with JMocker with ClassMocker { "ThriftAdapter" should { "convert Annotation" in { "to thrift and back" in { val expectedAnn: Annotation = Annotation(123, "value", Some(Endpoint(123, 123, "service"))) val thriftAnn: gen.Annotation = ThriftAdapter(expectedAnn) val actualAnn: Annotation = ThriftAdapter(thriftAnn) expectedAnn mustEqual actualAnn } } "convert AnnotationType" in { val types = Seq("Bool", "Bytes", "I16", "I32", "I64", "Double", "String") "to thrift and back" in { types.zipWithIndex.foreach { case (value: String, index: Int) => val expectedAnnType: AnnotationType = AnnotationType(index, value) val thriftAnnType: gen.AnnotationType = ThriftAdapter(expectedAnnType) val actualAnnType: AnnotationType = ThriftAdapter(thriftAnnType) actualAnnType mustEqual expectedAnnType } } } "convert BinaryAnnotation" in { "to thrift and back" in { val expectedAnnType = AnnotationType(3, "I32") val expectedHost = Some(Endpoint(123, 456, "service")) val expectedBA: BinaryAnnotation = BinaryAnnotation("something", ByteBuffer.wrap("else".getBytes), expectedAnnType, expectedHost) val thriftBA: gen.BinaryAnnotation = ThriftAdapter(expectedBA) val actualBA: BinaryAnnotation = ThriftAdapter(thriftBA) actualBA mustEqual expectedBA } } "convert Endpoint" in { "to thrift and back" in { val expectedEndpoint: Endpoint = Endpoint(123, 456, "service") val thriftEndpoint: gen.Endpoint = ThriftAdapter(expectedEndpoint) val actualEndpoint: Endpoint = ThriftAdapter(thriftEndpoint) expectedEndpoint mustEqual actualEndpoint } "to thrift and back, with null service" in { // TODO this could happen if we deserialize an old style struct val actualEndpoint = ThriftAdapter(gen.Endpoint(123, 456, null)) val expectedEndpoint = Endpoint(123, 456, Endpoint.UnknownServiceName) expectedEndpoint mustEqual actualEndpoint } } "convert Span" in { val annotationValue = "NONSENSE" val expectedAnnotation = Annotation(1, annotationValue, Some(Endpoint(1, 2, "service"))) val expectedSpan = Span(12345, "methodcall", 666, None, List(expectedAnnotation), Nil) "to thrift and back" in { val thriftSpan: gen.Span = ThriftAdapter(expectedSpan) val actualSpan: Span = ThriftAdapter(thriftSpan) expectedSpan mustEqual actualSpan } "handle incomplete thrift span" in { val noNameSpan = gen.Span(0, null, 0, None, Seq(), Seq()) ThriftAdapter(noNameSpan) must throwA[IncompleteTraceDataException] val noAnnotationsSpan = gen.Span(0, "name", 0, None, null, Seq()) ThriftAdapter(noAnnotationsSpan) mustEqual Span(0, "name", 0, None, List(), Seq()) val noBinaryAnnotationsSpan = gen.Span(0, "name", 0, None, Seq(), null) ThriftAdapter(noBinaryAnnotationsSpan) mustEqual Span(0, "name", 0, None, List(), Seq()) } } "convert TraceSummary" in { "to thrift and back" in { val expectedTraceSummary = TraceSummary(123, 10000, 10300, 300, Map("service1" -> 1), List(Endpoint(123, 123, "service1"))) val thriftTraceSummary = ThriftAdapter(expectedTraceSummary) val actualTraceSummary = ThriftAdapter(thriftTraceSummary) expectedTraceSummary mustEqual actualTraceSummary } } } }
apache-2.0
gawkermedia/googleads-java-lib
modules/dfp_axis/src/main/java/com/google/api/ads/dfp/axis/v201511/TeamServiceSoapBindingStub.java
39099
/** * TeamServiceSoapBindingStub.java * * This file was auto-generated from WSDL * by the Apache Axis 1.4 Mar 02, 2009 (07:08:06 PST) WSDL2Java emitter. */ package com.google.api.ads.dfp.axis.v201511; public class TeamServiceSoapBindingStub extends org.apache.axis.client.Stub implements com.google.api.ads.dfp.axis.v201511.TeamServiceInterface { private java.util.Vector cachedSerClasses = new java.util.Vector(); private java.util.Vector cachedSerQNames = new java.util.Vector(); private java.util.Vector cachedSerFactories = new java.util.Vector(); private java.util.Vector cachedDeserFactories = new java.util.Vector(); static org.apache.axis.description.OperationDesc [] _operations; static { _operations = new org.apache.axis.description.OperationDesc[3]; _initOperationDesc1(); } private static void _initOperationDesc1(){ org.apache.axis.description.OperationDesc oper; org.apache.axis.description.ParameterDesc param; oper = new org.apache.axis.description.OperationDesc(); oper.setName("createTeams"); param = new org.apache.axis.description.ParameterDesc(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v201511", "teams"), org.apache.axis.description.ParameterDesc.IN, new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v201511", "Team"), com.google.api.ads.dfp.axis.v201511.Team[].class, false, false); param.setOmittable(true); oper.addParameter(param); oper.setReturnType(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v201511", "Team")); oper.setReturnClass(com.google.api.ads.dfp.axis.v201511.Team[].class); oper.setReturnQName(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v201511", "rval")); oper.setStyle(org.apache.axis.constants.Style.WRAPPED); oper.setUse(org.apache.axis.constants.Use.LITERAL); oper.addFault(new org.apache.axis.description.FaultDesc( new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v201511", "ApiExceptionFault"), "com.google.api.ads.dfp.axis.v201511.ApiException", new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v201511", "ApiException"), true )); _operations[0] = oper; oper = new org.apache.axis.description.OperationDesc(); oper.setName("getTeamsByStatement"); param = new org.apache.axis.description.ParameterDesc(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v201511", "filterStatement"), org.apache.axis.description.ParameterDesc.IN, new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v201511", "Statement"), com.google.api.ads.dfp.axis.v201511.Statement.class, false, false); param.setOmittable(true); oper.addParameter(param); oper.setReturnType(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v201511", "TeamPage")); oper.setReturnClass(com.google.api.ads.dfp.axis.v201511.TeamPage.class); oper.setReturnQName(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v201511", "rval")); oper.setStyle(org.apache.axis.constants.Style.WRAPPED); oper.setUse(org.apache.axis.constants.Use.LITERAL); oper.addFault(new org.apache.axis.description.FaultDesc( new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v201511", "ApiExceptionFault"), "com.google.api.ads.dfp.axis.v201511.ApiException", new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v201511", "ApiException"), true )); _operations[1] = oper; oper = new org.apache.axis.description.OperationDesc(); oper.setName("updateTeams"); param = new org.apache.axis.description.ParameterDesc(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v201511", "teams"), org.apache.axis.description.ParameterDesc.IN, new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v201511", "Team"), com.google.api.ads.dfp.axis.v201511.Team[].class, false, false); param.setOmittable(true); oper.addParameter(param); oper.setReturnType(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v201511", "Team")); oper.setReturnClass(com.google.api.ads.dfp.axis.v201511.Team[].class); oper.setReturnQName(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v201511", "rval")); oper.setStyle(org.apache.axis.constants.Style.WRAPPED); oper.setUse(org.apache.axis.constants.Use.LITERAL); oper.addFault(new org.apache.axis.description.FaultDesc( new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v201511", "ApiExceptionFault"), "com.google.api.ads.dfp.axis.v201511.ApiException", new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v201511", "ApiException"), true )); _operations[2] = oper; } public TeamServiceSoapBindingStub() throws org.apache.axis.AxisFault { this(null); } public TeamServiceSoapBindingStub(java.net.URL endpointURL, javax.xml.rpc.Service service) throws org.apache.axis.AxisFault { this(service); super.cachedEndpoint = endpointURL; } public TeamServiceSoapBindingStub(javax.xml.rpc.Service service) throws org.apache.axis.AxisFault { if (service == null) { super.service = new org.apache.axis.client.Service(); } else { super.service = service; } ((org.apache.axis.client.Service)super.service).setTypeMappingVersion("1.2"); java.lang.Class cls; javax.xml.namespace.QName qName; javax.xml.namespace.QName qName2; java.lang.Class beansf = org.apache.axis.encoding.ser.BeanSerializerFactory.class; java.lang.Class beandf = org.apache.axis.encoding.ser.BeanDeserializerFactory.class; java.lang.Class enumsf = org.apache.axis.encoding.ser.EnumSerializerFactory.class; java.lang.Class enumdf = org.apache.axis.encoding.ser.EnumDeserializerFactory.class; java.lang.Class arraysf = org.apache.axis.encoding.ser.ArraySerializerFactory.class; java.lang.Class arraydf = org.apache.axis.encoding.ser.ArrayDeserializerFactory.class; java.lang.Class simplesf = org.apache.axis.encoding.ser.SimpleSerializerFactory.class; java.lang.Class simpledf = org.apache.axis.encoding.ser.SimpleDeserializerFactory.class; java.lang.Class simplelistsf = org.apache.axis.encoding.ser.SimpleListSerializerFactory.class; java.lang.Class simplelistdf = org.apache.axis.encoding.ser.SimpleListDeserializerFactory.class; qName = new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v201511", "ApiError"); cachedSerQNames.add(qName); cls = com.google.api.ads.dfp.axis.v201511.ApiError.class; cachedSerClasses.add(cls); cachedSerFactories.add(beansf); cachedDeserFactories.add(beandf); qName = new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v201511", "ApiException"); cachedSerQNames.add(qName); cls = com.google.api.ads.dfp.axis.v201511.ApiException.class; cachedSerClasses.add(cls); cachedSerFactories.add(beansf); cachedDeserFactories.add(beandf); qName = new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v201511", "ApiVersionError"); cachedSerQNames.add(qName); cls = com.google.api.ads.dfp.axis.v201511.ApiVersionError.class; cachedSerClasses.add(cls); cachedSerFactories.add(beansf); cachedDeserFactories.add(beandf); qName = new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v201511", "ApiVersionError.Reason"); cachedSerQNames.add(qName); cls = com.google.api.ads.dfp.axis.v201511.ApiVersionErrorReason.class; cachedSerClasses.add(cls); cachedSerFactories.add(enumsf); cachedDeserFactories.add(enumdf); qName = new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v201511", "ApplicationException"); cachedSerQNames.add(qName); cls = com.google.api.ads.dfp.axis.v201511.ApplicationException.class; cachedSerClasses.add(cls); cachedSerFactories.add(beansf); cachedDeserFactories.add(beandf); qName = new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v201511", "AuthenticationError"); cachedSerQNames.add(qName); cls = com.google.api.ads.dfp.axis.v201511.AuthenticationError.class; cachedSerClasses.add(cls); cachedSerFactories.add(beansf); cachedDeserFactories.add(beandf); qName = new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v201511", "AuthenticationError.Reason"); cachedSerQNames.add(qName); cls = com.google.api.ads.dfp.axis.v201511.AuthenticationErrorReason.class; cachedSerClasses.add(cls); cachedSerFactories.add(enumsf); cachedDeserFactories.add(enumdf); qName = new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v201511", "BooleanValue"); cachedSerQNames.add(qName); cls = com.google.api.ads.dfp.axis.v201511.BooleanValue.class; cachedSerClasses.add(cls); cachedSerFactories.add(beansf); cachedDeserFactories.add(beandf); qName = new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v201511", "CollectionSizeError"); cachedSerQNames.add(qName); cls = com.google.api.ads.dfp.axis.v201511.CollectionSizeError.class; cachedSerClasses.add(cls); cachedSerFactories.add(beansf); cachedDeserFactories.add(beandf); qName = new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v201511", "CollectionSizeError.Reason"); cachedSerQNames.add(qName); cls = com.google.api.ads.dfp.axis.v201511.CollectionSizeErrorReason.class; cachedSerClasses.add(cls); cachedSerFactories.add(enumsf); cachedDeserFactories.add(enumdf); qName = new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v201511", "CommonError"); cachedSerQNames.add(qName); cls = com.google.api.ads.dfp.axis.v201511.CommonError.class; cachedSerClasses.add(cls); cachedSerFactories.add(beansf); cachedDeserFactories.add(beandf); qName = new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v201511", "CommonError.Reason"); cachedSerQNames.add(qName); cls = com.google.api.ads.dfp.axis.v201511.CommonErrorReason.class; cachedSerClasses.add(cls); cachedSerFactories.add(enumsf); cachedDeserFactories.add(enumdf); qName = new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v201511", "Date"); cachedSerQNames.add(qName); cls = com.google.api.ads.dfp.axis.v201511.Date.class; cachedSerClasses.add(cls); cachedSerFactories.add(beansf); cachedDeserFactories.add(beandf); qName = new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v201511", "DateTime"); cachedSerQNames.add(qName); cls = com.google.api.ads.dfp.axis.v201511.DateTime.class; cachedSerClasses.add(cls); cachedSerFactories.add(beansf); cachedDeserFactories.add(beandf); qName = new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v201511", "DateTimeValue"); cachedSerQNames.add(qName); cls = com.google.api.ads.dfp.axis.v201511.DateTimeValue.class; cachedSerClasses.add(cls); cachedSerFactories.add(beansf); cachedDeserFactories.add(beandf); qName = new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v201511", "DateValue"); cachedSerQNames.add(qName); cls = com.google.api.ads.dfp.axis.v201511.DateValue.class; cachedSerClasses.add(cls); cachedSerFactories.add(beansf); cachedDeserFactories.add(beandf); qName = new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v201511", "FeatureError"); cachedSerQNames.add(qName); cls = com.google.api.ads.dfp.axis.v201511.FeatureError.class; cachedSerClasses.add(cls); cachedSerFactories.add(beansf); cachedDeserFactories.add(beandf); qName = new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v201511", "FeatureError.Reason"); cachedSerQNames.add(qName); cls = com.google.api.ads.dfp.axis.v201511.FeatureErrorReason.class; cachedSerClasses.add(cls); cachedSerFactories.add(enumsf); cachedDeserFactories.add(enumdf); qName = new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v201511", "InternalApiError"); cachedSerQNames.add(qName); cls = com.google.api.ads.dfp.axis.v201511.InternalApiError.class; cachedSerClasses.add(cls); cachedSerFactories.add(beansf); cachedDeserFactories.add(beandf); qName = new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v201511", "InternalApiError.Reason"); cachedSerQNames.add(qName); cls = com.google.api.ads.dfp.axis.v201511.InternalApiErrorReason.class; cachedSerClasses.add(cls); cachedSerFactories.add(enumsf); cachedDeserFactories.add(enumdf); qName = new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v201511", "NotNullError"); cachedSerQNames.add(qName); cls = com.google.api.ads.dfp.axis.v201511.NotNullError.class; cachedSerClasses.add(cls); cachedSerFactories.add(beansf); cachedDeserFactories.add(beandf); qName = new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v201511", "NotNullError.Reason"); cachedSerQNames.add(qName); cls = com.google.api.ads.dfp.axis.v201511.NotNullErrorReason.class; cachedSerClasses.add(cls); cachedSerFactories.add(enumsf); cachedDeserFactories.add(enumdf); qName = new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v201511", "NullError"); cachedSerQNames.add(qName); cls = com.google.api.ads.dfp.axis.v201511.NullError.class; cachedSerClasses.add(cls); cachedSerFactories.add(beansf); cachedDeserFactories.add(beandf); qName = new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v201511", "NullError.Reason"); cachedSerQNames.add(qName); cls = com.google.api.ads.dfp.axis.v201511.NullErrorReason.class; cachedSerClasses.add(cls); cachedSerFactories.add(enumsf); cachedDeserFactories.add(enumdf); qName = new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v201511", "NumberValue"); cachedSerQNames.add(qName); cls = com.google.api.ads.dfp.axis.v201511.NumberValue.class; cachedSerClasses.add(cls); cachedSerFactories.add(beansf); cachedDeserFactories.add(beandf); qName = new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v201511", "ObjectValue"); cachedSerQNames.add(qName); cls = com.google.api.ads.dfp.axis.v201511.ObjectValue.class; cachedSerClasses.add(cls); cachedSerFactories.add(beansf); cachedDeserFactories.add(beandf); qName = new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v201511", "OrderError"); cachedSerQNames.add(qName); cls = com.google.api.ads.dfp.axis.v201511.OrderError.class; cachedSerClasses.add(cls); cachedSerFactories.add(beansf); cachedDeserFactories.add(beandf); qName = new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v201511", "OrderError.Reason"); cachedSerQNames.add(qName); cls = com.google.api.ads.dfp.axis.v201511.OrderErrorReason.class; cachedSerClasses.add(cls); cachedSerFactories.add(enumsf); cachedDeserFactories.add(enumdf); qName = new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v201511", "ParseError"); cachedSerQNames.add(qName); cls = com.google.api.ads.dfp.axis.v201511.ParseError.class; cachedSerClasses.add(cls); cachedSerFactories.add(beansf); cachedDeserFactories.add(beandf); qName = new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v201511", "ParseError.Reason"); cachedSerQNames.add(qName); cls = com.google.api.ads.dfp.axis.v201511.ParseErrorReason.class; cachedSerClasses.add(cls); cachedSerFactories.add(enumsf); cachedDeserFactories.add(enumdf); qName = new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v201511", "PermissionError"); cachedSerQNames.add(qName); cls = com.google.api.ads.dfp.axis.v201511.PermissionError.class; cachedSerClasses.add(cls); cachedSerFactories.add(beansf); cachedDeserFactories.add(beandf); qName = new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v201511", "PermissionError.Reason"); cachedSerQNames.add(qName); cls = com.google.api.ads.dfp.axis.v201511.PermissionErrorReason.class; cachedSerClasses.add(cls); cachedSerFactories.add(enumsf); cachedDeserFactories.add(enumdf); qName = new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v201511", "PublisherQueryLanguageContextError"); cachedSerQNames.add(qName); cls = com.google.api.ads.dfp.axis.v201511.PublisherQueryLanguageContextError.class; cachedSerClasses.add(cls); cachedSerFactories.add(beansf); cachedDeserFactories.add(beandf); qName = new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v201511", "PublisherQueryLanguageContextError.Reason"); cachedSerQNames.add(qName); cls = com.google.api.ads.dfp.axis.v201511.PublisherQueryLanguageContextErrorReason.class; cachedSerClasses.add(cls); cachedSerFactories.add(enumsf); cachedDeserFactories.add(enumdf); qName = new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v201511", "PublisherQueryLanguageSyntaxError"); cachedSerQNames.add(qName); cls = com.google.api.ads.dfp.axis.v201511.PublisherQueryLanguageSyntaxError.class; cachedSerClasses.add(cls); cachedSerFactories.add(beansf); cachedDeserFactories.add(beandf); qName = new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v201511", "PublisherQueryLanguageSyntaxError.Reason"); cachedSerQNames.add(qName); cls = com.google.api.ads.dfp.axis.v201511.PublisherQueryLanguageSyntaxErrorReason.class; cachedSerClasses.add(cls); cachedSerFactories.add(enumsf); cachedDeserFactories.add(enumdf); qName = new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v201511", "QuotaError"); cachedSerQNames.add(qName); cls = com.google.api.ads.dfp.axis.v201511.QuotaError.class; cachedSerClasses.add(cls); cachedSerFactories.add(beansf); cachedDeserFactories.add(beandf); qName = new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v201511", "QuotaError.Reason"); cachedSerQNames.add(qName); cls = com.google.api.ads.dfp.axis.v201511.QuotaErrorReason.class; cachedSerClasses.add(cls); cachedSerFactories.add(enumsf); cachedDeserFactories.add(enumdf); qName = new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v201511", "RequiredCollectionError"); cachedSerQNames.add(qName); cls = com.google.api.ads.dfp.axis.v201511.RequiredCollectionError.class; cachedSerClasses.add(cls); cachedSerFactories.add(beansf); cachedDeserFactories.add(beandf); qName = new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v201511", "RequiredCollectionError.Reason"); cachedSerQNames.add(qName); cls = com.google.api.ads.dfp.axis.v201511.RequiredCollectionErrorReason.class; cachedSerClasses.add(cls); cachedSerFactories.add(enumsf); cachedDeserFactories.add(enumdf); qName = new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v201511", "RequiredError"); cachedSerQNames.add(qName); cls = com.google.api.ads.dfp.axis.v201511.RequiredError.class; cachedSerClasses.add(cls); cachedSerFactories.add(beansf); cachedDeserFactories.add(beandf); qName = new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v201511", "RequiredError.Reason"); cachedSerQNames.add(qName); cls = com.google.api.ads.dfp.axis.v201511.RequiredErrorReason.class; cachedSerClasses.add(cls); cachedSerFactories.add(enumsf); cachedDeserFactories.add(enumdf); qName = new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v201511", "ServerError"); cachedSerQNames.add(qName); cls = com.google.api.ads.dfp.axis.v201511.ServerError.class; cachedSerClasses.add(cls); cachedSerFactories.add(beansf); cachedDeserFactories.add(beandf); qName = new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v201511", "ServerError.Reason"); cachedSerQNames.add(qName); cls = com.google.api.ads.dfp.axis.v201511.ServerErrorReason.class; cachedSerClasses.add(cls); cachedSerFactories.add(enumsf); cachedDeserFactories.add(enumdf); qName = new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v201511", "SetValue"); cachedSerQNames.add(qName); cls = com.google.api.ads.dfp.axis.v201511.SetValue.class; cachedSerClasses.add(cls); cachedSerFactories.add(beansf); cachedDeserFactories.add(beandf); qName = new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v201511", "SoapRequestHeader"); cachedSerQNames.add(qName); cls = com.google.api.ads.dfp.axis.v201511.SoapRequestHeader.class; cachedSerClasses.add(cls); cachedSerFactories.add(beansf); cachedDeserFactories.add(beandf); qName = new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v201511", "SoapResponseHeader"); cachedSerQNames.add(qName); cls = com.google.api.ads.dfp.axis.v201511.SoapResponseHeader.class; cachedSerClasses.add(cls); cachedSerFactories.add(beansf); cachedDeserFactories.add(beandf); qName = new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v201511", "Statement"); cachedSerQNames.add(qName); cls = com.google.api.ads.dfp.axis.v201511.Statement.class; cachedSerClasses.add(cls); cachedSerFactories.add(beansf); cachedDeserFactories.add(beandf); qName = new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v201511", "StatementError"); cachedSerQNames.add(qName); cls = com.google.api.ads.dfp.axis.v201511.StatementError.class; cachedSerClasses.add(cls); cachedSerFactories.add(beansf); cachedDeserFactories.add(beandf); qName = new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v201511", "StatementError.Reason"); cachedSerQNames.add(qName); cls = com.google.api.ads.dfp.axis.v201511.StatementErrorReason.class; cachedSerClasses.add(cls); cachedSerFactories.add(enumsf); cachedDeserFactories.add(enumdf); qName = new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v201511", "String_ValueMapEntry"); cachedSerQNames.add(qName); cls = com.google.api.ads.dfp.axis.v201511.String_ValueMapEntry.class; cachedSerClasses.add(cls); cachedSerFactories.add(beansf); cachedDeserFactories.add(beandf); qName = new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v201511", "Team"); cachedSerQNames.add(qName); cls = com.google.api.ads.dfp.axis.v201511.Team.class; cachedSerClasses.add(cls); cachedSerFactories.add(beansf); cachedDeserFactories.add(beandf); qName = new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v201511", "TeamAccessType"); cachedSerQNames.add(qName); cls = com.google.api.ads.dfp.axis.v201511.TeamAccessType.class; cachedSerClasses.add(cls); cachedSerFactories.add(enumsf); cachedDeserFactories.add(enumdf); qName = new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v201511", "TeamError"); cachedSerQNames.add(qName); cls = com.google.api.ads.dfp.axis.v201511.TeamError.class; cachedSerClasses.add(cls); cachedSerFactories.add(beansf); cachedDeserFactories.add(beandf); qName = new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v201511", "TeamError.Reason"); cachedSerQNames.add(qName); cls = com.google.api.ads.dfp.axis.v201511.TeamErrorReason.class; cachedSerClasses.add(cls); cachedSerFactories.add(enumsf); cachedDeserFactories.add(enumdf); qName = new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v201511", "TeamPage"); cachedSerQNames.add(qName); cls = com.google.api.ads.dfp.axis.v201511.TeamPage.class; cachedSerClasses.add(cls); cachedSerFactories.add(beansf); cachedDeserFactories.add(beandf); qName = new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v201511", "TextValue"); cachedSerQNames.add(qName); cls = com.google.api.ads.dfp.axis.v201511.TextValue.class; cachedSerClasses.add(cls); cachedSerFactories.add(beansf); cachedDeserFactories.add(beandf); qName = new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v201511", "TypeError"); cachedSerQNames.add(qName); cls = com.google.api.ads.dfp.axis.v201511.TypeError.class; cachedSerClasses.add(cls); cachedSerFactories.add(beansf); cachedDeserFactories.add(beandf); qName = new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v201511", "UniqueError"); cachedSerQNames.add(qName); cls = com.google.api.ads.dfp.axis.v201511.UniqueError.class; cachedSerClasses.add(cls); cachedSerFactories.add(beansf); cachedDeserFactories.add(beandf); qName = new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v201511", "Value"); cachedSerQNames.add(qName); cls = com.google.api.ads.dfp.axis.v201511.Value.class; cachedSerClasses.add(cls); cachedSerFactories.add(beansf); cachedDeserFactories.add(beandf); } protected org.apache.axis.client.Call createCall() throws java.rmi.RemoteException { try { org.apache.axis.client.Call _call = super._createCall(); if (super.maintainSessionSet) { _call.setMaintainSession(super.maintainSession); } if (super.cachedUsername != null) { _call.setUsername(super.cachedUsername); } if (super.cachedPassword != null) { _call.setPassword(super.cachedPassword); } if (super.cachedEndpoint != null) { _call.setTargetEndpointAddress(super.cachedEndpoint); } if (super.cachedTimeout != null) { _call.setTimeout(super.cachedTimeout); } if (super.cachedPortName != null) { _call.setPortName(super.cachedPortName); } java.util.Enumeration keys = super.cachedProperties.keys(); while (keys.hasMoreElements()) { java.lang.String key = (java.lang.String) keys.nextElement(); _call.setProperty(key, super.cachedProperties.get(key)); } // All the type mapping information is registered // when the first call is made. // The type mapping information is actually registered in // the TypeMappingRegistry of the service, which // is the reason why registration is only needed for the first call. synchronized (this) { if (firstCall()) { // must set encoding style before registering serializers _call.setEncodingStyle(null); for (int i = 0; i < cachedSerFactories.size(); ++i) { java.lang.Class cls = (java.lang.Class) cachedSerClasses.get(i); javax.xml.namespace.QName qName = (javax.xml.namespace.QName) cachedSerQNames.get(i); java.lang.Object x = cachedSerFactories.get(i); if (x instanceof Class) { java.lang.Class sf = (java.lang.Class) cachedSerFactories.get(i); java.lang.Class df = (java.lang.Class) cachedDeserFactories.get(i); _call.registerTypeMapping(cls, qName, sf, df, false); } else if (x instanceof javax.xml.rpc.encoding.SerializerFactory) { org.apache.axis.encoding.SerializerFactory sf = (org.apache.axis.encoding.SerializerFactory) cachedSerFactories.get(i); org.apache.axis.encoding.DeserializerFactory df = (org.apache.axis.encoding.DeserializerFactory) cachedDeserFactories.get(i); _call.registerTypeMapping(cls, qName, sf, df, false); } } } } return _call; } catch (java.lang.Throwable _t) { throw new org.apache.axis.AxisFault("Failure trying to get the Call object", _t); } } public com.google.api.ads.dfp.axis.v201511.Team[] createTeams(com.google.api.ads.dfp.axis.v201511.Team[] teams) throws java.rmi.RemoteException, com.google.api.ads.dfp.axis.v201511.ApiException { if (super.cachedEndpoint == null) { throw new org.apache.axis.NoEndPointException(); } org.apache.axis.client.Call _call = createCall(); _call.setOperation(_operations[0]); _call.setUseSOAPAction(true); _call.setSOAPActionURI(""); _call.setEncodingStyle(null); _call.setProperty(org.apache.axis.client.Call.SEND_TYPE_ATTR, Boolean.FALSE); _call.setProperty(org.apache.axis.AxisEngine.PROP_DOMULTIREFS, Boolean.FALSE); _call.setSOAPVersion(org.apache.axis.soap.SOAPConstants.SOAP11_CONSTANTS); _call.setOperationName(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v201511", "createTeams")); setRequestHeaders(_call); setAttachments(_call); try { java.lang.Object _resp = _call.invoke(new java.lang.Object[] {teams}); if (_resp instanceof java.rmi.RemoteException) { throw (java.rmi.RemoteException)_resp; } else { extractAttachments(_call); try { return (com.google.api.ads.dfp.axis.v201511.Team[]) _resp; } catch (java.lang.Exception _exception) { return (com.google.api.ads.dfp.axis.v201511.Team[]) org.apache.axis.utils.JavaUtils.convert(_resp, com.google.api.ads.dfp.axis.v201511.Team[].class); } } } catch (org.apache.axis.AxisFault axisFaultException) { if (axisFaultException.detail != null) { if (axisFaultException.detail instanceof java.rmi.RemoteException) { throw (java.rmi.RemoteException) axisFaultException.detail; } if (axisFaultException.detail instanceof com.google.api.ads.dfp.axis.v201511.ApiException) { throw (com.google.api.ads.dfp.axis.v201511.ApiException) axisFaultException.detail; } } throw axisFaultException; } } public com.google.api.ads.dfp.axis.v201511.TeamPage getTeamsByStatement(com.google.api.ads.dfp.axis.v201511.Statement filterStatement) throws java.rmi.RemoteException, com.google.api.ads.dfp.axis.v201511.ApiException { if (super.cachedEndpoint == null) { throw new org.apache.axis.NoEndPointException(); } org.apache.axis.client.Call _call = createCall(); _call.setOperation(_operations[1]); _call.setUseSOAPAction(true); _call.setSOAPActionURI(""); _call.setEncodingStyle(null); _call.setProperty(org.apache.axis.client.Call.SEND_TYPE_ATTR, Boolean.FALSE); _call.setProperty(org.apache.axis.AxisEngine.PROP_DOMULTIREFS, Boolean.FALSE); _call.setSOAPVersion(org.apache.axis.soap.SOAPConstants.SOAP11_CONSTANTS); _call.setOperationName(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v201511", "getTeamsByStatement")); setRequestHeaders(_call); setAttachments(_call); try { java.lang.Object _resp = _call.invoke(new java.lang.Object[] {filterStatement}); if (_resp instanceof java.rmi.RemoteException) { throw (java.rmi.RemoteException)_resp; } else { extractAttachments(_call); try { return (com.google.api.ads.dfp.axis.v201511.TeamPage) _resp; } catch (java.lang.Exception _exception) { return (com.google.api.ads.dfp.axis.v201511.TeamPage) org.apache.axis.utils.JavaUtils.convert(_resp, com.google.api.ads.dfp.axis.v201511.TeamPage.class); } } } catch (org.apache.axis.AxisFault axisFaultException) { if (axisFaultException.detail != null) { if (axisFaultException.detail instanceof java.rmi.RemoteException) { throw (java.rmi.RemoteException) axisFaultException.detail; } if (axisFaultException.detail instanceof com.google.api.ads.dfp.axis.v201511.ApiException) { throw (com.google.api.ads.dfp.axis.v201511.ApiException) axisFaultException.detail; } } throw axisFaultException; } } public com.google.api.ads.dfp.axis.v201511.Team[] updateTeams(com.google.api.ads.dfp.axis.v201511.Team[] teams) throws java.rmi.RemoteException, com.google.api.ads.dfp.axis.v201511.ApiException { if (super.cachedEndpoint == null) { throw new org.apache.axis.NoEndPointException(); } org.apache.axis.client.Call _call = createCall(); _call.setOperation(_operations[2]); _call.setUseSOAPAction(true); _call.setSOAPActionURI(""); _call.setEncodingStyle(null); _call.setProperty(org.apache.axis.client.Call.SEND_TYPE_ATTR, Boolean.FALSE); _call.setProperty(org.apache.axis.AxisEngine.PROP_DOMULTIREFS, Boolean.FALSE); _call.setSOAPVersion(org.apache.axis.soap.SOAPConstants.SOAP11_CONSTANTS); _call.setOperationName(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v201511", "updateTeams")); setRequestHeaders(_call); setAttachments(_call); try { java.lang.Object _resp = _call.invoke(new java.lang.Object[] {teams}); if (_resp instanceof java.rmi.RemoteException) { throw (java.rmi.RemoteException)_resp; } else { extractAttachments(_call); try { return (com.google.api.ads.dfp.axis.v201511.Team[]) _resp; } catch (java.lang.Exception _exception) { return (com.google.api.ads.dfp.axis.v201511.Team[]) org.apache.axis.utils.JavaUtils.convert(_resp, com.google.api.ads.dfp.axis.v201511.Team[].class); } } } catch (org.apache.axis.AxisFault axisFaultException) { if (axisFaultException.detail != null) { if (axisFaultException.detail instanceof java.rmi.RemoteException) { throw (java.rmi.RemoteException) axisFaultException.detail; } if (axisFaultException.detail instanceof com.google.api.ads.dfp.axis.v201511.ApiException) { throw (com.google.api.ads.dfp.axis.v201511.ApiException) axisFaultException.detail; } } throw axisFaultException; } } }
apache-2.0
bbrowning/wildfly-swarm
remoting/runtime/src/main/java/org/wildfly/swarm/remoting/runtime/RemotingConfiguration.java
2078
package org.wildfly.swarm.remoting.runtime; import java.util.ArrayList; import java.util.List; import org.jboss.as.controller.PathAddress; import org.jboss.as.controller.PathElement; import org.jboss.dmr.ModelNode; import org.wildfly.swarm.container.runtime.AbstractServerConfiguration; import org.wildfly.swarm.remoting.RemotingFraction; import static org.jboss.as.controller.descriptions.ModelDescriptionConstants.ADD; import static org.jboss.as.controller.descriptions.ModelDescriptionConstants.EXTENSION; import static org.jboss.as.controller.descriptions.ModelDescriptionConstants.OP; import static org.jboss.as.controller.descriptions.ModelDescriptionConstants.OP_ADDR; import static org.jboss.as.controller.descriptions.ModelDescriptionConstants.SUBSYSTEM; /** * @author Ken Finnigan */ public class RemotingConfiguration extends AbstractServerConfiguration<RemotingFraction> { private PathAddress address = PathAddress.pathAddress(PathElement.pathElement(SUBSYSTEM, "remoting")); public RemotingConfiguration() { super(RemotingFraction.class); } @Override public RemotingFraction defaultFraction() { return new RemotingFraction(); } @Override public List<ModelNode> getList(RemotingFraction fraction) { List<ModelNode> list = new ArrayList<>(); ModelNode node = new ModelNode(); node.get(OP_ADDR).set(EXTENSION, "org.jboss.as.remoting"); node.get(OP).set(ADD); list.add(node); node = new ModelNode(); node.get(OP_ADDR).set(address.toModelNode()); node.get(OP).set(ADD); list.add(node); node = new ModelNode(); node.get(OP_ADDR).set(address.append("configuration", "endpoint").toModelNode()); node.get(OP).set(ADD); list.add(node); node = new ModelNode(); node.get(OP_ADDR).set(address.append("http-connector", "http-remoting-connector").toModelNode()); node.get(OP).set(ADD); node.get("connector-ref").set("default"); list.add(node); return list; } }
apache-2.0
DeinDesign/css
node_modules/bower/node_modules/inquirer/node_modules/cli-color/node_modules/es5-ext/lib/Object/map.js
370
'use strict'; var callable = require('./valid-callable') , forEach = require('./for-each') , call = Function.prototype.call; module.exports = function (obj, cb/*, thisArg*/) { var o = {}, thisArg = arguments[2]; callable(cb); forEach(obj, function (value, key, obj, index) { o[key] = call.call(cb, thisArg, value, key, obj, index); }); return o; };
apache-2.0
mbohlool/client-python
kubernetes/client/models/v1beta2_stateful_set_status.py
11358
# coding: utf-8 """ Kubernetes No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen) OpenAPI spec version: v1.8.2 Generated by: https://github.com/swagger-api/swagger-codegen.git """ from pprint import pformat from six import iteritems import re class V1beta2StatefulSetStatus(object): """ NOTE: This class is auto generated by the swagger code generator program. Do not edit the class manually. """ """ Attributes: swagger_types (dict): The key is attribute name and the value is attribute type. attribute_map (dict): The key is attribute name and the value is json key in definition. """ swagger_types = { 'collision_count': 'int', 'current_replicas': 'int', 'current_revision': 'str', 'observed_generation': 'int', 'ready_replicas': 'int', 'replicas': 'int', 'update_revision': 'str', 'updated_replicas': 'int' } attribute_map = { 'collision_count': 'collisionCount', 'current_replicas': 'currentReplicas', 'current_revision': 'currentRevision', 'observed_generation': 'observedGeneration', 'ready_replicas': 'readyReplicas', 'replicas': 'replicas', 'update_revision': 'updateRevision', 'updated_replicas': 'updatedReplicas' } def __init__(self, collision_count=None, current_replicas=None, current_revision=None, observed_generation=None, ready_replicas=None, replicas=None, update_revision=None, updated_replicas=None): """ V1beta2StatefulSetStatus - a model defined in Swagger """ self._collision_count = None self._current_replicas = None self._current_revision = None self._observed_generation = None self._ready_replicas = None self._replicas = None self._update_revision = None self._updated_replicas = None self.discriminator = None if collision_count is not None: self.collision_count = collision_count if current_replicas is not None: self.current_replicas = current_replicas if current_revision is not None: self.current_revision = current_revision if observed_generation is not None: self.observed_generation = observed_generation if ready_replicas is not None: self.ready_replicas = ready_replicas self.replicas = replicas if update_revision is not None: self.update_revision = update_revision if updated_replicas is not None: self.updated_replicas = updated_replicas @property def collision_count(self): """ Gets the collision_count of this V1beta2StatefulSetStatus. collisionCount is the count of hash collisions for the StatefulSet. The StatefulSet controller uses this field as a collision avoidance mechanism when it needs to create the name for the newest ControllerRevision. :return: The collision_count of this V1beta2StatefulSetStatus. :rtype: int """ return self._collision_count @collision_count.setter def collision_count(self, collision_count): """ Sets the collision_count of this V1beta2StatefulSetStatus. collisionCount is the count of hash collisions for the StatefulSet. The StatefulSet controller uses this field as a collision avoidance mechanism when it needs to create the name for the newest ControllerRevision. :param collision_count: The collision_count of this V1beta2StatefulSetStatus. :type: int """ self._collision_count = collision_count @property def current_replicas(self): """ Gets the current_replicas of this V1beta2StatefulSetStatus. currentReplicas is the number of Pods created by the StatefulSet controller from the StatefulSet version indicated by currentRevision. :return: The current_replicas of this V1beta2StatefulSetStatus. :rtype: int """ return self._current_replicas @current_replicas.setter def current_replicas(self, current_replicas): """ Sets the current_replicas of this V1beta2StatefulSetStatus. currentReplicas is the number of Pods created by the StatefulSet controller from the StatefulSet version indicated by currentRevision. :param current_replicas: The current_replicas of this V1beta2StatefulSetStatus. :type: int """ self._current_replicas = current_replicas @property def current_revision(self): """ Gets the current_revision of this V1beta2StatefulSetStatus. currentRevision, if not empty, indicates the version of the StatefulSet used to generate Pods in the sequence [0,currentReplicas). :return: The current_revision of this V1beta2StatefulSetStatus. :rtype: str """ return self._current_revision @current_revision.setter def current_revision(self, current_revision): """ Sets the current_revision of this V1beta2StatefulSetStatus. currentRevision, if not empty, indicates the version of the StatefulSet used to generate Pods in the sequence [0,currentReplicas). :param current_revision: The current_revision of this V1beta2StatefulSetStatus. :type: str """ self._current_revision = current_revision @property def observed_generation(self): """ Gets the observed_generation of this V1beta2StatefulSetStatus. observedGeneration is the most recent generation observed for this StatefulSet. It corresponds to the StatefulSet's generation, which is updated on mutation by the API Server. :return: The observed_generation of this V1beta2StatefulSetStatus. :rtype: int """ return self._observed_generation @observed_generation.setter def observed_generation(self, observed_generation): """ Sets the observed_generation of this V1beta2StatefulSetStatus. observedGeneration is the most recent generation observed for this StatefulSet. It corresponds to the StatefulSet's generation, which is updated on mutation by the API Server. :param observed_generation: The observed_generation of this V1beta2StatefulSetStatus. :type: int """ self._observed_generation = observed_generation @property def ready_replicas(self): """ Gets the ready_replicas of this V1beta2StatefulSetStatus. readyReplicas is the number of Pods created by the StatefulSet controller that have a Ready Condition. :return: The ready_replicas of this V1beta2StatefulSetStatus. :rtype: int """ return self._ready_replicas @ready_replicas.setter def ready_replicas(self, ready_replicas): """ Sets the ready_replicas of this V1beta2StatefulSetStatus. readyReplicas is the number of Pods created by the StatefulSet controller that have a Ready Condition. :param ready_replicas: The ready_replicas of this V1beta2StatefulSetStatus. :type: int """ self._ready_replicas = ready_replicas @property def replicas(self): """ Gets the replicas of this V1beta2StatefulSetStatus. replicas is the number of Pods created by the StatefulSet controller. :return: The replicas of this V1beta2StatefulSetStatus. :rtype: int """ return self._replicas @replicas.setter def replicas(self, replicas): """ Sets the replicas of this V1beta2StatefulSetStatus. replicas is the number of Pods created by the StatefulSet controller. :param replicas: The replicas of this V1beta2StatefulSetStatus. :type: int """ if replicas is None: raise ValueError("Invalid value for `replicas`, must not be `None`") self._replicas = replicas @property def update_revision(self): """ Gets the update_revision of this V1beta2StatefulSetStatus. updateRevision, if not empty, indicates the version of the StatefulSet used to generate Pods in the sequence [replicas-updatedReplicas,replicas) :return: The update_revision of this V1beta2StatefulSetStatus. :rtype: str """ return self._update_revision @update_revision.setter def update_revision(self, update_revision): """ Sets the update_revision of this V1beta2StatefulSetStatus. updateRevision, if not empty, indicates the version of the StatefulSet used to generate Pods in the sequence [replicas-updatedReplicas,replicas) :param update_revision: The update_revision of this V1beta2StatefulSetStatus. :type: str """ self._update_revision = update_revision @property def updated_replicas(self): """ Gets the updated_replicas of this V1beta2StatefulSetStatus. updatedReplicas is the number of Pods created by the StatefulSet controller from the StatefulSet version indicated by updateRevision. :return: The updated_replicas of this V1beta2StatefulSetStatus. :rtype: int """ return self._updated_replicas @updated_replicas.setter def updated_replicas(self, updated_replicas): """ Sets the updated_replicas of this V1beta2StatefulSetStatus. updatedReplicas is the number of Pods created by the StatefulSet controller from the StatefulSet version indicated by updateRevision. :param updated_replicas: The updated_replicas of this V1beta2StatefulSetStatus. :type: int """ self._updated_replicas = updated_replicas def to_dict(self): """ Returns the model properties as a dict """ result = {} for attr, _ in iteritems(self.swagger_types): value = getattr(self, attr) if isinstance(value, list): result[attr] = list(map( lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value )) elif hasattr(value, "to_dict"): result[attr] = value.to_dict() elif isinstance(value, dict): result[attr] = dict(map( lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], "to_dict") else item, value.items() )) else: result[attr] = value return result def to_str(self): """ Returns the string representation of the model """ return pformat(self.to_dict()) def __repr__(self): """ For `print` and `pprint` """ return self.to_str() def __eq__(self, other): """ Returns true if both objects are equal """ if not isinstance(other, V1beta2StatefulSetStatus): return False return self.__dict__ == other.__dict__ def __ne__(self, other): """ Returns true if both objects are not equal """ return not self == other
apache-2.0
MatMaul/libstorage
imports/executors/imports_executor_isilon.go
197
// +build libstorage_storage_executor,libstorage_storage_executor_isilon package executors import ( // load the packages _ "github.com/codedellemc/libstorage/drivers/storage/isilon/executor" )
apache-2.0
delacruzjayveejoshua920/ICNG-App-for-Windows-8.1-and-Windows-Phone-8.1-
InfoAppWindows8/ICNG Phone/obj/x86/Debug/Views/MyFiles.g.cs
1343
 #pragma checksum "C:\Users\DonCampos\Source\Workspaces\InfoApp\InfoAppWindows8\ICNG Phone\Views\MyFiles.xaml" "{406ea660-64cf-4c82-b6f0-42d48172a799}" "D8CEC6B28F0449C0A14B4A2FBBE72741" //------------------------------------------------------------------------------ // <auto-generated> // This code was generated by a tool. // // Changes to this file may cause incorrect behavior and will be lost if // the code is regenerated. // </auto-generated> //------------------------------------------------------------------------------ namespace App8.Views { partial class MyFiles : global::Windows.UI.Xaml.Controls.Page, global::Windows.UI.Xaml.Markup.IComponentConnector { [global::System.CodeDom.Compiler.GeneratedCodeAttribute("Microsoft.Windows.UI.Xaml.Build.Tasks"," 4.0.0.0")] [global::System.Diagnostics.DebuggerNonUserCodeAttribute()] public void Connect(int connectionId, object target) { switch(connectionId) { case 1: #line 151 "..\..\..\Views\MyFiles.xaml" ((global::Windows.UI.Xaml.Controls.TextBox)(target)).TextChanged += this.input_TextChanged; #line default #line hidden break; } this._contentLoaded = true; } } }
apache-2.0
abates/gosh
example/appliance.go
1607
package main import ( "fmt" "github.com/abates/gosh" "net" "os" "time" ) type TimeCommand struct{} func (TimeCommand) Exec() error { t := time.Now() fmt.Println(t.Format(time.RFC822)) return nil } func interfaceNames() ([]string, error) { interfaces, err := net.Interfaces() names := make([]string, len(interfaces)) if err != nil { return nil, err } else { for i, netInterface := range interfaces { names[i] = netInterface.Name } } return names, nil } type InterfaceCommand struct{} func (i InterfaceCommand) Completions(substring string) []string { names, err := interfaceNames() if err != nil { fmt.Fprintf(os.Stderr, "Failed to retrieve system interfaces: %v\n", err) } return names } func (i InterfaceCommand) Exec() error { for _, name := range os.Args[1:] { netInterface, err := net.InterfaceByName(name) if err != nil { return err } else { fmt.Printf("Name: %v\n", netInterface.Name) addresses, err := netInterface.Addrs() if err != nil { return err } for _, address := range addresses { fmt.Printf(" %v\n", address) } } } return nil } type InterfacesCommand struct{} func (i InterfacesCommand) Exec() error { names, err := interfaceNames() if err != nil { return err } for _, name := range names { fmt.Fprintf(os.Stdout, "%v\n", name) } return nil } var commands = gosh.CommandMap{ "show": gosh.NewTreeCommand(gosh.CommandMap{ "interface": InterfaceCommand{}, "interfaces": InterfacesCommand{}, "time": TimeCommand{}, }), } func main() { shell := gosh.NewShell(commands) shell.Exec() }
apache-2.0
bingoohuang/westcache
src/test/java/com/github/bingoohuang/westcache/springann/SpringConfig.java
1627
package com.github.bingoohuang.westcache.springann; import com.github.bingoohuang.westcache.spring.WestCacheableEnabled; import com.github.bingoohuang.westcache.spring.WestCacheableScan; import com.github.bingoohuang.westcache.utils.EmbeddedRedis; import com.github.bingoohuang.westcache.utils.Redis; import lombok.val; import org.n3r.eql.eqler.spring.EqlerScan; import org.springframework.aop.framework.autoproxy.DefaultAdvisorAutoProxyCreator; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.ComponentScan; import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.Primary; import redis.clients.jedis.JedisCommands; @Configuration @ComponentScan @EqlerScan @WestCacheableScan(value = "com.github.bingoohuang.westcache.springann", basePackageClasses = SpringConfig.class) @WestCacheableEnabled public class SpringConfig { @Bean public DefaultAdvisorAutoProxyCreator defaultAdvisorAutoProxyCreator() { val creator = new DefaultAdvisorAutoProxyCreator(); creator.setProxyTargetClass(true); return creator; } @Bean @Primary public JedisCommands thisJedisCommands() { return Redis.createJedisCommands( "127.0.0.1", EmbeddedRedis.port1, 10); } @Bean(name = "that") public JedisCommands thatJedisCommands() { return Redis.createJedisCommands( "127.0.0.1", EmbeddedRedis.port2, 10); } @Bean(name = "cacheFlushScheduledBean") public String cacheFlushScheduled() { return "Every 1 seconds"; } }
apache-2.0
txusballesteros/Android-Clean-Testing
app/src/main/java/com/txusballesteros/testing/data/datasource/model/ImageEntityMapperImpl.java
1796
/* * Copyright Txus Ballesteros 2016 (@txusballesteros) * * This file is part of some open source application. * * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. * * Contact: Txus Ballesteros <txus.ballesteros@gmail.com> */ package com.txusballesteros.testing.data.datasource.model; import com.txusballesteros.testing.domain.model.Image; import java.util.ArrayList; import java.util.List; import javax.inject.Inject; public class ImageEntityMapperImpl implements ImageEntityMapper { @Inject public ImageEntityMapperImpl() { } @Override public List<Image> map(List<ImageEntity> source) { List<Image> result = new ArrayList<>(source.size()); for (int location = 0; location < source.size(); location++) { final Image image = map(source.get(location)); result.add(image); } return result; } @Override public Image map(ImageEntity source) { return new Image.Builder() .setUrl(source.getUrl()) .build(); } }
apache-2.0
xxs/es-shop
src/main/java/com/sishuok/es/common/repository/RepositoryHelper.java
10302
/** * Copyright (c) 2005-2012 https://github.com/zhangkaitao * * Licensed under the Apache License, Version 2.0 (the "License"); */ package com.sishuok.es.common.repository; import java.util.List; import javax.persistence.EntityManager; import javax.persistence.EntityManagerFactory; import javax.persistence.Query; import org.springframework.core.annotation.AnnotationUtils; import org.springframework.data.domain.PageRequest; import org.springframework.data.domain.Pageable; import org.springframework.data.domain.Sort; import org.springframework.data.jpa.repository.support.JpaEntityInformation; import org.springframework.data.jpa.repository.support.JpaEntityInformationSupport; import org.springframework.orm.jpa.SharedEntityManagerCreator; import org.springframework.util.Assert; import com.sishuok.es.common.entity.search.Searchable; import com.sishuok.es.common.repository.callback.SearchCallback; import com.sishuok.es.common.repository.support.annotation.EnableQueryCache; /** * 仓库辅助类 * <p>User: Zhang Kaitao * <p>Date: 13-4-14 下午5:28 * <p>Version: 1.0 */ public class RepositoryHelper { private static EntityManager entityManager; private Class<?> entityClass; private boolean enableQueryCache = false; /** * @param entityClass 是否开启查询缓存 */ public RepositoryHelper(Class<?> entityClass) { this.entityClass = entityClass; EnableQueryCache enableQueryCacheAnnotation = AnnotationUtils.findAnnotation(entityClass, EnableQueryCache.class); boolean enableQueryCache = false; if (enableQueryCacheAnnotation != null) { enableQueryCache = enableQueryCacheAnnotation.value(); } this.enableQueryCache = enableQueryCache; } public static void setEntityManagerFactory(EntityManagerFactory entityManagerFactory) { entityManager = SharedEntityManagerCreator.createSharedEntityManager(entityManagerFactory); } public static EntityManager getEntityManager() { Assert.notNull(entityManager, "entityManager must null, please see " + "[com.sishuok.es.common.repository.RepositoryHelper#setEntityManagerFactory]"); return entityManager; } public static void flush() { getEntityManager().flush(); } public static void clear() { flush(); getEntityManager().clear(); } /** * <p>ql条件查询<br/> * searchCallback默认实现请参考 {@see com.sishuok.es.common.repository.callback.DefaultSearchCallback}<br/> * <p/> * 测试用例请参考:{@see com.sishuok.es.common.repository.UserRepositoryImplForCustomSearchIT} * 和{@see com.sishuok.es.common.repository.UserRepositoryImplForDefaultSearchIT} * * @param ql * @param searchable 查询条件、分页 排序 * @param searchCallback 查询回调 自定义设置查询条件和赋值 * @return */ public <M> List<M> findAll(final String ql, final Searchable searchable, final SearchCallback searchCallback) { assertConverted(searchable); StringBuilder s = new StringBuilder(ql); searchCallback.prepareQL(s, searchable); searchCallback.prepareOrder(s, searchable); Query query = getEntityManager().createQuery(s.toString()); applyEnableQueryCache(query); searchCallback.setValues(query, searchable); searchCallback.setPageable(query, searchable); return query.getResultList(); } /** * <p>按条件统计<br/> * 测试用例请参考:{@see com.sishuok.es.common.repository.UserRepositoryImplForCustomSearchIT} * 和{@see com.sishuok.es.common.repository.UserRepositoryImplForDefaultSearchIT} * * @param ql * @param searchable * @param searchCallback * @return */ public long count(final String ql, final Searchable searchable, final SearchCallback searchCallback) { assertConverted(searchable); StringBuilder s = new StringBuilder(ql); searchCallback.prepareQL(s, searchable); Query query = getEntityManager().createQuery(s.toString()); applyEnableQueryCache(query); searchCallback.setValues(query, searchable); return (Long) query.getSingleResult(); } /** * 按条件查询一个实体 * * @param ql * @param searchable * @param searchCallback * @return */ public <M> M findOne(final String ql, final Searchable searchable, final SearchCallback searchCallback) { assertConverted(searchable); StringBuilder s = new StringBuilder(ql); searchCallback.prepareQL(s, searchable); searchCallback.prepareOrder(s, searchable); Query query = getEntityManager().createQuery(s.toString()); applyEnableQueryCache(query); searchCallback.setValues(query, searchable); searchCallback.setPageable(query, searchable); query.setMaxResults(1); List<M> result = query.getResultList(); if (result.size() > 0) { return result.get(0); } return null; } /** * @param ql * @param params * @param <M> * @return * @see RepositoryHelper#findAll(String, org.springframework.data.domain.Pageable, Object...) */ public <M> List<M> findAll(final String ql, final Object... params) { //此处必须 (Pageable) null 否则默认有调用自己了 可变参列表 return findAll(ql, (Pageable) null, params); } /** * <p>根据ql和按照索引顺序的params执行ql,pageable存储分页信息 null表示不分页<br/> * 具体使用请参考测试用例:{@see com.sishuok.es.common.repository.UserRepository2ImplIT#testFindAll()} * * @param ql * @param pageable null表示不分页 * @param params * @param <M> * @return */ public <M> List<M> findAll(final String ql, final Pageable pageable, final Object... params) { Query query = getEntityManager().createQuery(ql + prepareOrder(pageable != null ? pageable.getSort() : null)); applyEnableQueryCache(query); setParameters(query, params); if (pageable != null) { query.setFirstResult(pageable.getOffset()); query.setMaxResults(pageable.getPageSize()); } return query.getResultList(); } /** * <p>根据ql和按照索引顺序的params执行ql,sort存储排序信息 null表示不排序<br/> * 具体使用请参考测试用例:{@see com.sishuok.es.common.repository.UserRepository2ImplIT#testFindAll()} * * @param ql * @param sort null表示不排序 * @param params * @param <M> * @return */ public <M> List<M> findAll(final String ql, final Sort sort, final Object... params) { Query query = getEntityManager().createQuery(ql + prepareOrder(sort)); applyEnableQueryCache(query); setParameters(query, params); return query.getResultList(); } /** * <p>根据ql和按照索引顺序的params查询一个实体<br/> * 具体使用请参考测试用例:{@see com.sishuok.es.common.repository.UserRepository2ImplIT#testFindOne()} * * @param ql * @param params * @param <M> * @return */ public <M> M findOne(final String ql, final Object... params) { List<M> list = findAll(ql, new PageRequest(0, 1), params); if (list.size() > 0) { return list.get(0); } return null; } /** * <p>根据ql和按照索引顺序的params执行ql统计<br/> * 具体使用请参考测试用例:com.sishuok.es.common.repository.UserRepository2ImplIT#testCountAll() * * @param ql * @param params * @return */ public long count(final String ql, final Object... params) { Query query = entityManager.createQuery(ql); applyEnableQueryCache(query); setParameters(query, params); return (Long) query.getSingleResult(); } /** * <p>执行批处理语句.如 之间insert, update, delete 等.<br/> * 具体使用请参考测试用例:{@see com.sishuok.es.common.repository.UserRepository2ImplIT#testBatchUpdate()} * * @param ql * @param params * @return */ public int batchUpdate(final String ql, final Object... params) { Query query = getEntityManager().createQuery(ql); setParameters(query, params); return query.executeUpdate(); } /** * 按顺序设置Query参数 * * @param query * @param params */ public void setParameters(Query query, Object[] params) { if (params != null) { for (int i = 0; i < params.length; i++) { query.setParameter(i + 1, params[i]); } } } /** * 拼排序 * * @param sort * @return */ public String prepareOrder(Sort sort) { if (sort == null || !sort.iterator().hasNext()) { return ""; } StringBuilder orderBy = new StringBuilder(""); orderBy.append(" order by "); orderBy.append(sort.toString().replace(":", " ")); return orderBy.toString(); } public <T> JpaEntityInformation<T, ?> getMetadata(Class<T> entityClass) { return JpaEntityInformationSupport.getMetadata(entityClass, entityManager); } public String getEntityName(Class<?> entityClass) { return getMetadata(entityClass).getEntityName(); } private void assertConverted(Searchable searchable) { if (!searchable.isConverted()) { searchable.convert(this.entityClass); } } public void applyEnableQueryCache(Query query) { if (enableQueryCache) { query.setHint("org.hibernate.cacheable", true);//开启查询缓存 } } }
apache-2.0
yinkaf/robovm-samples
HelloGL/src/main/java/org/robovm/samples/hellogl/GLES2.java
34443
/* * Copyright (C) 2014 RoboVM AB * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.robovm.samples.hellogl; import java.nio.Buffer; import java.nio.ByteBuffer; import java.nio.IntBuffer; import org.robovm.rt.bro.Bro; import org.robovm.rt.bro.Struct; import org.robovm.rt.bro.annotation.Bridge; import org.robovm.rt.bro.annotation.Library; import org.robovm.rt.bro.ptr.BytePtr; import org.robovm.rt.bro.ptr.IntPtr; @Library("OpenGLES") public class GLES2 { static { Bro.bind(GLES2.class); } public static final int GL_ES_VERSION_2_0 = 1; public static final int GL_DEPTH_BUFFER_BIT = 0x00000100; public static final int GL_STENCIL_BUFFER_BIT = 0x00000400; public static final int GL_COLOR_BUFFER_BIT = 0x00004000; public static final int GL_FALSE = 0; public static final int GL_TRUE = 1; public static final int GL_POINTS = 0x0000; public static final int GL_LINES = 0x0001; public static final int GL_LINE_LOOP = 0x0002; public static final int GL_LINE_STRIP = 0x0003; public static final int GL_TRIANGLES = 0x0004; public static final int GL_TRIANGLE_STRIP = 0x0005; public static final int GL_TRIANGLE_FAN = 0x0006; public static final int GL_ZERO = 0; public static final int GL_ONE = 1; public static final int GL_SRC_COLOR = 0x0300; public static final int GL_ONE_MINUS_SRC_COLOR = 0x0301; public static final int GL_SRC_ALPHA = 0x0302; public static final int GL_ONE_MINUS_SRC_ALPHA = 0x0303; public static final int GL_DST_ALPHA = 0x0304; public static final int GL_ONE_MINUS_DST_ALPHA = 0x0305; public static final int GL_DST_COLOR = 0x0306; public static final int GL_ONE_MINUS_DST_COLOR = 0x0307; public static final int GL_SRC_ALPHA_SATURATE = 0x0308; public static final int GL_FUNC_ADD = 0x8006; public static final int GL_BLEND_EQUATION = 0x8009; public static final int GL_BLEND_EQUATION_RGB = 0x8009; public static final int GL_BLEND_EQUATION_ALPHA = 0x883D; public static final int GL_FUNC_SUBTRACT = 0x800A; public static final int GL_FUNC_REVERSE_SUBTRACT = 0x800B; public static final int GL_BLEND_DST_RGB = 0x80C8; public static final int GL_BLEND_SRC_RGB = 0x80C9; public static final int GL_BLEND_DST_ALPHA = 0x80CA; public static final int GL_BLEND_SRC_ALPHA = 0x80CB; public static final int GL_CONSTANT_COLOR = 0x8001; public static final int GL_ONE_MINUS_CONSTANT_COLOR = 0x8002; public static final int GL_CONSTANT_ALPHA = 0x8003; public static final int GL_ONE_MINUS_CONSTANT_ALPHA = 0x8004; public static final int GL_BLEND_COLOR = 0x8005; public static final int GL_ARRAY_BUFFER = 0x8892; public static final int GL_ELEMENT_ARRAY_BUFFER = 0x8893; public static final int GL_ARRAY_BUFFER_BINDING = 0x8894; public static final int GL_ELEMENT_ARRAY_BUFFER_BINDING = 0x8895; public static final int GL_STREAM_DRAW = 0x88E0; public static final int GL_STATIC_DRAW = 0x88E4; public static final int GL_DYNAMIC_DRAW = 0x88E8; public static final int GL_BUFFER_SIZE = 0x8764; public static final int GL_BUFFER_USAGE = 0x8765; public static final int GL_CURRENT_VERTEX_ATTRIB = 0x8626; public static final int GL_FRONT = 0x0404; public static final int GL_BACK = 0x0405; public static final int GL_FRONT_AND_BACK = 0x0408; public static final int GL_TEXTURE_2D = 0x0DE1; public static final int GL_CULL_FACE = 0x0B44; public static final int GL_BLEND = 0x0BE2; public static final int GL_DITHER = 0x0BD0; public static final int GL_STENCIL_TEST = 0x0B90; public static final int GL_DEPTH_TEST = 0x0B71; public static final int GL_SCISSOR_TEST = 0x0C11; public static final int GL_POLYGON_OFFSET_FILL = 0x8037; public static final int GL_SAMPLE_ALPHA_TO_COVERAGE = 0x809E; public static final int GL_SAMPLE_COVERAGE = 0x80A0; public static final int GL_NO_ERROR = 0; public static final int GL_INVALID_ENUM = 0x0500; public static final int GL_INVALID_VALUE = 0x0501; public static final int GL_INVALID_OPERATION = 0x0502; public static final int GL_OUT_OF_MEMORY = 0x0505; public static final int GL_CW = 0x0900; public static final int GL_CCW = 0x0901; public static final int GL_LINE_WIDTH = 0x0B21; public static final int GL_ALIASED_POINT_SIZE_RANGE = 0x846D; public static final int GL_ALIASED_LINE_WIDTH_RANGE = 0x846E; public static final int GL_CULL_FACE_MODE = 0x0B45; public static final int GL_FRONT_FACE = 0x0B46; public static final int GL_DEPTH_RANGE = 0x0B70; public static final int GL_DEPTH_WRITEMASK = 0x0B72; public static final int GL_DEPTH_CLEAR_VALUE = 0x0B73; public static final int GL_DEPTH_FUNC = 0x0B74; public static final int GL_STENCIL_CLEAR_VALUE = 0x0B91; public static final int GL_STENCIL_FUNC = 0x0B92; public static final int GL_STENCIL_FAIL = 0x0B94; public static final int GL_STENCIL_PASS_DEPTH_FAIL = 0x0B95; public static final int GL_STENCIL_PASS_DEPTH_PASS = 0x0B96; public static final int GL_STENCIL_REF = 0x0B97; public static final int GL_STENCIL_VALUE_MASK = 0x0B93; public static final int GL_STENCIL_WRITEMASK = 0x0B98; public static final int GL_STENCIL_BACK_FUNC = 0x8800; public static final int GL_STENCIL_BACK_FAIL = 0x8801; public static final int GL_STENCIL_BACK_PASS_DEPTH_FAIL = 0x8802; public static final int GL_STENCIL_BACK_PASS_DEPTH_PASS = 0x8803; public static final int GL_STENCIL_BACK_REF = 0x8CA3; public static final int GL_STENCIL_BACK_VALUE_MASK = 0x8CA4; public static final int GL_STENCIL_BACK_WRITEMASK = 0x8CA5; public static final int GL_VIEWPORT = 0x0BA2; public static final int GL_SCISSOR_BOX = 0x0C10; public static final int GL_COLOR_CLEAR_VALUE = 0x0C22; public static final int GL_COLOR_WRITEMASK = 0x0C23; public static final int GL_UNPACK_ALIGNMENT = 0x0CF5; public static final int GL_PACK_ALIGNMENT = 0x0D05; public static final int GL_MAX_TEXTURE_SIZE = 0x0D33; public static final int GL_MAX_TEXTURE_UNITS = 0x84E2; public static final int GL_MAX_VIEWPORT_DIMS = 0x0D3A; public static final int GL_SUBPIXEL_BITS = 0x0D50; public static final int GL_RED_BITS = 0x0D52; public static final int GL_GREEN_BITS = 0x0D53; public static final int GL_BLUE_BITS = 0x0D54; public static final int GL_ALPHA_BITS = 0x0D55; public static final int GL_DEPTH_BITS = 0x0D56; public static final int GL_STENCIL_BITS = 0x0D57; public static final int GL_POLYGON_OFFSET_UNITS = 0x2A00; public static final int GL_POLYGON_OFFSET_FACTOR = 0x8038; public static final int GL_TEXTURE_BINDING_2D = 0x8069; public static final int GL_SAMPLE_BUFFERS = 0x80A8; public static final int GL_SAMPLES = 0x80A9; public static final int GL_SAMPLE_COVERAGE_VALUE = 0x80AA; public static final int GL_SAMPLE_COVERAGE_INVERT = 0x80AB; public static final int GL_NUM_COMPRESSED_TEXTURE_FORMATS = 0x86A2; public static final int GL_COMPRESSED_TEXTURE_FORMATS = 0x86A3; public static final int GL_DONT_CARE = 0x1100; public static final int GL_FASTEST = 0x1101; public static final int GL_NICEST = 0x1102; public static final int GL_GENERATE_MIPMAP = 0x8191; public static final int GL_GENERATE_MIPMAP_HINT = 0x8192; public static final int GL_BYTE = 0x1400; public static final int GL_UNSIGNED_BYTE = 0x1401; public static final int GL_SHORT = 0x1402; public static final int GL_UNSIGNED_SHORT = 0x1403; public static final int GL_INT = 0x1404; public static final int GL_UNSIGNED_INT = 0x1405; public static final int GL_FLOAT = 0x1406; public static final int GL_FIXED = 0x140C; public static final int GL_DEPTH_COMPONENT = 0x1902; public static final int GL_ALPHA = 0x1906; public static final int GL_RGB = 0x1907; public static final int GL_RGBA = 0x1908; public static final int GL_LUMINANCE = 0x1909; public static final int GL_LUMINANCE_ALPHA = 0x190A; public static final int GL_UNSIGNED_SHORT_4_4_4_4 = 0x8033; public static final int GL_UNSIGNED_SHORT_5_5_5_1 = 0x8034; public static final int GL_UNSIGNED_SHORT_5_6_5 = 0x8363; public static final int GL_FRAGMENT_SHADER = 0x8B30; public static final int GL_VERTEX_SHADER = 0x8B31; public static final int GL_MAX_VERTEX_ATTRIBS = 0x8869; public static final int GL_MAX_VERTEX_UNIFORM_VECTORS = 0x8DFB; public static final int GL_MAX_VARYING_VECTORS = 0x8DFC; public static final int GL_MAX_COMBINED_TEXTURE_IMAGE_UNITS = 0x8B4D; public static final int GL_MAX_VERTEX_TEXTURE_IMAGE_UNITS = 0x8B4C; public static final int GL_MAX_TEXTURE_IMAGE_UNITS = 0x8872; public static final int GL_MAX_FRAGMENT_UNIFORM_VECTORS = 0x8DFD; public static final int GL_SHADER_TYPE = 0x8B4F; public static final int GL_DELETE_STATUS = 0x8B80; public static final int GL_LINK_STATUS = 0x8B82; public static final int GL_VALIDATE_STATUS = 0x8B83; public static final int GL_ATTACHED_SHADERS = 0x8B85; public static final int GL_ACTIVE_UNIFORMS = 0x8B86; public static final int GL_ACTIVE_UNIFORM_MAX_LENGTH = 0x8B87; public static final int GL_ACTIVE_ATTRIBUTES = 0x8B89; public static final int GL_ACTIVE_ATTRIBUTE_MAX_LENGTH = 0x8B8A; public static final int GL_SHADING_LANGUAGE_VERSION = 0x8B8C; public static final int GL_CURRENT_PROGRAM = 0x8B8D; public static final int GL_NEVER = 0x0200; public static final int GL_LESS = 0x0201; public static final int GL_EQUAL = 0x0202; public static final int GL_LEQUAL = 0x0203; public static final int GL_GREATER = 0x0204; public static final int GL_NOTEQUAL = 0x0205; public static final int GL_GEQUAL = 0x0206; public static final int GL_ALWAYS = 0x0207; public static final int GL_KEEP = 0x1E00; public static final int GL_REPLACE = 0x1E01; public static final int GL_INCR = 0x1E02; public static final int GL_DECR = 0x1E03; public static final int GL_INVERT = 0x150A; public static final int GL_INCR_WRAP = 0x8507; public static final int GL_DECR_WRAP = 0x8508; public static final int GL_VENDOR = 0x1F00; public static final int GL_RENDERER = 0x1F01; public static final int GL_VERSION = 0x1F02; public static final int GL_EXTENSIONS = 0x1F03; public static final int GL_NEAREST = 0x2600; public static final int GL_LINEAR = 0x2601; public static final int GL_NEAREST_MIPMAP_NEAREST = 0x2700; public static final int GL_LINEAR_MIPMAP_NEAREST = 0x2701; public static final int GL_NEAREST_MIPMAP_LINEAR = 0x2702; public static final int GL_LINEAR_MIPMAP_LINEAR = 0x2703; public static final int GL_TEXTURE_MAG_FILTER = 0x2800; public static final int GL_TEXTURE_MIN_FILTER = 0x2801; public static final int GL_TEXTURE_WRAP_S = 0x2802; public static final int GL_TEXTURE_WRAP_T = 0x2803; public static final int GL_TEXTURE = 0x1702; public static final int GL_TEXTURE_CUBE_MAP = 0x8513; public static final int GL_TEXTURE_BINDING_CUBE_MAP = 0x8514; public static final int GL_TEXTURE_CUBE_MAP_POSITIVE_X = 0x8515; public static final int GL_TEXTURE_CUBE_MAP_NEGATIVE_X = 0x8516; public static final int GL_TEXTURE_CUBE_MAP_POSITIVE_Y = 0x8517; public static final int GL_TEXTURE_CUBE_MAP_NEGATIVE_Y = 0x8518; public static final int GL_TEXTURE_CUBE_MAP_POSITIVE_Z = 0x8519; public static final int GL_TEXTURE_CUBE_MAP_NEGATIVE_Z = 0x851A; public static final int GL_MAX_CUBE_MAP_TEXTURE_SIZE = 0x851C; public static final int GL_TEXTURE0 = 0x84C0; public static final int GL_TEXTURE1 = 0x84C1; public static final int GL_TEXTURE2 = 0x84C2; public static final int GL_TEXTURE3 = 0x84C3; public static final int GL_TEXTURE4 = 0x84C4; public static final int GL_TEXTURE5 = 0x84C5; public static final int GL_TEXTURE6 = 0x84C6; public static final int GL_TEXTURE7 = 0x84C7; public static final int GL_TEXTURE8 = 0x84C8; public static final int GL_TEXTURE9 = 0x84C9; public static final int GL_TEXTURE10 = 0x84CA; public static final int GL_TEXTURE11 = 0x84CB; public static final int GL_TEXTURE12 = 0x84CC; public static final int GL_TEXTURE13 = 0x84CD; public static final int GL_TEXTURE14 = 0x84CE; public static final int GL_TEXTURE15 = 0x84CF; public static final int GL_TEXTURE16 = 0x84D0; public static final int GL_TEXTURE17 = 0x84D1; public static final int GL_TEXTURE18 = 0x84D2; public static final int GL_TEXTURE19 = 0x84D3; public static final int GL_TEXTURE20 = 0x84D4; public static final int GL_TEXTURE21 = 0x84D5; public static final int GL_TEXTURE22 = 0x84D6; public static final int GL_TEXTURE23 = 0x84D7; public static final int GL_TEXTURE24 = 0x84D8; public static final int GL_TEXTURE25 = 0x84D9; public static final int GL_TEXTURE26 = 0x84DA; public static final int GL_TEXTURE27 = 0x84DB; public static final int GL_TEXTURE28 = 0x84DC; public static final int GL_TEXTURE29 = 0x84DD; public static final int GL_TEXTURE30 = 0x84DE; public static final int GL_TEXTURE31 = 0x84DF; public static final int GL_ACTIVE_TEXTURE = 0x84E0; public static final int GL_REPEAT = 0x2901; public static final int GL_CLAMP_TO_EDGE = 0x812F; public static final int GL_MIRRORED_REPEAT = 0x8370; public static final int GL_FLOAT_VEC2 = 0x8B50; public static final int GL_FLOAT_VEC3 = 0x8B51; public static final int GL_FLOAT_VEC4 = 0x8B52; public static final int GL_INT_VEC2 = 0x8B53; public static final int GL_INT_VEC3 = 0x8B54; public static final int GL_INT_VEC4 = 0x8B55; public static final int GL_BOOL = 0x8B56; public static final int GL_BOOL_VEC2 = 0x8B57; public static final int GL_BOOL_VEC3 = 0x8B58; public static final int GL_BOOL_VEC4 = 0x8B59; public static final int GL_FLOAT_MAT2 = 0x8B5A; public static final int GL_FLOAT_MAT3 = 0x8B5B; public static final int GL_FLOAT_MAT4 = 0x8B5C; public static final int GL_SAMPLER_2D = 0x8B5E; public static final int GL_SAMPLER_CUBE = 0x8B60; public static final int GL_VERTEX_ATTRIB_ARRAY_ENABLED = 0x8622; public static final int GL_VERTEX_ATTRIB_ARRAY_SIZE = 0x8623; public static final int GL_VERTEX_ATTRIB_ARRAY_STRIDE = 0x8624; public static final int GL_VERTEX_ATTRIB_ARRAY_TYPE = 0x8625; public static final int GL_VERTEX_ATTRIB_ARRAY_NORMALIZED = 0x886A; public static final int GL_VERTEX_ATTRIB_ARRAY_POINTER = 0x8645; public static final int GL_VERTEX_ATTRIB_ARRAY_BUFFER_BINDING = 0x889F; public static final int GL_IMPLEMENTATION_COLOR_READ_TYPE = 0x8B9A; public static final int GL_IMPLEMENTATION_COLOR_READ_FORMAT = 0x8B9B; public static final int GL_COMPILE_STATUS = 0x8B81; public static final int GL_INFO_LOG_LENGTH = 0x8B84; public static final int GL_SHADER_SOURCE_LENGTH = 0x8B88; public static final int GL_SHADER_COMPILER = 0x8DFA; public static final int GL_SHADER_BINARY_FORMATS = 0x8DF8; public static final int GL_NUM_SHADER_BINARY_FORMATS = 0x8DF9; public static final int GL_LOW_FLOAT = 0x8DF0; public static final int GL_MEDIUM_FLOAT = 0x8DF1; public static final int GL_HIGH_FLOAT = 0x8DF2; public static final int GL_LOW_INT = 0x8DF3; public static final int GL_MEDIUM_INT = 0x8DF4; public static final int GL_HIGH_INT = 0x8DF5; public static final int GL_FRAMEBUFFER = 0x8D40; public static final int GL_RENDERBUFFER = 0x8D41; public static final int GL_RGBA4 = 0x8056; public static final int GL_RGB5_A1 = 0x8057; public static final int GL_RGB565 = 0x8D62; public static final int GL_DEPTH_COMPONENT16 = 0x81A5; public static final int GL_STENCIL_INDEX = 0x1901; public static final int GL_STENCIL_INDEX8 = 0x8D48; public static final int GL_RENDERBUFFER_WIDTH = 0x8D42; public static final int GL_RENDERBUFFER_HEIGHT = 0x8D43; public static final int GL_RENDERBUFFER_INTERNAL_FORMAT = 0x8D44; public static final int GL_RENDERBUFFER_RED_SIZE = 0x8D50; public static final int GL_RENDERBUFFER_GREEN_SIZE = 0x8D51; public static final int GL_RENDERBUFFER_BLUE_SIZE = 0x8D52; public static final int GL_RENDERBUFFER_ALPHA_SIZE = 0x8D53; public static final int GL_RENDERBUFFER_DEPTH_SIZE = 0x8D54; public static final int GL_RENDERBUFFER_STENCIL_SIZE = 0x8D55; public static final int GL_FRAMEBUFFER_ATTACHMENT_OBJECT_TYPE = 0x8CD0; public static final int GL_FRAMEBUFFER_ATTACHMENT_OBJECT_NAME = 0x8CD1; public static final int GL_FRAMEBUFFER_ATTACHMENT_TEXTURE_LEVEL = 0x8CD2; public static final int GL_FRAMEBUFFER_ATTACHMENT_TEXTURE_CUBE_MAP_FACE = 0x8CD3; public static final int GL_COLOR_ATTACHMENT0 = 0x8CE0; public static final int GL_DEPTH_ATTACHMENT = 0x8D00; public static final int GL_STENCIL_ATTACHMENT = 0x8D20; public static final int GL_NONE = 0; public static final int GL_FRAMEBUFFER_COMPLETE = 0x8CD5; public static final int GL_FRAMEBUFFER_INCOMPLETE_ATTACHMENT = 0x8CD6; public static final int GL_FRAMEBUFFER_INCOMPLETE_MISSING_ATTACHMENT = 0x8CD7; public static final int GL_FRAMEBUFFER_INCOMPLETE_DIMENSIONS = 0x8CD9; public static final int GL_FRAMEBUFFER_UNSUPPORTED = 0x8CDD; public static final int GL_FRAMEBUFFER_BINDING = 0x8CA6; public static final int GL_RENDERBUFFER_BINDING = 0x8CA7; public static final int GL_MAX_RENDERBUFFER_SIZE = 0x84E8; public static final int GL_INVALID_FRAMEBUFFER_OPERATION = 0x0506; public static final int GL_VERTEX_PROGRAM_POINT_SIZE = 0x8642; public static final int GL_TEXTURE_MAX_ANISOTROPY_EXT = 0x84FE; public static final int GL_MAX_TEXTURE_MAX_ANISOTROPY_EXT = 0x84FF; // more ergonomic versions of certain methods public static int glGenBuffer() { glGenBuffers(1, intBuf1); return intBuf1.get(0); } public static int glGetProgrami(int program, int pname) { glGetProgramiv(program, pname, intBuf1); return intBuf1.get(0); } public static int glGetShaderi(int shader, int pname) { glGetShaderiv(shader, pname, intBuf1); return intBuf1.get(0); } public static void glShaderSource(int shader, String source) { BytePtr.BytePtrPtr sources = Struct.allocate(BytePtr.BytePtrPtr.class, 1); sources.next(0).set(BytePtr.toBytePtrAsciiZ(source)); glShaderSource(shader, 1, sources, null); } public static String glGetProgramInfoLog(int program) { ByteBuffer bbuf = GLUtil.newBuffer(MAX_LOG_SIZE); glGetProgramInfoLog(program, MAX_LOG_SIZE, intBuf1, bbuf); return toString(bbuf, intBuf1.get(0)); } public static String glGetShaderInfoLog(int shader) { ByteBuffer bbuf = GLUtil.newBuffer(MAX_LOG_SIZE); glGetShaderInfoLog(shader, MAX_LOG_SIZE, intBuf1, bbuf); return toString(bbuf, intBuf1.get(0)); } private static String toString(ByteBuffer bbuf, int length) { byte[] data = new byte[length]; bbuf.get(data, 0, length); return new String(data); } private static final int MAX_LOG_SIZE = 8192; private static IntBuffer intBuf1 = GLUtil.newIntBuffer(1); // NOTE: most of the commented out methods below will work as is, but any // methods that use // pointers probably need to be bridged (see the examples above for how that // works) // @Bridge public static native void glActiveTexture (int texture); @Bridge public static native void glAttachShader(int program, int shader); // @Bridge public static native void glBindAttribLocation (int program, int // index, String name); @Bridge public static native void glBindBuffer(int target, int buffer); // @Bridge public static native void glBindFramebuffer (int target, int // framebuffer); // @Bridge public static native void glBindRenderbuffer (int target, int // renderbuffer); // @Bridge public static native void glBindTexture (int target, int // texture); // @Bridge public static native void glBlendColor (float red, float green, // float blue, float alpha); // @Bridge public static native void glBlendEquation ( int mode ); // @Bridge public static native void glBlendEquationSeparate (int modeRGB, // int modeAlpha); // @Bridge public static native void glBlendFunc (int sfactor, int dfactor); // @Bridge public static native void glBlendFuncSeparate (int srcRGB, int // dstRGB, int srcAlpha, int dstAlpha); @Bridge public static native void glBufferData(int target, int size, Buffer data, int usage); // @Bridge public static native void glBufferSubData (int target, int // offset, int size, Buffer data); // @Bridge public static native int glCheckFramebufferStatus (int target); @Bridge public static native void glClear(int mask); @Bridge public static native void glClearColor(float red, float green, float blue, float alpha); // @Bridge public static native void glClearDepthf (float depth); // @Bridge public static native void glClearStencil (int s); // @Bridge public static native void glColorMask (boolean red, boolean // green, boolean blue, boolean alpha); @Bridge public static native void glCompileShader(int shader); // @Bridge public static native void glCompressedTexImage2D (int target, int // level, int internalformat, int width, int height, int border, int // imageSize, Buffer data); // @Bridge public static native void glCompressedTexSubImage2D (int target, // int level, int xoffset, int yoffset, int width, int height, int format, // int imageSize, Buffer data); // @Bridge public static native void glCopyTexImage2D (int target, int // level, int internalformat, int x, int y, int width, int height, int // border); // @Bridge public static native void glCopyTexSubImage2D (int target, int // level, int xoffset, int yoffset, int x, int y, int width, int height); @Bridge public static native int glCreateProgram(); @Bridge public static native int glCreateShader(int type); // @Bridge public static native void glCullFace (int mode); // @Bridge public static native void glDeleteBuffers (int n, IntBuffer // buffers); // @Bridge public static native void glDeleteFramebuffers (int n, IntBuffer // framebuffers); // @Bridge public static native void glDeleteProgram (int program); // @Bridge public static native void glDeleteRenderbuffers (int n, IntBuffer // renderbuffers); // @Bridge public static native void glDeleteShader (int shader); // @Bridge public static native void glDeleteTextures (int n, IntBuffer // textures); // @Bridge public static native void glDepthFunc (int func); // @Bridge public static native void glDepthMask (boolean flag); // @Bridge public static native void glDepthRangef (float zNear, float // zFar); // @Bridge public static native void glDetachShader (int program, int // shader); // @Bridge public static native void glDisable (int cap); // @Bridge public static native void glDisableVertexAttribArray (int index); @Bridge public static native void glDrawArrays(int mode, int first, int count); // @Bridge public static native void glDrawElements (int mode, int count, // int type, Buffer indices); // @Bridge public static native void glDrawElements (int mode, int count, // int type, int indices); @Bridge public static native void glEnable(int cap); @Bridge public static native void glEnableVertexAttribArray(int index); // @Bridge public static native void glFinish (); // @Bridge public static native void glFlush (); // @Bridge public static native void glFramebufferRenderbuffer (int target, // int attachment, int renderbuffertarget, int renderbuffer); // @Bridge public static native void glFramebufferTexture2D (int target, int // attachment, int textarget, int texture, int level); // @Bridge public static native void glFrontFace (int mode); @Bridge public static native void glGenBuffers(int n, IntBuffer buffers); // @Bridge public static native void glGenerateMipmap (int target); // @Bridge public static native void glGenFramebuffers (int n, IntBuffer // framebuffers); // @Bridge public static native void glGenRenderbuffers (int n, IntBuffer // renderbuffers); // @Bridge public static native void glGenTextures (int n, IntBuffer // textures); // @Bridge public static native String glGetActiveAttrib (int program, int // index, IntBuffer size, Buffer type); // @Bridge public static native String glGetActiveUniform (int program, int // index, IntBuffer size, Buffer type); // @Bridge public static native void glGetAttachedShaders (int program, int // maxcount, Buffer count, IntBuffer shaders); @Bridge public static native int glGetAttribLocation(int program, String name); // @Bridge public static native void glGetBooleanv (int pname, Buffer // params); // @Bridge public static native void glGetBufferParameteriv (int target, int // pname, IntBuffer params); // @Bridge public static native int glGetError (); // @Bridge public static native void glGetFloatv (int pname, FloatBuffer // params); // @Bridge public static native void glGetFramebufferAttachmentParameteriv // (int target, int attachment, int pname, IntBuffer params); // @Bridge public static native void glGetIntegerv (int pname, IntBuffer // params); @Bridge public static native void glGetProgramiv(int program, int pname, IntBuffer params); @Bridge public static native void glGetProgramInfoLog(int program, int maxLogLen, IntBuffer length, ByteBuffer logData); // @Bridge public static native void glGetRenderbufferParameteriv (int // target, int pname, IntBuffer params); @Bridge public static native void glGetShaderiv(int shader, int pname, IntBuffer params); @Bridge public static native void glGetShaderInfoLog(int shader, int maxLogLen, IntBuffer length, ByteBuffer logData); // @Bridge public static native void glGetShaderPrecisionFormat (int // shadertype, int precisiontype, IntBuffer range, IntBuffer precision); // @Bridge public static native void glGetShaderSource (int shader, int // bufsize, Buffer length, String source); // @Bridge public static native String glGetString (int name); // @Bridge public static native void glGetTexParameterfv (int target, int // pname, FloatBuffer params); // @Bridge public static native void glGetTexParameteriv (int target, int // pname, IntBuffer params); // @Bridge public static native void glGetUniformfv (int program, int // location, FloatBuffer params); // @Bridge public static native void glGetUniformiv (int program, int // location, IntBuffer params); @Bridge public static native int glGetUniformLocation(int program, String name); // @Bridge public static native void glGetVertexAttribfv (int index, int // pname, FloatBuffer params); // @Bridge public static native void glGetVertexAttribiv (int index, int // pname, IntBuffer params); // @Bridge public static native void glGetVertexAttribPointerv (int index, // int pname, Buffer pointer); // @Bridge public static native void glHint (int target, int mode); // @Bridge public static native boolean glIsBuffer (int buffer); // @Bridge public static native boolean glIsEnabled (int cap); // @Bridge public static native boolean glIsFramebuffer (int framebuffer); // @Bridge public static native boolean glIsProgram (int program); // @Bridge public static native boolean glIsRenderbuffer (int renderbuffer); // @Bridge public static native boolean glIsShader (int shader); // @Bridge public static native boolean glIsTexture (int texture); // @Bridge public static native void glLineWidth (float width); @Bridge public static native void glLinkProgram(int program); // @Bridge public static native void glPixelStorei (int pname, int param); // @Bridge public static native void glPolygonOffset (float factor, float // units); // @Bridge public static native void glReadPixels (int x, int y, int width, // int height, int format, int type, Buffer pixels); // @Bridge public static native void glReleaseShaderCompiler (); // @Bridge public static native void glRenderbufferStorage (int target, int // internalformat, int width, int height); // @Bridge public static native void glSampleCoverage (float value, boolean // invert); // @Bridge public static native void glScissor (int x, int y, int width, int // height); // @Bridge public static native void glShaderBinary (int n, IntBuffer // shaders, int binaryformat, Buffer binary, int length); @Bridge public static native void glShaderSource(int shader, int count, BytePtr.BytePtrPtr sources, IntPtr lengths); // @Bridge public static native void glStencilFunc (int func, int ref, int // mask); // @Bridge public static native void glStencilFuncSeparate (int face, int // func, int ref, int mask); // @Bridge public static native void glStencilMask (int mask); // @Bridge public static native void glStencilMaskSeparate (int face, int // mask); // @Bridge public static native void glStencilOp (int fail, int zfail, int // zpass); // @Bridge public static native void glStencilOpSeparate (int face, int // fail, int zfail, int zpass); // @Bridge public static native void glTexImage2D (int target, int level, // int internalformat, int width, int height, int border, int format, int // type, Buffer pixels); // @Bridge public static native void glTexParameterf (int target, int pname, // float param); // @Bridge public static native void glTexParameterfv (int target, int // pname, FloatBuffer params); // @Bridge public static native void glTexParameteri (int target, int pname, // int param); // @Bridge public static native void glTexParameteriv (int target, int // pname, IntBuffer params); // @Bridge public static native void glTexSubImage2D (int target, int level, // int xoffset, int yoffset, int width, int height, int format, int type, // Buffer pixels); @Bridge public static native void glUniform1f(int location, float x); // @Bridge public static native void glUniform1fv (int location, int count, // FloatBuffer v); @Bridge public static native void glUniform1i(int location, int x); // @Bridge public static native void glUniform1iv (int location, int count, // IntBuffer v); @Bridge public static native void glUniform2f(int location, float x, float y); // @Bridge public static native void glUniform2fv (int location, int count, // FloatBuffer v); @Bridge public static native void glUniform2i(int location, int x, int y); // @Bridge public static native void glUniform2iv (int location, int count, // IntBuffer v); @Bridge public static native void glUniform3f(int location, float x, float y, float z); // @Bridge public static native void glUniform3fv (int location, int count, // FloatBuffer v); @Bridge public static native void glUniform3i(int location, int x, int y, int z); // @Bridge public static native void glUniform3iv (int location, int count, // IntBuffer v); @Bridge public static native void glUniform4f(int location, float x, float y, float z, float w); // @Bridge public static native void glUniform4fv (int location, int count, // FloatBuffer v); @Bridge public static native void glUniform4i(int location, int x, int y, int z, int w); // @Bridge public static native void glUniform4iv (int location, int count, // IntBuffer v); // @Bridge public static native void glUniformMatrix2fv (int location, int // count, boolean transpose, FloatBuffer value); // @Bridge public static native void glUniformMatrix3fv (int location, int // count, boolean transpose, FloatBuffer value); // @Bridge public static native void glUniformMatrix4fv (int location, int // count, boolean transpose, FloatBuffer value); @Bridge public static native void glUseProgram(int program); // @Bridge public static native void glValidateProgram (int program); // @Bridge public static native void glVertexAttrib1f (int indx, float x); // @Bridge public static native void glVertexAttrib1fv (int indx, // FloatBuffer values); // @Bridge public static native void glVertexAttrib2f (int indx, float x, // float y); // @Bridge public static native void glVertexAttrib2fv (int indx, // FloatBuffer values); // @Bridge public static native void glVertexAttrib3f (int indx, float x, // float y, float z); // @Bridge public static native void glVertexAttrib3fv (int indx, // FloatBuffer values); // @Bridge public static native void glVertexAttrib4f (int indx, float x, // float y, float z, float w); // @Bridge public static native void glVertexAttrib4fv (int indx, // FloatBuffer values); // @Bridge public static native void glVertexAttribPointer (int indx, int // size, int type, boolean normalized, int stride, Buffer ptr); @Bridge public static native void glVertexAttribPointer(int indx, int size, int type, boolean normalized, int stride, int ptr); // @Bridge public static native void glViewport (int x, int y, int width, // int height); }
apache-2.0
CalebFenton/computer_craft
biogenerator/generator-control.lua
374
local modem = peripheral.wrap("back") modem.open(7) while true do local event, modemSide, senderChannel, replyChannel, message, distance = os.pullEvent("modem_message") write("Received: " + message + "\n") if message == "enable" then rs.setBundledOutput("right", 0) else rs.setBundledOutput("right", colors.white) end sleep(0.1) end
apache-2.0
petracvv/cas
core/cas-server-core-authentication/src/main/java/org/apereo/cas/services/DefaultMultifactorAuthenticationProviderBypass.java
11212
package org.apereo.cas.services; import org.apache.commons.lang3.StringUtils; import org.apereo.cas.authentication.Authentication; import org.apereo.cas.authentication.AuthenticationHandler; import org.apereo.cas.authentication.AuthenticationManager; import org.apereo.cas.authentication.DefaultAuthenticationBuilder; import org.apereo.cas.authentication.principal.Principal; import org.apereo.cas.configuration.model.support.mfa.MultifactorAuthenticationProperties; import org.apereo.cas.util.CollectionUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.util.Map; import java.util.Set; import java.util.stream.Collectors; /** * This is {@link DefaultMultifactorAuthenticationProviderBypass}. * * @author Misagh Moayyed * @since 5.0.0 */ public class DefaultMultifactorAuthenticationProviderBypass implements MultifactorAuthenticationProviderBypass { private static final Logger LOGGER = LoggerFactory.getLogger(DefaultMultifactorAuthenticationProviderBypass.class); private static final long serialVersionUID = 3720922341350004543L; private final MultifactorAuthenticationProperties.BaseProvider.Bypass bypass; public DefaultMultifactorAuthenticationProviderBypass(final MultifactorAuthenticationProperties.BaseProvider.Bypass bypass) { this.bypass = bypass; } @Override public boolean isAuthenticationRequestHonored(final Authentication authentication, final RegisteredService registeredService, final MultifactorAuthenticationProvider provider) { final Principal principal = authentication.getPrincipal(); final boolean bypassByPrincipal = locateMatchingAttributeBasedOnPrincipalAttributes(bypass, principal); if (bypassByPrincipal) { LOGGER.debug("Bypass rules for principal [{}] indicate the request may be ignored", principal.getId()); updateAuthenticationToRememberBypass(authentication, provider, principal); return false; } final boolean bypassByAuthn = locateMatchingAttributeBasedOnAuthenticationAttributes(bypass, authentication); if (bypassByAuthn) { LOGGER.debug("Bypass rules for authentication [{}] indicate the request may be ignored", principal.getId()); updateAuthenticationToRememberBypass(authentication, provider, principal); return false; } final boolean bypassByAuthnMethod = locateMatchingAttributeValue( AuthenticationManager.AUTHENTICATION_METHOD_ATTRIBUTE, bypass.getAuthenticationMethodName(), authentication.getAttributes(), false ); if (bypassByAuthnMethod) { LOGGER.debug("Bypass rules for authentication method [{}] indicate the request may be ignored", principal.getId()); updateAuthenticationToRememberBypass(authentication, provider, principal); return false; } final boolean bypassByHandlerName = locateMatchingAttributeValue( AuthenticationHandler.SUCCESSFUL_AUTHENTICATION_HANDLERS, bypass.getAuthenticationHandlerName(), authentication.getAttributes(), false ); if (bypassByHandlerName) { LOGGER.debug("Bypass rules for authentication handlers [{}] indicate the request may be ignored", principal.getId()); updateAuthenticationToRememberBypass(authentication, provider, principal); return false; } final boolean bypassByCredType = locateMatchingCredentialType(authentication, bypass.getCredentialClassType()); if (bypassByCredType) { LOGGER.debug("Bypass rules for credential types [{}] indicate the request may be ignored", principal.getId()); updateAuthenticationToRememberBypass(authentication, provider, principal); return false; } final boolean bypassByService = locateMatchingRegisteredServiceForBypass(authentication, registeredService); if (bypassByService) { updateAuthenticationToRememberBypass(authentication, provider, principal); return false; } updateAuthenticationToForgetBypass(authentication, provider, principal); return true; } private void updateAuthenticationToForgetBypass(final Authentication authentication, final MultifactorAuthenticationProvider provider, final Principal principal) { LOGGER.debug("Bypass rules for service [{}] indicate the request may be ignored", principal.getId()); final Authentication newAuthn = DefaultAuthenticationBuilder.newInstance(authentication) .addAttribute(AUTHENTICATION_ATTRIBUTE_BYPASS_MFA, Boolean.FALSE) .build(); LOGGER.debug("Updated authentication session to remember bypass for [{}] via [{}]", provider.getId(), AUTHENTICATION_ATTRIBUTE_BYPASS_MFA); authentication.updateAll(newAuthn); } private void updateAuthenticationToRememberBypass(final Authentication authentication, final MultifactorAuthenticationProvider provider, final Principal principal) { LOGGER.debug("Bypass rules for service [{}] indicate the request may NOT be ignored", principal.getId()); final Authentication newAuthn = DefaultAuthenticationBuilder.newInstance(authentication) .addAttribute(AUTHENTICATION_ATTRIBUTE_BYPASS_MFA, Boolean.TRUE) .addAttribute(AUTHENTICATION_ATTRIBUTE_BYPASS_MFA_PROVIDER, provider.getId()) .build(); LOGGER.debug("Updated authentication session to NOT remember bypass for [{}] via [{}]", provider.getId(), AUTHENTICATION_ATTRIBUTE_BYPASS_MFA); authentication.updateAll(newAuthn); } /** * Locate matching registered service property boolean. * * @param authentication the authentication * @param registeredService the registered service * @return true/false */ protected boolean locateMatchingRegisteredServiceForBypass(final Authentication authentication, final RegisteredService registeredService) { if (registeredService != null && registeredService.getMultifactorPolicy() != null) { return registeredService.getMultifactorPolicy().isBypassEnabled(); } return false; } /** * Locate matching credential type boolean. * * @param authentication the authentication * @param credentialClassType the credential class type * @return the boolean */ protected boolean locateMatchingCredentialType(final Authentication authentication, final String credentialClassType) { return StringUtils.isNotBlank(credentialClassType) && authentication.getCredentials() .stream() .filter(e -> e.getCredentialClass().getName().matches(credentialClassType)) .findAny() .isPresent(); } /** * Skip bypass and support event based on authentication attributes. * * @param bypass the bypass * @param authn the authn * @return the boolean */ protected boolean locateMatchingAttributeBasedOnAuthenticationAttributes( final MultifactorAuthenticationProperties.BaseProvider.Bypass bypass, final Authentication authn) { return locateMatchingAttributeValue(bypass.getAuthenticationAttributeName(), bypass.getAuthenticationAttributeValue(), authn.getAttributes()); } /** * Skip bypass and support event based on principal attributes. * * @param bypass the bypass * @param principal the principal * @return the boolean */ protected boolean locateMatchingAttributeBasedOnPrincipalAttributes( final MultifactorAuthenticationProperties.BaseProvider.Bypass bypass, final Principal principal) { return locateMatchingAttributeValue(bypass.getPrincipalAttributeName(), bypass.getAuthenticationAttributeValue(), principal.getAttributes()); } /** * Locate matching attribute value boolean. * * @param attrName the attr name * @param attrValue the attr value * @param attributes the attributes * @return true/false */ protected boolean locateMatchingAttributeValue(final String attrName, final String attrValue, final Map<String, Object> attributes) { return locateMatchingAttributeValue(attrName, attrValue, attributes, true); } /** * Evaluate attribute rules for bypass. * * @param attrName the attr name * @param attrValue the attr value * @param attributes the attributes * @param matchIfNoValueProvided the force match on value * @return true a matching attribute name/value is found */ protected boolean locateMatchingAttributeValue(final String attrName, final String attrValue, final Map<String, Object> attributes, final boolean matchIfNoValueProvided) { LOGGER.debug("Locating matching attribute [{}] with value [{}] amongst the attribute collection [{}]", attrName, attrValue, attributes); if (StringUtils.isBlank(attrName)) { LOGGER.debug("Failed to match since attribute name is undefined"); return false; } final Set<Map.Entry<String, Object>> names = attributes.entrySet() .stream() .filter(e -> { LOGGER.debug("Attempting to match [{}] against [{}]", attrName, e.getKey()); return e.getKey().matches(attrName); } ).collect(Collectors.toSet()); LOGGER.debug("Found [{}] attributes relevant for multifactor authentication bypass", names.size()); if (names.isEmpty()) { return false; } if (StringUtils.isBlank(attrValue)) { LOGGER.debug("No attribute value to match is provided; Match result is set to [{}]", matchIfNoValueProvided); return matchIfNoValueProvided; } final Set<Map.Entry<String, Object>> values = names .stream() .filter(e -> { final Set<Object> valuesCol = CollectionUtils.toCollection(e.getValue()); LOGGER.debug("Matching attribute [{}] with values [{}] against [{}]", e.getKey(), valuesCol, attrValue); return valuesCol.stream() .filter(v -> v.toString().matches(attrValue)) .findAny() .isPresent(); }).collect(Collectors.toSet()); LOGGER.debug("Matching attribute values remaining are [{}]", values); return !values.isEmpty(); } }
apache-2.0
tayeumi/HFC
HFC/Forms/frmMaps.cs
3329
using System; using System.Collections.Generic; using System.ComponentModel; using System.Data; using System.Drawing; using System.Text; using System.Windows.Forms; using DevExpress.XtraEditors; using HFC.Class; using System.Threading; namespace HFC.Forms { public partial class frmMaps : DevExpress.XtraEditors.XtraForm { public frmMaps() { InitializeComponent(); LoadSV(); } private Thread serverThread; MyWebServer mWebserver = new MyWebServer(); private void frmMaps_FormClosing(object sender, FormClosingEventArgs e) { // serverThread = null; mWebserver.Stop(); } void LoadSV() { serverThread = new Thread(new ThreadStart(mWebserver.Start)); serverThread.IsBackground = true; serverThread.Start(); // this.Text = "Listening..."; } private void frmMaps_Load(object sender, EventArgs e) { //webControl.Navigate("http://127.0.0.1:100/"); checknodeDown(); } void Nodefail() { MyWebServer.detail = "<span style=color:red><b>CẢNH BÁO MẤT TÍN HIỆU:</b></span> <br>NODE: ABCXYZ"; MyWebServer.lat = "10.8133611"; MyWebServer.lng = "106.6974304"; webControl.Navigate("http://127.0.0.1:100/nodewarning.html"); while (webControl.ReadyState != WebBrowserReadyState.Complete) { Application.DoEvents(); System.Threading.Thread.Sleep(50); } webControl.Refresh(); } void checknodeDown() { Class.NW_Device clsnode = new Class.NW_Device(); DataTable dt = clsnode.NW_Device_GetStatic(); int checkloi=0; for( int i=0;i<dt.Rows.Count;i++) { if (dt.Rows[i]["Description"].ToString().Length > 3) { if (dt.Rows[i]["Description"].ToString().IndexOf(',') > 0) { if (dt.Rows[i]["Value1"].ToString() == "0") { checkloi = 1; } } } } if (checkloi == 0) { NodeToMaps(); } else { webControl.Navigate("http://127.0.0.1:100/nodewarning.html?"+DateTime.Now.Millisecond); while (webControl.ReadyState != WebBrowserReadyState.Complete) { Application.DoEvents(); System.Threading.Thread.Sleep(50); } webControl.Refresh(); } } void NodeToMaps() { webControl.Navigate("http://127.0.0.1:100/node.html?" + DateTime.Now.Millisecond); } private void timer1_Tick(object sender, EventArgs e) { try { checknodeDown(); } catch { } } private void frmMaps_FormClosed(object sender, FormClosedEventArgs e) { } } }
apache-2.0
oMMuCo/HPTT-FT-UGM-Official-Website
protected/views/wall_like/_search.php
1400
<?php /** * Ommu Wall Likes (ommu-wall-likes) * @var $this WalllikeController * @var $model OmmuWallLikes * @var $form CActiveForm * version: 1.1.0 * * @author Putra Sudaryanto <putra@sudaryanto.id> * @copyright Copyright (c) 2015 Ommu Platform (ommu.co) * @link https://github.com/oMMu/Ommu-Core * @contect (+62)856-299-4114 * */ ?> <?php $form=$this->beginWidget('CActiveForm', array( 'action'=>Yii::app()->createUrl($this->route), 'method'=>'get', )); ?> <ul> <li> <?php echo $model->getAttributeLabel('like_id'); ?><br/> <?php echo $form->textField($model,'like_id',array('size'=>11,'maxlength'=>11)); ?> </li> <li> <?php echo $model->getAttributeLabel('wall_id'); ?><br/> <?php echo $form->textField($model,'wall_id',array('size'=>11,'maxlength'=>11)); ?> </li> <li> <?php echo $model->getAttributeLabel('user_id'); ?><br/> <?php echo $form->textField($model,'user_id',array('size'=>11,'maxlength'=>11)); ?> </li> <li> <?php echo $model->getAttributeLabel('likes_date'); ?><br/> <?php echo $form->textField($model,'likes_date'); ?> </li> <li> <?php echo $model->getAttributeLabel('likes_ip'); ?><br/> <?php echo $form->textField($model,'likes_ip',array('size'=>20,'maxlength'=>20)); ?> </li> <li class="submit"> <?php echo CHtml::submitButton(Yii::t('phrase', 'Search')); ?> </li> </ul> <?php $this->endWidget(); ?>
apache-2.0
SonicGD/google-adwords-api-light
Google/Api/Ads/AdWords/v201605/classes/ManagedCustomerService.require.php
3583
<?php require_once('DateRange.php'); require_once('OrderBy.php'); require_once('Paging.php'); require_once('Predicate.php'); require_once('SoapRequestHeader.php'); require_once('SoapResponseHeader.php'); require_once('ApiError.php'); require_once('ApplicationException.php'); require_once('Selector.php'); require_once('Operation.php'); require_once('Page.php'); require_once('AuthenticationErrorReason.php'); require_once('AuthorizationErrorReason.php'); require_once('ClientTermsErrorReason.php'); require_once('DatabaseErrorReason.php'); require_once('DateErrorReason.php'); require_once('DistinctErrorReason.php'); require_once('IdErrorReason.php'); require_once('InternalApiErrorReason.php'); require_once('NotEmptyErrorReason.php'); require_once('NullErrorReason.php'); require_once('OperationAccessDeniedReason.php'); require_once('Operator.php'); require_once('OperatorErrorReason.php'); require_once('PredicateOperator.php'); require_once('QuotaCheckErrorReason.php'); require_once('RangeErrorReason.php'); require_once('RateExceededErrorReason.php'); require_once('ReadOnlyErrorReason.php'); require_once('RejectedErrorReason.php'); require_once('RequestErrorReason.php'); require_once('RequiredErrorReason.php'); require_once('SelectorErrorReason.php'); require_once('SizeLimitErrorReason.php'); require_once('SortOrder.php'); require_once('StringFormatErrorReason.php'); require_once('StringLengthErrorReason.php'); require_once('ManagedCustomerLabel.php'); require_once('ManagedCustomerLabelOperation.php'); require_once('ManagedCustomerLabelReturnValue.php'); require_once('ManagedCustomerServiceError.php'); require_once('PendingInvitationSelector.php'); require_once('AccountLabel.php'); require_once('ManagedCustomerLink.php'); require_once('LinkOperation.php'); require_once('MoveOperation.php'); require_once('MutateLinkResults.php'); require_once('MutateManagerResults.php'); require_once('ManagedCustomer.php'); require_once('ManagedCustomerOperation.php'); require_once('ManagedCustomerPage.php'); require_once('ManagedCustomerReturnValue.php'); require_once('PendingInvitation.php'); require_once('LinkStatus.php'); require_once('ManagedCustomerServiceErrorReason.php'); require_once('ManagedCustomerServiceGet.php'); require_once('ManagedCustomerServiceGetResponse.php'); require_once('GetPendingInvitations.php'); require_once('GetPendingInvitationsResponse.php'); require_once('ManagedCustomerServiceMutate.php'); require_once('ManagedCustomerServiceMutateResponse.php'); require_once('MutateLabel.php'); require_once('MutateLabelResponse.php'); require_once('MutateLink.php'); require_once('MutateLinkResponse.php'); require_once('MutateManager.php'); require_once('MutateManagerResponse.php'); require_once('AuthenticationError.php'); require_once('AuthorizationError.php'); require_once('ClientTermsError.php'); require_once('DateError.php'); require_once('DistinctError.php'); require_once('IdError.php'); require_once('InternalApiError.php'); require_once('NotEmptyError.php'); require_once('NullError.php'); require_once('OperationAccessDenied.php'); require_once('OperatorError.php'); require_once('QuotaCheckError.php'); require_once('RangeError.php'); require_once('RateExceededError.php'); require_once('ReadOnlyError.php'); require_once('RejectedError.php'); require_once('RequestError.php'); require_once('RequiredError.php'); require_once('SelectorError.php'); require_once('SizeLimitError.php'); require_once('StringFormatError.php'); require_once('StringLengthError.php'); require_once('DatabaseError.php'); require_once('ApiException.php');
apache-2.0
SonicGD/google-adwords-api-light
Google/Api/Ads/AdWords/v201605/classes/FeedMappingServiceMutateResponse.php
803
<?php /** * * @package Google_Api_Ads_AdWords_v201605 * @subpackage v201605 */ class FeedMappingServiceMutateResponse { const WSDL_NAMESPACE = "https://adwords.google.com/api/adwords/cm/v201605"; const XSI_TYPE = ""; /** * @access public * @var FeedMappingReturnValue */ public $rval; /** * Gets the namesapce of this class * @return string the namespace of this class */ public function getNamespace() { return self::WSDL_NAMESPACE; } /** * Gets the xsi:type name of this class * @return string the xsi:type name of this class */ public function getXsiTypeName() { return self::XSI_TYPE; } public function __construct($rval = null) { $this->rval = $rval; } }
apache-2.0
poisondog/java.commons
src/main/java/poisondog/vfs/http/HttpFileFactory.java
3527
/* * Copyright (C) 2016 Adam Huang <poisondog@gmail.com> * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package poisondog.vfs.http; import java.io.FileNotFoundException; import java.io.InputStream; import java.io.IOException; import java.util.ArrayList; import java.util.List; import org.apache.commons.io.IOUtils; import org.jsoup.Jsoup; import org.jsoup.nodes.Document; import org.jsoup.nodes.Element; import org.jsoup.select.Elements; import poisondog.net.http.HttpGet; import poisondog.net.http.HttpHead; import poisondog.net.http.HttpParameter; import poisondog.net.URLUtils; import poisondog.string.ReplacePath; import poisondog.vfs.IFile; import poisondog.vfs.IFileFactory; /** * @author Adam Huang <poisondog@gmail.com> * @since 2016-12-13 */ public class HttpFileFactory implements IFileFactory { private HttpParameter mParameter; private HttpGet mGet; private HttpHead mHead; /** * Constructor */ public HttpFileFactory() { this(null, null); } /** * Constructor */ public HttpFileFactory(String username, String password) { mGet = new HttpGet(); mHead = new HttpHead(); mParameter = new HttpParameter(); mParameter.setUsername(username); mParameter.setPassword(password); } public void setUsername(String username) { mParameter.setUsername(username); } public void setPassword(String password) { mParameter.setPassword(password); } @Override public IFile getFile(String url) { if (url.endsWith("/")) return new HttpFolder(this, url); else return new HttpData(this, url); } public void setGet(HttpGet get) { mGet = get; } public void setHead(HttpHead head) { mHead = head; } public boolean exists(String url) throws IOException { mParameter.setUrl(url); try { mHead.execute(mParameter); } catch(FileNotFoundException e) { return false; } return true; } public boolean isHidden(String url) { if(URLUtils.file(url).startsWith(".")) return true; return false; } public InputStream get(String url) throws IOException { mParameter.setUrl(url); return mGet.execute(mParameter).getInputStream(); } public long getContentSize(String url) throws IOException { mParameter.setUrl(url); return Long.parseLong(mHead.execute(mParameter).getHeader("Content-Length")); } public long getLastModifiedTime(String url) throws IOException { mParameter.setUrl(url); String last = mHead.execute(mParameter).getHeader("Last-Modified"); if (last == null) return 0l; return Long.parseLong(last); } public List<IFile> list(String url) throws IOException { Document doc = Jsoup.parse(IOUtils.toString(get(url), "utf8")); Elements newsHeadlines = doc.select("a"); List<IFile> result = new ArrayList<IFile>(); for (Element e : newsHeadlines) { String path = URLUtils.path(e.attr("href")); if (path.startsWith("/")) { ReplacePath task = new ReplacePath(path); result.add(getFile(task.process(url))); } else { result.add(getFile(url + path)); } } return result; } }
apache-2.0
anton-johansson/elasticsearch-shell
src/main/java/com/antonjohansson/elasticsearchshell/AppConfiguration.java
1697
/** * Copyright 2017 Anton Johansson * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.antonjohansson.elasticsearchshell; import static com.antonjohansson.elasticsearchshell.utils.Constants.CONFIGURATION_FILE; import java.io.File; import java.util.logging.Logger; import org.springframework.beans.factory.annotation.Qualifier; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; import org.springframework.shell.support.logging.HandlerUtils; import com.antonjohansson.elasticsearchshell.shell.output.Console; /** * Configures various things for the application. */ @Configuration class AppConfiguration { @Bean @Qualifier(CONFIGURATION_FILE) public File getConfigurationPath() { String homePath = System.getProperty("user.home"); File home = new File(homePath); File configurationPath = new File(home, ".elasticsearch-shell"); configurationPath.mkdirs(); return configurationPath; } @Bean public Logger getLogger() { return HandlerUtils.getLogger(Console.class); } }
apache-2.0
NumericTechnology/Platanum.Net
KPComponents/KPComponents/KPJqGrid/KPItemModel.cs
2844
/* * Copyright 2011-2015 Numeric Technology * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ using KPComponents.Generic; using KPEnumerator.KPComponents; using KPGlobalization; using System; using System.ComponentModel; using System.Web.UI; namespace KPComponents { /// <summary> /// <para>Authors: Juliano Tiago Rinaldi and /// Tiago Antonio Jacobi</para> /// </summary> [Serializable] public class KPItemModel : StateManagedItem { public KPItemModel() { Editable = false; Sortable = true; Width = 100; } #region Properties public int Width { get { object o = ViewState["Width"]; return o == null ? 100 : (int)o; } set { ViewState["Width"] = value; } } public bool Sortable { get { object o = ViewState["Sortable"]; return o == null ? false : (bool)o; } set { ViewState["Sortable"] = value; } } // TODO: Não será implementado neste momento // http://www.trirand.com/jqgridwiki/doku.php?id=wiki:common_rules [Browsable(false)] public bool Editable { get { object o = ViewState["Editable"]; return o == null ? false : (bool)o; } set { ViewState["Editable"] = value; } } public string FieldName { get { object o = ViewState["FieldName"]; return o == null ? null : (String)o; } set { ViewState["FieldName"] = value; } } public string HeaderName { get { object o = ViewState["HeaderName"]; return o == null ? null : (String)o; } set { ViewState["HeaderName"] = KPGlobalizationLanguage.GetString(value); } } public bool Visible { get { object o = ViewState["Visible"]; return o == null ? true : (bool)o; } set { ViewState["Visible"] = value; } } #endregion } }
apache-2.0
844348677/IBack
StreamExer/src/main/scala/StreamingExercise.scala
809
import javax.sql.ConnectionPoolDataSource import org.apache.spark.SparkConf import org.apache.spark.streaming.kafka.KafkaUtils import org.apache.spark.streaming.{Seconds, StreamingContext} /** * Created by hdd on 4/29/16. */ object StreamingExercise { def main(args: Array[String]) { val conf = new SparkConf().setMaster("local[2]").setAppName("NetworkWordCount") val ssc = new StreamingContext(conf , Seconds(5)) val lines = ssc.socketTextStream("localhost",9999) val words = lines.flatMap(_.split(" ")) val pairs = words.map(word => (word,1)) val wordCounts = pairs.reduceByKey(_+_) wordCounts.print val windowedWordConts = pairs.reduceByKeyAndWindow((a:Int,b:Int)=>(a+b),Seconds(30),Seconds(10)) //KafkaUtils ssc.start ssc.awaitTermination } }
apache-2.0
HuangLS/neo4j
community/lucene-index/src/test/java/org/neo4j/index/impl/lucene/LuceneDataSourceTest.java
12563
/* * Copyright (c) 2002-2018 "Neo Technology," * Network Engine for Objects in Lund AB [http://neotechnology.com] * * This file is part of Neo4j. * * Neo4j is free software: you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with this program. If not, see <http://www.gnu.org/licenses/>. */ package org.neo4j.index.impl.lucene; import org.apache.lucene.index.IndexWriter; import org.apache.lucene.index.IndexWriterAccessor; import org.junit.Before; import org.junit.Rule; import org.junit.Test; import org.junit.rules.ExpectedException; import java.io.IOException; import java.util.Map; import org.neo4j.graphdb.Node; import org.neo4j.graphdb.factory.GraphDatabaseSettings; import org.neo4j.graphdb.index.IndexManager; import org.neo4j.helpers.Exceptions; import org.neo4j.helpers.collection.MapUtil; import org.neo4j.kernel.DefaultFileSystemAbstraction; import org.neo4j.kernel.configuration.Config; import org.neo4j.kernel.impl.index.IndexConfigStore; import org.neo4j.kernel.impl.index.IndexEntityType; import org.neo4j.kernel.lifecycle.LifeRule; import org.neo4j.test.TargetDirectory; import org.neo4j.test.TargetDirectory.TestDirectory; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertNotSame; import static org.junit.Assert.assertSame; import static org.junit.Assert.assertTrue; public class LuceneDataSourceTest { @Rule public final LifeRule life = new LifeRule( true ); @Rule public final TestDirectory directory = TargetDirectory.testDirForTest( getClass() ); @Rule public final ExpectedException expectedException = ExpectedException.none(); private IndexConfigStore indexStore; private LuceneDataSource dataSource; @Before public void setUp() { indexStore = new IndexConfigStore( directory.directory(), new DefaultFileSystemAbstraction() ); addIndex( "foo" ); } @Test public void doNotTryToCommitWritersOnForceInReadOnlyMode() throws Throwable { IndexIdentifier indexIdentifier = identifier( "foo" ); prepareIndexesByIdentifiers( indexIdentifier ); stopDataSource(); Config readOnlyConfig = new Config( readOnlyConfig(), GraphDatabaseSettings.class ); LuceneDataSource readOnlyDataSource = life.add( new LuceneDataSource( directory.graphDbDir(), readOnlyConfig, indexStore, new DefaultFileSystemAbstraction() ) ); assertNotNull( readOnlyDataSource.getIndexSearcher( indexIdentifier ) ); readOnlyDataSource.force(); } @Test public void notAllowIndexDeletionInReadOnlyMode() throws IOException { IndexIdentifier indexIdentifier = identifier( "foo" ); prepareIndexesByIdentifiers( indexIdentifier ); stopDataSource(); Config readOnlyConfig = new Config( readOnlyConfig(), GraphDatabaseSettings.class ); dataSource = life.add( new LuceneDataSource( directory.graphDbDir(), readOnlyConfig, indexStore, new DefaultFileSystemAbstraction() ) ); expectedException.expect( IllegalStateException.class ); expectedException.expectMessage("Index deletion in read only mode is not supported."); dataSource.deleteIndex( indexIdentifier, false ); } @Test public void useReadOnlyIndexSearcherInReadOnlyMode() throws IOException { IndexIdentifier indexIdentifier = identifier( "foo" ); prepareIndexesByIdentifiers( indexIdentifier ); stopDataSource(); Config readOnlyConfig = new Config( readOnlyConfig(), GraphDatabaseSettings.class ); dataSource = life.add( new LuceneDataSource( directory.graphDbDir(), readOnlyConfig, indexStore, new DefaultFileSystemAbstraction() ) ); IndexReference indexSearcher = dataSource.getIndexSearcher( indexIdentifier ); assertTrue( "Read only index reference should be used in read only mode.", ReadOnlyIndexReference.class.isInstance( indexSearcher ) ); } @Test public void refreshReadOnlyIndexSearcherInReadOnlyMode() throws IOException { IndexIdentifier indexIdentifier = identifier( "foo" ); prepareIndexesByIdentifiers( indexIdentifier ); stopDataSource(); Config readOnlyConfig = new Config( readOnlyConfig(), GraphDatabaseSettings.class ); dataSource = life.add( new LuceneDataSource( directory.graphDbDir(), readOnlyConfig, indexStore, new DefaultFileSystemAbstraction() ) ); IndexReference indexSearcher = dataSource.getIndexSearcher( indexIdentifier ); IndexReference indexSearcher2 = dataSource.getIndexSearcher( indexIdentifier ); IndexReference indexSearcher3 = dataSource.getIndexSearcher( indexIdentifier ); IndexReference indexSearcher4 = dataSource.getIndexSearcher( indexIdentifier ); assertSame( "Refreshed read only searcher should be the same.", indexSearcher, indexSearcher2 ); assertSame( "Refreshed read only searcher should be the same.", indexSearcher2, indexSearcher3 ); assertSame( "Refreshed read only searcher should be the same.", indexSearcher3, indexSearcher4 ); } @Test public void testShouldReturnIndexWriterFromLRUCache() throws Throwable { Config config = new Config( config(), GraphDatabaseSettings.class ); dataSource = life.add( new LuceneDataSource( directory.graphDbDir(), config, indexStore, new DefaultFileSystemAbstraction() ) ); IndexIdentifier identifier = identifier( "foo" ); IndexWriter writer = dataSource.getIndexSearcher( identifier ).getWriter(); assertSame( writer, dataSource.getIndexSearcher( identifier ).getWriter() ); } @Test public void testShouldReturnIndexSearcherFromLRUCache() throws Throwable { Config config = new Config( config(), GraphDatabaseSettings.class ); dataSource = life.add( new LuceneDataSource( directory.graphDbDir(), config, indexStore, new DefaultFileSystemAbstraction() ) ); IndexIdentifier identifier = identifier( "foo" ); IndexReference searcher = dataSource.getIndexSearcher( identifier ); assertSame( searcher, dataSource.getIndexSearcher( identifier ) ); searcher.close(); } @Test public void testClosesOldestIndexWriterWhenCacheSizeIsExceeded() throws Throwable { addIndex( "bar" ); addIndex( "baz" ); Map<String, String> configMap = config(); configMap.put( GraphDatabaseSettings.lucene_searcher_cache_size.name(), "2" ); Config config = new Config( configMap, GraphDatabaseSettings.class ); dataSource = life.add( new LuceneDataSource( directory.graphDbDir(), config, indexStore, new DefaultFileSystemAbstraction() ) ); IndexIdentifier fooIdentifier = identifier( "foo" ); IndexIdentifier barIdentifier = identifier( "bar" ); IndexIdentifier bazIdentifier = identifier( "baz" ); IndexWriter fooIndexWriter = dataSource.getIndexSearcher( fooIdentifier ).getWriter(); dataSource.getIndexSearcher( barIdentifier ); assertFalse( IndexWriterAccessor.isClosed( fooIndexWriter ) ); dataSource.getIndexSearcher( bazIdentifier ); assertTrue( IndexWriterAccessor.isClosed( fooIndexWriter ) ); } @Test public void testClosesOldestIndexSearcherWhenCacheSizeIsExceeded() throws Throwable { addIndex( "bar" ); addIndex( "baz" ); Map<String, String> configMap = config(); configMap.put( GraphDatabaseSettings.lucene_searcher_cache_size.name(), "2" ); Config config = new Config( configMap, GraphDatabaseSettings.class ); dataSource = life.add( new LuceneDataSource( directory.graphDbDir(), config, indexStore, new DefaultFileSystemAbstraction() ) ); IndexIdentifier fooIdentifier = identifier( "foo" ); IndexIdentifier barIdentifier = identifier( "bar" ); IndexIdentifier bazIdentifier = identifier( "baz" ); IndexReference fooSearcher = dataSource.getIndexSearcher( fooIdentifier ); IndexReference barSearcher = dataSource.getIndexSearcher( barIdentifier ); assertFalse( fooSearcher.isClosed() ); IndexReference bazSearcher = dataSource.getIndexSearcher( bazIdentifier ); assertTrue( fooSearcher.isClosed() ); barSearcher.close(); bazSearcher.close(); } @Test public void testRecreatesSearcherWhenRequestedAgain() throws Throwable { addIndex( "bar" ); addIndex( "baz" ); Map<String, String> configMap = config(); configMap.put( GraphDatabaseSettings.lucene_searcher_cache_size.name(), "2" ); Config config = new Config( configMap, GraphDatabaseSettings.class ); dataSource = life.add( new LuceneDataSource( directory.graphDbDir(), config, indexStore, new DefaultFileSystemAbstraction() ) ); IndexIdentifier fooIdentifier = identifier( "foo" ); IndexIdentifier barIdentifier = identifier( "bar" ); IndexIdentifier bazIdentifier = identifier( "baz" ); IndexReference oldFooSearcher = dataSource.getIndexSearcher( fooIdentifier ); IndexReference barSearcher = dataSource.getIndexSearcher( barIdentifier ); IndexReference bazSearcher = dataSource.getIndexSearcher( bazIdentifier ); IndexReference newFooSearcher = dataSource.getIndexSearcher( bazIdentifier ); assertNotSame( oldFooSearcher, newFooSearcher ); assertFalse( newFooSearcher.isClosed() ); oldFooSearcher.close(); barSearcher.close(); bazSearcher.close(); newFooSearcher.close(); } @Test public void testRecreatesWriterWhenRequestedAgainAfterCacheEviction() throws Throwable { addIndex( "bar" ); addIndex( "baz" ); Map<String, String> configMap = config(); configMap.put( GraphDatabaseSettings.lucene_searcher_cache_size.name(), "2" ); Config config = new Config( configMap, GraphDatabaseSettings.class ); dataSource = life.add( new LuceneDataSource( directory.graphDbDir(), config, indexStore, new DefaultFileSystemAbstraction() ) ); IndexIdentifier fooIdentifier = identifier( "foo" ); IndexIdentifier barIdentifier = identifier( "bar" ); IndexIdentifier bazIdentifier = identifier( "baz" ); IndexWriter oldFooIndexWriter = dataSource.getIndexSearcher( fooIdentifier ).getWriter(); dataSource.getIndexSearcher( barIdentifier ); dataSource.getIndexSearcher( bazIdentifier ); IndexWriter newFooIndexWriter = dataSource.getIndexSearcher( fooIdentifier ).getWriter(); assertNotSame( oldFooIndexWriter, newFooIndexWriter ); assertFalse( IndexWriterAccessor.isClosed( newFooIndexWriter ) ); } private Map<String, String> config() { return MapUtil.stringMap(); } private void prepareIndexesByIdentifiers( IndexIdentifier indexIdentifier ) { Config config = new Config( config(), GraphDatabaseSettings.class ); dataSource = life.add( new LuceneDataSource( directory.graphDbDir(), config, indexStore, new DefaultFileSystemAbstraction() ) ); dataSource.getIndexSearcher( indexIdentifier ); dataSource.force(); } private Map<String, String> readOnlyConfig() { Map<String,String> config = config(); config.put( GraphDatabaseSettings.read_only.name(), "true" ); return config; } private void addIndex( String name ) { indexStore.set( Node.class, name, MapUtil.stringMap( IndexManager.PROVIDER, "lucene", "type", "fulltext" ) ); } private IndexIdentifier identifier( String name ) { return new IndexIdentifier( IndexEntityType.Node, name ); } private void stopDataSource() throws IOException { try { dataSource.stop(); dataSource.shutdown(); } catch ( Throwable e ) { throw Exceptions.launderedException( IOException.class, e ); } } }
apache-2.0
schmittjoh/php-stubs
res/php/filesystem/functions/filectime.php
248
<?php /** * Gets inode change time of file * * @phpstub * * @param string $filename * * @return int Returns the time the file was last changed, . * The time is returned as a Unix timestamp. */ function filectime($filename) { }
apache-2.0
SH4DY/tripitude
backend/src/main/java/ac/tuwien/ase08/tripitude/service/interfaces/IRouteService.java
216
package ac.tuwien.ase08.tripitude.service.interfaces; import ac.tuwien.ase08.tripitude.entity.Route; public interface IRouteService extends IGenericService<Route, Long> { public Route findFullRoute(Long key); }
apache-2.0
aws/aws-sdk-java
aws-java-sdk-simpleworkflow/src/main/java/com/amazonaws/services/simpleworkflow/model/transform/RegisterDomainRequestProtocolMarshaller.java
2708
/* * Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.simpleworkflow.model.transform; import javax.annotation.Generated; import com.amazonaws.SdkClientException; import com.amazonaws.Request; import com.amazonaws.http.HttpMethodName; import com.amazonaws.services.simpleworkflow.model.*; import com.amazonaws.transform.Marshaller; import com.amazonaws.protocol.*; import com.amazonaws.protocol.Protocol; import com.amazonaws.annotation.SdkInternalApi; /** * RegisterDomainRequest Marshaller */ @Generated("com.amazonaws:aws-java-sdk-code-generator") @SdkInternalApi public class RegisterDomainRequestProtocolMarshaller implements Marshaller<Request<RegisterDomainRequest>, RegisterDomainRequest> { private static final OperationInfo SDK_OPERATION_BINDING = OperationInfo.builder().protocol(Protocol.AWS_JSON).requestUri("/") .httpMethodName(HttpMethodName.POST).hasExplicitPayloadMember(false).hasPayloadMembers(true) .operationIdentifier("SimpleWorkflowService.RegisterDomain").serviceName("AmazonSimpleWorkflow").build(); private final com.amazonaws.protocol.json.SdkJsonProtocolFactory protocolFactory; public RegisterDomainRequestProtocolMarshaller(com.amazonaws.protocol.json.SdkJsonProtocolFactory protocolFactory) { this.protocolFactory = protocolFactory; } public Request<RegisterDomainRequest> marshall(RegisterDomainRequest registerDomainRequest) { if (registerDomainRequest == null) { throw new SdkClientException("Invalid argument passed to marshall(...)"); } try { final ProtocolRequestMarshaller<RegisterDomainRequest> protocolMarshaller = protocolFactory.createProtocolMarshaller(SDK_OPERATION_BINDING, registerDomainRequest); protocolMarshaller.startMarshalling(); RegisterDomainRequestMarshaller.getInstance().marshall(registerDomainRequest, protocolMarshaller); return protocolMarshaller.finishMarshalling(); } catch (Exception e) { throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e); } } }
apache-2.0
aws/aws-sdk-java
aws-java-sdk-comprehend/src/main/java/com/amazonaws/services/comprehend/model/DescribeTopicsDetectionJobRequest.java
3696
/* * Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.comprehend.model; import java.io.Serializable; import javax.annotation.Generated; import com.amazonaws.AmazonWebServiceRequest; /** * * @see <a href="http://docs.aws.amazon.com/goto/WebAPI/comprehend-2017-11-27/DescribeTopicsDetectionJob" * target="_top">AWS API Documentation</a> */ @Generated("com.amazonaws:aws-java-sdk-code-generator") public class DescribeTopicsDetectionJobRequest extends com.amazonaws.AmazonWebServiceRequest implements Serializable, Cloneable { /** * <p> * The identifier assigned by the user to the detection job. * </p> */ private String jobId; /** * <p> * The identifier assigned by the user to the detection job. * </p> * * @param jobId * The identifier assigned by the user to the detection job. */ public void setJobId(String jobId) { this.jobId = jobId; } /** * <p> * The identifier assigned by the user to the detection job. * </p> * * @return The identifier assigned by the user to the detection job. */ public String getJobId() { return this.jobId; } /** * <p> * The identifier assigned by the user to the detection job. * </p> * * @param jobId * The identifier assigned by the user to the detection job. * @return Returns a reference to this object so that method calls can be chained together. */ public DescribeTopicsDetectionJobRequest withJobId(String jobId) { setJobId(jobId); return this; } /** * Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be * redacted from this string using a placeholder value. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getJobId() != null) sb.append("JobId: ").append(getJobId()); sb.append("}"); return sb.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof DescribeTopicsDetectionJobRequest == false) return false; DescribeTopicsDetectionJobRequest other = (DescribeTopicsDetectionJobRequest) obj; if (other.getJobId() == null ^ this.getJobId() == null) return false; if (other.getJobId() != null && other.getJobId().equals(this.getJobId()) == false) return false; return true; } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getJobId() == null) ? 0 : getJobId().hashCode()); return hashCode; } @Override public DescribeTopicsDetectionJobRequest clone() { return (DescribeTopicsDetectionJobRequest) super.clone(); } }
apache-2.0
cretz/mlbdash
src/org/mlbdash/client/gdx/masterscore/MasterScoreboardReader.java
850
/* * Copyright 2011 Chad Retz * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package org.mlbdash.client.gdx.masterscore; import name.pehl.piriti.xml.client.XmlReader; import org.mlbdash.shared.gdx.masterscore.MasterScoreboard; public interface MasterScoreboardReader extends XmlReader<MasterScoreboard> { }
apache-2.0
kite9-org/k9
src/main/resources/static/public/behaviours/containers/child/containers-child.js
1280
import { hasLastSelected, createUniqueId } from '/public/bundles/api.js'; import { getMainSvg } from '/public/bundles/screen.js'; import { getBeforeId } from '/public/bundles/ordering.js'; function defaultChildSelector() { return getMainSvg().querySelectorAll("[k9-child].selected"); } /** * Adds child option into context menu */ export function initChildContextMenuCallback(command, selector) { if (selector == undefined) { selector = defaultChildSelector; } function getElementUri(e) { return e.getAttribute("k9-child"); } function createInsertStep(e, uri) { return { "type": 'InsertUrl', "fragmentId": e.getAttribute('id'), "uriStr": uri, "deep" : true, "newId": createUniqueId() } } /** * Provides a link option for the context menu */ return function(event, contextMenu) { const selectedElements = hasLastSelected(selector()); if (selectedElements.length > 0) { contextMenu.addControl(event, "/public/behaviours/containers/child/add.svg", "Add Child", function(e2) { selectedElements.forEach(e => { const uri = getElementUri(e); if (uri != undefined) { command.push(createInsertStep(e, uri)); } }); command.perform(); contextMenu.destroy(); }); } } }
apache-2.0
metatron-app/metatron-discovery
discovery-server/src/main/java/app/metatron/discovery/domain/workbook/configurations/board/WidgetRelation.java
2268
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package app.metatron.discovery.domain.workbook.configurations.board; import com.google.common.collect.Lists; import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonProperty; import org.apache.commons.collections.CollectionUtils; import java.io.Serializable; import java.util.List; import java.util.Map; /** * Created by kyungtaak on 2016. 9. 20.. */ public class WidgetRelation implements Serializable { String ref; List<WidgetRelation> children; public WidgetRelation() { } @JsonCreator public WidgetRelation(@JsonProperty(value = "ref") String ref, @JsonProperty(value = "pageRef") String pageRef, @JsonProperty("children") List<WidgetRelation> children) { this.ref = ref; if(this.ref == null) { this.ref = pageRef; } this.children = children; } public WidgetRelation(String ref, WidgetRelation... children) { this(ref, null, children.length == 0 ? null : Lists.newArrayList(children)); } public void replaceId(Map<String, String> idMap) { if (idMap.containsKey(ref)) { ref = idMap.get(ref); } if (CollectionUtils.isEmpty(children)) { return; } for (WidgetRelation relation : children) { relation.replaceId(idMap); } } public void addChild(String pageRef, WidgetRelation... children) { if (children == null) { this.children = Lists.newArrayList(); } this.children.add(new WidgetRelation(pageRef, children)); } public String getRef() { return ref; } public String getPageRef() { return ref; } public List<WidgetRelation> getChildren() { return children; } }
apache-2.0
eliasgranderubio/dagda
tests/cli/command/test_vuln_cli_parser.py
9975
# # Licensed to Dagda under one or more contributor # license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright # ownership. Dagda licenses this file to you under # the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. # import sys import unittest from cli.command.vuln_cli_parser import VulnCLIParser from cli.command.vuln_cli_parser import DagdaVulnParser from cli.command.vuln_cli_parser import vuln_parser_text # -- Test suite class VulnDBCliParserTestSuite(unittest.TestCase): def test_empty_args(self): empty_args = generate_args(False, False, None, None, None, None, None, None, None, None, None, None, None, None) status = VulnCLIParser.verify_args(empty_args) self.assertEqual(status, 1) def test_not_only_init(self): args = generate_args(True, False, None, None, 12345, None, None, None, None, None, None, None, None, None) status = VulnCLIParser.verify_args(args) self.assertEqual(status, 2) def test_not_only_init_status(self): args = generate_args(False, True, None, None, 12345, None, None, None, None, None, None, None, None, None) status = VulnCLIParser.verify_args(args) self.assertEqual(status, 3) def test_not_only_cve(self): args = generate_args(False, False, 'CVE-2002-1562', None, 12345, None, None, None, None, None, None, None, None, None) status = VulnCLIParser.verify_args(args) self.assertEqual(status, 4) def test_bad_cve(self): args = generate_args(False, False, 'CVE-62', None, None, None, None, None, None, None, None, None, None, None) status = VulnCLIParser.verify_args(args) self.assertEqual(status, 5) def test_not_only_cveinfo(self): args = generate_args(False, False, None,'CVE-2002-1562', None, 12345, None, None, None, None, None, None, None, None) status = VulnCLIParser.verify_args(args) self.assertEqual(status, 6) def test_bad_cveinfo(self): args = generate_args(False, False, None, 'CVE-62', None, None, None, None, None, None, None, None, None, None) status = VulnCLIParser.verify_args(args) self.assertEqual(status, 7) def test_not_only_bid(self): args = generate_args(False, False, None, None, 12345, None, 'openldap', None, None, None, None, None, None, None) status = VulnCLIParser.verify_args(args) self.assertEqual(status, 8) def test_bad_bid(self): args = generate_args(False, False, None, None, -12345, None, None, None, None, None, None, None, None, None) status = VulnCLIParser.verify_args(args) self.assertEqual(status, 9) def test_not_only_bid_info(self): args = generate_args(False, False, None, None, None, 12345, 'openldap', None, None, None, None, None, None, None) status = VulnCLIParser.verify_args(args) self.assertEqual(status, 10) def test_bad_bid_info(self): args = generate_args(False, False, None, None, None, -12345, None, None, None, None, None, None, None, None) status = VulnCLIParser.verify_args(args) self.assertEqual(status, 11) def test_not_only_exploit_db(self): args = generate_args(False, False, None, None, None, None, 12345, None, 'openldap', None, None, None, None, None) status = VulnCLIParser.verify_args(args) self.assertEqual(status, 12) def test_bad_exploit_db(self): args = generate_args(False, False, None, None, None, None, -12345, None, None, None, None, None, None, None) status = VulnCLIParser.verify_args(args) self.assertEqual(status, 13) def test_not_only_exploit_db_info(self): args = generate_args(False, False, None, None, None, None, None, 12345, 'openldap', None, None, None, None, None) status = VulnCLIParser.verify_args(args) self.assertEqual(status, 14) def test_bad_exploit_db_info(self): args = generate_args(False, False, None, None, None, None, None, -12345, None, None, None, None, None, None) status = VulnCLIParser.verify_args(args) self.assertEqual(status, 15) def test_only_product_version(self): args = generate_args(False, False, None, None, None, None, None, None, None, '2.30', None, None, None, None) status = VulnCLIParser.verify_args(args) self.assertEqual(status, 16) def test_not_only_rhba(self): args = generate_args(False, False, None, None, None, None, None, None, 'openldap', None, 'RHBA-2012:002', None, None, None) status = VulnCLIParser.verify_args(args) self.assertEqual(status, 17) def test_bad_rhba(self): args = generate_args(False, False, None, None, None, None, None, None, None, None, 'RHBA--2012:002', None, None, None) status = VulnCLIParser.verify_args(args) self.assertEqual(status, 18) def test_not_only_rhba_info(self): args = generate_args(False, False, None, None, None, None, None, None, 'openldap', None, None, 'RHBA-2012:002', None, None) status = VulnCLIParser.verify_args(args) self.assertEqual(status, 19) def test_bad_rhba_info(self): args = generate_args(False, False, None, None, None, None, None, None, None, None, None, 'RHBA--2012:002', None, None) status = VulnCLIParser.verify_args(args) self.assertEqual(status, 20) def test_not_only_rhsa(self): args = generate_args(False, False, None, None, None, None, None, None, 'openldap', None, None, None, 'RHSA-2012:002', None) status = VulnCLIParser.verify_args(args) self.assertEqual(status, 21) def test_bad_rhsa(self): args = generate_args(False, False, None, None, None, None, None, None, None, None, None, None, 'RHSA-20122:002', None) status = VulnCLIParser.verify_args(args) self.assertEqual(status, 22) def test_not_only_rhsa_info(self): args = generate_args(False, False, None, None, None, None, None, None, 'openldap', None, None, None, None, 'RHSA-2012:002') status = VulnCLIParser.verify_args(args) self.assertEqual(status, 23) def test_bad_rhsa_info(self): args = generate_args(False, False, None, None, None, None, None, None, None, None, None, None, None, 'RHSA-201222:002') status = VulnCLIParser.verify_args(args) self.assertEqual(status, 24) def test_ok(self): args = generate_args(False, False, None, None, None, None, None, None, 'openldap', '2.2.20', None, None, None, None) status = VulnCLIParser.verify_args(args) self.assertEqual(status, 0) def test_get_cve(self): sys.argv = ['dagda.py', 'vuln', '--cve', 'CVE-2002-2002'] parsed_args = VulnCLIParser() self.assertEqual(parsed_args.get_cve(), 'CVE-2002-2002') def test_get_cve_info(self): sys.argv = ['dagda.py', 'vuln', '--cve_info', 'CVE-2002-2002'] parsed_args = VulnCLIParser() self.assertEqual(parsed_args.get_cve_info(), 'CVE-2002-2002') def test_get_bid(self): sys.argv = ['dagda.py', 'vuln', '--bid', '15'] parsed_args = VulnCLIParser() self.assertEqual(parsed_args.get_bid(), 15) def test_get_bid_info(self): sys.argv = ['dagda.py', 'vuln', '--bid_info', '15'] parsed_args = VulnCLIParser() self.assertEqual(parsed_args.get_bid_info(), 15) def test_get_exploit_db_id(self): sys.argv = ['dagda.py', 'vuln', '--exploit_db', '15'] parsed_args = VulnCLIParser() self.assertEqual(parsed_args.get_exploit_db_id(), 15) def test_get_exploit_db_info_id(self): sys.argv = ['dagda.py', 'vuln', '--exploit_db_info', '15'] parsed_args = VulnCLIParser() self.assertEqual(parsed_args.get_exploit_db_info_id(), 15) def test_check_full_happy_path(self): sys.argv = ['dagda.py', 'vuln', '--product', 'openldap', '--product_version', '2.2.20'] parsed_args = VulnCLIParser() self.assertEqual(parsed_args.get_product(), 'openldap') self.assertEqual(parsed_args.get_product_version(), '2.2.20') def test_check_exit_1(self): sys.argv = ['dagda.py', 'vuln'] with self.assertRaises(SystemExit) as cm: VulnCLIParser() self.assertEqual(cm.exception.code, 1) def test_DagdaVulnParser_exit_2(self): with self.assertRaises(SystemExit) as cm: DagdaVulnParser().error("fail") self.assertEqual(cm.exception.code, 2) def test_DagdaVulnParser_format_help(self): self.assertEqual(DagdaVulnParser().format_help(), vuln_parser_text) # -- Util methods def generate_args(init, init_status, cve, cve_info, bid, bid_info, exploit_db, exploit_db_info, product, product_version, rhba, rhba_info, rhsa, rhsa_info): return AttrDict([('init', init), ('init_status', init_status), ('cve', cve), ('cve_info', cve_info), ('bid', bid), ('bid_info', bid_info), ('exploit_db', exploit_db), ('exploit_db_info', exploit_db_info), ('product', product), ('product_version', product_version), ('rhba', rhba), ('rhba_info', rhba_info), ('rhsa', rhsa), ('rhsa_info', rhsa_info)]) # -- Util classes class AttrDict(dict): def __init__(self, *args, **kwargs): super(AttrDict, self).__init__(*args, **kwargs) self.__dict__ = self if __name__ == '__main__': unittest.main()
apache-2.0
vespa-engine/vespa
controller-server/src/test/java/com/yahoo/vespa/hosted/controller/restapi/ApplicationRequestToDiscFilterRequestWrapper.java
4662
// Copyright Yahoo. Licensed under the terms of the Apache 2.0 license. See LICENSE in the project root. package com.yahoo.vespa.hosted.controller.restapi; import com.yahoo.application.container.handler.Request; import com.yahoo.jdisc.http.HttpRequest; import com.yahoo.jdisc.http.filter.DiscFilterRequest; import java.net.URI; import java.security.Principal; import java.security.cert.X509Certificate; import java.util.Collections; import java.util.Enumeration; import java.util.List; import java.util.Map; import static org.mockito.ArgumentMatchers.any; import static org.mockito.Mockito.doNothing; import static org.mockito.Mockito.doReturn; import static org.mockito.Mockito.mock; /** * Wraps an {@link Request} into a {@link DiscFilterRequest}. Only a few methods are supported. * Changes are not propagated; updated request instance must be retrieved through {@link #getUpdatedRequest()}. * * @author bjorncs */ public class ApplicationRequestToDiscFilterRequestWrapper extends DiscFilterRequest { private final Request request; private final List<X509Certificate> clientCertificateChain; private Principal userPrincipal; public ApplicationRequestToDiscFilterRequestWrapper(Request request) { this(request, Collections.emptyList()); } public ApplicationRequestToDiscFilterRequestWrapper(Request request, List<X509Certificate> clientCertificateChain) { super(createDummyHttpRequest(request)); this.request = request; this.userPrincipal = request.getUserPrincipal().orElse(null); this.clientCertificateChain = clientCertificateChain; } private static HttpRequest createDummyHttpRequest(Request req) { HttpRequest dummy = mock(HttpRequest.class, invocation -> { throw new UnsupportedOperationException(); }); doReturn(URI.create(req.getUri()).normalize()).when(dummy).getUri(); doNothing().when(dummy).copyHeaders(any()); doReturn(Map.of()).when(dummy).parameters(); return dummy; } public Request getUpdatedRequest() { Request updatedRequest = new Request(this.request.getUri(), this.request.getBody(), this.request.getMethod(), this.userPrincipal); this.request.getHeaders().forEach(updatedRequest.getHeaders()::put); updatedRequest.getAttributes().putAll(this.request.getAttributes()); return updatedRequest; } @Override public String getMethod() { return request.getMethod().name(); } @Override @Deprecated public void setUri(URI uri) { throw new UnsupportedOperationException(); } @Override public String getParameter(String name) { throw new UnsupportedOperationException(); } @Override public Enumeration<String> getParameterNames() { throw new UnsupportedOperationException(); } @Override public void addHeader(String name, String value) { throw new UnsupportedOperationException(); } @Override public String getHeader(String name) { return request.getHeaders().getFirst(name); } @Override public Enumeration<String> getHeaderNames() { throw new UnsupportedOperationException(); } @Override public List<String> getHeaderNamesAsList() { throw new UnsupportedOperationException(); } @Override public Enumeration<String> getHeaders(String name) { throw new UnsupportedOperationException(); } @Override public List<String> getHeadersAsList(String name) { throw new UnsupportedOperationException(); } @Override public void removeHeaders(String name) { throw new UnsupportedOperationException(); } @Override public void setHeaders(String name, String value) { throw new UnsupportedOperationException(); } @Override public void setHeaders(String name, List<String> values) { throw new UnsupportedOperationException(); } @Override public Principal getUserPrincipal() { return this.userPrincipal; } @Override public void setUserPrincipal(Principal principal) { this.userPrincipal = principal; } @Override public List<X509Certificate> getClientCertificateChain() { return clientCertificateChain; } @Override public void clearCookies() { throw new UnsupportedOperationException(); } @Override public Object getAttribute(String name) { return request.getAttributes().get(name); } @Override public void setAttribute(String name, Object value) { request.getAttributes().put(name, value); } }
apache-2.0
artem-aliev/tinkerpop
gremlin-core/src/main/java/org/apache/tinkerpop/gremlin/process/traversal/util/DefaultTraversal.java
14336
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.tinkerpop.gremlin.process.traversal.util; import org.apache.tinkerpop.gremlin.process.traversal.Bytecode; import org.apache.tinkerpop.gremlin.process.traversal.Step; import org.apache.tinkerpop.gremlin.process.traversal.Traversal; import org.apache.tinkerpop.gremlin.process.traversal.TraversalSideEffects; import org.apache.tinkerpop.gremlin.process.traversal.TraversalSource; import org.apache.tinkerpop.gremlin.process.traversal.TraversalStrategies; import org.apache.tinkerpop.gremlin.process.traversal.Traverser; import org.apache.tinkerpop.gremlin.process.traversal.TraverserGenerator; import org.apache.tinkerpop.gremlin.process.traversal.step.TraversalParent; import org.apache.tinkerpop.gremlin.process.traversal.step.util.EmptyStep; import org.apache.tinkerpop.gremlin.process.traversal.traverser.TraverserRequirement; import org.apache.tinkerpop.gremlin.process.traversal.traverser.util.DefaultTraverserGeneratorFactory; import org.apache.tinkerpop.gremlin.process.traversal.traverser.util.EmptyTraverser; import org.apache.tinkerpop.gremlin.structure.Graph; import org.apache.tinkerpop.gremlin.structure.util.StringFactory; import org.apache.tinkerpop.gremlin.structure.util.empty.EmptyGraph; import java.util.ArrayList; import java.util.Collections; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.NoSuchElementException; import java.util.Optional; import java.util.Set; /** * @author Marko A. Rodriguez (http://markorodriguez.com) */ public class DefaultTraversal<S, E> implements Traversal.Admin<S, E> { private Traverser.Admin<E> lastTraverser = EmptyTraverser.instance(); private Step<?, E> finalEndStep = EmptyStep.instance(); private final StepPosition stepPosition = new StepPosition(); protected transient Graph graph; protected List<Step> steps = new ArrayList<>(); // steps will be repeatedly retrieved from this traversal so wrap them once in an immutable list that can be reused protected List<Step> unmodifiableSteps = Collections.unmodifiableList(steps); protected TraversalParent parent = EmptyStep.instance(); protected TraversalSideEffects sideEffects = new DefaultTraversalSideEffects(); protected TraversalStrategies strategies; protected transient TraverserGenerator generator; protected Set<TraverserRequirement> requirements; protected boolean locked = false; protected Bytecode bytecode; // TODO: perhaps make transient until 3.3.0? private DefaultTraversal(final Graph graph, final TraversalStrategies traversalStrategies, final Bytecode bytecode) { this.graph = graph; this.strategies = traversalStrategies; this.bytecode = bytecode; } public DefaultTraversal(final Graph graph) { this(graph, TraversalStrategies.GlobalCache.getStrategies(graph.getClass()), new Bytecode()); } public DefaultTraversal(final TraversalSource traversalSource) { this(traversalSource.getGraph(), traversalSource.getStrategies(), traversalSource.getBytecode()); } public DefaultTraversal(final TraversalSource traversalSource, final DefaultTraversal.Admin<S,E> traversal) { this(traversalSource.getGraph(), traversalSource.getStrategies(), traversal.getBytecode()); steps.addAll(traversal.getSteps()); } // TODO: clean up unused or redundant constructors public DefaultTraversal() { this(EmptyGraph.instance(), TraversalStrategies.GlobalCache.getStrategies(EmptyGraph.class), new Bytecode()); } public DefaultTraversal(final Bytecode bytecode) { this(EmptyGraph.instance(), TraversalStrategies.GlobalCache.getStrategies(EmptyGraph.class), bytecode); } public Bytecode getBytecode() { return this.bytecode; } @Override public Traversal.Admin<S, E> asAdmin() { return this; } @Override public TraverserGenerator getTraverserGenerator() { if (null == this.generator) this.generator = (this.parent instanceof EmptyStep) ? DefaultTraverserGeneratorFactory.instance().getTraverserGenerator(this.getTraverserRequirements()) : TraversalHelper.getRootTraversal(this).getTraverserGenerator(); return this.generator; } @Override public void applyStrategies() throws IllegalStateException { if (this.locked) throw Traversal.Exceptions.traversalIsLocked(); TraversalHelper.reIdSteps(this.stepPosition, this); this.strategies.applyStrategies(this); boolean hasGraph = null != this.graph; for (int i = 0, j = this.steps.size(); i < j; i++) { // "foreach" can lead to ConcurrentModificationExceptions final Step step = this.steps.get(i); if (step instanceof TraversalParent) { for (final Traversal.Admin<?, ?> globalChild : ((TraversalParent) step).getGlobalChildren()) { globalChild.setStrategies(this.strategies); globalChild.setSideEffects(this.sideEffects); if (hasGraph) globalChild.setGraph(this.graph); globalChild.applyStrategies(); } for (final Traversal.Admin<?, ?> localChild : ((TraversalParent) step).getLocalChildren()) { localChild.setStrategies(this.strategies); localChild.setSideEffects(this.sideEffects); if (hasGraph) localChild.setGraph(this.graph); localChild.applyStrategies(); } } } this.finalEndStep = this.getEndStep(); // finalize requirements if (this.getParent() instanceof EmptyStep) { this.requirements = null; this.getTraverserRequirements(); } this.locked = true; } @Override public Set<TraverserRequirement> getTraverserRequirements() { if (null == this.requirements) { // if (!this.locked) this.applyStrategies(); this.requirements = new HashSet<>(); for (final Step<?, ?> step : this.getSteps()) { this.requirements.addAll(step.getRequirements()); } if (!this.requirements.contains(TraverserRequirement.LABELED_PATH) && TraversalHelper.hasLabels(this)) this.requirements.add(TraverserRequirement.LABELED_PATH); if (!this.getSideEffects().keys().isEmpty()) this.requirements.add(TraverserRequirement.SIDE_EFFECTS); if (null != this.getSideEffects().getSackInitialValue()) this.requirements.add(TraverserRequirement.SACK); if (this.requirements.contains(TraverserRequirement.ONE_BULK)) this.requirements.remove(TraverserRequirement.BULK); this.requirements = Collections.unmodifiableSet(this.requirements); } return this.requirements; } @Override public List<Step> getSteps() { return this.unmodifiableSteps; } @Override public Traverser.Admin<E> nextTraverser() { try { if (!this.locked) this.applyStrategies(); if (this.lastTraverser.bulk() > 0L) { final Traverser.Admin<E> temp = this.lastTraverser; this.lastTraverser = EmptyTraverser.instance(); return temp; } else { return this.finalEndStep.next(); } } catch (final FastNoSuchElementException e) { throw this.parent instanceof EmptyStep ? new NoSuchElementException() : e; } } @Override public boolean hasNext() { if (!this.locked) this.applyStrategies(); return this.lastTraverser.bulk() > 0L || this.finalEndStep.hasNext(); } @Override public E next() { try { if (!this.locked) this.applyStrategies(); if (this.lastTraverser.bulk() == 0L) this.lastTraverser = this.finalEndStep.next(); this.lastTraverser.setBulk(this.lastTraverser.bulk() - 1L); return this.lastTraverser.get(); } catch (final FastNoSuchElementException e) { throw this.parent instanceof EmptyStep ? new NoSuchElementException() : e; } } @Override public void reset() { this.steps.forEach(Step::reset); this.finalEndStep.reset(); this.lastTraverser = EmptyTraverser.instance(); } @Override public void addStart(final Traverser.Admin<S> start) { if (!this.locked) this.applyStrategies(); if (!this.steps.isEmpty()) this.steps.get(0).addStart(start); } @Override public void addStarts(final Iterator<Traverser.Admin<S>> starts) { if (!this.locked) this.applyStrategies(); if (!this.steps.isEmpty()) this.steps.get(0).addStarts(starts); } @Override public String toString() { return StringFactory.traversalString(this); } @Override public Step<S, ?> getStartStep() { return this.steps.isEmpty() ? EmptyStep.instance() : this.steps.get(0); } @Override public Step<?, E> getEndStep() { return this.steps.isEmpty() ? EmptyStep.instance() : this.steps.get(this.steps.size() - 1); } @Override public DefaultTraversal<S, E> clone() { try { final DefaultTraversal<S, E> clone = (DefaultTraversal<S, E>) super.clone(); clone.lastTraverser = EmptyTraverser.instance(); clone.steps = new ArrayList<>(); clone.unmodifiableSteps = Collections.unmodifiableList(clone.steps); clone.sideEffects = this.sideEffects.clone(); clone.strategies = this.strategies; clone.bytecode = this.bytecode.clone(); for (final Step<?, ?> step : this.steps) { final Step<?, ?> clonedStep = step.clone(); clonedStep.setTraversal(clone); final Step previousStep = clone.steps.isEmpty() ? EmptyStep.instance() : clone.steps.get(clone.steps.size() - 1); clonedStep.setPreviousStep(previousStep); previousStep.setNextStep(clonedStep); clone.steps.add(clonedStep); } clone.finalEndStep = clone.getEndStep(); return clone; } catch (final CloneNotSupportedException e) { throw new IllegalStateException(e.getMessage(), e); } } @Override public boolean isLocked() { return this.locked; } @Override public void setSideEffects(final TraversalSideEffects sideEffects) { this.sideEffects = sideEffects; } @Override public TraversalSideEffects getSideEffects() { return this.sideEffects; } @Override public void setStrategies(final TraversalStrategies strategies) { this.strategies = strategies; } @Override public TraversalStrategies getStrategies() { return this.strategies; } @Override public <S2, E2> Traversal.Admin<S2, E2> addStep(final int index, final Step<?, ?> step) throws IllegalStateException { if (this.locked) throw Exceptions.traversalIsLocked(); step.setId(this.stepPosition.nextXId()); this.steps.add(index, step); final Step previousStep = this.steps.size() > 0 && index != 0 ? steps.get(index - 1) : null; final Step nextStep = this.steps.size() > index + 1 ? steps.get(index + 1) : null; step.setPreviousStep(null != previousStep ? previousStep : EmptyStep.instance()); step.setNextStep(null != nextStep ? nextStep : EmptyStep.instance()); if (null != previousStep) previousStep.setNextStep(step); if (null != nextStep) nextStep.setPreviousStep(step); step.setTraversal(this); return (Traversal.Admin<S2, E2>) this; } @Override public <S2, E2> Traversal.Admin<S2, E2> removeStep(final int index) throws IllegalStateException { if (this.locked) throw Exceptions.traversalIsLocked(); final Step previousStep = this.steps.size() > 0 && index != 0 ? steps.get(index - 1) : null; final Step nextStep = this.steps.size() > index + 1 ? steps.get(index + 1) : null; //this.steps.get(index).setTraversal(EmptyTraversal.instance()); this.steps.remove(index); if (null != previousStep) previousStep.setNextStep(null == nextStep ? EmptyStep.instance() : nextStep); if (null != nextStep) nextStep.setPreviousStep(null == previousStep ? EmptyStep.instance() : previousStep); return (Traversal.Admin<S2, E2>) this; } @Override public void setParent(final TraversalParent step) { this.parent = step; } @Override public TraversalParent getParent() { return this.parent; } @Override public Optional<Graph> getGraph() { return Optional.ofNullable(this.graph); } @Override public void setGraph(final Graph graph) { this.graph = graph; } @Override public boolean equals(final Object other) { return other != null && other.getClass().equals(this.getClass()) && this.equals(((Traversal.Admin) other)); } @Override public int hashCode() { int index = 0; int result = this.getClass().hashCode(); for (final Step step : this.asAdmin().getSteps()) { result ^= Integer.rotateLeft(step.hashCode(), index++); } return result; } }
apache-2.0
sbrossie/killbill
api/src/main/java/org/killbill/billing/entitlement/EntitlementInternalApi.java
2152
/* * Copyright 2010-2013 Ning, Inc. * Copyright 2014-2016 Groupon, Inc * Copyright 2014-2016 The Billing Project, LLC * * The Billing Project licenses this file to you under the Apache License, version 2.0 * (the "License"); you may not use this file except in compliance with the * License. You may obtain a copy of the License at: * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. */ package org.killbill.billing.entitlement; import java.util.UUID; import org.joda.time.LocalDate; import org.killbill.billing.callcontext.InternalCallContext; import org.killbill.billing.callcontext.InternalTenantContext; import org.killbill.billing.catalog.api.BillingActionPolicy; import org.killbill.billing.entitlement.api.Entitlement; import org.killbill.billing.entitlement.api.EntitlementApiException; import org.killbill.billing.payment.api.PluginProperty; public interface EntitlementInternalApi { AccountEntitlements getAllEntitlementsForAccount(InternalTenantContext context) throws EntitlementApiException; Entitlement getEntitlementForId(final UUID uuid, final InternalTenantContext tenantContext) throws EntitlementApiException; Entitlement getEntitlementForExternalKey(final String externalKey, final InternalTenantContext tenantContext) throws EntitlementApiException; void pause(UUID bundleId, LocalDate effectiveDate, Iterable<PluginProperty> properties, InternalCallContext context) throws EntitlementApiException; void resume(UUID bundleId, LocalDate localEffectiveDate, Iterable<PluginProperty> properties, InternalCallContext context) throws EntitlementApiException; void cancel(Iterable<Entitlement> entitlements, LocalDate effectiveDate, BillingActionPolicy billingPolicy, Iterable<PluginProperty> properties, InternalCallContext context) throws EntitlementApiException; }
apache-2.0
xlui/KinectProject
AndroidClient/app/src/main/java/com/totoro/xkf/androidclient/presenter/HistoryHandStatePresenter.java
2375
package com.totoro.xkf.androidclient.presenter; import com.totoro.xkf.androidclient.base.BasePresenter; import com.totoro.xkf.androidclient.util.HttpUtils; import com.totoro.xkf.androidclient.util.PreferenceUtils; import com.totoro.xkf.androidclient.util.StateSelector; import com.totoro.xkf.androidclient.view.HistoryHandStateActivity; import org.json.JSONArray; import org.json.JSONException; import org.json.JSONObject; import java.io.IOException; import java.util.ArrayList; import java.util.List; import okhttp3.Call; import okhttp3.Callback; import okhttp3.Response; public class HistoryHandStatePresenter extends BasePresenter<HistoryHandStateActivity> { public HistoryHandStatePresenter(HistoryHandStateActivity mView) { super(mView); } public void updateHistoryData() { HttpUtils.getHistoryState(PreferenceUtils.getToken(), new Callback() { @Override public void onFailure(Call call, IOException e) { } @Override public void onResponse(Call call, Response response) throws IOException { String json = response.body().string(); final List<String> history = new ArrayList<>(); try { JSONArray jsonArray = new JSONArray(json); for (int i = 0; i < jsonArray.length(); i++) { JSONObject jsonObject = jsonArray.getJSONObject(i); String state = jsonObject.optString("state"); String stateInfo = StateSelector.getState(state); boolean isDanger = jsonObject.optBoolean("danger"); String danger = "安全"; if (isDanger) { danger = "危险"; } String date = jsonObject.optString("date"); String info = stateInfo + "\n" + danger + "\n" + date; history.add(info); } } catch (JSONException e) { e.printStackTrace(); } if (!history.isEmpty()) { String[] historyData = history.toArray(new String[history.size()]); mView.updateListView(historyData); } } }); } }
apache-2.0
freme-project/pipelines
src/test/java/eu/freme/eservices/pipelines/TestRequestFactory.java
4447
/** * Copyright (C) 2015 Agro-Know, Deutsches Forschungszentrum für Künstliche Intelligenz, iMinds, * Institut für Angewandte Informatik e. V. an der Universität Leipzig, * Istituto Superiore Mario Boella, Tilde, Vistatec, WRIPL (http://freme-project.eu) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package eu.freme.eservices.pipelines; import com.google.gson.Gson; import com.google.gson.JsonSyntaxException; import eu.freme.eservices.pipelines.requests.RequestFactory; import eu.freme.eservices.pipelines.requests.SerializedRequest; import eu.freme.eservices.pipelines.serialization.Pipeline; import eu.freme.eservices.pipelines.serialization.Serializer; import org.junit.Test; import java.util.Collections; import java.util.List; import static org.junit.Assert.assertEquals; import static org.junit.Assert.fail; /** * @author Gerald Haesendonck */ public class TestRequestFactory { @Test public void testWrongRequestInJSON() { // POST = POS! String wrongRequest = "[\n" + " {\n" + " \"method\": \"POS\",\n" + " \"endpoint\": \"http://api.freme-project.eu/current/e-entity/dbpedia-spotlight/documents\",\n" + " \"parameters\": {\n" + " \"language\": \"en\",\n" + " \"prefix\": \"http://freme-project.eu/\"\n" + " },\n" + " \"headers\": {\n" + " \"content-method\": \"text/plain\",\n" + " \"accept\": \"text/turtle\"\n" + " },\n" + " \"body\": \"This summer there is the Zomerbar in Antwerp, one of the most beautiful cities in Belgium.\"\n" + " }]"; try { List<SerializedRequest> requests = Serializer.fromJson(wrongRequest); fail("A JsonSyntaxException is expected, but all went well (which is wrong!)"); // should throw exception } catch (JsonSyntaxException e) { // very good! System.out.println("Test succeeded. Error msg: " + e.getMessage()); } } @Test public void testPipelineMemberCheckValid() { Gson gson = new Gson(); List<SerializedRequest> serializedReqs = Collections.singletonList(RequestFactory.createEntitySpotlight("nl")); Pipeline pipeline = new Pipeline(1, 1, "label", "description", false, "Edgar Allan Poe", "PUBLIC", serializedReqs); String pipelineStr = gson.toJson(pipeline); Pipeline pipeline2 = Serializer.templateFromJson(pipelineStr); assertEquals(pipeline, pipeline2); } @Test public void testPipelineMemberCheckValidInvalid() { Gson gson = new Gson(); List<SerializedRequest> serializedReqs = Collections.singletonList(RequestFactory.createEntitySpotlight("nl")); Pipeline pipeline = new Pipeline(1, 1, "label", "description", false, "Edgar Allan Poe", "PUBLIC", serializedReqs); String pipelineStr = gson.toJson(pipeline); String invalidPipelineStr = pipelineStr.replace("label", "labour"); try { Serializer.templateFromJson(invalidPipelineStr); fail("A JsonSyntaxException is expected, but all went well (which is wrong!)"); } catch (JsonSyntaxException e) { // very good! System.out.println("Test succeeded. Error msg: " + e.getMessage()); } } // @Test // TODO: not valid anymore. Write new test. // public void testPipelineMemberCheckValidInvalidRequest() { // Gson gson = new Gson(); // String serializedReqs = Serializer.toJson(RequestFactory.createEntitySpotlight("nl")); // String invalidSerializedReqs = serializedReqs.replace("parameters", "barometers"); // Pipeline pipeline = new Pipeline(1, "label", "description", false, "Edgar Allan Poe", "PUBLIC", invalidSerializedReqs); // String pipelineStr = gson.toJson(pipeline); // try { // Serializer.templateFromJson(pipelineStr); // fail("A JsonSyntaxException is expected, but all went well (which is wrong!)"); // } catch (JsonSyntaxException e) { // // very good! // System.out.println("Test succeeded. Error msg: " + e.getMessage()); // } // } }
apache-2.0
koyhoge/parse2ncmb
lib/converter.js
2484
"use strict" var Converter = module.exports = (function() { var keyMap = { createdAt: 'parseCreateAt' , updatedAt: 'parseUpdateAt' , objectId: 'parseObjectId' }; function Converter(ncmb, type, name) { this.__proto__.ncmb = ncmb; this._type = type; this._name = name; } Converter.prototype.convert = function(obj) { switch (this._type) { case 'installation': return this.convInstallation(obj); break; case 'user': return this.convUser(obj); break; case 'role': case 'product': case 'join': return this.convDummy(obj); break; default: return this.convObject(obj); break; } }; Converter.prototype.objCopy = function(obj, map) { let attrs = {}; Object.keys(obj).forEach(function(key) { if (map[key] == undefined) { attrs[key] = obj[key]; } else { attrs[map[key]] = obj[key]; } }); return attrs; }; Converter.prototype.convInstallation = function(obj) { let map = keyMap; // add key depends on installations map['appName'] = 'applicationName'; let attrs = this.objCopy(obj, map); let NCMBInstallationEx = require('./installation_ex'); let installation = new NCMBInstallationEx(this.ncmb); return new Promise(function(resolve, reject) { installation .register(attrs) .then(function() { resolve(); }) .catch(function(err) { reject(err); }); }); } Converter.prototype.convUser = function(obj) { let map = keyMap; // add keys depends on user map['username'] = 'userName'; map['email'] = 'mailAddress'; map['emailVerified'] = 'parseEmailVerified'; map['sessionToken'] = 'parseSessionToken'; let attrs = this.objCopy(obj, map); // override password as dummy attrs['password'] = 'ncmb_dummy_password'; let user = new this.ncmb.User(attrs); return new Promise(function(resolve, reject) { user.save() .then(function() { resolve(); }) .catch(function(err) { reject(err); }); }); } Converter.prototype.convObject = function(obj) { let attrs = this.objCopy(obj, keyMap); let klass = this.ncmb.DataStore(this._name); let object = new klass(attrs); return new Promise(function(resolve, reject) { object.save() .then(function() { resolve(); }) .catch(function(err) { reject(err); }); }); } Converter.prototype.convDummy = function(obj) { return new Promise(function(resolve, reject) { resolve(); }); } return Converter; })();
apache-2.0
ligato/cn-infra
health/probe/prometheus_probe.go
4585
// Copyright (c) 2017 Cisco and/or its affiliates. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at: // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package probe import ( "github.com/prometheus/client_golang/prometheus" "github.com/prometheus/client_golang/prometheus/promhttp" "go.ligato.io/cn-infra/v2/health/statuscheck/model/status" ) const ( // DefaultHealthPath default Prometheus health metrics URL DefaultHealthPath = "/health" // Namespace namespace to use for Prometheus health metrics Namespace = "" // Subsystem subsystem to use for Prometheus health metrics Subsystem = "" // ServiceLabel label for service field ServiceLabel = "service" // DependencyLabel label for dependency field DependencyLabel = "dependency" // BuildVersionLabel label for build version field BuildVersionLabel = "build_version" // BuildDateLabel label for build date field BuildDateLabel = "build_date" // ServiceHealthName name of service health metric ServiceHealthName = "service_health" // ServiceHealthHelp help text for service health metric // Adapt Ligato status code for now. // TODO: Consolidate with that from the "Common Container Telemetry" proposal. // ServiceHealthHelp string = "The health of the ServiceLabel 0 = INIT, 1 = UP, 2 = DOWN, 3 = OUTAGE" ServiceHealthHelp = "The health of the ServiceLabel 0 = INIT, 1 = OK, 2 = ERROR" // DependencyHealthName name of dependency health metric DependencyHealthName = "service_dependency_health" // DependencyHealthHelp help text for dependency health metric // Adapt Ligato status code for now. // TODO: Consolidate with that from the "Common Container Telemetry" proposal. // DependencyHealthHelp string = "The health of the DependencyLabel 0 = INIT, 1 = UP, 2 = DOWN, 3 = OUTAGE" DependencyHealthHelp = "The health of the DependencyLabel 0 = INIT, 1 = OK, 2 = ERROR" // ServiceInfoName name of service info metric ServiceInfoName = "service_info" // ServiceInfoHelp help text for service info metric ServiceInfoHelp = "Build info for the service. Value is always 1, build info is in the tags." ) func (p *Plugin) registerPrometheusProbe() error { err := p.Prometheus.NewRegistry(DefaultHealthPath, promhttp.HandlerOpts{}) if err != nil { return err } p.Prometheus.RegisterGaugeFunc(DefaultHealthPath, Namespace, Subsystem, ServiceHealthName, ServiceHealthHelp, prometheus.Labels{ ServiceLabel: p.getServiceLabel(), }, p.getServiceHealth, ) agentStatus := p.StatusCheck.GetAgentStatus() p.Prometheus.RegisterGaugeFunc(DefaultHealthPath, Namespace, Subsystem, ServiceInfoName, ServiceInfoHelp, prometheus.Labels{ ServiceLabel: p.getServiceLabel(), BuildVersionLabel: agentStatus.BuildVersion, BuildDateLabel: agentStatus.BuildDate, }, func() float64 { return 1 }, ) allPluginStatusMap := p.StatusCheck.GetAllPluginStatus() for k, v := range allPluginStatusMap { p.Log.Infof("k=%v, v=%v, state=%v", k, v, v.State) p.Prometheus.RegisterGaugeFunc(DefaultHealthPath, Namespace, Subsystem, DependencyHealthName, DependencyHealthHelp, prometheus.Labels{ ServiceLabel: p.getServiceLabel(), DependencyLabel: k, }, p.getDependencyHealth(k, v), ) } return nil } // getServiceHealth returns agent health status func (p *Plugin) getServiceHealth() float64 { agentStatus := p.StatusCheck.GetAgentStatus() // Adapt Ligato status code for now. // TODO: Consolidate with that from the "Common Container Telemetry" proposal. health := float64(agentStatus.State) p.Log.Infof("ServiceHealth: %v", health) return health } // getDependencyHealth returns plugin health status func (p *Plugin) getDependencyHealth(pluginName string, pluginStatus *status.PluginStatus) func() float64 { p.Log.Infof("DependencyHealth for plugin %v: %v", pluginName, float64(pluginStatus.State)) return func() float64 { health := float64(pluginStatus.State) p.Log.Infof("Dependency Health %v: %v", pluginName, health) return health } } func (p *Plugin) getServiceLabel() string { if p.ServiceLabel != nil { return p.ServiceLabel.GetAgentLabel() } return "" }
apache-2.0
IdentityModel/IdentityModel2
src/Client/Messages/ParameterReplaceBehavior.cs
532
namespace IdentityModel.Client { /// <summary> /// Specifies how parameter in the collection get replaced (or not). /// </summary> public enum ParameterReplaceBehavior { /// <summary> /// Allow multiple /// </summary> None, /// <summary> /// Replace a single parameter with the same key /// </summary> Single, /// <summary> /// Replace all parameters with same key /// </summary> All } }
apache-2.0
jskeet/nodatime
src/NodaTime/CalendarSystem.cs
48813
// Copyright 2010 The Noda Time Authors. All rights reserved. // Use of this source code is governed by the Apache License 2.0, // as found in the LICENSE.txt file. using NodaTime.Annotations; using NodaTime.Calendars; using NodaTime.Utility; using System; using System.Collections.Generic; using System.Diagnostics; using System.Diagnostics.CodeAnalysis; using System.Globalization; namespace NodaTime { /// <summary> /// A calendar system maps the non-calendar-specific "local time line" to human concepts /// such as years, months and days. /// </summary> /// <remarks> /// <para> /// Many developers will never need to touch this class, other than to potentially ask a calendar /// how many days are in a particular year/month and the like. Noda Time defaults to using the ISO-8601 /// calendar anywhere that a calendar system is required but hasn't been explicitly specified. /// </para> /// <para> /// If you need to obtain a <see cref="CalendarSystem" /> instance, use one of the static properties or methods in this /// class, such as the <see cref="Iso" /> property or the <see cref="GetHebrewCalendar(HebrewMonthNumbering)" /> method. /// </para> /// <para>Although this class is currently sealed (as of Noda Time 1.2), in the future this decision may /// be reversed. In any case, there is no current intention for third-party developers to be able to implement /// their own calendar systems (for various reasons). If you require a calendar system which is not /// currently supported, please file a feature request and we'll see what we can do. /// </para> /// </remarks> /// <threadsafety> /// All calendar implementations are immutable and thread-safe. See the thread safety /// section of the user guide for more information. /// </threadsafety> [Immutable] public sealed class CalendarSystem { // IDs and names are separated out (usually with the ID either being the same as the name, // or the base ID being the same as a name and then other IDs being formed from it.) The // differentiation is only present for clarity. private const string GregorianName = "Gregorian"; private const string GregorianId = GregorianName; private const string IsoName = "ISO"; private const string IsoId = IsoName; private const string CopticName = "Coptic"; private const string CopticId = CopticName; private const string BadiName = "Badi"; private const string BadiId = BadiName; private const string JulianName = "Julian"; private const string JulianId = JulianName; private const string IslamicName = "Hijri"; private const string IslamicIdBase = IslamicName; // Not part of IslamicCalendars as we want to be able to call it without triggering type initialization. internal static string GetIslamicId(IslamicLeapYearPattern leapYearPattern, IslamicEpoch epoch) { return string.Format(CultureInfo.InvariantCulture, "{0} {1}-{2}", IslamicIdBase, epoch, leapYearPattern); } private const string PersianName = "Persian"; private const string PersianIdBase = PersianName; private const string PersianSimpleId = PersianIdBase + " Simple"; private const string PersianAstronomicalId = PersianIdBase + " Algorithmic"; private const string PersianArithmeticId = PersianIdBase + " Arithmetic"; private const string HebrewName = "Hebrew"; private const string HebrewIdBase = HebrewName; private const string HebrewCivilId = HebrewIdBase + " Civil"; private const string HebrewScripturalId = HebrewIdBase + " Scriptural"; private const string UmAlQuraName = "Um Al Qura"; private const string UmAlQuraId = UmAlQuraName; // While we could implement some of these as auto-props, it probably adds more confusion than convenience. private static readonly CalendarSystem IsoCalendarSystem; private static readonly CalendarSystem[] CalendarByOrdinal = new CalendarSystem[(int) CalendarOrdinal.Size]; static CalendarSystem() { var gregorianCalculator = new GregorianYearMonthDayCalculator(); var gregorianEraCalculator = new GJEraCalculator(gregorianCalculator); IsoCalendarSystem = new CalendarSystem(CalendarOrdinal.Iso, IsoId, IsoName, gregorianCalculator, gregorianEraCalculator); } #region Public factory members for calendars /// <summary> /// Fetches a calendar system by its unique identifier. This provides full round-tripping of a calendar /// system. It is not guaranteed that calling this method twice with the same identifier will return /// identical references, but the references objects will be equal. /// </summary> /// <param name="id">The ID of the calendar system. This is case-sensitive.</param> /// <returns>The calendar system with the given ID.</returns> /// <seealso cref="Id"/> /// <exception cref="KeyNotFoundException">No calendar system for the specified ID can be found.</exception> /// <exception cref="NotSupportedException">The calendar system with the specified ID is known, but not supported on this platform.</exception> public static CalendarSystem ForId(string id) { Preconditions.CheckNotNull(id, nameof(id)); if (!IdToFactoryMap.TryGetValue(id, out Func<CalendarSystem> factory)) { throw new KeyNotFoundException($"No calendar system for ID {id} exists"); } return factory(); } /// <summary> /// Fetches a calendar system by its ordinal value, constructing it if necessary. /// </summary> internal static CalendarSystem ForOrdinal([Trusted] CalendarOrdinal ordinal) { Preconditions.DebugCheckArgument(ordinal >= 0 && ordinal < CalendarOrdinal.Size, nameof(ordinal), "Unknown ordinal value {0}", ordinal); // Avoid an array lookup for the overwhelmingly common case. if (ordinal == CalendarOrdinal.Iso) { return IsoCalendarSystem; } CalendarSystem calendar = CalendarByOrdinal[(int) ordinal]; if (calendar != null) { return calendar; } // Not found it in the array. This can happen if the calendar system was initialized in // a different thread, and the write to the array isn't visible in this thread yet. // A simple switch will do the right thing. This is separated out (directly below) to allow // it to be tested separately. (It may also help this method be inlined...) The return // statement below is unlikely to ever be hit by code coverage, as it's handling a very // unusual and hard-to-provoke situation. return ForOrdinalUncached(ordinal); } /// <summary> /// Returns the IDs of all calendar systems available within Noda Time. The order of the keys is not guaranteed. /// </summary> /// <value>The IDs of all calendar systems available within Noda Time.</value> public static IEnumerable<string> Ids => IdToFactoryMap.Keys; private static readonly Dictionary<string, Func<CalendarSystem>> IdToFactoryMap = new Dictionary<string, Func<CalendarSystem>> { {IsoId, () => Iso}, {PersianSimpleId, () => PersianSimple}, {PersianArithmeticId, () => PersianArithmetic}, {PersianAstronomicalId, () => PersianAstronomical}, {HebrewCivilId, () => GetHebrewCalendar(HebrewMonthNumbering.Civil)}, {HebrewScripturalId, () => GetHebrewCalendar(HebrewMonthNumbering.Scriptural)}, {GregorianId, () => Gregorian}, {CopticId, () => Coptic}, {BadiId, () => Badi}, {JulianId, () => Julian}, {UmAlQuraId, () => UmAlQura}, {GetIslamicId(IslamicLeapYearPattern.Indian, IslamicEpoch.Civil), () => GetIslamicCalendar(IslamicLeapYearPattern.Indian, IslamicEpoch.Civil)}, {GetIslamicId(IslamicLeapYearPattern.Base15, IslamicEpoch.Civil), () => GetIslamicCalendar(IslamicLeapYearPattern.Base15, IslamicEpoch.Civil)}, {GetIslamicId(IslamicLeapYearPattern.Base16, IslamicEpoch.Civil), () => GetIslamicCalendar(IslamicLeapYearPattern.Base16, IslamicEpoch.Civil)}, {GetIslamicId(IslamicLeapYearPattern.HabashAlHasib, IslamicEpoch.Civil), () => GetIslamicCalendar(IslamicLeapYearPattern.HabashAlHasib, IslamicEpoch.Civil)}, {GetIslamicId(IslamicLeapYearPattern.Indian, IslamicEpoch.Astronomical), () => GetIslamicCalendar(IslamicLeapYearPattern.Indian, IslamicEpoch.Astronomical)}, {GetIslamicId(IslamicLeapYearPattern.Base15, IslamicEpoch.Astronomical), () => GetIslamicCalendar(IslamicLeapYearPattern.Base15, IslamicEpoch.Astronomical)}, {GetIslamicId(IslamicLeapYearPattern.Base16, IslamicEpoch.Astronomical), () => GetIslamicCalendar(IslamicLeapYearPattern.Base16, IslamicEpoch.Astronomical)}, {GetIslamicId(IslamicLeapYearPattern.HabashAlHasib, IslamicEpoch.Astronomical), () => GetIslamicCalendar(IslamicLeapYearPattern.HabashAlHasib, IslamicEpoch.Astronomical)}, }; /// <summary> /// Returns a calendar system that follows the rules of the ISO-8601 standard, /// which is compatible with Gregorian for all modern dates. /// </summary> /// <remarks> /// As of Noda Time 2.0, this calendar system is equivalent to <see cref="Gregorian"/>. /// The only areas in which the calendars differed were around centuries, and the members /// relating to those differences were removed in Noda Time 2.0. /// The distinction between Gregorian and ISO has been maintained for the sake of simplicity, compatibility /// and consistency. /// </remarks> /// <value>The ISO calendar system.</value> public static CalendarSystem Iso => IsoCalendarSystem; /// <summary> /// Returns a Hebrew calendar, as described at http://en.wikipedia.org/wiki/Hebrew_calendar. This is a /// purely mathematical calculator, applied proleptically to the period where the real calendar was observational. /// </summary> /// <remarks> /// <para>Please note that in version 1.3.0 of Noda Time, support for the Hebrew calendar is somewhat experimental, /// particularly in terms of calculations involving adding or subtracting years. Additionally, text formatting /// and parsing using month names is not currently supported, due to the challenges of handling leap months. /// It is hoped that this will be improved in future versions.</para> /// <para>The implementation for this was taken from http://www.cs.tau.ac.il/~nachum/calendar-book/papers/calendar.ps, /// which is a public domain algorithm presumably equivalent to that given in the Calendrical Calculations book /// by the same authors (Nachum Dershowitz and Edward Reingold). /// </para> /// </remarks> /// <param name="monthNumbering">The month numbering system to use</param> /// <returns>A Hebrew calendar system for the given month numbering.</returns> public static CalendarSystem GetHebrewCalendar(HebrewMonthNumbering monthNumbering) { Preconditions.CheckArgumentRange(nameof(monthNumbering), (int) monthNumbering, 1, 2); return HebrewCalendars.ByMonthNumbering[((int) monthNumbering) - 1]; } /// <summary> /// Returns the Badíʿ (meaning "wondrous" or "unique") calendar, as described at https://en.wikipedia.org/wiki/Badi_calendar. /// This is a purely solar calendar with years starting at the vernal equinox. /// </summary> /// <remarks> /// <para>The Badíʿ calendar was developed and defined by the founders of the Bahá'í Faith in the mid to late /// 1800's A.D. The first year in the calendar coincides with 1844 A.D. Years are labeled "B.E." for Bahá'í Era.</para> /// <para>A year consists of 19 months, each with 19 days. Each day starts at sunset. Years are grouped into sets /// of 19 "Unities" (Váḥid) and 19 Unities make up 1 "All Things" (Kull-i-Shay’).</para> /// <para>A period of days (usually 4 or 5, called Ayyám-i-Há) occurs between the 18th and 19th months. The length of this /// period of intercalary days is solely determined by the date of the following vernal equinox. The vernal equinox is /// a momentary point in time, so the "date" of the equinox is determined by the date (beginning /// at sunset) in effect in Tehran, Iran at the moment of the equinox.</para> /// <para>In this Noda Time implementation, days start at midnight and lookup tables are used to determine vernal equinox dates. /// Ayyám-i-Há is internally modelled as extra days added to the 18th month. As a result, a few functions will /// not work as expected for Ayyám-i-Há, such as EndOfMonth.</para> /// </remarks> /// <returns>The Badíʿ calendar system.</returns> public static CalendarSystem Badi => MiscellaneousCalendars.Badi; /// <summary> /// Returns an Islamic, or Hijri, calendar system. /// </summary> /// <remarks> /// <para> /// This returns a tablular calendar, rather than one based on lunar observation. This calendar is a /// lunar calendar with 12 months, each of 29 or 30 days, resulting in a year of 354 days (or 355 on a leap /// year). /// </para> /// <para> /// Year 1 in the Islamic calendar began on July 15th or 16th, 622 CE (Julian), thus /// Islamic years do not begin at the same time as Julian years. This calendar /// is not proleptic, as it does not allow dates before the first Islamic year. /// </para> /// <para> /// There are two basic forms of the Islamic calendar, the tabular and the /// observed. The observed form cannot easily be used by computers as it /// relies on human observation of the new moon. The tabular calendar, implemented here, is an /// arithmetic approximation of the observed form that follows relatively simple rules. /// </para> /// <para>You should choose an epoch based on which external system you wish /// to be compatible with. The epoch beginning on July 16th is the more common /// one for the tabular calendar, so using <see cref="IslamicEpoch.Civil" /> /// would usually be a logical choice. However, Windows uses July 15th, so /// if you need to be compatible with other Windows systems, you may wish to use /// <see cref="IslamicEpoch.Astronomical" />. The fact that the Islamic calendar /// traditionally starts at dusk, a Julian day traditionally starts at noon, /// and all calendar systems in Noda Time start their days at midnight adds /// somewhat inevitable confusion to the mix, unfortunately.</para> /// <para> /// The tabular form of the calendar defines 12 months of alternately /// 30 and 29 days. The last month is extended to 30 days in a leap year. /// Leap years occur according to a 30 year cycle. There are four recognised /// patterns of leap years in the 30 year cycle: /// </para> /// <list type="table"> /// <listheader><term>Origin</term><description>Leap years</description></listheader> /// <item><term>Kūshyār ibn Labbān</term><description>2, 5, 7, 10, 13, 15, 18, 21, 24, 26, 29</description></item> /// <item><term>al-Fazārī</term><description>2, 5, 7, 10, 13, 16, 18, 21, 24, 26, 29</description></item> /// <item><term>Fātimid (also known as Misri or Bohra)</term><description>2, 5, 8, 10, 13, 16, 19, 21, 24, 27, 29</description></item> /// <item><term>Habash al-Hasib</term><description>2, 5, 8, 11, 13, 16, 19, 21, 24, 27, 30</description></item> /// </list> /// <para> /// The leap year pattern to use is determined from the first parameter to this factory method. /// The second parameter determines which epoch is used - the "astronomical" or "Thursday" epoch /// (July 15th 622CE) or the "civil" or "Friday" epoch (July 16th 622CE). /// </para> /// <para> /// This implementation defines a day as midnight to midnight exactly as per /// the ISO calendar. This correct start of day is at sunset on the previous /// day, however this cannot readily be modelled and has been ignored. /// </para> /// </remarks> /// <param name="leapYearPattern">The pattern of years in the 30-year cycle to consider as leap years</param> /// <param name="epoch">The kind of epoch to use (astronomical or civil)</param> /// <returns>A suitable Islamic calendar reference; the same reference may be returned by several /// calls as the object is immutable and thread-safe.</returns> public static CalendarSystem GetIslamicCalendar(IslamicLeapYearPattern leapYearPattern, IslamicEpoch epoch) { Preconditions.CheckArgumentRange(nameof(leapYearPattern), (int) leapYearPattern, 1, 4); Preconditions.CheckArgumentRange(nameof(epoch), (int) epoch, 1, 2); return IslamicCalendars.ByLeapYearPatterAndEpoch[(int) leapYearPattern - 1, (int) epoch - 1]; } #endregion // Other fields back read-only automatic properties. private readonly EraCalculator eraCalculator; private CalendarSystem(CalendarOrdinal ordinal, string id, string name, YearMonthDayCalculator yearMonthDayCalculator, Era singleEra) : this(ordinal, id, name, yearMonthDayCalculator, new SingleEraCalculator(singleEra, yearMonthDayCalculator)) { } private CalendarSystem(CalendarOrdinal ordinal, string id, string name, YearMonthDayCalculator yearMonthDayCalculator, EraCalculator eraCalculator) { this.Ordinal = ordinal; this.Id = id; this.Name = name; this.YearMonthDayCalculator = yearMonthDayCalculator; this.MinYear = yearMonthDayCalculator.MinYear; this.MaxYear = yearMonthDayCalculator.MaxYear; this.MinDays = yearMonthDayCalculator.GetStartOfYearInDays(MinYear); this.MaxDays = yearMonthDayCalculator.GetStartOfYearInDays(MaxYear + 1) - 1; // We trust the construction code not to mutate the array... this.eraCalculator = eraCalculator; CalendarByOrdinal[(int) ordinal] = this; } /// <summary> /// Returns the unique identifier for this calendar system. This is provides full round-trip capability /// using <see cref="ForId" /> to retrieve the calendar system from the identifier. /// </summary> /// <remarks> /// <para> /// A unique ID for a calendar is required when serializing types which include a <see cref="CalendarSystem"/>. /// As of 2 Nov 2012 (ISO calendar) there are no ISO or RFC standards for naming a calendar system. As such, /// the identifiers provided here are specific to Noda Time, and are not guaranteed to interoperate with any other /// date and time API. /// </para> /// <list type="table"> /// <listheader> /// <term>Calendar ID</term> /// <description>Equivalent factory method or property</description> /// </listheader> /// <item><term>ISO</term><description><see cref="CalendarSystem.Iso"/></description></item> /// <item><term>Gregorian</term><description><see cref="CalendarSystem.Gregorian"/></description></item> /// <item><term>Coptic</term><description><see cref="CalendarSystem.Coptic"/></description></item> /// <item><term>Badíʿ</term><description><see cref="CalendarSystem.Badi"/></description></item> /// <item><term>Julian</term><description><see cref="CalendarSystem.Julian"/></description></item> /// <item><term>Hijri Civil-Indian</term><description><see cref="CalendarSystem.GetIslamicCalendar"/>(IslamicLeapYearPattern.Indian, IslamicEpoch.Civil)</description></item> /// <item><term>Hijri Civil-Base15</term><description><see cref="CalendarSystem.GetIslamicCalendar"/>(IslamicLeapYearPattern.Base15, IslamicEpoch.Civil)</description></item> /// <item><term>Hijri Civil-Base16</term><description><see cref="CalendarSystem.GetIslamicCalendar"/>(IslamicLeapYearPattern.Base16, IslamicEpoch.Civil)</description></item> /// <item><term>Hijri Civil-HabashAlHasib</term><description><see cref="CalendarSystem.GetIslamicCalendar"/>(IslamicLeapYearPattern.HabashAlHasib, IslamicEpoch.Civil)</description></item> /// <item><term>Hijri Astronomical-Indian</term><description><see cref="CalendarSystem.GetIslamicCalendar"/>(IslamicLeapYearPattern.Indian, IslamicEpoch.Astronomical)</description></item> /// <item><term>Hijri Astronomical-Base15</term><description><see cref="CalendarSystem.GetIslamicCalendar"/>(IslamicLeapYearPattern.Base15, IslamicEpoch.Astronomical)</description></item> /// <item><term>Hijri Astronomical-Base16</term><description><see cref="CalendarSystem.GetIslamicCalendar"/>(IslamicLeapYearPattern.Base16, IslamicEpoch.Astronomical)</description></item> /// <item><term>Hijri Astronomical-HabashAlHasib</term><description><see cref="CalendarSystem.GetIslamicCalendar"/>(IslamicLeapYearPattern.HabashAlHasib, IslamicEpoch.Astronomical)</description></item> /// <item><term>Persian Simple</term><description><see cref="CalendarSystem.PersianSimple"/></description></item> /// <item><term>Persian Arithmetic</term><description><see cref="CalendarSystem.PersianArithmetic"/></description></item> /// <item><term>Persian Astronomical</term><description><see cref="CalendarSystem.PersianAstronomical"/></description></item> /// <item><term>Um Al Qura</term><description><see cref="CalendarSystem.UmAlQura"/>()</description></item> /// <item><term>Hebrew Civil</term><description><see cref="CalendarSystem.HebrewCivil"/></description></item> /// <item><term>Hebrew Scriptural</term><description><see cref="CalendarSystem.HebrewScriptural"/></description></item> /// </list> /// </remarks> /// <value>The unique identifier for this calendar system.</value> public string Id { get; } /// <summary> /// Returns the name of this calendar system. Each kind of calendar system has a unique name, but this /// does not usually provide enough information for round-tripping. (For example, the name of an /// Islamic calendar system does not indicate which kind of leap cycle it uses.) /// </summary> /// <value>The name of this calendar system.</value> public string Name { get; } /// <summary> /// Gets the minimum valid year (inclusive) within this calendar. /// </summary> /// <value>The minimum valid year (inclusive) within this calendar.</value> public int MinYear { get; } /// <summary> /// Gets the maximum valid year (inclusive) within this calendar. /// </summary> /// <value>The maximum valid year (inclusive) within this calendar.</value> public int MaxYear { get; } /// <summary> /// Returns the minimum day number this calendar can handle. /// </summary> internal int MinDays { get; } /// <summary> /// Returns the maximum day number (inclusive) this calendar can handle. /// </summary> internal int MaxDays { get; } /// <summary> /// Returns the ordinal value of this calendar. /// </summary> internal CalendarOrdinal Ordinal { get; } #region Era-based members /// <summary> /// Gets a read-only list of eras used in this calendar system. /// </summary> /// <value>A read-only list of eras used in this calendar system.</value> public IList<Era> Eras => eraCalculator.Eras; /// <summary> /// Returns the "absolute year" (the one used throughout most of the API, without respect to eras) /// from a year-of-era and an era. /// </summary> /// <remarks> /// For example, in the Gregorian and Julian calendar systems, the BCE era starts at year 1, which is /// equivalent to an "absolute year" of 0 (then BCE year 2 has an absolute year of -1, and so on). The absolute /// year is the year that is used throughout the API; year-of-era is typically used primarily when formatting /// and parsing date values to and from text. /// </remarks> /// <param name="yearOfEra">The year within the era.</param> /// <param name="era">The era in which to consider the year</param> /// <returns>The absolute year represented by the specified year of era.</returns> /// <exception cref="ArgumentOutOfRangeException"><paramref name="yearOfEra"/> is out of the range of years for the given era.</exception> /// <exception cref="ArgumentException"><paramref name="era"/> is not an era used in this calendar.</exception> public int GetAbsoluteYear(int yearOfEra, Era era) => eraCalculator.GetAbsoluteYear(yearOfEra, era); /// <summary> /// Returns the maximum valid year-of-era in the given era. /// </summary> /// <remarks>Note that depending on the calendar system, it's possible that only /// part of the returned year falls within the given era. It is also possible that /// the returned value represents the earliest year of the era rather than the latest /// year. (See the BC era in the Gregorian calendar, for example.)</remarks> /// <param name="era">The era in which to find the greatest year</param> /// <returns>The maximum valid year in the given era.</returns> /// <exception cref="ArgumentException"><paramref name="era"/> is not an era used in this calendar.</exception> public int GetMaxYearOfEra(Era era) => eraCalculator.GetMaxYearOfEra(era); /// <summary> /// Returns the minimum valid year-of-era in the given era. /// </summary> /// <remarks>Note that depending on the calendar system, it's possible that only /// part of the returned year falls within the given era. It is also possible that /// the returned value represents the latest year of the era rather than the earliest /// year. (See the BC era in the Gregorian calendar, for example.)</remarks> /// <param name="era">The era in which to find the greatest year</param> /// <returns>The minimum valid year in the given eraera.</returns> /// <exception cref="ArgumentException"><paramref name="era"/> is not an era used in this calendar.</exception> public int GetMinYearOfEra(Era era) => eraCalculator.GetMinYearOfEra(era); #endregion internal YearMonthDayCalculator YearMonthDayCalculator { get; } internal YearMonthDayCalendar GetYearMonthDayCalendarFromDaysSinceEpoch(int daysSinceEpoch) { Preconditions.CheckArgumentRange(nameof(daysSinceEpoch), daysSinceEpoch, MinDays, MaxDays); return YearMonthDayCalculator.GetYearMonthDay(daysSinceEpoch).WithCalendarOrdinal(Ordinal); } #region object overrides /// <summary> /// Converts this calendar system to text by simply returning its unique ID. /// </summary> /// <returns>The ID of this calendar system.</returns> public override string ToString() => Id; #endregion /// <summary> /// Returns the number of days since the Unix epoch (1970-01-01 ISO) for the given date. /// </summary> internal int GetDaysSinceEpoch([Trusted] YearMonthDay yearMonthDay) { DebugValidateYearMonthDay(yearMonthDay); return YearMonthDayCalculator.GetDaysSinceEpoch(yearMonthDay); } /// <summary> /// Returns the IsoDayOfWeek corresponding to the day of week for the given year, month and day. /// </summary> /// <param name="yearMonthDay">The year, month and day to use to find the day of the week</param> /// <returns>The day of the week as an IsoDayOfWeek</returns> internal IsoDayOfWeek GetDayOfWeek([Trusted] YearMonthDay yearMonthDay) { DebugValidateYearMonthDay(yearMonthDay); int daysSinceEpoch = YearMonthDayCalculator.GetDaysSinceEpoch(yearMonthDay); int numericDayOfWeek = unchecked(daysSinceEpoch >= -3 ? 1 + ((daysSinceEpoch + 3) % 7) : 7 + ((daysSinceEpoch + 4) % 7)); return (IsoDayOfWeek) numericDayOfWeek; } /// <summary> /// Returns the number of days in the given year. /// </summary> /// <param name="year">The year to determine the number of days in</param> /// <exception cref="ArgumentOutOfRangeException">The given year is invalid for this calendar.</exception> /// <returns>The number of days in the given year.</returns> public int GetDaysInYear(int year) { Preconditions.CheckArgumentRange(nameof(year), year, MinYear, MaxYear); return YearMonthDayCalculator.GetDaysInYear(year); } /// <summary> /// Returns the number of days in the given month within the given year. /// </summary> /// <param name="year">The year in which to consider the month</param> /// <param name="month">The month to determine the number of days in</param> /// <exception cref="ArgumentOutOfRangeException">The given year / month combination /// is invalid for this calendar.</exception> /// <returns>The number of days in the given month and year.</returns> public int GetDaysInMonth(int year, int month) { // Simplest way to validate the year and month. Assume it's quick enough to validate the day... ValidateYearMonthDay(year, month, 1); return YearMonthDayCalculator.GetDaysInMonth(year, month); } /// <summary> /// Returns whether or not the given year is a leap year in this calendar. /// </summary> /// <param name="year">The year to consider.</param> /// <exception cref="ArgumentOutOfRangeException">The given year is invalid for this calendar. /// Note that some implementations may return a value rather than throw this exception. Failure to throw an /// exception should not be treated as an indication that the year is valid.</exception> /// <returns>True if the given year is a leap year; false otherwise.</returns> public bool IsLeapYear(int year) { Preconditions.CheckArgumentRange(nameof(year), year, MinYear, MaxYear); return YearMonthDayCalculator.IsLeapYear(year); } /// <summary> /// Returns the maximum valid month (inclusive) within this calendar in the given year. /// </summary> /// <remarks> /// It is assumed that in all calendars, every month between 1 and this month /// number is valid for the given year. This does not necessarily mean that the first month of the year /// is 1, however. (See the Hebrew calendar system using the scriptural month numbering system for example.) /// </remarks> /// <param name="year">The year to consider.</param> /// <exception cref="ArgumentOutOfRangeException">The given year is invalid for this calendar. /// Note that some implementations may return a month rather than throw this exception (for example, if all /// years have the same number of months in this calendar system). Failure to throw an exception should not be /// treated as an indication that the year is valid.</exception> /// <returns>The maximum month number within the given year.</returns> public int GetMonthsInYear(int year) { Preconditions.CheckArgumentRange(nameof(year), year, MinYear, MaxYear); return YearMonthDayCalculator.GetMonthsInYear(year); } internal void ValidateYearMonthDay(int year, int month, int day) { YearMonthDayCalculator.ValidateYearMonthDay(year, month, day); } internal int Compare([Trusted] YearMonthDay lhs, [Trusted] YearMonthDay rhs) { DebugValidateYearMonthDay(lhs); DebugValidateYearMonthDay(rhs); return YearMonthDayCalculator.Compare(lhs, rhs); } #region "Getter" methods which used to be DateTimeField internal int GetDayOfYear([Trusted] YearMonthDay yearMonthDay) { DebugValidateYearMonthDay(yearMonthDay); return YearMonthDayCalculator.GetDayOfYear(yearMonthDay); } internal int GetYearOfEra([Trusted] int absoluteYear) { Preconditions.DebugCheckArgumentRange(nameof(absoluteYear), absoluteYear, MinYear, MaxYear); return eraCalculator.GetYearOfEra(absoluteYear); } internal Era GetEra([Trusted] int absoluteYear) { Preconditions.DebugCheckArgumentRange(nameof(absoluteYear), absoluteYear, MinYear, MaxYear); return eraCalculator.GetEra(absoluteYear); } /// <summary> /// In debug configurations only, this method calls <see cref="ValidateYearMonthDay"/> /// with the components of the given YearMonthDay, ensuring that it's valid in the /// current calendar. /// </summary> /// <param name="yearMonthDay">The value to validate.</param> [Conditional("DEBUG")] [ExcludeFromCodeCoverage] internal void DebugValidateYearMonthDay(YearMonthDay yearMonthDay) { // Avoid the line even being compiled in a release build... #if DEBUG ValidateYearMonthDay(yearMonthDay.Year, yearMonthDay.Month, yearMonthDay.Day); #endif } #endregion /// <summary> /// Returns a Gregorian calendar system. /// </summary> /// <remarks> /// <para> /// The Gregorian calendar system defines every /// fourth year as leap, unless the year is divisible by 100 and not by 400. /// This improves upon the Julian calendar leap year rule. /// </para> /// <para> /// Although the Gregorian calendar did not exist before 1582 CE, this /// calendar system assumes it did, thus it is proleptic. This implementation also /// fixes the start of the year at January 1. /// </para> /// </remarks> /// <value>A Gregorian calendar system.</value> public static CalendarSystem Gregorian => GregorianJulianCalendars.Gregorian; /// <summary> /// Returns a pure proleptic Julian calendar system, which defines every /// fourth year as a leap year. This implementation follows the leap year rule /// strictly, even for dates before 8 CE, where leap years were actually /// irregular. /// </summary> /// <remarks> /// Although the Julian calendar did not exist before 45 BCE, this calendar /// assumes it did, thus it is proleptic. This implementation also fixes the /// start of the year at January 1. /// </remarks> /// <value>A suitable Julian calendar reference; the same reference may be returned by several /// calls as the object is immutable and thread-safe.</value> public static CalendarSystem Julian => GregorianJulianCalendars.Julian; /// <summary> /// Returns a Coptic calendar system, which defines every fourth year as /// leap, much like the Julian calendar. The year is broken down into 12 months, /// each 30 days in length. An extra period at the end of the year is either 5 /// or 6 days in length. In this implementation, it is considered a 13th month. /// </summary> /// <remarks> /// <para> /// Year 1 in the Coptic calendar began on August 29, 284 CE (Julian), thus /// Coptic years do not begin at the same time as Julian years. This calendar /// is not proleptic, as it does not allow dates before the first Coptic year. /// </para> /// <para> /// This implementation defines a day as midnight to midnight exactly as per /// the ISO calendar. Some references indicate that a Coptic day starts at /// sunset on the previous ISO day, but this has not been confirmed and is not /// implemented. /// </para> /// </remarks> /// <value>A suitable Coptic calendar reference; the same reference may be returned by several /// calls as the object is immutable and thread-safe.</value> public static CalendarSystem Coptic => MiscellaneousCalendars.Coptic; /// <summary> /// Returns an Islamic calendar system equivalent to the one used by the BCL HijriCalendar. /// </summary> /// <remarks> /// This uses the <see cref="IslamicLeapYearPattern.Base16"/> leap year pattern and the /// <see cref="IslamicEpoch.Astronomical"/> epoch. This is equivalent to HijriCalendar /// when the <c>HijriCalendar.HijriAdjustment</c> is 0. /// </remarks> /// <seealso cref="CalendarSystem.GetIslamicCalendar"/> /// <value>An Islamic calendar system equivalent to the one used by the BCL.</value> public static CalendarSystem IslamicBcl => GetIslamicCalendar(IslamicLeapYearPattern.Base16, IslamicEpoch.Astronomical); /// <summary> /// Returns a Persian (also known as Solar Hijri) calendar system implementing the behaviour of the /// BCL <c>PersianCalendar</c> before .NET 4.6, and the sole Persian calendar in Noda Time 1.3. /// </summary> /// <remarks> /// This implementation uses a simple 33-year leap cycle, where years 1, 5, 9, 13, 17, 22, 26, and 30 /// in each cycle are leap years. /// </remarks> /// <value>A Persian calendar system using a simple 33-year leap cycle.</value> public static CalendarSystem PersianSimple => PersianCalendars.Simple; /// <summary> /// Returns a Persian (also known as Solar Hijri) calendar system implementing the behaviour of the /// BCL <c>PersianCalendar</c> from .NET 4.6 onwards (and Windows 10), and the astronomical /// system described in Wikipedia and Calendrical Calculations. /// </summary> /// <remarks> /// This implementation uses data derived from the .NET 4.6 implementation (with the data built into Noda Time, so there's /// no BCL dependency) for simplicity; the actual implementation involves computing the time of noon in Iran, and /// is complex. /// </remarks> /// <value>A Persian calendar system using astronomical calculations to determine leap years.</value> public static CalendarSystem PersianArithmetic => PersianCalendars.Arithmetic; /// <summary> /// Returns a Persian (also known as Solar Hijri) calendar system implementing the behaviour /// proposed by Ahmad Birashk with nested cycles of years determining which years are leap years. /// </summary> /// <remarks> /// This calendar is also known as the algorithmic Solar Hijri calendar. /// </remarks> /// <value>A Persian calendar system using cycles-within-cycles of years to determine leap years.</value> public static CalendarSystem PersianAstronomical => PersianCalendars.Astronomical; /// <summary> /// Returns a Hebrew calendar system using the civil month numbering, /// equivalent to the one used by the BCL HebrewCalendar. /// </summary> /// <seealso cref="CalendarSystem.GetHebrewCalendar"/> /// <value>A Hebrew calendar system using the civil month numbering, equivalent to the one used by the /// BCL.</value> public static CalendarSystem HebrewCivil => GetHebrewCalendar(HebrewMonthNumbering.Civil); /// <summary> /// Returns a Hebrew calendar system using the scriptural month numbering. /// </summary> /// <seealso cref="CalendarSystem.GetHebrewCalendar"/> /// <value>A Hebrew calendar system using the scriptural month numbering.</value> public static CalendarSystem HebrewScriptural => GetHebrewCalendar(HebrewMonthNumbering.Scriptural); /// <summary> /// Returns an Um Al Qura calendar system - an Islamic calendar system primarily used by /// Saudi Arabia. /// </summary> /// <remarks> /// This is a tabular calendar, relying on pregenerated data. /// </remarks> /// <value>A calendar system for the Um Al Qura calendar.</value> public static CalendarSystem UmAlQura => MiscellaneousCalendars.UmAlQura; // TODO: Move this after fixing https://github.com/nodatime/nodatime/issues/1269 [VisibleForTesting] internal static CalendarSystem ForOrdinalUncached([Trusted] CalendarOrdinal ordinal) => ordinal switch { // This entry is really just for completeness. We'd never get called with this. CalendarOrdinal.Iso => Iso, CalendarOrdinal.Gregorian => Gregorian, CalendarOrdinal.Julian => Julian, CalendarOrdinal.Coptic => Coptic, CalendarOrdinal.Badi => Badi, CalendarOrdinal.HebrewCivil => HebrewCivil, CalendarOrdinal.HebrewScriptural => HebrewScriptural, CalendarOrdinal.PersianSimple => PersianSimple, CalendarOrdinal.PersianArithmetic => PersianArithmetic, CalendarOrdinal.PersianAstronomical => PersianAstronomical, CalendarOrdinal.IslamicAstronomicalBase15 => GetIslamicCalendar(IslamicLeapYearPattern.Base15, IslamicEpoch.Astronomical), CalendarOrdinal.IslamicAstronomicalBase16 => GetIslamicCalendar(IslamicLeapYearPattern.Base16, IslamicEpoch.Astronomical), CalendarOrdinal.IslamicAstronomicalIndian => GetIslamicCalendar(IslamicLeapYearPattern.Indian, IslamicEpoch.Astronomical), CalendarOrdinal.IslamicAstronomicalHabashAlHasib => GetIslamicCalendar(IslamicLeapYearPattern.HabashAlHasib, IslamicEpoch.Astronomical), CalendarOrdinal.IslamicCivilBase15 => GetIslamicCalendar(IslamicLeapYearPattern.Base15, IslamicEpoch.Civil), CalendarOrdinal.IslamicCivilBase16 => GetIslamicCalendar(IslamicLeapYearPattern.Base16, IslamicEpoch.Civil), CalendarOrdinal.IslamicCivilIndian => GetIslamicCalendar(IslamicLeapYearPattern.Indian, IslamicEpoch.Civil), CalendarOrdinal.IslamicCivilHabashAlHasib => GetIslamicCalendar(IslamicLeapYearPattern.HabashAlHasib, IslamicEpoch.Civil), CalendarOrdinal.UmAlQura => UmAlQura, _ => throw new InvalidOperationException($"Bug in Noda Time: calendar ordinal {ordinal} missing from switch in CalendarSystem.ForOrdinal.") }; // "Holder" classes for lazy initialization of calendar systems private static class PersianCalendars { internal static readonly CalendarSystem Simple = new CalendarSystem(CalendarOrdinal.PersianSimple, PersianSimpleId, PersianName, new PersianYearMonthDayCalculator.Simple(), Era.AnnoPersico); internal static readonly CalendarSystem Arithmetic = new CalendarSystem(CalendarOrdinal.PersianArithmetic, PersianArithmeticId, PersianName, new PersianYearMonthDayCalculator.Arithmetic(), Era.AnnoPersico); internal static readonly CalendarSystem Astronomical = new CalendarSystem(CalendarOrdinal.PersianAstronomical, PersianAstronomicalId, PersianName, new PersianYearMonthDayCalculator.Astronomical(), Era.AnnoPersico); // Static constructor to enforce laziness. static PersianCalendars() {} } /// <summary> /// Specifically the calendars implemented by IslamicYearMonthDayCalculator, as opposed to all /// Islam-based calendars (which would include UmAlQura and Persian, for example). /// </summary> private static class IslamicCalendars { internal static readonly CalendarSystem[,] ByLeapYearPatterAndEpoch; static IslamicCalendars() { ByLeapYearPatterAndEpoch = new CalendarSystem[4, 2]; for (int i = 1; i <= 4; i++) { for (int j = 1; j <= 2; j++) { var leapYearPattern = (IslamicLeapYearPattern) i; var epoch = (IslamicEpoch) j; var calculator = new IslamicYearMonthDayCalculator((IslamicLeapYearPattern) i, (IslamicEpoch) j); CalendarOrdinal ordinal = CalendarOrdinal.IslamicAstronomicalBase15 + (i - 1) + (j - 1) * 4; ByLeapYearPatterAndEpoch[i - 1, j - 1] = new CalendarSystem(ordinal, GetIslamicId(leapYearPattern, epoch), IslamicName, calculator, Era.AnnoHegirae); } } } } /// <summary> /// Odds and ends, with an assumption that it's not *that* painful to initialize UmAlQura if you only /// need Coptic, for example. /// </summary> private static class MiscellaneousCalendars { internal static readonly CalendarSystem Coptic = new CalendarSystem(CalendarOrdinal.Coptic, CopticId, CopticName, new CopticYearMonthDayCalculator(), Era.AnnoMartyrum); internal static readonly CalendarSystem UmAlQura = new CalendarSystem(CalendarOrdinal.UmAlQura, UmAlQuraId, UmAlQuraName, new UmAlQuraYearMonthDayCalculator(), Era.AnnoHegirae); internal static readonly CalendarSystem Badi = new CalendarSystem(CalendarOrdinal.Badi, BadiId, BadiName, new BadiYearMonthDayCalculator(), Era.Bahai); // Static constructor to enforce laziness. This used to be important to avoid a Heisenbug. // I don't believe it's strictly required now, but it does no harm and I don't want to go // through the pain I went through before. Besides, very few users will actually want these // calendars, so making this fully lazy avoids unnecessary initialization. static MiscellaneousCalendars() { } } private static class GregorianJulianCalendars { internal static readonly CalendarSystem Gregorian; internal static readonly CalendarSystem Julian; static GregorianJulianCalendars() { var julianCalculator = new JulianYearMonthDayCalculator(); Julian = new CalendarSystem(CalendarOrdinal.Julian, JulianId, JulianName, julianCalculator, new GJEraCalculator(julianCalculator)); Gregorian = new CalendarSystem(CalendarOrdinal.Gregorian, GregorianId, GregorianName, IsoCalendarSystem.YearMonthDayCalculator, IsoCalendarSystem.eraCalculator); } } private static class HebrewCalendars { internal static readonly CalendarSystem[] ByMonthNumbering = { new CalendarSystem(CalendarOrdinal.HebrewCivil, HebrewCivilId, HebrewName, new HebrewYearMonthDayCalculator(HebrewMonthNumbering.Civil), Era.AnnoMundi), new CalendarSystem(CalendarOrdinal.HebrewScriptural, HebrewScripturalId, HebrewName, new HebrewYearMonthDayCalculator(HebrewMonthNumbering.Scriptural), Era.AnnoMundi) }; // Static constructor to enforce laziness. static HebrewCalendars() { } } } }
apache-2.0
MICRORISC/iqrfsdk
libs/simply/simply-iqrf-dpa-v22x-examples/src/main/java/com/microrisc/simply/iqrf/dpa/v22x/examples/std_per/io/SetAllPinsIn.java
3533
/* * Copyright 2014 MICRORISC s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.microrisc.simply.iqrf.dpa.v22x.examples.std_per.io; import com.microrisc.simply.CallRequestProcessingState; import com.microrisc.simply.Simply; import com.microrisc.simply.SimplyException; import com.microrisc.simply.Network; import com.microrisc.simply.Node; import com.microrisc.simply.errors.CallRequestProcessingError; import com.microrisc.simply.iqrf.dpa.v22x.DPA_SimplyFactory; import com.microrisc.simply.iqrf.dpa.v22x.devices.IO; import com.microrisc.simply.iqrf.dpa.v22x.types.IO_DirectionSettings; import com.microrisc.simply.iqrf.types.VoidType; import java.io.File; /** * Example of using IO Peripheral - synchronous version. * * @author Michal Konopa * @author Rostislav Spinar */ public class SetAllPinsIn { // reference to Simply private static Simply simply = null; // prints out specified message, destroys the Simply and exits private static void printMessageAndExit(String message) { System.out.println(message); if ( simply != null) { simply.destroy(); } System.exit(1); } public static void main(String[] args) { // creating Simply instance try { simply = DPA_SimplyFactory.getSimply("config" + File.separator + "Simply.properties"); } catch ( SimplyException ex ) { System.err.println("Error while creating Simply: " + ex.getMessage()); return; } // getting network 1 Network network1 = simply.getNetwork("1", Network.class); if ( network1 == null ) { printMessageAndExit("Network 1 doesn't exist"); } // getting node 1 Node node1 = network1.getNode("1"); if ( node1 == null ) { printMessageAndExit("Node 1 doesn't exist"); } IO io = node1.getDeviceObject(IO.class); if ( io == null ) { printMessageAndExit("IO not present or enabled at node 1"); } // set all pins IN IO_DirectionSettings[] dirSettings = new IO_DirectionSettings[] { new IO_DirectionSettings(0x00, 0x21, 0x21), new IO_DirectionSettings(0x01, 0x10, 0x10), new IO_DirectionSettings(0x02, 0xFC, 0xFC) }; VoidType result = io.setDirection(dirSettings); if (result == null) { CallRequestProcessingState procState = io.getCallRequestProcessingStateOfLastCall(); if ( procState == CallRequestProcessingState.ERROR ) { CallRequestProcessingError error = io.getCallRequestProcessingErrorOfLastCall(); printMessageAndExit("Setting IO direction failed: " + error); } else { printMessageAndExit("Setting IO direction hasn't been processed yet: " + procState); } } // end of working with Simply simply.destroy(); } }
apache-2.0
conversationai/perspective-hacks
view_with_slider/src/testdata/sql_query_maker.ts
3176
/* Copyright 2017 Google Inc. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ /* A little tools to construct compound queries to get examples from bigquery defined by various score ranges. ts-node sql_query_maker.ts */ interface ThresholdSpec { // The bottom and top end of the score range of comments to get. // null indicates end of the range. from: number|null; to: number|null; // The max number of comments to get. limit: number|null; } function thersholdsBucketSubquery( attributesToSelect: string[], attribute: string, thresholds: ThresholdSpec): string { const conditions = []; if (thresholds.from) { conditions.push(`${attribute} >= ${thresholds.from}`); } if (thresholds.to) { conditions.push( `${attribute} < ${thresholds.to}`); } return ` (SELECT id, ${attributesToSelect.join(', ')}, content FROM \`wikidetox-viz.scored_conversations.wikiconvs\` WHERE ${conditions.join(' AND ')} LIMIT ${thresholds.limit}) `; } function buildQuery(allAttributes: string[], thresholdAttributes: string[], bucketThresholds: ThresholdSpec[]) { const subqueries = []; for (const attribute of thresholdAttributes) { for (const thresholds of bucketThresholds) { subqueries.push(thersholdsBucketSubquery( allAttributes, attribute, thresholds)); } } return subqueries; } function main() { const allAttributes = [ // 'TOXICITY', 'SEVERE_TOXICITY', // 'TOXICITY_OBSCENE', // 'TOXICITY_THREAT', 'TOXICITY_INSULT', // 'TOXICITY_IDENTITY_HATE', // 'SEXUALLY_EXPLICIT', ]; const bucketThresholds: ThresholdSpec[] = [ { from: null, to: 0.1, limit: 5000 }, { from: 0.1, to: 0.8, limit: 200 }, { from: 0.8, to: null, limit: 50 }]; const q1s = buildQuery(allAttributes, ['TOXICITY_INSULT'], bucketThresholds); const q2s = buildQuery(allAttributes, ['SEVERE_TOXICITY'], [{ from: 0.9, to: null, limit: 20 }]); const allQs = q1s.concat(q2s); console.log(`SELECT id, ${allAttributes.join(', ')}, content FROM ( ${allQs.join('\n UNION ALL \n')}) AS t GROUP BY id, ${allAttributes.join(', ')}, content`); } main(); // (`SELECT id, ${attributesToSelect.join(', ')}, content FROM `wikidetox-viz.scored_conversations.wikiconvs` // WHERE TOXICITY_OBSCENE >= 0.0 AND TOXICITY_OBSCENE < 0.2 LIMIT 30)` // function transformObj(inputObj: InputObj): OutputObj { // return { // text: inputObj.text, // scores: {TOXICITY: inputObj.score}, // date: new Date((new Date()).valueOf() + (Math.random() * 1000 * 60 * 60 * 4)).toLocaleString() // }; // } // console.log(JSON.stringify(CLIMATE.map(transformObj), null, 2));
apache-2.0
daviddenton/q-ext
examples/index.js
1011
'use strict'; var q = require('q'); var qExt = require('../lib'); //Simply pass an object with named promises. The results are grouped by result (fulfilled/rejected) //which can then be spread() into a node-like callback which gives you the errorsByName and resultsByName. //NOTE: Unlike traditional node callbacks, the errorsByName object is NEVER undefined (instead being an empty object):' qExt.allSettled({ aSuccessfulPromise: q.resolve('result'), anotherSuccessfulPromise: q.resolve('anotherResult'), anUnsuccessfulPromise: q.reject('error'), anotherUnsuccessfulPromise: q.reject('anotherError') }).spread(function (errorsByName, successesByName) { console.log('errors:', errorsByName); console.log('successes:', successesByName); }).done(); //Passing an array of promises defers to the traditional form qExt.allSettled([q.resolve('result'), q.reject('error')]).spread(function (success, error) { console.log('success:', success); console.log('error:', error); }).done();
apache-2.0
cnevinc/noFragment
app/src/androidTest/java/com/nevinchen/nofragment/ApplicationTest.java
355
package com.nevinchen.nofragment; import android.app.Application; import android.test.ApplicationTestCase; /** * <a href="http://d.android.com/tools/testing/testing_android.html">Testing Fundamentals</a> */ public class ApplicationTest extends ApplicationTestCase<Application> { public ApplicationTest() { super(Application.class); } }
apache-2.0
kerryjiang/SuperSocket
test/SuperSocket.Tests/IHostConfigurator.cs
911
using System.IO; using System.Net.Sockets; using System.Text; using System.Threading.Tasks; using Microsoft.Extensions.DependencyInjection; using Microsoft.Extensions.Hosting; using SuperSocket; using SuperSocket.Channel; using SuperSocket.Client; using SuperSocket.ProtoBase; namespace SuperSocket.Tests { public interface IHostConfigurator { void Configure(ISuperSocketHostBuilder hostBuilder); ValueTask KeepSequence(); Socket CreateClient(); ValueTask<Stream> GetClientStream(Socket socket); TextReader GetStreamReader(Stream stream, Encoding encoding); string WebSocketSchema { get; } bool IsSecure { get; } ListenOptions Listener { get; } IEasyClient<TPackageInfo> ConfigureEasyClient<TPackageInfo>(IPipelineFilter<TPackageInfo> pipelineFilter, ChannelOptions options) where TPackageInfo : class; } }
apache-2.0
bbxyard/bbxyard
yard/skills/66-java/spring-boot-study/spring-boot-rabbit-mq/src/main/java/com/bbxyard/sfb/rabbitmq/handler/hallo/HalloReceiver.java
425
package com.bbxyard.sfb.rabbitmq.handler.hallo; import org.springframework.amqp.rabbit.annotation.RabbitHandler; import org.springframework.amqp.rabbit.annotation.RabbitListener; import org.springframework.stereotype.Component; @Component @RabbitListener(queues = "hallo") public class HalloReceiver { @RabbitHandler public void process(String msg) { System.out.println("Consumer Recv: " + msg); } }
apache-2.0
hopecee/texsts
jdo/general/src/java/org/datanucleus/samples/models/hashsetcollection/OtherDetail.java
1318
/********************************************************************** Copyright (c) 2005 Maciej Wegorkiewicz and others. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. Contributors: ... **********************************************************************/ package org.datanucleus.samples.models.hashsetcollection; import java.io.Serializable; /** * OtherDetail object * @version $Revision: 1.1 $ */ public class OtherDetail implements Serializable { private String id; private Master master; public OtherDetail() { } public Master getMaster() { return master; } public void setMaster(Master master) { this.master=master; } public String getId() { return id; } public void setId(String id) { this.id = id; } }
apache-2.0
jentfoo/aws-sdk-java
aws-java-sdk-mediaconvert/src/main/java/com/amazonaws/services/mediaconvert/model/H265TemporalAdaptiveQuantization.java
1980
/* * Copyright 2014-2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.mediaconvert.model; import javax.annotation.Generated; /** * Adjust quantization within each frame based on temporal variation of content complexity. */ @Generated("com.amazonaws:aws-java-sdk-code-generator") public enum H265TemporalAdaptiveQuantization { DISABLED("DISABLED"), ENABLED("ENABLED"); private String value; private H265TemporalAdaptiveQuantization(String value) { this.value = value; } @Override public String toString() { return this.value; } /** * Use this in place of valueOf. * * @param value * real value * @return H265TemporalAdaptiveQuantization corresponding to the value * * @throws IllegalArgumentException * If the specified value does not map to one of the known values in this enum. */ public static H265TemporalAdaptiveQuantization fromValue(String value) { if (value == null || "".equals(value)) { throw new IllegalArgumentException("Value cannot be null or empty!"); } for (H265TemporalAdaptiveQuantization enumEntry : H265TemporalAdaptiveQuantization.values()) { if (enumEntry.toString().equals(value)) { return enumEntry; } } throw new IllegalArgumentException("Cannot create enum from " + value + " value!"); } }
apache-2.0
deib-polimi/tower4clouds
model/src/main/java/it/polimi/tower4clouds/model/ontology/Component.java
970
/** * Copyright (C) 2014 Politecnico di Milano (marco.miglierina@polimi.it) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package it.polimi.tower4clouds.model.ontology; public class Component extends Resource { public Component() { } public Component(String type, String id) { super(type,id); } @Override public String toString() { return "Component [clazz=" + getClazz() + ", type=" + getType() + ", id=" + getId() + "]"; } }
apache-2.0
botelhojp/apache-jmeter-2.10
test/src/org/apache/jmeter/gui/util/TestMenuFactory.java
2099
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package org.apache.jmeter.gui.util; import org.apache.jmeter.junit.JMeterTestCase; public final class TestMenuFactory extends JMeterTestCase { public TestMenuFactory() { super(); } public TestMenuFactory(String name) { super(name); } private static void check(String s, int i) throws Exception { assertFalse("The number of " + s + " should not be 0", 0 == i); } public void testMenu() throws Exception { check("menumap", MenuFactory.menuMap_size()); check("assertions", MenuFactory.assertions_size()); check("configElements", MenuFactory.configElements_size()); check("controllers", MenuFactory.controllers_size()); check("listeners", MenuFactory.listeners_size()); check("nonTestElements", MenuFactory.nonTestElements_size()); check("postProcessors", MenuFactory.postProcessors_size()); check("preProcessors", MenuFactory.preProcessors_size()); check("samplers", MenuFactory.samplers_size()); check("timers", MenuFactory.timers_size()); check("elementstoskip", MenuFactory.elementsToSkip_size()); } }
apache-2.0
brahmaroutu/test-infra
prow/plugins/jira/jira_test.go
10860
/* Copyright 2020 The Kubernetes Authors. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package jira import ( "fmt" "testing" "github.com/andygrunwald/go-jira" "github.com/google/go-cmp/cmp" "github.com/sirupsen/logrus" "k8s.io/test-infra/prow/github" jiraclient "k8s.io/test-infra/prow/jira" ) type fakeJiraClient struct { existingIssues []jira.Issue existingLinks map[string][]jira.RemoteLink newLinks []jira.RemoteLink } func (f *fakeJiraClient) GetIssue(id string) (*jira.Issue, error) { for _, existingIssue := range f.existingIssues { if existingIssue.ID == id { return &existingIssue, nil } } return nil, jiraclient.NewNotFoundError(fmt.Errorf("No issue %s found", id)) } func (f *fakeJiraClient) GetRemoteLinks(id string) ([]jira.RemoteLink, error) { return f.existingLinks[id], nil } func (f *fakeJiraClient) AddRemoteLink(id string, link *jira.RemoteLink) error { if _, err := f.GetIssue(id); err != nil { return err } f.newLinks = append(f.newLinks, *link) return nil } func (f *fakeJiraClient) JiraClient() *jira.Client { panic("not implemented") } const fakeJiraUrl = "https://my-jira.com" func (f *fakeJiraClient) JiraURL() string { return fakeJiraUrl } type fakeGitHubClient struct { editedComments map[string]string } func (f *fakeGitHubClient) EditComment(org, repo string, id int, body string) error { if f.editedComments == nil { f.editedComments = map[string]string{} } f.editedComments[fmt.Sprintf("%s/%s:%d", org, repo, id)] = body return nil } func (f *fakeGitHubClient) GetIssue(org, repo string, number int) (*github.Issue, error) { return nil, nil } func (f *fakeGitHubClient) EditIssue(org, repo string, number int, issue *github.Issue) (*github.Issue, error) { return nil, nil } func TestHandle(t *testing.T) { t.Parallel() testCases := []struct { name string event github.GenericCommentEvent existingIssues []jira.Issue existingLinks map[string][]jira.RemoteLink expectedNewLinks []jira.RemoteLink expectedCommentUpdates map[string]string }{ { name: "No issue referenced, nothing to do", }, { name: "Link is created based on body", event: github.GenericCommentEvent{ CommentID: intPtr(1), HTMLURL: "https://github.com/org/repo/issues/3", IssueTitle: "Some issue", Body: "Some text and also ABC-123", Repo: github.Repo{FullName: "org/repo", Owner: github.User{Login: "org"}, Name: "repo"}, Number: 3, }, existingIssues: []jira.Issue{{ID: "ABC-123"}}, expectedNewLinks: []jira.RemoteLink{{Object: &jira.RemoteLinkObject{ URL: "https://github.com/org/repo/issues/3", Title: "org/repo#3: Some issue", Icon: &jira.RemoteLinkIcon{ Url16x16: "https://github.com/favicon.ico", Title: "GitHub", }, }, }}, expectedCommentUpdates: map[string]string{"org/repo:1": "Some text and also [ABC-123](https://my-jira.com/browse/ABC-123)"}, }, { name: "Link is created based on body with pasted link", event: github.GenericCommentEvent{ CommentID: intPtr(1), HTMLURL: "https://github.com/org/repo/issues/3", IssueTitle: "Some issue", Body: "Some text and also https://my-jira.com/browse/ABC-123", Repo: github.Repo{FullName: "org/repo", Owner: github.User{Login: "org"}, Name: "repo"}, Number: 3, }, existingIssues: []jira.Issue{{ID: "ABC-123"}}, expectedNewLinks: []jira.RemoteLink{{Object: &jira.RemoteLinkObject{ URL: "https://github.com/org/repo/issues/3", Title: "org/repo#3: Some issue", Icon: &jira.RemoteLinkIcon{ Url16x16: "https://github.com/favicon.ico", Title: "GitHub", }, }, }}, }, { name: "Link is created based on body and issuecomment suffix is removed from url", event: github.GenericCommentEvent{ CommentID: intPtr(1), HTMLURL: "https://github.com/org/repo/issues/3#issuecomment-705743977", IssueTitle: "Some issue", Body: "Some text and also ABC-123", Repo: github.Repo{FullName: "org/repo", Owner: github.User{Login: "org"}, Name: "repo"}, Number: 3, }, existingIssues: []jira.Issue{{ID: "ABC-123"}}, expectedNewLinks: []jira.RemoteLink{{Object: &jira.RemoteLinkObject{ URL: "https://github.com/org/repo/issues/3", Title: "org/repo#3: Some issue", Icon: &jira.RemoteLinkIcon{ Url16x16: "https://github.com/favicon.ico", Title: "GitHub", }, }, }}, expectedCommentUpdates: map[string]string{"org/repo:1": "Some text and also [ABC-123](https://my-jira.com/browse/ABC-123)"}, }, { name: "Link is created based on title", event: github.GenericCommentEvent{ HTMLURL: "https://github.com/org/repo/issues/3", IssueTitle: "ABC-123: Some issue", Body: "Some text", Repo: github.Repo{FullName: "org/repo"}, Number: 3, }, existingIssues: []jira.Issue{{ID: "ABC-123"}}, expectedNewLinks: []jira.RemoteLink{{Object: &jira.RemoteLinkObject{ URL: "https://github.com/org/repo/issues/3", Title: "org/repo#3: ABC-123: Some issue", Icon: &jira.RemoteLinkIcon{ Url16x16: "https://github.com/favicon.ico", Title: "GitHub", }, }, }}, }, { name: "Multiple references for issue, one link is created", event: github.GenericCommentEvent{ CommentID: intPtr(1), HTMLURL: "https://github.com/org/repo/issues/3", IssueTitle: "Some issue", Body: "Some text and also ABC-123 and again ABC-123", Repo: github.Repo{FullName: "org/repo", Owner: github.User{Login: "org"}, Name: "repo"}, Number: 3, }, existingIssues: []jira.Issue{{ID: "ABC-123"}}, expectedNewLinks: []jira.RemoteLink{{Object: &jira.RemoteLinkObject{ URL: "https://github.com/org/repo/issues/3", Title: "org/repo#3: Some issue", Icon: &jira.RemoteLinkIcon{ Url16x16: "https://github.com/favicon.ico", Title: "GitHub", }, }, }}, expectedCommentUpdates: map[string]string{"org/repo:1": "Some text and also [ABC-123](https://my-jira.com/browse/ABC-123) and again [ABC-123](https://my-jira.com/browse/ABC-123)"}, }, { name: "Referenced issue doesn't exist, nothing to do", event: github.GenericCommentEvent{ HTMLURL: "https://github.com/org/repo/issues/3#issuecomment-705743977", IssueTitle: "Some issue", Body: "Some text and also ABC-123", Repo: github.Repo{FullName: "org/repo"}, Number: 3, }, }, { name: "Link already exists, nothing to do", event: github.GenericCommentEvent{ HTMLURL: "https://github.com/org/repo/issues/3", IssueTitle: "Some issue", Body: "Some text and also [ABC-123](https://my-jira.com/browse/ABC-123)", Repo: github.Repo{FullName: "org/repo"}, Number: 3, }, existingIssues: []jira.Issue{{ID: "ABC-123"}}, existingLinks: map[string][]jira.RemoteLink{"ABC-123": {{Object: &jira.RemoteLinkObject{URL: "https://github.com/org/repo/issues/3"}}}}, }, } for _, tc := range testCases { t.Run(tc.name, func(t *testing.T) { jiraClient := &fakeJiraClient{ existingIssues: tc.existingIssues, existingLinks: tc.existingLinks, } githubClient := &fakeGitHubClient{} if err := handle(jiraClient, githubClient, logrus.NewEntry(logrus.New()), &tc.event); err != nil { t.Fatalf("handle failed: %v", err) } if diff := cmp.Diff(jiraClient.newLinks, tc.expectedNewLinks); diff != "" { t.Errorf("new links differs from expected new links: %s", diff) } if diff := cmp.Diff(githubClient.editedComments, tc.expectedCommentUpdates); diff != "" { t.Errorf("comment updates differ from expected: %s", diff) } }) } } func intPtr(i int) *int { return &i } func TestInsertLinksIntoComment(t *testing.T) { t.Parallel() const issueName = "ABC-123" testCases := []struct { name string body string expected string }{ { name: "Multiline body starting with issue name", body: `ABC-123: Fix problems: * First problem * Second problem`, expected: `[ABC-123](https://my-jira.com/browse/ABC-123): Fix problems: * First problem * Second problem`, }, { name: "Multiline body starting with already replaced issue name", body: `[ABC-123](https://my-jira.com/browse/ABC-123): Fix problems: * First problem * Second problem`, expected: `[ABC-123](https://my-jira.com/browse/ABC-123): Fix problems: * First problem * Second problem`, }, { name: "Multiline body with multiple occurrence in the middle", body: `This change: * Does stuff related to ABC-123 * And even more stuff related to ABC-123 * But also something else`, expected: `This change: * Does stuff related to [ABC-123](https://my-jira.com/browse/ABC-123) * And even more stuff related to [ABC-123](https://my-jira.com/browse/ABC-123) * But also something else`, }, { name: "Multiline body with multiple occurrence in the middle, some already replaced", body: `This change: * Does stuff related to [ABC-123](https://my-jira.com/browse/ABC-123) * And even more stuff related to ABC-123 * But also something else`, expected: `This change: * Does stuff related to [ABC-123](https://my-jira.com/browse/ABC-123) * And even more stuff related to [ABC-123](https://my-jira.com/browse/ABC-123) * But also something else`, }, { name: "Multiline body with issue name at the end", body: `This change: is very important because of ABC-123`, expected: `This change: is very important because of [ABC-123](https://my-jira.com/browse/ABC-123)`, }, { name: "Multiline body with already replaced issue name at the end", body: `This change: is very important because of [ABC-123](https://my-jira.com/browse/ABC-123)`, expected: `This change: is very important because of [ABC-123](https://my-jira.com/browse/ABC-123)`, }, { name: "Pasted links are not replaced, as they are already clickable", body: "https://my-jira.com/browse/ABC-123", expected: "https://my-jira.com/browse/ABC-123", }, } for _, tc := range testCases { t.Run(tc.name, func(t *testing.T) { if diff := cmp.Diff(insertLinksIntoComment(tc.body, []string{issueName}, fakeJiraUrl), tc.expected); diff != "" { t.Errorf("actual result differs from expected result: %s", diff) } }) } }
apache-2.0
zhaotongxue/coolweather
app/src/androidTest/java/zhao/coolweather/util/Utility.java
4300
package zhao.coolweather.util; import android.content.Context; import android.content.SharedPreferences; import android.preference.PreferenceManager; import android.text.TextUtils; import org.json.JSONException; import org.json.JSONObject; import java.text.SimpleDateFormat; import java.util.Date; import java.util.Locale; import zhao.coolweather.CoolWeatherDB; import zhao.coolweather.model.City; import zhao.coolweather.model.County; import zhao.coolweather.model.Province; /** * Created by zhao on 2015/6/30. * 处理网络 */ public class Utility { public synchronized static boolean HandleProvinceResponse(CoolWeatherDB coolWeatherDB,String response){ if(!TextUtils.isEmpty(response)){ String[] allProvince=response.split(","); if(allProvince!=null && allProvince.length>0){ for(String e : allProvince){ String[] strings=e.split("\\|"); Province province=new Province(); province.setProvinceName(strings[1]); province.setProvinceCode(strings[0]); coolWeatherDB.saveProvince(province); } return true; } } return false; } public synchronized static boolean HandleCityReponse(CoolWeatherDB coolWeatherDB,String response,int provinceId){ if(!TextUtils.isEmpty(response)){ String[] allCity=response.split(","); if(allCity!=null && allCity.length>0){ for(String e : allCity){ String[] strings=e.split("\\|"); City city=new City(); city.setCityCode(strings[0]); city.setCityName(strings[1]); city.setProvinceId(provinceId); coolWeatherDB.saveCity(city); } return true; } } return false; } public synchronized static boolean HandleCounty(CoolWeatherDB coolWeatherDB,String response,int countyId){ if(!TextUtils.isEmpty(response)){ String[] allCounty=response.split(","); if(allCounty!=null && allCounty.length>0){ for(String e : allCounty){ String[] strings=e.split("\\|"); County county=new County(); county.setCountyCode(strings[0]); county.setCountyName(strings[1]); county.setCityId(countyId); coolWeatherDB.saveCounty(county); } return true; } } return false; } /* 将返回的json解析并保存 */ public static void handleJSONWeatherResponse(Context context, String json){ try { JSONObject jsonObject=new JSONObject(json); JSONObject weather=jsonObject.getJSONObject("weatherinfo"); String weatherCode=weather.getString("cityid"); String temp1=weather.getString("temp1"); String cityName=weather.getString("cityname"); String temp2=weather.getString("temp2"); String weatherDesp=weather.getString("weather"); String publishTime=weather.getString("ptime"); saveWeather(context,cityName,weatherCode,temp1,temp2,weatherDesp,publishTime); } catch (JSONException e) { e.printStackTrace(); } } /* 存储到sharedpreference中 */ private static void saveWeather(Context context, String cityName, String weatherCode, String temp1, String temp2, String weatherDesp, String publishTime) { SimpleDateFormat sdf=new SimpleDateFormat("yyyy年M月d日", Locale.CHINA); SharedPreferences.Editor editor= PreferenceManager.getDefaultSharedPreferences(context).edit(); editor.putBoolean("city_selected",true); editor.putString("city_name", cityName); editor.putString("weather_code", weatherCode); editor.putString("temp1", temp1); editor.putString("temp2", temp2); editor.putString("publish_time", publishTime); editor.putString("weather_desp",weatherDesp); editor.putString("current_time",sdf.format(new Date())); editor.commit(); } }
apache-2.0
roguexz/rogue.io
modules/framework-model/src/main/java/rogue/app/framework/persistence/EntityImplementationFor.java
1031
/* * Copyright 2013, Rogue.IO * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package rogue.app.framework.persistence; import rogue.app.framework.model.PersistentObject; import java.lang.annotation.*; /** * Annotation for identifying that the annotated class is an implementation of a * {@link rogue.app.framework.model.AppObject} type. */ @Documented @Target({ElementType.TYPE}) @Retention(RetentionPolicy.RUNTIME) public @interface EntityImplementationFor { Class<? extends PersistentObject> value(); }
apache-2.0
jphp-compiler/jphp
exts/jphp-zend-ext/src/main/java/org/develnext/jphp/zend/ext/ZendExtension.java
987
package org.develnext.jphp.zend.ext; import org.develnext.jphp.zend.ext.crypto.csprng.CSPRNGExtenstion; import org.develnext.jphp.zend.ext.standard.BCMathExtension; import org.develnext.jphp.zend.ext.standard.CTypeExtension; import org.develnext.jphp.zend.ext.standard.DateExtension; import org.develnext.jphp.zend.ext.standard.StandardExtension; import php.runtime.env.CompileScope; import php.runtime.ext.support.Extension; public class ZendExtension extends Extension { @Override public String[] getRequiredExtensions() { return new String[] { StandardExtension.class.getName(), BCMathExtension.class.getName(), CTypeExtension.class.getName(), DateExtension.class.getName(), CSPRNGExtenstion.class.getName() }; } @Override public void onRegister(CompileScope scope) { } @Override public Status getStatus() { return Status.ZEND_LEGACY; } }
apache-2.0
Fabric3/spring-samples
apps/bigbank/bigbank-loan/src/main/java/org/fabric3/samples/bigbank/services/credit/impl/CreditServiceImpl.java
1777
/* * See the NOTICE file distributed with this work for information * regarding copyright ownership. This file is licensed * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.fabric3.samples.bigbank.services.credit.impl; import org.fabric3.samples.bigbank.services.credit.CreditScore; import org.fabric3.samples.bigbank.services.credit.CreditService; /** * Implementation of a CreditService that returns a credit score from the fictitious credit bureau. This implementation records all credit score operations with * the AuditService. */ public class CreditServiceImpl implements CreditService { private AuditService auditService; public void setAuditService(AuditService auditService) { this.auditService = auditService; } public CreditScore score(String ssn) { System.out.println("CreditService: Calculating credit score"); auditService.recordCheck(ssn); CreditScore score; if (ssn.startsWith("11")) { score = new CreditScore(ssn, 300); } else if (ssn.startsWith("22")) { score = new CreditScore(ssn, 700); } else { score = new CreditScore(ssn, 760); } auditService.recordResult(ssn, score); return score; } }
apache-2.0
wozniakm83/java_pft
addressbook-web-tests/src/test/java/pl/pft/addressbook/model/ContactData.java
6604
package pl.pft.addressbook.model; import com.google.gson.annotations.Expose; import com.thoughtworks.xstream.annotations.XStreamAlias; import com.thoughtworks.xstream.annotations.XStreamOmitField; import org.hibernate.annotations.Type; import javax.persistence.*; import java.io.File; @XStreamAlias("contact") @Entity @Table(name = "addressbook") public class ContactData { @XStreamOmitField @Id @Column(name = "id") private int id = Integer.MAX_VALUE; @Expose @Column(name = "firstname") private String firstname; @Expose @Column(name = "lastname") private String lastname; @Expose @Column(name = "address") @Type(type = "text") private String address; @Expose @Column(name = "email") @Type(type = "text") private String email; @Expose @Column(name = "email2") @Type(type = "text") private String email2; @Expose @Column(name = "email3") @Type(type = "text") private String email3; @Transient private String allEmails; @Expose @Column(name = "home") @Type(type = "text") private String homePhone; @Expose @Column(name = "mobile") @Type(type = "text") private String mobilePhone; @Expose @Column(name = "work") @Type(type = "text") private String workPhone; @Transient private String allPhones; @Expose @Transient private String group; @Column(name = "photo") @Type(type = "text") private String photo; public ContactData withId(int id) { this.id = id; return this; } public ContactData withFirstname(String firstname) { this.firstname = firstname; return this; } public ContactData withLastname(String lastname) { this.lastname = lastname; return this; } public ContactData withAddress(String address) { this.address = address; return this; } public ContactData withEmail(String email) { this.email = email; return this; } public ContactData withEmail2(String email2) { this.email2 = email2; return this; } public ContactData withEmail3(String email3) { this.email3 = email3; return this; } public ContactData withAllEmails(String allEmails) { this.allEmails = allEmails; return this; } public ContactData withHomePhone(String homePhone) { this.homePhone = homePhone; return this; } public ContactData withMobilePhone(String mobilePhone) { this.mobilePhone = mobilePhone; return this; } public ContactData withWorkPhone(String workPhone) { this.workPhone = workPhone; return this; } public ContactData withAllPhones(String allPhones) { this.allPhones = allPhones; return this; } public ContactData withGroup(String group) { this.group = group; return this; } public ContactData withPhoto(File photo) { this.photo = photo.getPath(); return this; } public int getId() { return id; } public String getFirstname() { return firstname; } public String getLastname() { return lastname; } public String getAddress() { return address; } public String getEmail() { return email; } public String getEmail2() { return email2; } public String getEmail3() { return email3; } public String getAllEmails() { return allEmails; } public String getHomePhone() { return homePhone; } public String getMobilePhone() { return mobilePhone; } public String getWorkPhone() { return workPhone; } public String getAllPhones() { return allPhones; } public String getGroup() { return group; } public File getPhoto() { if (photo == null) { return null; } else { return new File(photo); } } @Override public String toString() { return "ContactData{" + "id=" + id + ", firstname='" + firstname + '\'' + ", lastname='" + lastname + '\'' + ", address='" + address + '\'' + ", email='" + email + '\'' + ", email2='" + email2 + '\'' + ", email3='" + email3 + '\'' + ", homePhone='" + homePhone + '\'' + ", mobilePhone='" + mobilePhone + '\'' + ", workPhone='" + workPhone + '\'' + ", group='" + group + '\'' + '}'; } @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; ContactData that = (ContactData) o; if (id != that.id) return false; if (firstname != null ? !firstname.equals(that.firstname) : that.firstname != null) return false; if (lastname != null ? !lastname.equals(that.lastname) : that.lastname != null) return false; if (address != null ? !address.equals(that.address) : that.address != null) return false; if (email != null ? !email.equals(that.email) : that.email != null) return false; if (email2 != null ? !email2.equals(that.email2) : that.email2 != null) return false; if (email3 != null ? !email3.equals(that.email3) : that.email3 != null) return false; if (homePhone != null ? !homePhone.equals(that.homePhone) : that.homePhone != null) return false; if (mobilePhone != null ? !mobilePhone.equals(that.mobilePhone) : that.mobilePhone != null) return false; return workPhone != null ? workPhone.equals(that.workPhone) : that.workPhone == null; } @Override public int hashCode() { int result = id; result = 31 * result + (firstname != null ? firstname.hashCode() : 0); result = 31 * result + (lastname != null ? lastname.hashCode() : 0); result = 31 * result + (address != null ? address.hashCode() : 0); result = 31 * result + (email != null ? email.hashCode() : 0); result = 31 * result + (email2 != null ? email2.hashCode() : 0); result = 31 * result + (email3 != null ? email3.hashCode() : 0); result = 31 * result + (homePhone != null ? homePhone.hashCode() : 0); result = 31 * result + (mobilePhone != null ? mobilePhone.hashCode() : 0); result = 31 * result + (workPhone != null ? workPhone.hashCode() : 0); return result; } }
apache-2.0
nathanielbecker/business-contacter-django-app
myproject/cookie_app/migrations/0001_initial.py
878
# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import models, migrations class Migration(migrations.Migration): dependencies = [ ] operations = [ migrations.CreateModel( name='barebones_CRUD', fields=[ ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), ('name', models.CharField(max_length=255)), ('created', models.DateTimeField(auto_now_add=True)), ('priority', models.IntegerField(default=0)), ('difficulty', models.IntegerField(default=0)), ('durr', models.IntegerField(default=0)), ('done', models.BooleanField(default=False)), ], options={ }, bases=(models.Model,), ), ]
apache-2.0
emcvipr/controller-client-java
models/src/main/java/com/emc/storageos/model/vpool/VirtualPoolChangeRep.java
2693
/* * Copyright (c) 2008-2013 EMC Corporation * All Rights Reserved */ package com.emc.storageos.model.vpool; import java.net.URI; import java.util.ArrayList; import java.util.List; import javax.xml.bind.annotation.XmlElement; import com.emc.storageos.model.RestLinkRep; import com.emc.storageos.model.StringHashMapEntry; /** * Extends the name related resource to add new fields * specifying if the a change to the virtual pool is allowed and * the reason if not allowed. */ public class VirtualPoolChangeRep extends NamedRelatedVirtualPoolRep { private Boolean allowed; private String notAllowedReason; private List<StringHashMapEntry> allowedChangeOperations; public VirtualPoolChangeRep() { } public VirtualPoolChangeRep(URI id, RestLinkRep selfLink, String name, String virtualPoolType, String notAllowedReason, List<VirtualPoolChangeOperationEnum> allowedChangeOperationEnums) { super(id, selfLink, name, virtualPoolType); this.allowed = allowedChangeOperationEnums != null && !allowedChangeOperationEnums.isEmpty(); this.notAllowedReason = notAllowedReason; if (allowedChangeOperationEnums != null) { for (VirtualPoolChangeOperationEnum allowedChangeOperationEnum : allowedChangeOperationEnums) { getAllowedChangeOperations().add( new StringHashMapEntry(allowedChangeOperationEnum.name(), allowedChangeOperationEnum.toString())); } } } /** * Specifies whether or not a virtual pool change is allowed. * */ @XmlElement(name = "allowed") public Boolean getAllowed() { return allowed; } public void setAllowed(Boolean allowed) { this.allowed = allowed; } /** * When not allowed, the reason the virtual pool change is not allowed. * */ @XmlElement(name = "not_allowed_reason") public String getNotAllowedReason() { return notAllowedReason; } public void setNotAllowedReason(String notAllowedReason) { this.notAllowedReason = notAllowedReason; } /** * Get list of allowed change operations * * @return List of allowed change operations */ @XmlElement(name = "allowed_change_operation") public List<StringHashMapEntry> getAllowedChangeOperations() { if (allowedChangeOperations == null) { allowedChangeOperations = new ArrayList<StringHashMapEntry>(); } return allowedChangeOperations; } public void setAllowedChangeOperations(List<StringHashMapEntry> allowedOperations) { this.allowedChangeOperations = allowedOperations; } }
apache-2.0
P7h/ScalaPlayground
Atomic Scala/atomic-scala-solutions/09_Methods/Starter-2.scala
200
// Starter Code for Exercise 2 // From "Methods" atom val sd1 = getSquareDouble(1.2) assert(1.44 == sd1, "Your message here") val sd2 = getSquareDouble(5.7) assert(32.49 == sd2, "Your message here")
apache-2.0
google/earthengine-community
samples/javascript/apidocs/ee-array-multiply.js
1126
/** * Copyright 2021 The Google Earth Engine Community Authors * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // [START earthengine__apidocs__ee_array_multiply] print(ee.Array([1]).multiply(0)); // [0] print(ee.Array([1]).multiply(1)); // [1] print(ee.Array([1]).multiply([0])); // [0] print(ee.Array([1]).multiply([1])); // [1] // [-1,8,-2,4.8] print(ee.Array([1, -2, 2, 4]).multiply([-1, -4, -1, 1.2])); // [-1,2,-2,-3] print(ee.Array([1, -2, 2, 3]).multiply(-1)); var empty = ee.Array([], ee.PixelType.int8()); print(empty.multiply(empty)); // [] // [END earthengine__apidocs__ee_array_multiply]
apache-2.0
sslavic/kafka
streams/src/main/java/org/apache/kafka/streams/kstream/TimeWindowedKStream.java
45367
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.kafka.streams.kstream; import org.apache.kafka.common.utils.Bytes; import org.apache.kafka.streams.KafkaStreams; import org.apache.kafka.streams.KeyValue; import org.apache.kafka.streams.StoreQueryParameters; import org.apache.kafka.streams.StreamsConfig; import org.apache.kafka.streams.Topology; import org.apache.kafka.streams.state.WindowStore; import java.time.Duration; /** * {@code TimeWindowedKStream} is an abstraction of a <i>windowed</i> record stream of {@link KeyValue} pairs. * It is an intermediate representation after a grouping and windowing of a {@link KStream} before an aggregation is * applied to the new (partitioned) windows resulting in a windowed {@link KTable} (a <emph>windowed</emph> * {@code KTable} is a {@link KTable} with key type {@link Windowed Windowed<K>}). * <p> * The specified {@code windows} define either hopping time windows that can be overlapping or tumbling (c.f. * {@link TimeWindows}) or they define landmark windows (c.f. {@link UnlimitedWindows}). * <p> * The result is written into a local {@link WindowStore} (which is basically an ever-updating * materialized view) that can be queried using the name provided in the {@link Materialized} instance. * Furthermore, updates to the store are sent downstream into a windowed {@link KTable} changelog stream, where * "windowed" implies that the {@link KTable} key is a combined key of the original record key and a window ID. * New events are added to {@link TimeWindows} until their grace period ends (see {@link TimeWindows#grace(Duration)}). * <p> * A {@code TimeWindowedKStream} must be obtained from a {@link KGroupedStream} via * {@link KGroupedStream#windowedBy(Windows)}. * * @param <K> Type of keys * @param <V> Type of values * @see KStream * @see KGroupedStream */ public interface TimeWindowedKStream<K, V> { /** * Count the number of records in this stream by the grouped key and defined windows. * Records with {@code null} key or value are ignored. * <p> * The result is written into a local {@link WindowStore} (which is basically an ever-updating materialized view). * The default key serde from the config will be used for serializing the result. * If a different serde is required then you should use {@link #count(Materialized)}. * Furthermore, updates to the store are sent downstream into a {@link KTable} changelog stream. * Not all updates might get sent downstream, as an internal cache is used to deduplicate consecutive updates to * the same window and key. * The rate of propagated updates depends on your input data rate, the number of distinct keys, the number of * parallel running Kafka Streams instances, and the {@link StreamsConfig configuration} parameters for * {@link StreamsConfig#CACHE_MAX_BYTES_BUFFERING_CONFIG cache size}, and * {@link StreamsConfig#COMMIT_INTERVAL_MS_CONFIG commit intervall}. * <p> * For failure and recovery the store will be backed by an internal changelog topic that will be created in Kafka. * The changelog topic will be named "${applicationId}-${internalStoreName}-changelog", where "applicationId" is * user-specified in {@link StreamsConfig StreamsConfig} via parameter * {@link StreamsConfig#APPLICATION_ID_CONFIG APPLICATION_ID_CONFIG}, "internalStoreName" is an internal name * and "-changelog" is a fixed suffix. * Note that the internal store name may not be queriable through Interactive Queries. * <p> * You can retrieve all generated internal topic names via {@link Topology#describe()}. * * @return a windowed {@link KTable} that contains "update" records with unmodified keys and {@link Long} values * that represent the latest (rolling) count (i.e., number of records) for each key within a window */ KTable<Windowed<K>, Long> count(); /** * Count the number of records in this stream by the grouped key and defined windows. * Records with {@code null} key or value are ignored. * <p> * The result is written into a local {@link WindowStore} (which is basically an ever-updating materialized view). * The default key serde from the config will be used for serializing the result. * If a different serde is required then you should use {@link #count(Named, Materialized)}. * Furthermore, updates to the store are sent downstream into a {@link KTable} changelog stream. * Not all updates might get sent downstream, as an internal cache is used to deduplicate consecutive updates to * the same window and key. * The rate of propagated updates depends on your input data rate, the number of distinct keys, the number of * parallel running Kafka Streams instances, and the {@link StreamsConfig configuration} parameters for * {@link StreamsConfig#CACHE_MAX_BYTES_BUFFERING_CONFIG cache size}, and * {@link StreamsConfig#COMMIT_INTERVAL_MS_CONFIG commit intervall}. * <p> * For failure and recovery the store will be backed by an internal changelog topic that will be created in Kafka. * The changelog topic will be named "${applicationId}-${internalStoreName}-changelog", where "applicationId" is * user-specified in {@link StreamsConfig StreamsConfig} via parameter * {@link StreamsConfig#APPLICATION_ID_CONFIG APPLICATION_ID_CONFIG}, "internalStoreName" is an internal name * and "-changelog" is a fixed suffix. * Note that the internal store name may not be queriable through Interactive Queries. * <p> * You can retrieve all generated internal topic names via {@link Topology#describe()}. * * @param named a {@link Named} config used to name the processor in the topology. Cannot be {@code null}. * @return a windowed {@link KTable} that contains "update" records with unmodified keys and {@link Long} values * that represent the latest (rolling) count (i.e., number of records) for each key within a window */ KTable<Windowed<K>, Long> count(final Named named); /** * Count the number of records in this stream by the grouped key and defined windows. * Records with {@code null} key or value are ignored. * <p> * The result is written into a local {@link WindowStore} (which is basically an ever-updating materialized view) * that can be queried using the name provided with {@link Materialized}. * Furthermore, updates to the store are sent downstream into a {@link KTable} changelog stream. * <p> * Not all updates might get sent downstream, as an internal cache will be used to deduplicate consecutive updates * to the same window and key if caching is enabled on the {@link Materialized} instance. * When caching is enabled the rate of propagated updates depends on your input data rate, the number of distinct * keys, the number of parallel running Kafka Streams instances, and the {@link StreamsConfig configuration} * parameters for {@link StreamsConfig#CACHE_MAX_BYTES_BUFFERING_CONFIG cache size}, and * {@link StreamsConfig#COMMIT_INTERVAL_MS_CONFIG commit intervall}. * <p> * To query the local {@link ReadOnlyWindowStore} it must be obtained via * {@link KafkaStreams#store(StoreQueryParameters) KafkaStreams#store(...)}: * <pre>{@code * KafkaStreams streams = ... // counting words * Store queryableStoreName = ... // the queryableStoreName should be the name of the store as defined by the Materialized instance * ReadOnlyWindowStore<K, ValueAndTimestamp<Long>> localWindowStore = streams.store(queryableStoreName, QueryableStoreTypes.<K, ValueAndTimestamp<Long>>timestampedWindowStore()); * * K key = "some-word"; * long fromTime = ...; * long toTime = ...; * WindowStoreIterator<ValueAndTimestamp<Long>> countForWordsForWindows = localWindowStore.fetch(key, timeFrom, timeTo); // key must be local (application state is shared over all running Kafka Streams instances) * }</pre> * For non-local keys, a custom RPC mechanism must be implemented using {@link KafkaStreams#allMetadata()} to * query the value of the key on a parallel running instance of your Kafka Streams application. * <p> * For failure and recovery the store will be backed by an internal changelog topic that will be created in Kafka. * Therefore, the store name defined by the Materialized instance must be a valid Kafka topic name and cannot * contain characters other than ASCII alphanumerics, '.', '_' and '-'. * The changelog topic will be named "${applicationId}-${storeName}-changelog", where "applicationId" is * user-specified in {@link StreamsConfig} via parameter * {@link StreamsConfig#APPLICATION_ID_CONFIG APPLICATION_ID_CONFIG}, "storeName" is the provide store name defined * in {@code Materialized}, and "-changelog" is a fixed suffix. * <p> * You can retrieve all generated internal topic names via {@link Topology#describe()}. * * @param materialized an instance of {@link Materialized} used to materialize a state store. Cannot be {@code null}. * Note: the valueSerde will be automatically set to {@link org.apache.kafka.common.serialization.Serdes#Long() Serdes#Long()} * if there is no valueSerde provided * @return a windowed {@link KTable} that contains "update" records with unmodified keys and {@link Long} values * that represent the latest (rolling) count (i.e., number of records) for each key within a window */ KTable<Windowed<K>, Long> count(final Materialized<K, Long, WindowStore<Bytes, byte[]>> materialized); /** * Count the number of records in this stream by the grouped key and defined windows. * Records with {@code null} key or value are ignored. * <p> * The result is written into a local {@link WindowStore} (which is basically an ever-updating materialized view) * that can be queried using the name provided with {@link Materialized}. * Furthermore, updates to the store are sent downstream into a {@link KTable} changelog stream. * <p> * Not all updates might get sent downstream, as an internal cache will be used to deduplicate consecutive updates * to the same window and key if caching is enabled on the {@link Materialized} instance. * When caching is enabled the rate of propagated updates depends on your input data rate, the number of distinct * keys, the number of parallel running Kafka Streams instances, and the {@link StreamsConfig configuration} * parameters for {@link StreamsConfig#CACHE_MAX_BYTES_BUFFERING_CONFIG cache size}, and * {@link StreamsConfig#COMMIT_INTERVAL_MS_CONFIG commit intervall} * <p> * To query the local {@link ReadOnlyWindowStore} it must be obtained via * {@link KafkaStreams#store(StoreQueryParameters) KafkaStreams#store(...)}: * <pre>{@code * KafkaStreams streams = ... // counting words * Store queryableStoreName = ... // the queryableStoreName should be the name of the store as defined by the Materialized instance * ReadOnlyWindowStore<K, ValueAndTimestamp<Long>> localWindowStore = streams.store(queryableStoreName, QueryableStoreTypes.<K, ValueAndTimestamp<Long>>timestampedWindowStore()); * * K key = "some-word"; * long fromTime = ...; * long toTime = ...; * WindowStoreIterator<ValueAndTimestamp<Long>> countForWordsForWindows = localWindowStore.fetch(key, timeFrom, timeTo); // key must be local (application state is shared over all running Kafka Streams instances) * }</pre> * For non-local keys, a custom RPC mechanism must be implemented using {@link KafkaStreams#allMetadata()} to * query the value of the key on a parallel running instance of your Kafka Streams application. * <p> * For failure and recovery the store will be backed by an internal changelog topic that will be created in Kafka. * Therefore, the store name defined by the Materialized instance must be a valid Kafka topic name and cannot * contain characters other than ASCII alphanumerics, '.', '_' and '-'. * The changelog topic will be named "${applicationId}-${storeName}-changelog", where "applicationId" is * user-specified in {@link StreamsConfig} via parameter * {@link StreamsConfig#APPLICATION_ID_CONFIG APPLICATION_ID_CONFIG}, "storeName" is the provide store name defined * in {@code Materialized}, and "-changelog" is a fixed suffix. * <p> * You can retrieve all generated internal topic names via {@link Topology#describe()}. * * @param named a {@link Named} config used to name the processor in the topology. Cannot be {@code null}. * @param materialized an instance of {@link Materialized} used to materialize a state store. Cannot be {@code null}. * Note: the valueSerde will be automatically set to {@link org.apache.kafka.common.serialization.Serdes#Long() Serdes#Long()} * if there is no valueSerde provided * @return a windowed {@link KTable} that contains "update" records with unmodified keys and {@link Long} values * that represent the latest (rolling) count (i.e., number of records) for each key within a window */ KTable<Windowed<K>, Long> count(final Named named, final Materialized<K, Long, WindowStore<Bytes, byte[]>> materialized); /** * Aggregate the values of records in this stream by the grouped key and defined windows. * Records with {@code null} key or value are ignored. * Aggregating is a generalization of {@link #reduce(Reducer) combining via reduce(...)} as it, for example, * allows the result to have a different type than the input values. * The result is written into a local {@link WindowStore} (which is basically an ever-updating materialized view). * Furthermore, updates to the store are sent downstream into a {@link KTable} changelog stream. * <p> * The specified {@link Initializer} is applied directly before the first input record (per key) in each window is * processed to provide an initial intermediate aggregation result that is used to process the first record for * the window (per key). * The specified {@link Aggregator} is applied for each input record and computes a new aggregate using the current * aggregate (or for the very first record using the intermediate aggregation result provided via the * {@link Initializer}) and the record's value. * Thus, {@code aggregate()} can be used to compute aggregate functions like count (c.f. {@link #count()}). * <p> * The default key and value serde from the config will be used for serializing the result. * If a different serde is required then you should use {@link #aggregate(Initializer, Aggregator, Materialized)}. * Not all updates might get sent downstream, as an internal cache is used to deduplicate consecutive updates to * the same window and key. * The rate of propagated updates depends on your input data rate, the number of distinct keys, the number of * parallel running Kafka Streams instances, and the {@link StreamsConfig configuration} parameters for * {@link StreamsConfig#CACHE_MAX_BYTES_BUFFERING_CONFIG cache size}, and * {@link StreamsConfig#COMMIT_INTERVAL_MS_CONFIG commit intervall}. * <p> * For failure and recovery the store will be backed by an internal changelog topic that will be created in Kafka. * The changelog topic will be named "${applicationId}-${internalStoreName}-changelog", where "applicationId" is * user-specified in {@link StreamsConfig} via parameter * {@link StreamsConfig#APPLICATION_ID_CONFIG APPLICATION_ID_CONFIG}, "internalStoreName" is an internal name * and "-changelog" is a fixed suffix. * Note that the internal store name may not be queriable through Interactive Queries. * <p> * You can retrieve all generated internal topic names via {@link Topology#describe()}. * * @param initializer an {@link Initializer} that computes an initial intermediate aggregation result. Cannot be {@code null}. * @param aggregator an {@link Aggregator} that computes a new aggregate result. Cannot be {@code null}. * @param <VR> the value type of the resulting {@link KTable} * @return a windowed {@link KTable} that contains "update" records with unmodified keys, and values that represent * the latest (rolling) aggregate for each key within a window */ <VR> KTable<Windowed<K>, VR> aggregate(final Initializer<VR> initializer, final Aggregator<? super K, ? super V, VR> aggregator); /** * Aggregate the values of records in this stream by the grouped key and defined windows. * Records with {@code null} key or value are ignored. * Aggregating is a generalization of {@link #reduce(Reducer) combining via reduce(...)} as it, for example, * allows the result to have a different type than the input values. * The result is written into a local {@link WindowStore} (which is basically an ever-updating materialized view). * Furthermore, updates to the store are sent downstream into a {@link KTable} changelog stream. * <p> * The specified {@link Initializer} is applied directly before the first input record (per key) in each window is * processed to provide an initial intermediate aggregation result that is used to process the first record for * the window (per key). * The specified {@link Aggregator} is applied for each input record and computes a new aggregate using the current * aggregate (or for the very first record using the intermediate aggregation result provided via the * {@link Initializer}) and the record's value. * Thus, {@code aggregate()} can be used to compute aggregate functions like count (c.f. {@link #count()}). * <p> * The default key and value serde from the config will be used for serializing the result. * If a different serde is required then you should use * {@link #aggregate(Initializer, Aggregator, Named, Materialized)}. * Not all updates might get sent downstream, as an internal cache is used to deduplicate consecutive updates to * the same window and key. * The rate of propagated updates depends on your input data rate, the number of distinct * keys, the number of parallel running Kafka Streams instances, and the {@link StreamsConfig configuration} * parameters for {@link StreamsConfig#CACHE_MAX_BYTES_BUFFERING_CONFIG cache size}, and * {@link StreamsConfig#COMMIT_INTERVAL_MS_CONFIG commit intervall}. * <p> * For failure and recovery the store will be backed by an internal changelog topic that will be created in Kafka. * The changelog topic will be named "${applicationId}-${internalStoreName}-changelog", where "applicationId" is * user-specified in {@link StreamsConfig} via parameter * {@link StreamsConfig#APPLICATION_ID_CONFIG APPLICATION_ID_CONFIG}, "internalStoreName" is an internal name * and "-changelog" is a fixed suffix. * Note that the internal store name may not be queriable through Interactive Queries. * <p> * You can retrieve all generated internal topic names via {@link Topology#describe()}. * * @param initializer an {@link Initializer} that computes an initial intermediate aggregation result. Cannot be {@code null}. * @param aggregator an {@link Aggregator} that computes a new aggregate result. Cannot be {@code null}. * @param named a {@link Named} config used to name the processor in the topology. Cannot be {@code null}. * @param <VR> the value type of the resulting {@link KTable} * @return a windowed {@link KTable} that contains "update" records with unmodified keys, and values that represent * the latest (rolling) aggregate for each key within a window */ <VR> KTable<Windowed<K>, VR> aggregate(final Initializer<VR> initializer, final Aggregator<? super K, ? super V, VR> aggregator, final Named named); /** * Aggregate the values of records in this stream by the grouped key and defined windows. * Records with {@code null} key or value are ignored. * Aggregating is a generalization of {@link #reduce(Reducer) combining via reduce(...)} as it, for example, * allows the result to have a different type than the input values. * The result is written into a local {@link WindowStore} (which is basically an ever-updating materialized view) * that can be queried using the store name as provided with {@link Materialized}. * Furthermore, updates to the store are sent downstream into a {@link KTable} changelog stream. * <p> * The specified {@link Initializer} is applied directly before the first input record (per key) in each window is * processed to provide an initial intermediate aggregation result that is used to process the first record for * the window (per key). * The specified {@link Aggregator} is applied for each input record and computes a new aggregate using the current * aggregate (or for the very first record using the intermediate aggregation result provided via the * {@link Initializer}) and the record's value. * Thus, {@code aggregate()} can be used to compute aggregate functions like count (c.f. {@link #count()}). * <p> * Not all updates might get sent downstream, as an internal cache is used to deduplicate consecutive updates to * the same window and key if caching is enabled on the {@link Materialized} instance. * When caching is enabled the rate of propagated updates depends on your input data rate, the number of distinct * keys, the number of parallel running Kafka Streams instances, and the {@link StreamsConfig configuration} * parameters for {@link StreamsConfig#CACHE_MAX_BYTES_BUFFERING_CONFIG cache size}, and * {@link StreamsConfig#COMMIT_INTERVAL_MS_CONFIG commit intervall}. * <p> * To query the local {@link ReadOnlyWindowStore} it must be obtained via * {@link KafkaStreams#store(StoreQueryParameters) KafkaStreams#store(...)}: * <pre>{@code * KafkaStreams streams = ... // counting words * Store queryableStoreName = ... // the queryableStoreName should be the name of the store as defined by the Materialized instance * ReadOnlyWindowStore<K, ValueAndTimestamp<VR>> localWindowStore = streams.store(queryableStoreName, QueryableStoreTypes.<K, ValueAndTimestamp<VR>>timestampedWindowStore()); * * K key = "some-word"; * long fromTime = ...; * long toTime = ...; * WindowStoreIterator<ValueAndTimestamp<VR>> aggregateStore = localWindowStore.fetch(key, timeFrom, timeTo); // key must be local (application state is shared over all running Kafka Streams instances) * }</pre> * For non-local keys, a custom RPC mechanism must be implemented using {@link KafkaStreams#allMetadata()} to * query the value of the key on a parallel running instance of your Kafka Streams application. * <p> * For failure and recovery the store will be backed by an internal changelog topic that will be created in Kafka. * Therefore, the store name defined by the {@link Materialized} instance must be a valid Kafka topic name and * cannot contain characters other than ASCII alphanumerics, '.', '_' and '-'. * The changelog topic will be named "${applicationId}-${storeName}-changelog", where "applicationId" is * user-specified in {@link StreamsConfig} via parameter * {@link StreamsConfig#APPLICATION_ID_CONFIG APPLICATION_ID_CONFIG}, "storeName" is the * provide store name defined in {@link Materialized}, and "-changelog" is a fixed suffix. * <p> * You can retrieve all generated internal topic names via {@link Topology#describe()}. * * @param initializer an {@link Initializer} that computes an initial intermediate aggregation result. Cannot be {@code null}. * @param aggregator an {@link Aggregator} that computes a new aggregate result. Cannot be {@code null}. * @param materialized a {@link Materialized} config used to materialize a state store. Cannot be {@code null}. * @param <VR> the value type of the resulting {@link KTable} * @return a windowed {@link KTable} that contains "update" records with unmodified keys, and values that represent * the latest (rolling) aggregate for each key within a window */ <VR> KTable<Windowed<K>, VR> aggregate(final Initializer<VR> initializer, final Aggregator<? super K, ? super V, VR> aggregator, final Materialized<K, VR, WindowStore<Bytes, byte[]>> materialized); /** * Aggregate the values of records in this stream by the grouped key and defined windows. * Records with {@code null} key or value are ignored. * Aggregating is a generalization of {@link #reduce(Reducer) combining via reduce(...)} as it, for example, * allows the result to have a different type than the input values. * The result is written into a local {@link WindowStore} (which is basically an ever-updating materialized view) * that can be queried using the store name as provided with {@link Materialized}. * Furthermore, updates to the store are sent downstream into a {@link KTable} changelog stream. * <p> * The specified {@link Initializer} is applied directly before the first input record (per key) in each window is * processed to provide an initial intermediate aggregation result that is used to process the first record for * the window (per key). * The specified {@link Aggregator} is applied for each input record and computes a new aggregate using the current * aggregate (or for the very first record using the intermediate aggregation result provided via the * {@link Initializer}) and the record's value. * Thus, {@code aggregate()} can be used to compute aggregate functions like count (c.f. {@link #count()}). * <p> * Not all updates might get sent downstream, as an internal cache will be used to deduplicate consecutive updates * to the same window and key if caching is enabled on the {@link Materialized} instance. * When caching is enabled the rate of propagated updates depends on your input data rate, the number of distinct * keys, the number of parallel running Kafka Streams instances, and the {@link StreamsConfig configuration} * parameters for {@link StreamsConfig#CACHE_MAX_BYTES_BUFFERING_CONFIG cache size}, and * {@link StreamsConfig#COMMIT_INTERVAL_MS_CONFIG commit intervall} * <p> * To query the local {@link ReadOnlyWindowStore} it must be obtained via * {@link KafkaStreams#store(StoreQueryParameters) KafkaStreams#store(...)}: * <pre>{@code * KafkaStreams streams = ... // counting words * Store queryableStoreName = ... // the queryableStoreName should be the name of the store as defined by the Materialized instance * ReadOnlyWindowStore<K, ValueAndTimestamp<VR>> localWindowStore = streams.store(queryableStoreName, QueryableStoreTypes.<K, ValueAndTimestamp<VR>>timestampedWindowStore()); * * K key = "some-word"; * long fromTime = ...; * long toTime = ...; * WindowStoreIterator<ValueAndTimestamp<VR>> aggregateStore = localWindowStore.fetch(key, timeFrom, timeTo); // key must be local (application state is shared over all running Kafka Streams instances) * }</pre> * For non-local keys, a custom RPC mechanism must be implemented using {@link KafkaStreams#allMetadata()} to * query the value of the key on a parallel running instance of your Kafka Streams application. * <p> * For failure and recovery the store will be backed by an internal changelog topic that will be created in Kafka. * Therefore, the store name defined by the {@link Materialized} instance must be a valid Kafka topic name and * cannot contain characters other than ASCII alphanumerics, '.', '_' and '-'. * The changelog topic will be named "${applicationId}-${storeName}-changelog", where "applicationId" is * user-specified in {@link StreamsConfig} via parameter * {@link StreamsConfig#APPLICATION_ID_CONFIG APPLICATION_ID_CONFIG}, "storeName" is the * provide store name defined in {@link Materialized}, and "-changelog" is a fixed suffix. * <p> * You can retrieve all generated internal topic names via {@link Topology#describe()}. * * @param initializer an {@link Initializer} that computes an initial intermediate aggregation result. Cannot be {@code null}. * @param aggregator an {@link Aggregator} that computes a new aggregate result. Cannot be {@code null}. * @param named a {@link Named} config used to name the processor in the topology. Cannot be {@code null}. * @param materialized a {@link Materialized} config used to materialize a state store. Cannot be {@code null}. * @param <VR> the value type of the resulting {@link KTable} * @return a windowed {@link KTable} that contains "update" records with unmodified keys, and values that represent * the latest (rolling) aggregate for each key within a window */ <VR> KTable<Windowed<K>, VR> aggregate(final Initializer<VR> initializer, final Aggregator<? super K, ? super V, VR> aggregator, final Named named, final Materialized<K, VR, WindowStore<Bytes, byte[]>> materialized); /** * Combine the values of records in this stream by the grouped key and defined windows. * Records with {@code null} key or value are ignored. * Combining implies that the type of the aggregate result is the same as the type of the input value * (c.f. {@link #aggregate(Initializer, Aggregator)}). * <p> * The result is written into a local {@link WindowStore} (which is basically an ever-updating materialized view). * Furthermore, updates to the store are sent downstream into a {@link KTable} changelog stream. * The default key and value serde from the config will be used for serializing the result. * If a different serde is required then you should use {@link #reduce(Reducer, Materialized)} . * <p> * The value of the first record per window initialized the aggregation result. * The specified {@link Reducer} is applied for each additional input record per window and computes a new * aggregate using the current aggregate (first argument) and the record's value (second argument): * <pre>{@code * // At the example of a Reducer<Long> * new Reducer<Long>() { * public Long apply(Long aggValue, Long currValue) { * return aggValue + currValue; * } * } * }</pre> * Thus, {@code reduce()} can be used to compute aggregate functions like sum, min, or max. * <p> * Not all updates might get sent downstream, as an internal cache is used to deduplicate consecutive updates to * the same window and key. * The rate of propagated updates depends on your input data rate, the number of distinct keys, the number of * parallel running Kafka Streams instances, and the {@link StreamsConfig configuration} parameters for * {@link StreamsConfig#CACHE_MAX_BYTES_BUFFERING_CONFIG cache size}, and * {@link StreamsConfig#COMMIT_INTERVAL_MS_CONFIG commit intervall}. * <p> * For failure and recovery the store will be backed by an internal changelog topic that will be created in Kafka. * The changelog topic will be named "${applicationId}-${internalStoreName}-changelog", where "applicationId" is * user-specified in {@link StreamsConfig} via parameter * {@link StreamsConfig#APPLICATION_ID_CONFIG APPLICATION_ID_CONFIG}, "internalStoreName" is an internal name * and "-changelog" is a fixed suffix. * <p> * You can retrieve all generated internal topic names via {@link Topology#describe()}. * * @param reducer a {@link Reducer} that computes a new aggregate result. Cannot be {@code null}. * @return a windowed {@link KTable} that contains "update" records with unmodified keys, and values that represent * the latest (rolling) aggregate for each key within a window */ KTable<Windowed<K>, V> reduce(final Reducer<V> reducer); /** * Combine the values of records in this stream by the grouped key and defined windows. * Records with {@code null} key or value are ignored. * Combining implies that the type of the aggregate result is the same as the type of the input value. * <p> * The result is written into a local {@link WindowStore} (which is basically an ever-updating materialized view). * Furthermore, updates to the store are sent downstream into a {@link KTable} changelog stream. * The default key and value serde from the config will be used for serializing the result. * If a different serde is required then you should use {@link #reduce(Reducer, Named, Materialized)} . * <p> * The value of the first record per window initialized the aggregation result. * The specified {@link Reducer} is applied for each additional input record per window and computes a new * aggregate using the current aggregate (first argument) and the record's value (second argument): * <pre>{@code * // At the example of a Reducer<Long> * new Reducer<Long>() { * public Long apply(Long aggValue, Long currValue) { * return aggValue + currValue; * } * } * }</pre> * Thus, {@code reduce()} can be used to compute aggregate functions like sum, min, or max. * <p> * Not all updates might get sent downstream, as an internal cache is used to deduplicate consecutive updates to * the same window and key. * The rate of propagated updates depends on your input data rate, the number of distinct keys, the number of * parallel running Kafka Streams instances, and the {@link StreamsConfig configuration} parameters for * {@link StreamsConfig#CACHE_MAX_BYTES_BUFFERING_CONFIG cache size}, and * {@link StreamsConfig#COMMIT_INTERVAL_MS_CONFIG commit intervall}. * <p> * For failure and recovery the store will be backed by an internal changelog topic that will be created in Kafka. * The changelog topic will be named "${applicationId}-${internalStoreName}-changelog", where "applicationId" is * user-specified in {@link StreamsConfig} via parameter * {@link StreamsConfig#APPLICATION_ID_CONFIG APPLICATION_ID_CONFIG}, "internalStoreName" is an internal name * and "-changelog" is a fixed suffix. * <p> * You can retrieve all generated internal topic names via {@link Topology#describe()}. * * @param reducer a {@link Reducer} that computes a new aggregate result. Cannot be {@code null}. * @param named a {@link Named} config used to name the processor in the topology. Cannot be {@code null}. * @return a windowed {@link KTable} that contains "update" records with unmodified keys, and values that represent * the latest (rolling) aggregate for each key within a window */ KTable<Windowed<K>, V> reduce(final Reducer<V> reducer, final Named named); /** * Combine the values of records in this stream by the grouped key and defined windows. * Records with {@code null} key or value are ignored. * Combining implies that the type of the aggregate result is the same as the type of the input value. * <p> * The result is written into a local {@link WindowStore} (which is basically an ever-updating materialized view) * that can be queried using the store name as provided with {@link Materialized}. * Furthermore, updates to the store are sent downstream into a {@link KTable} changelog stream. * <p> * The value of the first record per window initialized the aggregation result. * The specified {@link Reducer} is applied for each additional input record per window and computes a new * aggregate using the current aggregate (first argument) and the record's value (second argument): * <pre>{@code * // At the example of a Reducer<Long> * new Reducer<Long>() { * public Long apply(Long aggValue, Long currValue) { * return aggValue + currValue; * } * } * }</pre> * Thus, {@code reduce()} can be used to compute aggregate functions like sum, min, or max. * <p> * Not all updates might get sent downstream, as an internal cache will be used to deduplicate consecutive updates * to the same window and key if caching is enabled on the {@link Materialized} instance. * When caching is enabled the rate of propagated updates depends on your input data rate, the number of distinct * keys, the number of parallel running Kafka Streams instances, and the {@link StreamsConfig configuration} * parameters for {@link StreamsConfig#CACHE_MAX_BYTES_BUFFERING_CONFIG cache size}, and * {@link StreamsConfig#COMMIT_INTERVAL_MS_CONFIG commit intervall}. * <p> * To query the local {@link ReadOnlyWindowStore} it must be obtained via * {@link KafkaStreams#store(StoreQueryParameters) KafkaStreams#store(...)}: * <pre>{@code * KafkaStreams streams = ... // counting words * Store queryableStoreName = ... // the queryableStoreName should be the name of the store as defined by the Materialized instance * ReadOnlyWindowStore<K, ValueAndTimestamp<V>> localWindowStore = streams.store(queryableStoreName, QueryableStoreTypes.<K, ValueAndTimestamp<V>>timestampedWindowStore()); * * K key = "some-word"; * long fromTime = ...; * long toTime = ...; * WindowStoreIterator<ValueAndTimestamp<V>> reduceStore = localWindowStore.fetch(key, timeFrom, timeTo); // key must be local (application state is shared over all running Kafka Streams instances) * }</pre> * For non-local keys, a custom RPC mechanism must be implemented using {@link KafkaStreams#allMetadata()} to * query the value of the key on a parallel running instance of your Kafka Streams application. * <p> * For failure and recovery the store will be backed by an internal changelog topic that will be created in Kafka. * Therefore, the store name defined by the Materialized instance must be a valid Kafka topic name and cannot * contain characters other than ASCII alphanumerics, '.', '_' and '-'. * The changelog topic will be named "${applicationId}-${storeName}-changelog", where "applicationId" is * user-specified in {@link StreamsConfig} via parameter * {@link StreamsConfig#APPLICATION_ID_CONFIG APPLICATION_ID_CONFIG}, "storeName" is the provide store name defined * in {@code Materialized}, and "-changelog" is a fixed suffix. * <p> * You can retrieve all generated internal topic names via {@link Topology#describe()}. * * @param reducer a {@link Reducer} that computes a new aggregate result. Cannot be {@code null}. * @param materialized a {@link Materialized} config used to materialize a state store. Cannot be {@code null}. * @return a windowed {@link KTable} that contains "update" records with unmodified keys, and values that represent * the latest (rolling) aggregate for each key within a window */ KTable<Windowed<K>, V> reduce(final Reducer<V> reducer, final Materialized<K, V, WindowStore<Bytes, byte[]>> materialized); /** * Combine the values of records in this stream by the grouped key and defined windows. * Records with {@code null} key or value are ignored. * Combining implies that the type of the aggregate result is the same as the type of the input value. * <p> * The result is written into a local {@link WindowStore} (which is basically an ever-updating materialized view) * that can be queried using the store name as provided with {@link Materialized}. * Furthermore, updates to the store are sent downstream into a {@link KTable} changelog stream. * <p> * The value of the first record per window initialized the aggregation result. * The specified {@link Reducer} is applied for each additional input record per window and computes a new * aggregate using the current aggregate (first argument) and the record's value (second argument): * <pre>{@code * // At the example of a Reducer<Long> * new Reducer<Long>() { * public Long apply(Long aggValue, Long currValue) { * return aggValue + currValue; * } * } * }</pre> * Thus, {@code reduce()} can be used to compute aggregate functions like sum, min, or max. * <p> * Not all updates might get sent downstream, as an internal cache will be used to deduplicate consecutive updates * to the same window and key if caching is enabled on the {@link Materialized} instance. * When caching is enabled the rate of propagated updates depends on your input data rate, the number of distinct * keys, the number of parallel running Kafka Streams instances, and the {@link StreamsConfig configuration} * parameters for {@link StreamsConfig#CACHE_MAX_BYTES_BUFFERING_CONFIG cache size}, and * {@link StreamsConfig#COMMIT_INTERVAL_MS_CONFIG commit intervall}. * <p> * To query the local {@link ReadOnlyWindowStore} it must be obtained via * {@link KafkaStreams#store(StoreQueryParameters) KafkaStreams#store(...)}: * <pre>{@code * KafkaStreams streams = ... // counting words * Store queryableStoreName = ... // the queryableStoreName should be the name of the store as defined by the Materialized instance * ReadOnlyWindowStore<K, ValueAndTimestamp<V>> localWindowStore = streams.store(queryableStoreName, QueryableStoreTypes.<K, ValueAndTimestamp<V>>timestampedWindowStore()); * * K key = "some-word"; * long fromTime = ...; * long toTime = ...; * WindowStoreIterator<ValueAndTimestamp<V>> reduceStore = localWindowStore.fetch(key, timeFrom, timeTo); // key must be local (application state is shared over all running Kafka Streams instances) * }</pre> * For non-local keys, a custom RPC mechanism must be implemented using {@link KafkaStreams#allMetadata()} to * query the value of the key on a parallel running instance of your Kafka Streams application. * <p> * For failure and recovery the store will be backed by an internal changelog topic that will be created in Kafka. * Therefore, the store name defined by the Materialized instance must be a valid Kafka topic name and cannot * contain characters other than ASCII alphanumerics, '.', '_' and '-'. * The changelog topic will be named "${applicationId}-${storeName}-changelog", where "applicationId" is * user-specified in {@link StreamsConfig} via parameter * {@link StreamsConfig#APPLICATION_ID_CONFIG APPLICATION_ID_CONFIG}, "storeName" is the provide store name defined * in {@link Materialized}, and "-changelog" is a fixed suffix. * <p> * You can retrieve all generated internal topic names via {@link Topology#describe()}. * * @param reducer a {@link Reducer} that computes a new aggregate result. Cannot be {@code null}. * @param named a {@link Named} config used to name the processor in the topology. Cannot be {@code null}. * @param materialized a {@link Materialized} config used to materialize a state store. Cannot be {@code null}. * @return a windowed {@link KTable} that contains "update" records with unmodified keys, and values that represent * the latest (rolling) aggregate for each key within a window */ KTable<Windowed<K>, V> reduce(final Reducer<V> reducer, final Named named, final Materialized<K, V, WindowStore<Bytes, byte[]>> materialized); }
apache-2.0
bqxu/JMethod
service/angular/src/main/webapp/asset/angular/gruntfile.js
6693
/** * auth : iMethod * create_at: 15/9/30. * desc: * note: * 1. */ //Wrapper函数 module.exports = function (grunt) { // 配置项目 grunt.initConfig({ // 配置任务 pkg: grunt.file.readJSON('package.json'), bower: { install: { options: { targetDir: 'angular/app/lib', layout: 'byComponent', install: true, verbose: false, cleanTargetDir: true, cleanBowerDir: true, bowerOptions: {} } } }, concat: { options: { separator: '\n' }, app_js: { src: [ "angular/app/00-components/base.js", "angular/app/00-components/*/*.js", "angular/app/01-models/**/*.js", "angular/app/02-services/*.js", "angular/app/02-services/**/*.js", "angular/app/03-views/*.js", "angular/app/03-views/**/*.js", "angular/app/04-configs/component.js", "angular/app/04-configs/router.js", "angular/app/04-configs/app.js" ], dest: "angular/app/dist.js" }, plugin_js: { src: [ "angular/app/lib/html5-boilerplate/dist/js/vendor/modernizr-2.8.3.min.js", "angular/app/lib/jquery/jquery.js", "angular/app/bower_components/bootstrap/dist/js/bootstrap.js", "angular/app/lib/angular/angular.js", "angular/app/lib/angular-loader/angular-loader.js", "angular/app/lib/angular-mocks/angular-mocks.js", "angular/app/lib/angular-route/angular-route.js", "angular/app/lib/angular-ui-router/angular-ui-router.js" ], dest: "angular/app/plugins.js" }, app_css: { src: ["angular/app/03-views/*/*.css"], dest: "angular/app/dist.css" }, plugin_css: { src: [ "angular/app/bower_components/bootstrap/dist/css/bootstrap.css", "angular/app/bower_components/bootstrap/dist/css/bootstrap-theme.css" ], dest: "angular/app/plugins.css" } }, less: { dist: { files: { 'angular/app/less.css': 'angular/app/05-style/main.less', 'angular/app/merge.css': 'angular/app/05-style/merge.less' } } }, uglify: { options: { banner: '/*! power by iMethod */\n', beautify: true, mangle: true //混淆变量名 }, built: { src: ["angular/app/dist.js"], dest: "angular/app/dist.min.js" } }, clean: { js: ["angular/app/dist.js", "angular/app/plugins.js", "angular/app/dist.min.js"], css: ["angular/app/dist.css", "angular/app/plugins.css", "angular/app/less.css"], api: ["API/src/main/resources/static/app"], cordova: ["cordova/www/index.html", "cordova/www/*.js", "cordova/www/*.css", "cordova/www/app"] }, copy: { main: { files: [ {expand: true, cwd: "angular/app", src: ['index.html'], dest: "cordova/www"}, {expand: true, cwd: "angular/app", src: ['index.html'], dest: "API/src/main/resources/static"} ] }, main_js: { files: [ {expand: true, cwd: "angular/app", src: ['dist.min.js'], dest: "cordova/www"}, {expand: true, cwd: "angular/app", src: ['dist.min.js'], dest: "API/src/main/resources/static"} ] }, plugin_js: { files: [ {expand: true, cwd: "angular/app", src: ['plugins.js'], dest: "cordova/www"}, {expand: true, cwd: "angular/app", src: ['plugins.js'], dest: "API/src/main/resources/static"} ] }, main_css: { files: [ {expand: true, cwd: "angular/app", src: ['dist.css'], dest: "cordova/www"}, {expand: true, cwd: "angular/app", src: ['less.css'], dest: "cordova/www"}, {expand: true, cwd: "angular/app", src: ['dist.css'], dest: "API/src/main/resources/static"}, {expand: true, cwd: "angular/app", src: ['less.css'], dest: "API/src/main/resources/static"} ] }, plugin_css: { files: [ {expand: true, cwd: "angular/app", src: ['plugins.css'], dest: "cordova/www"}, {expand: true, cwd: "angular/app", src: ['plugins.css'], dest: "API/src/main/resources/static"} ] }, html: { files: [ {expand: true, cwd: "angular/app/03-views", src: ['**/*.html'], dest: "cordova/www/app/03-views"}, { expand: true, cwd: "angular/app/03-views", src: ['**/*.html'], dest: "API/src/main/resources/static/app/03-views" } ] } }, watch: { css: { files: ['angular/app/05-style/**/*.less', 'angular/app/03-views/**/*.js', 'angular/app/02-services/**/*.js'], tasks: ['clean', 'concat', 'uglify', 'less', 'copy'], options: { debounceDelay: 10000, interval: 1000, interrupt: true, reload: true } } } }); grunt.loadNpmTasks('grunt-contrib-uglify'); grunt.loadNpmTasks('grunt-contrib-jshint'); grunt.loadNpmTasks('grunt-contrib-cssmin'); grunt.loadNpmTasks('grunt-bower-task'); grunt.loadNpmTasks('grunt-contrib-clean'); grunt.loadNpmTasks('grunt-contrib-concat'); grunt.loadNpmTasks('grunt-contrib-copy'); //监控文件 grunt.loadNpmTasks('grunt-contrib-watch'); grunt.loadNpmTasks('grunt-contrib-less'); grunt.registerTask('default', ['bower', 'clean', 'concat', 'uglify', 'less', 'copy', 'watch']); };
apache-2.0
googleapis/java-secretmanager
proto-google-cloud-secretmanager-v1/src/main/java/com/google/cloud/secretmanager/v1/ServiceProto.java
24401
/* * Copyright 2020 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/secretmanager/v1/service.proto package com.google.cloud.secretmanager.v1; public final class ServiceProto { private ServiceProto() {} public static void registerAllExtensions(com.google.protobuf.ExtensionRegistryLite registry) {} public static void registerAllExtensions(com.google.protobuf.ExtensionRegistry registry) { registerAllExtensions((com.google.protobuf.ExtensionRegistryLite) registry); } static final com.google.protobuf.Descriptors.Descriptor internal_static_google_cloud_secretmanager_v1_ListSecretsRequest_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_cloud_secretmanager_v1_ListSecretsRequest_fieldAccessorTable; static final com.google.protobuf.Descriptors.Descriptor internal_static_google_cloud_secretmanager_v1_ListSecretsResponse_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_cloud_secretmanager_v1_ListSecretsResponse_fieldAccessorTable; static final com.google.protobuf.Descriptors.Descriptor internal_static_google_cloud_secretmanager_v1_CreateSecretRequest_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_cloud_secretmanager_v1_CreateSecretRequest_fieldAccessorTable; static final com.google.protobuf.Descriptors.Descriptor internal_static_google_cloud_secretmanager_v1_AddSecretVersionRequest_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_cloud_secretmanager_v1_AddSecretVersionRequest_fieldAccessorTable; static final com.google.protobuf.Descriptors.Descriptor internal_static_google_cloud_secretmanager_v1_GetSecretRequest_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_cloud_secretmanager_v1_GetSecretRequest_fieldAccessorTable; static final com.google.protobuf.Descriptors.Descriptor internal_static_google_cloud_secretmanager_v1_ListSecretVersionsRequest_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_cloud_secretmanager_v1_ListSecretVersionsRequest_fieldAccessorTable; static final com.google.protobuf.Descriptors.Descriptor internal_static_google_cloud_secretmanager_v1_ListSecretVersionsResponse_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_cloud_secretmanager_v1_ListSecretVersionsResponse_fieldAccessorTable; static final com.google.protobuf.Descriptors.Descriptor internal_static_google_cloud_secretmanager_v1_GetSecretVersionRequest_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_cloud_secretmanager_v1_GetSecretVersionRequest_fieldAccessorTable; static final com.google.protobuf.Descriptors.Descriptor internal_static_google_cloud_secretmanager_v1_UpdateSecretRequest_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_cloud_secretmanager_v1_UpdateSecretRequest_fieldAccessorTable; static final com.google.protobuf.Descriptors.Descriptor internal_static_google_cloud_secretmanager_v1_AccessSecretVersionRequest_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_cloud_secretmanager_v1_AccessSecretVersionRequest_fieldAccessorTable; static final com.google.protobuf.Descriptors.Descriptor internal_static_google_cloud_secretmanager_v1_AccessSecretVersionResponse_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_cloud_secretmanager_v1_AccessSecretVersionResponse_fieldAccessorTable; static final com.google.protobuf.Descriptors.Descriptor internal_static_google_cloud_secretmanager_v1_DeleteSecretRequest_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_cloud_secretmanager_v1_DeleteSecretRequest_fieldAccessorTable; static final com.google.protobuf.Descriptors.Descriptor internal_static_google_cloud_secretmanager_v1_DisableSecretVersionRequest_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_cloud_secretmanager_v1_DisableSecretVersionRequest_fieldAccessorTable; static final com.google.protobuf.Descriptors.Descriptor internal_static_google_cloud_secretmanager_v1_EnableSecretVersionRequest_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_cloud_secretmanager_v1_EnableSecretVersionRequest_fieldAccessorTable; static final com.google.protobuf.Descriptors.Descriptor internal_static_google_cloud_secretmanager_v1_DestroySecretVersionRequest_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_cloud_secretmanager_v1_DestroySecretVersionRequest_fieldAccessorTable; public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() { return descriptor; } private static com.google.protobuf.Descriptors.FileDescriptor descriptor; static { java.lang.String[] descriptorData = { "\n+google/cloud/secretmanager/v1/service." + "proto\022\035google.cloud.secretmanager.v1\032\034go" + "ogle/api/annotations.proto\032\027google/api/c" + "lient.proto\032\037google/api/field_behavior.p" + "roto\032\031google/api/resource.proto\032-google/" + "cloud/secretmanager/v1/resources.proto\032\036" + "google/iam/v1/iam_policy.proto\032\032google/i" + "am/v1/policy.proto\032\033google/protobuf/empt" + "y.proto\032 google/protobuf/field_mask.prot" + "o\"\237\001\n\022ListSecretsRequest\022C\n\006parent\030\001 \001(\t" + "B3\340A\002\372A-\n+cloudresourcemanager.googleapi" + "s.com/Project\022\026\n\tpage_size\030\002 \001(\005B\003\340A\001\022\027\n" + "\npage_token\030\003 \001(\tB\003\340A\001\022\023\n\006filter\030\004 \001(\tB\003" + "\340A\001\"z\n\023ListSecretsResponse\0226\n\007secrets\030\001 " + "\003(\0132%.google.cloud.secretmanager.v1.Secr" + "et\022\027\n\017next_page_token\030\002 \001(\t\022\022\n\ntotal_siz" + "e\030\003 \001(\005\"\256\001\n\023CreateSecretRequest\022C\n\006paren" + "t\030\001 \001(\tB3\340A\002\372A-\n+cloudresourcemanager.go" + "ogleapis.com/Project\022\026\n\tsecret_id\030\002 \001(\tB" + "\003\340A\002\022:\n\006secret\030\003 \001(\0132%.google.cloud.secr" + "etmanager.v1.SecretB\003\340A\002\"\232\001\n\027AddSecretVe" + "rsionRequest\022;\n\006parent\030\001 \001(\tB+\340A\002\372A%\n#se" + "cretmanager.googleapis.com/Secret\022B\n\007pay" + "load\030\002 \001(\0132,.google.cloud.secretmanager." + "v1.SecretPayloadB\003\340A\002\"M\n\020GetSecretReques" + "t\0229\n\004name\030\001 \001(\tB+\340A\002\372A%\n#secretmanager.g" + "oogleapis.com/Secret\"\236\001\n\031ListSecretVersi" + "onsRequest\022;\n\006parent\030\001 \001(\tB+\340A\002\372A%\n#secr" + "etmanager.googleapis.com/Secret\022\026\n\tpage_" + "size\030\002 \001(\005B\003\340A\001\022\027\n\npage_token\030\003 \001(\tB\003\340A\001" + "\022\023\n\006filter\030\004 \001(\tB\003\340A\001\"\211\001\n\032ListSecretVers" + "ionsResponse\022>\n\010versions\030\001 \003(\0132,.google." + "cloud.secretmanager.v1.SecretVersion\022\027\n\017" + "next_page_token\030\002 \001(\t\022\022\n\ntotal_size\030\003 \001(" + "\005\"[\n\027GetSecretVersionRequest\022@\n\004name\030\001 \001" + "(\tB2\340A\002\372A,\n*secretmanager.googleapis.com" + "/SecretVersion\"\207\001\n\023UpdateSecretRequest\022:" + "\n\006secret\030\001 \001(\0132%.google.cloud.secretmana" + "ger.v1.SecretB\003\340A\002\0224\n\013update_mask\030\002 \001(\0132" + "\032.google.protobuf.FieldMaskB\003\340A\002\"^\n\032Acce" + "ssSecretVersionRequest\022@\n\004name\030\001 \001(\tB2\340A" + "\002\372A,\n*secretmanager.googleapis.com/Secre" + "tVersion\"\233\001\n\033AccessSecretVersionResponse" + "\022=\n\004name\030\001 \001(\tB/\372A,\n*secretmanager.googl" + "eapis.com/SecretVersion\022=\n\007payload\030\002 \001(\013" + "2,.google.cloud.secretmanager.v1.SecretP" + "ayload\"c\n\023DeleteSecretRequest\0229\n\004name\030\001 " + "\001(\tB+\340A\002\372A%\n#secretmanager.googleapis.co" + "m/Secret\022\021\n\004etag\030\002 \001(\tB\003\340A\001\"r\n\033DisableSe" + "cretVersionRequest\022@\n\004name\030\001 \001(\tB2\340A\002\372A," + "\n*secretmanager.googleapis.com/SecretVer" + "sion\022\021\n\004etag\030\002 \001(\tB\003\340A\001\"q\n\032EnableSecretV" + "ersionRequest\022@\n\004name\030\001 \001(\tB2\340A\002\372A,\n*sec" + "retmanager.googleapis.com/SecretVersion\022" + "\021\n\004etag\030\002 \001(\tB\003\340A\001\"r\n\033DestroySecretVersi" + "onRequest\022@\n\004name\030\001 \001(\tB2\340A\002\372A,\n*secretm" + "anager.googleapis.com/SecretVersion\022\021\n\004e" + "tag\030\002 \001(\tB\003\340A\0012\317\025\n\024SecretManagerService\022" + "\246\001\n\013ListSecrets\0221.google.cloud.secretman" + "ager.v1.ListSecretsRequest\0322.google.clou" + "d.secretmanager.v1.ListSecretsResponse\"0" + "\202\323\344\223\002!\022\037/v1/{parent=projects/*}/secrets\332" + "A\006parent\022\264\001\n\014CreateSecret\0222.google.cloud" + ".secretmanager.v1.CreateSecretRequest\032%." + "google.cloud.secretmanager.v1.Secret\"I\202\323" + "\344\223\002)\"\037/v1/{parent=projects/*}/secrets:\006s" + "ecret\332A\027parent,secret_id,secret\022\302\001\n\020AddS" + "ecretVersion\0226.google.cloud.secretmanage" + "r.v1.AddSecretVersionRequest\032,.google.cl" + "oud.secretmanager.v1.SecretVersion\"H\202\323\344\223" + "\0021\",/v1/{parent=projects/*/secrets/*}:ad" + "dVersion:\001*\332A\016parent,payload\022\223\001\n\tGetSecr" + "et\022/.google.cloud.secretmanager.v1.GetSe" + "cretRequest\032%.google.cloud.secretmanager" + ".v1.Secret\".\202\323\344\223\002!\022\037/v1/{name=projects/*" + "/secrets/*}\332A\004name\022\266\001\n\014UpdateSecret\0222.go" + "ogle.cloud.secretmanager.v1.UpdateSecret" + "Request\032%.google.cloud.secretmanager.v1." + "Secret\"K\202\323\344\223\00202&/v1/{secret.name=project" + "s/*/secrets/*}:\006secret\332A\022secret,update_m" + "ask\022\212\001\n\014DeleteSecret\0222.google.cloud.secr" + "etmanager.v1.DeleteSecretRequest\032\026.googl" + "e.protobuf.Empty\".\202\323\344\223\002!*\037/v1/{name=proj" + "ects/*/secrets/*}\332A\004name\022\306\001\n\022ListSecretV" + "ersions\0228.google.cloud.secretmanager.v1." + "ListSecretVersionsRequest\0329.google.cloud" + ".secretmanager.v1.ListSecretVersionsResp" + "onse\";\202\323\344\223\002,\022*/v1/{parent=projects/*/sec" + "rets/*}/versions\332A\006parent\022\263\001\n\020GetSecretV" + "ersion\0226.google.cloud.secretmanager.v1.G" + "etSecretVersionRequest\032,.google.cloud.se" + "cretmanager.v1.SecretVersion\"9\202\323\344\223\002,\022*/v" + "1/{name=projects/*/secrets/*/versions/*}" + "\332A\004name\022\316\001\n\023AccessSecretVersion\0229.google" + ".cloud.secretmanager.v1.AccessSecretVers" + "ionRequest\032:.google.cloud.secretmanager." + "v1.AccessSecretVersionResponse\"@\202\323\344\223\0023\0221" + "/v1/{name=projects/*/secrets/*/versions/" + "*}:access\332A\004name\022\306\001\n\024DisableSecretVersio" + "n\022:.google.cloud.secretmanager.v1.Disabl" + "eSecretVersionRequest\032,.google.cloud.sec" + "retmanager.v1.SecretVersion\"D\202\323\344\223\0027\"2/v1" + "/{name=projects/*/secrets/*/versions/*}:" + "disable:\001*\332A\004name\022\303\001\n\023EnableSecretVersio" + "n\0229.google.cloud.secretmanager.v1.Enable" + "SecretVersionRequest\032,.google.cloud.secr" + "etmanager.v1.SecretVersion\"C\202\323\344\223\0026\"1/v1/" + "{name=projects/*/secrets/*/versions/*}:e" + "nable:\001*\332A\004name\022\306\001\n\024DestroySecretVersion" + "\022:.google.cloud.secretmanager.v1.Destroy" + "SecretVersionRequest\032,.google.cloud.secr" + "etmanager.v1.SecretVersion\"D\202\323\344\223\0027\"2/v1/" + "{name=projects/*/secrets/*/versions/*}:d" + "estroy:\001*\332A\004name\022\206\001\n\014SetIamPolicy\022\".goog" + "le.iam.v1.SetIamPolicyRequest\032\025.google.i" + "am.v1.Policy\";\202\323\344\223\0025\"0/v1/{resource=proj" + "ects/*/secrets/*}:setIamPolicy:\001*\022\203\001\n\014Ge" + "tIamPolicy\022\".google.iam.v1.GetIamPolicyR" + "equest\032\025.google.iam.v1.Policy\"8\202\323\344\223\0022\0220/" + "v1/{resource=projects/*/secrets/*}:getIa" + "mPolicy\022\254\001\n\022TestIamPermissions\022(.google." + "iam.v1.TestIamPermissionsRequest\032).googl" + "e.iam.v1.TestIamPermissionsResponse\"A\202\323\344" + "\223\002;\"6/v1/{resource=projects/*/secrets/*}" + ":testIamPermissions:\001*\032P\312A\034secretmanager" + ".googleapis.com\322A.https://www.googleapis" + ".com/auth/cloud-platformB\353\001\n!com.google." + "cloud.secretmanager.v1B\014ServiceProtoP\001ZJ" + "google.golang.org/genproto/googleapis/cl" + "oud/secretmanager/v1;secretmanager\370\001\001\242\002\003" + "GSM\252\002\035Google.Cloud.SecretManager.V1\312\002\035Go" + "ogle\\Cloud\\SecretManager\\V1\352\002 Google::Cl" + "oud::SecretManager::V1b\006proto3" }; descriptor = com.google.protobuf.Descriptors.FileDescriptor.internalBuildGeneratedFileFrom( descriptorData, new com.google.protobuf.Descriptors.FileDescriptor[] { com.google.api.AnnotationsProto.getDescriptor(), com.google.api.ClientProto.getDescriptor(), com.google.api.FieldBehaviorProto.getDescriptor(), com.google.api.ResourceProto.getDescriptor(), com.google.cloud.secretmanager.v1.ResourcesProto.getDescriptor(), com.google.iam.v1.IamPolicyProto.getDescriptor(), com.google.iam.v1.PolicyProto.getDescriptor(), com.google.protobuf.EmptyProto.getDescriptor(), com.google.protobuf.FieldMaskProto.getDescriptor(), }); internal_static_google_cloud_secretmanager_v1_ListSecretsRequest_descriptor = getDescriptor().getMessageTypes().get(0); internal_static_google_cloud_secretmanager_v1_ListSecretsRequest_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_cloud_secretmanager_v1_ListSecretsRequest_descriptor, new java.lang.String[] { "Parent", "PageSize", "PageToken", "Filter", }); internal_static_google_cloud_secretmanager_v1_ListSecretsResponse_descriptor = getDescriptor().getMessageTypes().get(1); internal_static_google_cloud_secretmanager_v1_ListSecretsResponse_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_cloud_secretmanager_v1_ListSecretsResponse_descriptor, new java.lang.String[] { "Secrets", "NextPageToken", "TotalSize", }); internal_static_google_cloud_secretmanager_v1_CreateSecretRequest_descriptor = getDescriptor().getMessageTypes().get(2); internal_static_google_cloud_secretmanager_v1_CreateSecretRequest_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_cloud_secretmanager_v1_CreateSecretRequest_descriptor, new java.lang.String[] { "Parent", "SecretId", "Secret", }); internal_static_google_cloud_secretmanager_v1_AddSecretVersionRequest_descriptor = getDescriptor().getMessageTypes().get(3); internal_static_google_cloud_secretmanager_v1_AddSecretVersionRequest_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_cloud_secretmanager_v1_AddSecretVersionRequest_descriptor, new java.lang.String[] { "Parent", "Payload", }); internal_static_google_cloud_secretmanager_v1_GetSecretRequest_descriptor = getDescriptor().getMessageTypes().get(4); internal_static_google_cloud_secretmanager_v1_GetSecretRequest_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_cloud_secretmanager_v1_GetSecretRequest_descriptor, new java.lang.String[] { "Name", }); internal_static_google_cloud_secretmanager_v1_ListSecretVersionsRequest_descriptor = getDescriptor().getMessageTypes().get(5); internal_static_google_cloud_secretmanager_v1_ListSecretVersionsRequest_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_cloud_secretmanager_v1_ListSecretVersionsRequest_descriptor, new java.lang.String[] { "Parent", "PageSize", "PageToken", "Filter", }); internal_static_google_cloud_secretmanager_v1_ListSecretVersionsResponse_descriptor = getDescriptor().getMessageTypes().get(6); internal_static_google_cloud_secretmanager_v1_ListSecretVersionsResponse_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_cloud_secretmanager_v1_ListSecretVersionsResponse_descriptor, new java.lang.String[] { "Versions", "NextPageToken", "TotalSize", }); internal_static_google_cloud_secretmanager_v1_GetSecretVersionRequest_descriptor = getDescriptor().getMessageTypes().get(7); internal_static_google_cloud_secretmanager_v1_GetSecretVersionRequest_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_cloud_secretmanager_v1_GetSecretVersionRequest_descriptor, new java.lang.String[] { "Name", }); internal_static_google_cloud_secretmanager_v1_UpdateSecretRequest_descriptor = getDescriptor().getMessageTypes().get(8); internal_static_google_cloud_secretmanager_v1_UpdateSecretRequest_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_cloud_secretmanager_v1_UpdateSecretRequest_descriptor, new java.lang.String[] { "Secret", "UpdateMask", }); internal_static_google_cloud_secretmanager_v1_AccessSecretVersionRequest_descriptor = getDescriptor().getMessageTypes().get(9); internal_static_google_cloud_secretmanager_v1_AccessSecretVersionRequest_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_cloud_secretmanager_v1_AccessSecretVersionRequest_descriptor, new java.lang.String[] { "Name", }); internal_static_google_cloud_secretmanager_v1_AccessSecretVersionResponse_descriptor = getDescriptor().getMessageTypes().get(10); internal_static_google_cloud_secretmanager_v1_AccessSecretVersionResponse_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_cloud_secretmanager_v1_AccessSecretVersionResponse_descriptor, new java.lang.String[] { "Name", "Payload", }); internal_static_google_cloud_secretmanager_v1_DeleteSecretRequest_descriptor = getDescriptor().getMessageTypes().get(11); internal_static_google_cloud_secretmanager_v1_DeleteSecretRequest_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_cloud_secretmanager_v1_DeleteSecretRequest_descriptor, new java.lang.String[] { "Name", "Etag", }); internal_static_google_cloud_secretmanager_v1_DisableSecretVersionRequest_descriptor = getDescriptor().getMessageTypes().get(12); internal_static_google_cloud_secretmanager_v1_DisableSecretVersionRequest_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_cloud_secretmanager_v1_DisableSecretVersionRequest_descriptor, new java.lang.String[] { "Name", "Etag", }); internal_static_google_cloud_secretmanager_v1_EnableSecretVersionRequest_descriptor = getDescriptor().getMessageTypes().get(13); internal_static_google_cloud_secretmanager_v1_EnableSecretVersionRequest_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_cloud_secretmanager_v1_EnableSecretVersionRequest_descriptor, new java.lang.String[] { "Name", "Etag", }); internal_static_google_cloud_secretmanager_v1_DestroySecretVersionRequest_descriptor = getDescriptor().getMessageTypes().get(14); internal_static_google_cloud_secretmanager_v1_DestroySecretVersionRequest_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_cloud_secretmanager_v1_DestroySecretVersionRequest_descriptor, new java.lang.String[] { "Name", "Etag", }); com.google.protobuf.ExtensionRegistry registry = com.google.protobuf.ExtensionRegistry.newInstance(); registry.add(com.google.api.ClientProto.defaultHost); registry.add(com.google.api.FieldBehaviorProto.fieldBehavior); registry.add(com.google.api.AnnotationsProto.http); registry.add(com.google.api.ClientProto.methodSignature); registry.add(com.google.api.ClientProto.oauthScopes); registry.add(com.google.api.ResourceProto.resourceReference); com.google.protobuf.Descriptors.FileDescriptor.internalUpdateFileDescriptor( descriptor, registry); com.google.api.AnnotationsProto.getDescriptor(); com.google.api.ClientProto.getDescriptor(); com.google.api.FieldBehaviorProto.getDescriptor(); com.google.api.ResourceProto.getDescriptor(); com.google.cloud.secretmanager.v1.ResourcesProto.getDescriptor(); com.google.iam.v1.IamPolicyProto.getDescriptor(); com.google.iam.v1.PolicyProto.getDescriptor(); com.google.protobuf.EmptyProto.getDescriptor(); com.google.protobuf.FieldMaskProto.getDescriptor(); } // @@protoc_insertion_point(outer_class_scope) }
apache-2.0
richkadel/flip.tv
websites/search/src/main/java/com/appeligo/search/actions/BitSetFacetHitCounter.java
3054
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package com.appeligo.search.actions; import java.io.IOException; import java.util.ArrayList; import java.util.BitSet; import java.util.Collection; import java.util.Collections; import java.util.List; import org.apache.lucene.index.IndexReader; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.Query; import org.apache.lucene.search.QueryFilter; public class BitSetFacetHitCounter { private Query baseQuery; private String baseQueryString; private Collection<HitCount> subQueries; private IndexSearcher searcher; private BitSet baseBitSet; public BitSetFacetHitCounter() { super(); } public void setBaseQuery(Query baseQuery, String baseQueryString) { this.baseQuery = baseQuery; this.baseQueryString = baseQueryString; baseBitSet = null; } public void setSubQueries(Collection<HitCount> subQueries) { this.subQueries = subQueries; } public void setSearcher(IndexSearcher searcher) { this.searcher = searcher; baseBitSet = null; } public BitSet getBaseBitSet() throws IOException { if (baseBitSet == null) { if (baseQuery != null && searcher != null) { IndexReader reader = searcher.getIndexReader(); QueryFilter baseQueryFilter = new QueryFilter(baseQuery); baseBitSet = baseQueryFilter.bits(reader); } } return baseBitSet; } public Collection<HitCount> getFacetHitCounts(boolean sortByCount) throws IOException { List<HitCount> facetCounts = new ArrayList<HitCount>(); IndexReader reader = searcher.getIndexReader(); for (HitCount hitCount : subQueries) { QueryFilter filter = new QueryFilter(hitCount.getQuery()); BitSet filterBitSet = filter.bits(reader); BitSet baseBits = getBaseBitSet(); if (baseBits != null) { facetCounts.add(new HitCount(hitCount.getLabel(), hitCount.getQuery(), baseQueryString + " AND " + hitCount.getQueryString(), getFacetHitCount(baseBitSet, filterBitSet))); } } if (sortByCount) { Collections.sort(facetCounts); } return facetCounts; } private int getFacetHitCount(BitSet baseBitSet, BitSet filterBitSet) { filterBitSet.and(baseBitSet); return filterBitSet.cardinality(); } }
apache-2.0
TOMOTON/gwt-remote-eventbus
gwtx.event.remote/src/gwtx/event/remote/shared/SourceId.java
1687
/** * Licensed to TOMOTON nv under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. TOMOTON nv licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package gwtx.event.remote.shared; import java.io.Serializable; /** * An object which identifies a remote event bus client source. * * @author Dann Martens */ public class SourceId implements Serializable { /* Managed UID. */ private static final long serialVersionUID = 1L; private int id; @SuppressWarnings("unused") private SourceId() { //? Mandatory bean constructor. } public SourceId(int id) { this.id = id; } @Override public int hashCode() { final int prime = 31; int result = 1; result = prime * result + id; return result; } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (getClass() != obj.getClass()) return false; SourceId other = (SourceId) obj; if (id != other.id) return false; return true; } public String asString() { return String.valueOf(id); } }
apache-2.0
samrussell/ryu
ryu/ofproto/ofproto_v1_3.py
45611
# Copyright (C) 2012 Nippon Telegraph and Telephone Corporation. # Copyright (C) 2012 Isaku Yamahata <yamahata at valinux co jp> # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. from struct import calcsize # struct ofp_header OFP_HEADER_PACK_STR = '!BBHI' OFP_HEADER_SIZE = 8 assert calcsize(OFP_HEADER_PACK_STR) == OFP_HEADER_SIZE # enum ofp_type OFPT_HELLO = 0 # Symmetric message OFPT_ERROR = 1 # Symmetric message OFPT_ECHO_REQUEST = 2 # Symmetric message OFPT_ECHO_REPLY = 3 # Symmetric message OFPT_EXPERIMENTER = 4 # Symmetric message OFPT_FEATURES_REQUEST = 5 # Controller/switch message OFPT_FEATURES_REPLY = 6 # Controller/switch message OFPT_GET_CONFIG_REQUEST = 7 # Controller/switch message OFPT_GET_CONFIG_REPLY = 8 # Controller/switch message OFPT_SET_CONFIG = 9 # Controller/switch message OFPT_PACKET_IN = 10 # Async message OFPT_FLOW_REMOVED = 11 # Async message OFPT_PORT_STATUS = 12 # Async message OFPT_PACKET_OUT = 13 # Controller/switch message OFPT_FLOW_MOD = 14 # Controller/switch message OFPT_GROUP_MOD = 15 # Controller/switch message OFPT_PORT_MOD = 16 # Controller/switch message OFPT_TABLE_MOD = 17 # Controller/switch message OFPT_MULTIPART_REQUEST = 18 # Controller/switch message OFPT_MULTIPART_REPLY = 19 # Controller/switch message OFPT_BARRIER_REQUEST = 20 # Controller/switch message OFPT_BARRIER_REPLY = 21 # Controller/switch message OFPT_QUEUE_GET_CONFIG_REQUEST = 22 # Controller/switch message OFPT_QUEUE_GET_CONFIG_REPLY = 23 # Controller/switch message OFPT_ROLE_REQUEST = 24 # Controller/switch message OFPT_ROLE_REPLY = 25 # Controller/switch message OFPT_GET_ASYNC_REQUEST = 26 # Controller/switch message OFPT_GET_ASYNC_REPLY = 27 # Controller/switch message OFPT_SET_ASYNC = 28 # Controller/switch message OFPT_METER_MOD = 29 # Controller/switch message # struct ofp_port OFP_MAX_PORT_NAME_LEN = 16 OFP_ETH_ALEN = 6 OFP_ETH_ALEN_STR = str(OFP_ETH_ALEN) _OFP_PORT_PACK_STR = 'I4x' + OFP_ETH_ALEN_STR + 's' + '2x' + \ str(OFP_MAX_PORT_NAME_LEN) + 's' + 'IIIIIIII' OFP_PORT_PACK_STR = '!' + _OFP_PORT_PACK_STR OFP_PORT_SIZE = 64 assert calcsize(OFP_PORT_PACK_STR) == OFP_PORT_SIZE # enum ofp_port_config OFPPC_PORT_DOWN = 1 << 0 # Port is administratively down. OFPPC_NO_RECV = 1 << 2 # Drop all packets recieved by port. OFPPC_NO_FWD = 1 << 5 # Drop packets forwarded to port. OFPPC_NO_PACKET_IN = 1 << 6 # Do not send packet-in msgs for port. # enum ofp_port_state OFPPS_LINK_DOWN = 1 << 0 # No physical link present. OFPPS_BLOCKED = 1 << 1 # Port is blocked. OFPPS_LIVE = 1 << 2 # Live for Fast Failover Group. # enum ofp_port_no OFPP_MAX = 0xffffff00 OFPP_IN_PORT = 0xfffffff8 # Send the packet out the input port. This # virtual port must be explicitly used # in order to send back out of the input # port. OFPP_TABLE = 0xfffffff9 # Perform actions in flow table. # NB: This can only be the destination # port for packet-out messages. OFPP_NORMAL = 0xfffffffa # Process with normal L2/L3 switching. OFPP_FLOOD = 0xfffffffb # All physical ports except input port and # those disabled by STP. OFPP_ALL = 0xfffffffc # All physical ports except input port. OFPP_CONTROLLER = 0xfffffffd # Send to controller. OFPP_LOCAL = 0xfffffffe # Local openflow "port". OFPP_ANY = 0xffffffff # Not associated with a physical port. # All ones is used to indicate all queues in a port (for stats retrieval). OFPQ_ALL = 0xffffffff # enum ofp_port_features OFPPF_10MB_HD = 1 << 0 # 10 Mb half-duplex rate support. OFPPF_10MB_FD = 1 << 1 # 10 Mb full-duplex rate support. OFPPF_100MB_HD = 1 << 2 # 100 Mb half-duplex rate support. OFPPF_100MB_FD = 1 << 3 # 100 Mb full-duplex rate support. OFPPF_1GB_HD = 1 << 4 # 1 Gb half-duplex rate support. OFPPF_1GB_FD = 1 << 5 # 1 Gb full-duplex rate support. OFPPF_10GB_FD = 1 << 6 # 10 Gb full-duplex rate support. OFPPF_40GB_FD = 1 << 7 # 40 Gb full-duplex rate support. OFPPF_100GB_FD = 1 << 8 # 100 Gb full-duplex rate support. OFPPF_1TB_FD = 1 << 9 # 1 Tb full-duplex rate support. OFPPF_OTHER = 1 << 10 # Other rate, not in the list. OFPPF_COPPER = 1 << 11 # Copper medium. OFPPF_FIBER = 1 << 12 # Fiber medium. OFPPF_AUTONEG = 1 << 13 # Auto-negotiation. OFPPF_PAUSE = 1 << 14 # Pause. OFPPF_PAUSE_ASYM = 1 << 15 # Asymmetric pause. # struct ofp_packet_queue OFP_PACKET_QUEUE_PACK_STR = '!IIH6x' OFP_PACKET_QUEUE_SIZE = 16 assert calcsize(OFP_PACKET_QUEUE_PACK_STR) == OFP_PACKET_QUEUE_SIZE # enum ofp_queue_properties OFPQT_MIN_RATE = 1 # Minimum datarate guaranteed. OFPQT_MAX_RATE = 2 # Maximum datarate. OFPQT_EXPERIMENTER = 0xffff # Experimenter defined property. # struct ofp_queue_prop_header OFP_QUEUE_PROP_HEADER_PACK_STR = '!HH4x' OFP_QUEUE_PROP_HEADER_SIZE = 8 assert calcsize(OFP_QUEUE_PROP_HEADER_PACK_STR) == OFP_QUEUE_PROP_HEADER_SIZE # struct ofp_queue_prop_min_rate OFP_QUEUE_PROP_MIN_RATE_PACK_STR = '!H6x' OFP_QUEUE_PROP_MIN_RATE_SIZE = 16 assert (calcsize(OFP_QUEUE_PROP_MIN_RATE_PACK_STR) + OFP_QUEUE_PROP_HEADER_SIZE) == OFP_QUEUE_PROP_MIN_RATE_SIZE # struct ofp_queue_prop_max_rate OFP_QUEUE_PROP_MAX_RATE_PACK_STR = '!H6x' OFP_QUEUE_PROP_MAX_RATE_SIZE = 16 assert (calcsize(OFP_QUEUE_PROP_MAX_RATE_PACK_STR) + OFP_QUEUE_PROP_HEADER_SIZE) == OFP_QUEUE_PROP_MAX_RATE_SIZE # struct ofp_queue_prop_experimenter OFP_QUEUE_PROP_EXPERIMENTER_PACK_STR = '!I4x' OFP_QUEUE_PROP_EXPERIMENTER_SIZE = 16 assert (calcsize(OFP_QUEUE_PROP_EXPERIMENTER_PACK_STR) + OFP_QUEUE_PROP_HEADER_SIZE) == OFP_QUEUE_PROP_EXPERIMENTER_SIZE # struct ofp_match _OFP_MATCH_PACK_STR = 'HHBBBB' OFP_MATCH_PACK_STR = '!' + _OFP_MATCH_PACK_STR OFP_MATCH_SIZE = 8 assert calcsize(OFP_MATCH_PACK_STR) == OFP_MATCH_SIZE # enum ofp_match_type OFPMT_STANDARD = 0 # Deprecated OFPMT_OXM = 1 # OpenFlow Extensible Match # enum ofp_oxm_class OFPXMC_NXM_0 = 0x0000 # Backward compatibility with NXM OFPXMC_NXM_1 = 0x0001 # Backward compatibility with NXM OFPXMC_OPENFLOW_BASIC = 0x8000 # Basic class for OpenFlow OFPXMC_EXPERIMENTER = 0xFFFF # Experimenter class # enmu oxm_ofb_match_fields OFPXMT_OFB_IN_PORT = 0 # Switch input port. OFPXMT_OFB_IN_PHY_PORT = 1 # Switch physical input port. OFPXMT_OFB_METADATA = 2 # Metadata passed between tables. OFPXMT_OFB_ETH_DST = 3 # Ethernet destination address. OFPXMT_OFB_ETH_SRC = 4 # Ethernet source address. OFPXMT_OFB_ETH_TYPE = 5 # Ethernet frame type. OFPXMT_OFB_VLAN_VID = 6 # VLAN id. OFPXMT_OFB_VLAN_PCP = 7 # VLAN priority. OFPXMT_OFB_IP_DSCP = 8 # IP DSCP (6 bits in ToS field). OFPXMT_OFB_IP_ECN = 9 # IP ECN (2 bits in ToS field). OFPXMT_OFB_IP_PROTO = 10 # IP protocol. OFPXMT_OFB_IPV4_SRC = 11 # IPv4 source address. OFPXMT_OFB_IPV4_DST = 12 # IPv4 destination address. OFPXMT_OFB_TCP_SRC = 13 # TCP source port. OFPXMT_OFB_TCP_DST = 14 # TCP destination port. OFPXMT_OFB_UDP_SRC = 15 # UDP source port. OFPXMT_OFB_UDP_DST = 16 # UDP destination port. OFPXMT_OFB_SCTP_SRC = 17 # SCTP source port. OFPXMT_OFB_SCTP_DST = 18 # SCTP destination port. OFPXMT_OFB_ICMPV4_TYPE = 19 # ICMP type. OFPXMT_OFB_ICMPV4_CODE = 20 # ICMP code. OFPXMT_OFB_ARP_OP = 21 # ARP opcode. OFPXMT_OFB_ARP_SPA = 22 # ARP source IPv4 address. OFPXMT_OFB_ARP_TPA = 23 # ARP target IPv4 address. OFPXMT_OFB_ARP_SHA = 24 # ARP source hardware address. OFPXMT_OFB_ARP_THA = 25 # ARP target hardware address. OFPXMT_OFB_IPV6_SRC = 26 # IPv6 source address. OFPXMT_OFB_IPV6_DST = 27 # IPv6 destination address. OFPXMT_OFB_IPV6_FLABEL = 28 # IPv6 Flow Label OFPXMT_OFB_ICMPV6_TYPE = 29 # ICMPv6 type. OFPXMT_OFB_ICMPV6_CODE = 30 # ICMPv6 code. OFPXMT_OFB_IPV6_ND_TARGET = 31 # Target address for ND. OFPXMT_OFB_IPV6_ND_SLL = 32 # Source link-layer for ND. OFPXMT_OFB_IPV6_ND_TLL = 33 # Target link-layer for ND. OFPXMT_OFB_MPLS_LABEL = 34 # MPLS label. OFPXMT_OFB_MPLS_TC = 35 # MPLS TC. OFPXMT_OFB_MPLS_BOS = 36 # MPLS BoS bit. OFPXMT_OFB_PBB_ISID = 37 # PBB I-SID. OFPXMT_OFB_TUNNEL_ID = 38 # Logical Port Metadata. OFPXMT_OFB_IPV6_EXTHDR = 39 # IPv6 Extension Header pseudo-field # enum ofp_vlan_id OFPVID_PRESENT = 0x1000 # bit that indicate that a VLAN id is set. OFPVID_NONE = 0x0000 # No VLAN id was set. # enum ofp_ipv6exthdr_flags OFPIEH_NONEXT = 1 << 0 # "No next header" encountered. OFPIEH_ESP = 1 << 1 # Encrypted Sec Payload header present. OFPIEH_AUTH = 1 << 2 # Authentication header present. OFPIEH_DEST = 1 << 3 # 1 or 2 dest headers present. OFPIEH_FRAG = 1 << 4 # Fragment header present. OFPIEH_ROUTER = 1 << 5 # Router header present. OFPIEH_HOP = 1 << 6 # Hop-by-hop header present. OFPIEH_UNREP = 1 << 7 # Unexpected repeats encountered. OFPIEH_UNSEQ = 1 << 8 # Unexpected sequencing encountered. # ofp_oxm_experimenter_header OFP_OXM_EXPERIMENTER_HEADER_PACK_STR = '!II' OFP_OXM_EXPERIMENTER_HEADER_SIZE = 8 assert (calcsize(OFP_OXM_EXPERIMENTER_HEADER_PACK_STR) == OFP_OXM_EXPERIMENTER_HEADER_SIZE) # enum ofp_instruction_type OFPIT_GOTO_TABLE = 1 # Setup the next table in the lookup pipeline. OFPIT_WRITE_METADATA = 2 # Setup the metadata field for use later in # pipeline. OFPIT_WRITE_ACTIONS = 3 # Write the action(s) onto the datapath # action set OFPIT_APPLY_ACTIONS = 4 # Applies the action(s) immediately OFPIT_CLEAR_ACTIONS = 5 # Clears all actions from the datapath action # set OFPIT_METER = 6 # Apply meter (rate limiter) OFPIT_EXPERIMENTER = 0xFFFF # Experimenter instruction # struct ofp_instruction_goto_table OFP_INSTRUCTION_GOTO_TABLE_PACK_STR = '!HHB3x' OFP_INSTRUCTION_GOTO_TABLE_SIZE = 8 assert (calcsize(OFP_INSTRUCTION_GOTO_TABLE_PACK_STR) == OFP_INSTRUCTION_GOTO_TABLE_SIZE) # struct ofp_instruction_write_metadata OFP_INSTRUCTION_WRITE_METADATA_PACK_STR = '!HH4xQQ' OFP_INSTRUCTION_WRITE_METADATA_SIZE = 24 assert (calcsize(OFP_INSTRUCTION_WRITE_METADATA_PACK_STR) == OFP_INSTRUCTION_WRITE_METADATA_SIZE) # struct ofp_instruction_actions OFP_INSTRUCTION_ACTIONS_PACK_STR = '!HH4x' OFP_INSTRUCTION_ACTIONS_SIZE = 8 assert (calcsize(OFP_INSTRUCTION_ACTIONS_PACK_STR) == OFP_INSTRUCTION_ACTIONS_SIZE) # struct ofp_instruction_meter OFP_INSTRUCTION_METER_PACK_STR = '!HHI' OFP_INSTRUCTION_METER_SIZE = 8 assert calcsize(OFP_INSTRUCTION_METER_PACK_STR) == OFP_INSTRUCTION_METER_SIZE # enum ofp_action_type OFPAT_OUTPUT = 0 # Output to switch port. OFPAT_COPY_TTL_OUT = 11 # Copy TTL "outwards" -- from # next-to-outermost to outermost OFPAT_COPY_TTL_IN = 12 # Copy TTL "inwards" -- from outermost to # next-to-outermost OFPAT_SET_MPLS_TTL = 15 # MPLS TTL. OFPAT_DEC_MPLS_TTL = 16 # Decrement MPLS TTL OFPAT_PUSH_VLAN = 17 # Push a new VLAN tag OFPAT_POP_VLAN = 18 # Pop the outer VLAN tag OFPAT_PUSH_MPLS = 19 # Push a new MPLS tag OFPAT_POP_MPLS = 20 # Pop the outer MPLS tag OFPAT_SET_QUEUE = 21 # Set queue id when outputting to a port OFPAT_GROUP = 22 # Apply group OFPAT_SET_NW_TTL = 23 # IP TTL. OFPAT_DEC_NW_TTL = 24 # Decrement IP TTL. OFPAT_SET_FIELD = 25 # Set a header field using OXM TLV format. OFPAT_PUSH_PBB = 26 # Push a new PBB service tag (I-TAG) OFPAT_POP_PBB = 27 # Pop the outer PBB service tag (I-TAG) OFPAT_EXPERIMENTER = 0xffff # struct ofp_action_header OFP_ACTION_HEADER_PACK_STR = '!HH4x' OFP_ACTION_HEADER_SIZE = 8 assert calcsize(OFP_ACTION_HEADER_PACK_STR) == OFP_ACTION_HEADER_SIZE # struct ofp_action_output OFP_ACTION_OUTPUT_PACK_STR = '!HHIH6x' OFP_ACTION_OUTPUT_SIZE = 16 assert calcsize(OFP_ACTION_OUTPUT_PACK_STR) == OFP_ACTION_OUTPUT_SIZE # enum ofp_controller_max_len OFPCML_MAX = 0xffe5 # maximum max_len value which can be used to # request a specific byte length. OFPCML_NO_BUFFER = 0xffff # indicates that no buffering should be # applied and the whole packet is to be # sent to the controller. # struct ofp_action_group OFP_ACTION_GROUP_PACK_STR = '!HHI' OFP_ACTION_GROUP_SIZE = 8 assert calcsize(OFP_ACTION_GROUP_PACK_STR) == OFP_ACTION_GROUP_SIZE # struct ofp_action_set_queue OFP_ACTION_SET_QUEUE_PACK_STR = '!HHI' OFP_ACTION_SET_QUEUE_SIZE = 8 assert calcsize(OFP_ACTION_SET_QUEUE_PACK_STR) == OFP_ACTION_SET_QUEUE_SIZE # struct ofp_action_mpls_ttl OFP_ACTION_MPLS_TTL_PACK_STR = '!HHB3x' OFP_ACTION_MPLS_TTL_SIZE = 8 assert calcsize(OFP_ACTION_MPLS_TTL_PACK_STR) == OFP_ACTION_MPLS_TTL_SIZE # struct ofp_action_nw_ttl OFP_ACTION_NW_TTL_PACK_STR = '!HHB3x' OFP_ACTION_NW_TTL_SIZE = 8 assert calcsize(OFP_ACTION_NW_TTL_PACK_STR) == OFP_ACTION_NW_TTL_SIZE # struct ofp_action_push OFP_ACTION_PUSH_PACK_STR = '!HHH2x' OFP_ACTION_PUSH_SIZE = 8 assert calcsize(OFP_ACTION_PUSH_PACK_STR) == OFP_ACTION_PUSH_SIZE # struct ofp_action_pop_mpls OFP_ACTION_POP_MPLS_PACK_STR = '!HHH2x' OFP_ACTION_POP_MPLS_SIZE = 8 assert calcsize(OFP_ACTION_POP_MPLS_PACK_STR) == OFP_ACTION_POP_MPLS_SIZE # struct ofp_action_set_field OFP_ACTION_SET_FIELD_PACK_STR = '!HH4x' OFP_ACTION_SET_FIELD_SIZE = 8 assert calcsize(OFP_ACTION_SET_FIELD_PACK_STR) == OFP_ACTION_SET_FIELD_SIZE # struct ofp_action_experimenter_header OFP_ACTION_EXPERIMENTER_HEADER_PACK_STR = '!HHI' OFP_ACTION_EXPERIMENTER_HEADER_SIZE = 8 assert (calcsize(OFP_ACTION_EXPERIMENTER_HEADER_PACK_STR) == OFP_ACTION_EXPERIMENTER_HEADER_SIZE) # ofp_switch_features OFP_SWITCH_FEATURES_PACK_STR = '!QIBB2xII' OFP_SWITCH_FEATURES_SIZE = 32 assert (calcsize(OFP_SWITCH_FEATURES_PACK_STR) + OFP_HEADER_SIZE == OFP_SWITCH_FEATURES_SIZE) # enum ofp_capabilities OFPC_FLOW_STATS = 1 << 0 # Flow statistics. OFPC_TABLE_STATS = 1 << 1 # Table statistics. OFPC_PORT_STATS = 1 << 2 # Port statistics. OFPC_GROUP_STATS = 1 << 3 # 802.1d spanning tree. OFPC_IP_REASM = 1 << 5 # Can reassemble IP fragments. OFPC_QUEUE_STATS = 1 << 6 # Queue statistics. OFPC_PORT_BLOCKED = 1 << 8 # Match IP addresses in ARP pkts. # struct ofp_switch_config OFP_SWITCH_CONFIG_PACK_STR = '!HH' OFP_SWITCH_CONFIG_SIZE = 12 assert (calcsize(OFP_SWITCH_CONFIG_PACK_STR) + OFP_HEADER_SIZE == OFP_SWITCH_CONFIG_SIZE) # enum ofp_config_flags OFPC_FRAG_NORMAL = 0 # No special handling for fragments. OFPC_FRAG_DROP = 1 # Drop fragments. OFPC_FRAG_REASM = 2 # Reassemble (only if OFPC_IP_REASM set). OFPC_FRAG_MASK = 3 # enum ofp_table OFPTT_MAX = 0xfe OFPTT_ALL = 0xff # struct ofp_table_mod OFP_TABLE_MOD_PACK_STR = '!B3xI' OFP_TABLE_MOD_SIZE = 16 assert (calcsize(OFP_TABLE_MOD_PACK_STR) + OFP_HEADER_SIZE == OFP_TABLE_MOD_SIZE) _OFP_FLOW_MOD_PACK_STR0 = 'QQBBHHHIIIH2x' OFP_FLOW_MOD_PACK_STR = '!' + _OFP_FLOW_MOD_PACK_STR0 + _OFP_MATCH_PACK_STR OFP_FLOW_MOD_PACK_STR0 = '!' + _OFP_FLOW_MOD_PACK_STR0 OFP_FLOW_MOD_SIZE = 56 assert (calcsize(OFP_FLOW_MOD_PACK_STR) + OFP_HEADER_SIZE == OFP_FLOW_MOD_SIZE) # enum ofp_flow_mod_command OFPFC_ADD = 0 # New flow. OFPFC_MODIFY = 1 # Modify all matching flows. OFPFC_MODIFY_STRICT = 2 # Modify entry strictly matching wildcards OFPFC_DELETE = 3 # Delete all matching flows. OFPFC_DELETE_STRICT = 4 # Strictly match wildcards and priority. # enum ofp_flow_mod_flags OFPFF_SEND_FLOW_REM = 1 << 0 # Send flow removed message when flow # expires or is deleted. OFPFF_CHECK_OVERLAP = 1 << 1 # Check for overlapping entries first. OFPFF_RESET_COUNT = 1 << 2 # Reset flow packet and byte counts. OFPFF_NO_PKT_COUNTS = 1 << 3 # Don't keep track of packet count. OFPFF_NO_BYT_COUNTS = 1 << 4 # Don't keep track of byte count. # struct ofp_group_mod OFP_GROUP_MOD_PACK_STR = '!HBxI' OFP_GROUP_MOD_SIZE = 16 assert (calcsize(OFP_GROUP_MOD_PACK_STR) + OFP_HEADER_SIZE == OFP_GROUP_MOD_SIZE) # enum ofp_group_mod_command OFPGC_ADD = 0 # New group. OFPGC_MODIFY = 1 # Modify all matching groups. OFPGC_DELETE = 2 # Delete all matching groups. # enum ofp_group OFPG_MAX = 0xffffff00 # Last usable group number. #Fake groups OFPG_ALL = 0xfffffffc # Represents all groups for group delete commands. OFPG_ANY = 0xffffffff # Wildcard group used only for flow stats requests. # Selects all flows regardless of group # (including flows with no group). # enum ofp_group_type OFPGT_ALL = 0 # All (multicast/broadcast) group. OFPGT_SELECT = 1 # Select group. OFPGT_INDIRECT = 2 # Indirect group. OFPGT_FF = 3 # Fast failover group. # struct ofp_bucket OFP_BUCKET_PACK_STR = '!HHII4x' OFP_BUCKET_SIZE = 16 assert calcsize(OFP_BUCKET_PACK_STR) == OFP_BUCKET_SIZE # struct ofp_port_mod OFP_PORT_MOD_PACK_STR = '!I4x' + OFP_ETH_ALEN_STR + 's2xIII4x' OFP_PORT_MOD_SIZE = 40 assert (calcsize(OFP_PORT_MOD_PACK_STR) + OFP_HEADER_SIZE == OFP_PORT_MOD_SIZE) # struct ofp_meter_mod OFP_METER_MOD_PACK_STR = '!HHI' OFP_METER_MOD_SIZE = 16 assert (calcsize(OFP_METER_MOD_PACK_STR) + OFP_HEADER_SIZE == OFP_METER_MOD_SIZE) # enum ofp_meter OFPM_MAX = 0xffff0000 OFPM_SLOWPATH = 0xfffffffd # Meter for slow datapath, if any. OFPM_CONTROLLER = 0xfffffffe # Meter for controller connection. OFPM_ALL = 0xffffffff # Represents all meters for stat requests commands. # enum ofp_meter_mod_command OFPMC_ADD = 0 # New meter. OFPMC_MODIFY = 1 # Modify specified meter. OFPMC_DELETE = 2 # Delete specified meter. # enum ofp_meter_flags OFPMF_KBPS = 1 << 0 # Rate value in kb/s (kilo-bit per second). OFPMF_PKTPS = 1 << 1 # Rate value in packet/sec. OFPMF_BURST = 1 << 2 # Do burst size. OFPMF_STATS = 1 << 3 # Collect statistics. # struct ofp_meter_band_header OFP_METER_BAND_HEADER_PACK_STR = '!HHII' OFP_METER_BAND_HEADER_SIZE = 12 assert (calcsize(OFP_METER_BAND_HEADER_PACK_STR) == OFP_METER_BAND_HEADER_SIZE) # enum ofp_meter_band_type OFPMBT_DROP = 1 # Drop packet. OFPMBT_DSCP_REMARK = 2 # Remark DSCP in the IP header. OFPMBT_EXPERIMENTER = 0xFFFF # Experimenter meter band. # struct ofp_meter_band_drop OFP_METER_BAND_DROP_PACK_STR = '!HHII4x' OFP_METER_BAND_DROP_SIZE = 16 assert (calcsize(OFP_METER_BAND_DROP_PACK_STR) == OFP_METER_BAND_DROP_SIZE) # struct ofp_meter_band_dscp_remark OFP_METER_BAND_DSCP_REMARK_PACK_STR = '!HHIIB3x' OFP_METER_BAND_DSCP_REMARK_SIZE = 16 assert (calcsize(OFP_METER_BAND_DSCP_REMARK_PACK_STR) == OFP_METER_BAND_DSCP_REMARK_SIZE) # struct ofp_meter_band_experimenter OFP_METER_BAND_EXPERIMENTER_PACK_STR = '!HHIII' OFP_METER_BAND_EXPERIMENTER_SIZE = 16 assert (calcsize(OFP_METER_BAND_EXPERIMENTER_PACK_STR) == OFP_METER_BAND_EXPERIMENTER_SIZE) # struct ofp_multipart_request OFP_MULTIPART_REQUEST_PACK_STR = '!HH4x' OFP_MULTIPART_REQUEST_SIZE = 16 assert (calcsize(OFP_MULTIPART_REQUEST_PACK_STR) + OFP_HEADER_SIZE == OFP_MULTIPART_REQUEST_SIZE) # enum ofp_multipart_request_flags OFPMPF_REQ_MORE = 1 << 0 # More requests to follow. # struct ofp_multipart_reply OFP_MULTIPART_REPLY_PACK_STR = '!HH4x' OFP_MULTIPART_REPLY_SIZE = 16 assert (calcsize(OFP_MULTIPART_REPLY_PACK_STR) + OFP_HEADER_SIZE == OFP_MULTIPART_REPLY_SIZE) # enum ofp_multipart_reply_flags OFPMPF_REPLY_MORE = 1 << 0 # More replies to follow. # enum ofp_multipart_types OFPMP_DESC = 0 OFPMP_FLOW = 1 OFPMP_AGGREGATE = 2 OFPMP_TABLE = 3 OFPMP_PORT_STATS = 4 OFPMP_QUEUE = 5 OFPMP_GROUP = 6 OFPMP_GROUP_DESC = 7 OFPMP_GROUP_FEATURES = 8 OFPMP_METER = 9 OFPMP_METER_CONFIG = 10 OFPMP_METER_FEATURES = 11 OFPMP_TABLE_FEATURES = 12 OFPMP_PORT_DESC = 13 OFPMP_EXPERIMENTER = 0xffff # struct ofp_desc DESC_STR_LEN = 256 DESC_STR_LEN_STR = str(DESC_STR_LEN) SERIAL_NUM_LEN = 32 SERIAL_NUM_LEN_STR = str(SERIAL_NUM_LEN) OFP_DESC_PACK_STR = '!' + \ DESC_STR_LEN_STR + 's' + \ DESC_STR_LEN_STR + 's' + \ DESC_STR_LEN_STR + 's' + \ SERIAL_NUM_LEN_STR + 's' + \ DESC_STR_LEN_STR + 's' OFP_DESC_SIZE = 1056 assert calcsize(OFP_DESC_PACK_STR) == OFP_DESC_SIZE # struct ofp_flow_stats_request _OFP_FLOW_STATS_REQUEST_0_PACK_STR = 'B3xII4xQQ' OFP_FLOW_STATS_REQUEST_0_PACK_STR = '!' + _OFP_FLOW_STATS_REQUEST_0_PACK_STR OFP_FLOW_STATS_REQUEST_0_SIZE = 32 assert (calcsize(OFP_FLOW_STATS_REQUEST_0_PACK_STR) == OFP_FLOW_STATS_REQUEST_0_SIZE) OFP_FLOW_STATS_REQUEST_PACK_STR = (OFP_FLOW_STATS_REQUEST_0_PACK_STR + _OFP_MATCH_PACK_STR) OFP_FLOW_STATS_REQUEST_SIZE = 40 assert (calcsize(OFP_FLOW_STATS_REQUEST_PACK_STR) == OFP_FLOW_STATS_REQUEST_SIZE) # struct ofp_flow_stats _OFP_FLOW_STATS_0_PACK_STR = 'HBxIIHHHH4xQQQ' OFP_FLOW_STATS_0_PACK_STR = '!' + _OFP_FLOW_STATS_0_PACK_STR OFP_FLOW_STATS_0_SIZE = 48 assert calcsize(OFP_FLOW_STATS_0_PACK_STR) == OFP_FLOW_STATS_0_SIZE OFP_FLOW_STATS_PACK_STR = (OFP_FLOW_STATS_0_PACK_STR + _OFP_MATCH_PACK_STR) OFP_FLOW_STATS_SIZE = 56 assert calcsize(OFP_FLOW_STATS_PACK_STR) == OFP_FLOW_STATS_SIZE # struct ofp_flow_stats_request _OFP_AGGREGATE_STATS_REQUEST_0_PACK_STR = 'B3xII4xQQ' OFP_AGGREGATE_STATS_REQUEST_0_PACK_STR = '!' + \ _OFP_AGGREGATE_STATS_REQUEST_0_PACK_STR OFP_AGGREGATE_STATS_REQUEST_0_SIZE = 32 assert (calcsize(OFP_AGGREGATE_STATS_REQUEST_0_PACK_STR) == OFP_AGGREGATE_STATS_REQUEST_0_SIZE) OFP_AGGREGATE_STATS_REQUEST_PACK_STR = \ OFP_AGGREGATE_STATS_REQUEST_0_PACK_STR + _OFP_MATCH_PACK_STR OFP_AGGREGATE_STATS_REQUEST_SIZE = 40 assert (calcsize(OFP_AGGREGATE_STATS_REQUEST_PACK_STR) == OFP_AGGREGATE_STATS_REQUEST_SIZE) # struct ofp_aggregate_stats_request OFP_AGGREGATE_STATS_REQUEST_PACK_STR = '!B3xII4xQQ' + _OFP_MATCH_PACK_STR OFP_AGGREGATE_STATS_REQUEST_SIZE = 40 assert (calcsize(OFP_AGGREGATE_STATS_REQUEST_PACK_STR) == OFP_AGGREGATE_STATS_REQUEST_SIZE) # struct ofp_aggregate_stats_reply OFP_AGGREGATE_STATS_REPLY_PACK_STR = '!QQI4x' OFP_AGGREGATE_STATS_REPLY_SIZE = 24 assert (calcsize(OFP_AGGREGATE_STATS_REPLY_PACK_STR) == OFP_AGGREGATE_STATS_REPLY_SIZE) # struct ofp_table_stats OFP_TABLE_STATS_PACK_STR = '!B3xIQQ' OFP_TABLE_STATS_SIZE = 24 assert calcsize(OFP_TABLE_STATS_PACK_STR) == OFP_TABLE_STATS_SIZE # struct ofp_table_features OFP_MAX_TABLE_NAME_LEN = 32 OFP_MAX_TABLE_NAME_LEN_STR = str(OFP_MAX_TABLE_NAME_LEN) OFP_TABLE_FEATURES_PACK_STR = '!HB5x' + OFP_MAX_TABLE_NAME_LEN_STR + \ 'c' + 'QQII' OFP_TABLE_FEATURES_SIZE = 64 assert (calcsize(OFP_TABLE_FEATURES_PACK_STR) == OFP_TABLE_FEATURES_SIZE) # enum ofp_table_feature_prop_type OFPTFPT_INSTRUCTIONS = 0 OFPTFPT_INSTRUCTIONS_MISS = 1 OFPTFPT_NEXT_TABLES = 2 OFPTFPT_NEXT_TABLES_MISS = 3 OFPTFPT_WRITE_ACTIONS = 4 OFPTFPT_WRITE_ACTIONS_MISS = 5 OFPTFPT_APPLY_ACTIONS = 6 OFPTFPT_APPLY_ACTIONS_MISS = 7 OFPTFPT_MATCH = 8 OFPTFPT_WILDCARDS = 10 OFPTFPT_WRITE_SETFIELD = 12 OFPTFPT_WRITE_SETFIELD_MISS = 13 OFPTFPT_APPLY_SETFIELD = 14 OFPTFPT_APPLY_SETFIELD_MISS = 15 OFPTFPT_EXPERIMENTER = 0xFFFE OFPTFPT_EXPERIMENTER_MISS = 0xFFFF # struct ofp_table_feature_prop_instructions OFP_TABLE_FEATURE_PROP_INSTRUCTIONS_PACK_STR = '!HH' OFP_TABLE_FEATURE_PROP_INSTRUCTIONS_SIZE = 4 assert (calcsize(OFP_TABLE_FEATURE_PROP_INSTRUCTIONS_PACK_STR) == OFP_TABLE_FEATURE_PROP_INSTRUCTIONS_SIZE) # struct ofp_table_feature_prop_next_tables OFP_TABLE_FEATURE_PROP_NEXT_TABLES_PACK_STR = '!HH' OFP_TABLE_FEATURE_PROP_NEXT_TABLES_SIZE = 4 assert (calcsize(OFP_TABLE_FEATURE_PROP_NEXT_TABLES_PACK_STR) == OFP_TABLE_FEATURE_PROP_NEXT_TABLES_SIZE) # struct ofp_table_feature_prop_actions OFP_TABLE_FEATURE_PROP_ACTIONS_PACK_STR = '!HH' OFP_TABLE_FEATURE_PROP_ACTIONS_SIZE = 4 assert (calcsize(OFP_TABLE_FEATURE_PROP_ACTIONS_PACK_STR) == OFP_TABLE_FEATURE_PROP_ACTIONS_SIZE) # struct ofp_table_feature_prop_oxm OFP_TABLE_FEATURE_PROP_OXM_PACK_STR = '!HH' OFP_TABLE_FEATURE_PROP_OXM_SIZE = 4 assert (calcsize(OFP_TABLE_FEATURE_PROP_OXM_PACK_STR) == OFP_TABLE_FEATURE_PROP_OXM_SIZE) # struct ofp_port_stats_request OFP_PORT_STATS_REQUEST_PACK_STR = '!I4x' OFP_PORT_STATS_REQUEST_SIZE = 8 assert (calcsize(OFP_PORT_STATS_REQUEST_PACK_STR) == OFP_PORT_STATS_REQUEST_SIZE) # struct ofp_port_stats OFP_PORT_STATS_PACK_STR = '!I4xQQQQQQQQQQQQII' OFP_PORT_STATS_SIZE = 112 assert calcsize(OFP_PORT_STATS_PACK_STR) == OFP_PORT_STATS_SIZE # struct ofp_queue_stats_request OFP_QUEUE_STATS_REQUEST_PACK_STR = '!II' OFP_QUEUE_STATS_REQUEST_SIZE = 8 assert (calcsize(OFP_QUEUE_STATS_REQUEST_PACK_STR) == OFP_QUEUE_STATS_REQUEST_SIZE) # struct ofp_queue_stats OFP_QUEUE_STATS_PACK_STR = '!IIQQQII' OFP_QUEUE_STATS_SIZE = 40 assert calcsize(OFP_QUEUE_STATS_PACK_STR) == OFP_QUEUE_STATS_SIZE # struct ofp_group_stats_request OFP_GROUP_STATS_REQUEST_PACK_STR = '!I4x' OFP_GROUP_STATS_REQUEST_SIZE = 8 assert (calcsize(OFP_GROUP_STATS_REQUEST_PACK_STR) == OFP_GROUP_STATS_REQUEST_SIZE) # struct ofp_group_stats OFP_GROUP_STATS_PACK_STR = '!H2xII4xQQII' OFP_GROUP_STATS_SIZE = 40 assert calcsize(OFP_GROUP_STATS_PACK_STR) == OFP_GROUP_STATS_SIZE # struct ofp_bucket_counter OFP_BUCKET_COUNTER_PACK_STR = '!QQ' OFP_BUCKET_COUNTER_SIZE = 16 assert calcsize(OFP_BUCKET_COUNTER_PACK_STR) == OFP_BUCKET_COUNTER_SIZE # struct ofp_group_desc_stats OFP_GROUP_DESC_STATS_PACK_STR = '!HBxI' OFP_GROUP_DESC_STATS_SIZE = 8 assert calcsize(OFP_GROUP_DESC_STATS_PACK_STR) == OFP_GROUP_DESC_STATS_SIZE # struct ofp_group_features OFP_GROUP_FEATURES_PACK_STR = '!II4I4I' OFP_GROUP_FEATURES_SIZE = 40 assert calcsize(OFP_GROUP_FEATURES_PACK_STR) == OFP_GROUP_FEATURES_SIZE # enum ofp_group_capabilities OFPGFC_SELECT_WEIGHT = 1 << 0 # Support weight for select groups. OFPGFC_SELECT_LIVENESS = 1 << 1 # Support liveness for select groups. OFPGFC_CHAINING = 1 << 2 # Support chaining groups. OFPGFC_CHAINING_CHECKS = 1 << 3 # Check chaining for loops and delete # struct ofp_meter_multipart_request OFP_METER_MULTIPART_REQUEST_PACK_STR = '!I4x' OFP_METER_MULTIPART_REQUEST_SIZE = 8 assert (calcsize(OFP_METER_MULTIPART_REQUEST_PACK_STR) == OFP_METER_MULTIPART_REQUEST_SIZE) # struct ofp_meter_stats OFP_METER_STATS_PACK_STR = '!IH6xIQQII' OFP_METER_STATS_SIZE = 40 assert calcsize(OFP_METER_STATS_PACK_STR) == OFP_METER_STATS_SIZE # struct ofp_meter_band_stats OFP_METER_BAND_STATS_PACK_STR = '!QQ' OFP_METER_BAND_STATS_SIZE = 16 assert (calcsize(OFP_METER_BAND_STATS_PACK_STR) == OFP_METER_BAND_STATS_SIZE) # struct ofp_meter_config OFP_METER_CONFIG_PACK_STR = '!HHI' OFP_METER_CONFIG_SIZE = 8 assert calcsize(OFP_METER_CONFIG_PACK_STR) == OFP_METER_CONFIG_SIZE # struct ofp_meter_features OFP_METER_FEATURES_PACK_STR = '!IIIBB2x' OFP_METER_FEATURES_SIZE = 16 assert (calcsize(OFP_METER_FEATURES_PACK_STR) == OFP_METER_FEATURES_SIZE) # struct ofp_experimenter_multipart_header OFP_EXPERIMENTER_MULTIPART_HEADER_PACK_STR = '!II' OFP_EXPERIMENTER_MULTIPART_HEADER_SIZE = 8 assert (calcsize(OFP_EXPERIMENTER_MULTIPART_HEADER_PACK_STR) == OFP_EXPERIMENTER_MULTIPART_HEADER_SIZE) # struct ofp_queue_get_config_request OFP_QUEUE_GET_CONFIG_REQUEST_PACK_STR = '!I4x' OFP_QUEUE_GET_CONFIG_REQUEST_SIZE = 16 assert (calcsize(OFP_QUEUE_GET_CONFIG_REQUEST_PACK_STR) + OFP_HEADER_SIZE) == OFP_QUEUE_GET_CONFIG_REQUEST_SIZE # struct ofp_queue_get_config_reply OFP_QUEUE_GET_CONFIG_REPLY_PACK_STR = '!I4x' OFP_QUEUE_GET_CONFIG_REPLY_SIZE = 16 assert (calcsize(OFP_QUEUE_GET_CONFIG_REPLY_PACK_STR) + OFP_HEADER_SIZE) == OFP_QUEUE_GET_CONFIG_REPLY_SIZE # struct ofp_packet_out OFP_PACKET_OUT_PACK_STR = '!IIH6x' OFP_PACKET_OUT_SIZE = 24 assert (calcsize(OFP_PACKET_OUT_PACK_STR) + OFP_HEADER_SIZE == OFP_PACKET_OUT_SIZE) # struct ofp_role_request OFP_ROLE_REQUEST_PACK_STR = '!I4xQ' OFP_ROLE_REQUEST_SIZE = 24 assert (calcsize(OFP_ROLE_REQUEST_PACK_STR) + OFP_HEADER_SIZE == OFP_ROLE_REQUEST_SIZE) # enum ofp_controller_role OFPCR_ROLE_NOCHANGE = 0 # Don't change current role. OFPCR_ROLE_EQUAL = 1 # Default role, full access. OFPCR_ROLE_MASTER = 2 # Full access, at most one master. OFPCR_ROLE_SLAVE = 3 # Read-only access. # struct ofp_async_config OFP_ASYNC_CONFIG_PACK_STR = '!2I2I2I' OFP_ASYNC_CONFIG_SIZE = 32 assert (calcsize(OFP_ASYNC_CONFIG_PACK_STR) + OFP_HEADER_SIZE == OFP_ASYNC_CONFIG_SIZE) # struct ofp_packet_in OFP_PACKET_IN_PACK_STR = '!IHBBQ' OFP_PACKET_IN_SIZE = 32 OFP_PACKET_IN_DATA_OFFSET = 18 assert (calcsize(OFP_PACKET_IN_PACK_STR) + OFP_MATCH_SIZE + OFP_HEADER_SIZE == OFP_PACKET_IN_SIZE) # enum ofp_packet_in_reason OFPR_NO_MATCH = 0 # No matching flow. OFPR_ACTION = 1 # Action explicitly output to controller. OFPR_INVALID_TTL = 2 # Packet has invalid TTL. # struct ofp_flow_removed _OFP_FLOW_REMOVED_PACK_STR0 = 'QHBBIIHHQQ' OFP_FLOW_REMOVED_PACK_STR = '!' + _OFP_FLOW_REMOVED_PACK_STR0 + \ _OFP_MATCH_PACK_STR OFP_FLOW_REMOVED_PACK_STR0 = '!' + _OFP_FLOW_REMOVED_PACK_STR0 OFP_FLOW_REMOVED_SIZE = 56 assert (calcsize(OFP_FLOW_REMOVED_PACK_STR) + OFP_HEADER_SIZE == OFP_FLOW_REMOVED_SIZE) # enum ofp_flow_removed_reason OFPRR_IDLE_TIMEOUT = 0 # Flow idle time exceeded idle_timeout. OFPRR_HARD_TIMEOUT = 1 # Time exceeded hard_timeout. OFPRR_DELETE = 2 # Evicted by a DELETE flow mod. OFPRR_GROUP_DELETE = 3 # Group was removed. # struct ofp_port_status OFP_PORT_STATUS_PACK_STR = '!B7x' + _OFP_PORT_PACK_STR OFP_PORT_STATUS_DESC_OFFSET = OFP_HEADER_SIZE + 8 OFP_PORT_STATUS_SIZE = 80 assert (calcsize(OFP_PORT_STATUS_PACK_STR) + OFP_HEADER_SIZE == OFP_PORT_STATUS_SIZE) # enum ofp_port_reason OFPPR_ADD = 0 # The port was added. OFPPR_DELETE = 1 # The port was removed. OFPPR_MODIFY = 2 # Some attribute of the port has changed. # struct ofp_error_msg OFP_ERROR_MSG_PACK_STR = '!HH' OFP_ERROR_MSG_SIZE = 12 assert (calcsize(OFP_ERROR_MSG_PACK_STR) + OFP_HEADER_SIZE == OFP_ERROR_MSG_SIZE) # enum ofp_error_type OFPET_HELLO_FAILED = 0 # Hello protocol failed. OFPET_BAD_REQUEST = 1 # Request was not understood. OFPET_BAD_ACTION = 2 # Error in action description. OFPET_BAD_INSTRUCTION = 3 # Error in instruction list. OFPET_BAD_MATCH = 4 # Error in match. OFPET_FLOW_MOD_FAILED = 5 # Problem modifying flow entry. OFPET_GROUP_MOD_FAILED = 6 # Problem modifying group entry. OFPET_PORT_MOD_FAILED = 7 # OFPT_PORT_MOD failed. OFPET_TABLE_MOD_FAILED = 8 # Table mod request failed. OFPET_QUEUE_OP_FAILED = 9 # Queue operation failed. OFPET_SWITCH_CONFIG_FAILED = 10 # Switch config request failed. OFPET_ROLE_REQUEST_FAILED = 11 # Controller Role request failed. OFPET_METER_MOD_FAILED = 12 # Error in meter. OFPET_TABLE_FEATURES_FAILED = 13 # Setting table features failed. OFPET_EXPERIMENTER = 0xffff # Experimenter error messages. # enum ofp_hello_failed_code OFPHFC_INCOMPATIBLE = 0 # No compatible version. OFPHFC_EPERM = 1 # Permissions error. # enum ofp_bad_request_code OFPBRC_BAD_VERSION = 0 # ofp_header.version not supported. OFPBRC_BAD_TYPE = 1 # ofp_header.type not supported. OFPBRC_BAD_MULTIPART = 2 # ofp_stats_msg.type not supported. OFPBRC_BAD_EXPERIMENTER = 3 # Experimenter id not supported # (in ofp_experimenter_header # or ofp_stats_request or # ofp_stats_reply). OFPBRC_BAD_EXP_TYPE = 4 # Experimenter type not supported. OFPBRC_EPERM = 5 # Permissions error. OFPBRC_BAD_LEN = 6 # Wrong request length for type. OFPBRC_BUFFER_EMPTY = 7 # Specified buffer has already been used. OFPBRC_BUFFER_UNKNOWN = 8 # Specified buffer does not exist. OFPBRC_BAD_TABLE_ID = 9 # Specified table-id invalid or does not exist. OFPBRC_IS_SLAVE = 10 # Denied because controller is slave. OFPBRC_BAD_PORT = 11 # Invalid port. OFPBRC_BAD_PACKET = 12 # Invalid packet in packet-out OFPBRC_MULTIPART_BUFFER_OVERFLOW = 13 # ofp_multipart_request # overflowed the assigned buffer. # enum ofp_bad_action_code OFPBAC_BAD_TYPE = 0 # Unknown action type. OFPBAC_BAD_LEN = 1 # Length problem in actions. OFPBAC_BAD_EXPERIMENTER = 2 # Unknown experimenter id specified. OFPBAC_BAD_EXP_TYPE = 3 # Unknown action type for experimenter id. OFPBAC_BAD_OUT_PORT = 4 # Problem validating output action. OFPBAC_BAD_ARGUMENT = 5 # Bad action argument. OFPBAC_EPERM = 6 # Permissions error. OFPBAC_TOO_MANY = 7 # Can't handle this many actions. OFPBAC_BAD_QUEUE = 8 # Problem validating output queue. OFPBAC_BAD_OUT_GROUP = 9 # Invalid group id in forward action. OFPBAC_MATCH_INCONSISTENT = 10 # Action can't apply for this match, # or Set-Field missing prerequisite. OFPBAC_UNSUPPORTED_ORDER = 11 # Action order is unsupported for # the action list in an Apply-Actions # instruction OFPBAC_BAD_TAG = 12 # Actions uses an unsupported tag/encap. OFPBAC_BAD_SET_TYPE = 13 # Unsupported type in SET_FIELD action. OFPBAC_BAD_SET_LEN = 14 # Length problem in SET_FIELD action. OFPBAC_BAD_SET_ARGUMENT = 15 # Bad arguement in SET_FIELD action. # enum ofp_bad_instruction_code OFPBIC_UNKNOWN_INST = 0 # Unknown instruction. OFPBIC_UNSUP_INST = 1 # Switch or table does not support # the instruction. OFPBIC_BAD_TABLE_ID = 2 # Invalid Table-Id specified OFPBIC_UNSUP_METADATA = 3 # Metadata value unsupported by datapath. OFPBIC_UNSUP_METADATA_MASK = 4 # Metadata mask value unsupported by # datapath. OFPBIC_BAD_EXPERIMENTER = 5 # Unknown experimenter id specified. OFPBIC_BAD_EXP_TYPE = 6 # Unknown instruction for experimenter id. OFPBIC_BAD_EXP_LEN = 7 # Length problem in instrucitons. OFPBIC_EPERM = 8 # Permissions error. # enum ofp_bad_match_code OFPBMC_BAD_TYPE = 0 # Unsupported match type apecified by # the match. OFPBMC_BAD_LEN = 1 # Length problem in math. OFPBMC_BAD_TAG = 2 # Match uses an unsupported tag/encap. OFPBMC_BAD_DL_ADDR_MASK = 3 # Unsupported datalink addr mask - # switch does not support arbitrary # datalink address mask. OFPBMC_BAD_NW_ADDR_MASK = 4 # Unsupported network addr mask - # switch does not support arbitrary # network addres mask. OFPBMC_BAD_WILDCARDS = 5 # Unsupported combination of fields # masked or omitted in the match. OFPBMC_BAD_FIELD = 6 # Unsupported field type in the match. OFPBMC_BAD_VALUE = 7 # Unsupported value in a match field. OFPBMC_BAD_MASK = 8 # Unsupported mask specified in the # match. OFPBMC_BAD_PREREQ = 9 # A prerequisite was not met. OFPBMC_DUP_FIELD = 10 # A field type was duplicated. OFPBMC_EPERM = 11 # Permissions error. # enum ofp_flow_mod_failed_code OFPFMFC_UNKNOWN = 0 # Unspecified error. OFPFMFC_TABLES_FULL = 1 # Flow not added because table was full. OFPFMFC_BAD_TABLE_ID = 2 # Table does not exist OFPFMFC_OVERLAP = 3 # Attempted to add overlapping flow # with CHECK_OVERLAP flag set. OFPFMFC_EPERM = 4 # Permissions error. OFPFMFC_BAD_TIMEOUT = 5 # Flow not added because of # unsupported idle/hard timeout. OFPFMFC_BAD_COMMAND = 6 # Unsupported or unknown command. OFPFMFC_BAD_FLAGS = 7 # Unsupported or unknown flags. # enum ofp_group_mod_failed_code OFPGMFC_GROUP_EXISTS = 0 OFPGMFC_INVALID_GROUP = 1 OFPGMFC_WEIGHT_UNSUPPORTED = 2 # Switch does not support unequal load # sharing with select groups. OFPGMFC_OUT_OF_GROUPS = 3 # The group table is full. OFPGMFC_OUT_OF_BUCKETS = 4 # The maximum number of action buckets # for a group has been exceeded. OFPGMFC_CHAINING_UNSUPPORTED = 5 # Switch does not support groups that # forward to groups. OFPGMFC_WATCH_UNSUPPORTED = 6 # This group cannot watch the # watch_port or watch_group specified. OFPGMFC_LOOP = 7 # Group entry would cause a loop. OFPGMFC_UNKNOWN_GROUP = 8 # Group not modified because a group # MODIFY attempted to modify a # non-existent group. OFPGMFC_CHAINED_GROUP = 9 # Group not deleted because another # group is forwarding to it. OFPGMFC_BAD_TYPE = 10 # Unsupported or unknown group type. OFPGMFC_BAD_COMMAND = 11 # Unsupported or unknown command. OFPGMFC_BAD_BUCKET = 12 # Error in bucket. OFPGMFC_BAD_WATCH = 13 # Error in watch port/group. OFPGMFC_EPERM = 14 # Permissions error. # enum ofp_port_mod_failed_code OFPPMFC_BAD_PORT = 0 # Specified port does not exist. OFPPMFC_BAD_HW_ADDR = 1 # Specified hardware address does not # match the port number. OFPPMFC_BAD_CONFIG = 2 # Specified config is invalid. OFPPMFC_BAD_ADVERTISE = 3 # Specified advertise is invalid. OFPPMFC_EPERM = 4 # Permissions error. # enum ofp_table_mod_failed_code OFPTMFC_BAD_TABLE = 0 # Specified table does not exist. OFPTMFC_BAD_CONFIG = 1 # Specified config is invalid. OFPTMFC_EPERM = 2 # Permissions error # enum ofp_queue_op_failed_code OFPQOFC_BAD_PORT = 0 # Invalid port (or port does not exist). OFPQOFC_BAD_QUEUE = 1 # Queue does not exist. OFPQOFC_EPERM = 2 # Permissions error. # enum ofp_switch_config_failed_code OFPSCFC_BAD_FLAGS = 0 # Specified flags is invalid. OFPSCFC_BAD_LEN = 1 # Specified len is invalid. OFPQCFC_EPERM = 2 # Permissions error. # enum ofp_role_request_failed_code OFPRRFC_STALE = 0 # Stale Message: old generation_id. OFPRRFC_UNSUP = 1 # Controller role change unsupported. OFPRRFC_BAD_ROLE = 2 # Invalid role. # enum ofp_meter_mod_failed_code OFPMMFC_UNKNOWN = 0 # Unspecified error. OFPMMFC_METER_EXISTS = 1 # Meter not added because a Meter ADD # attempted to replace an existing Meter. OFPMMFC_INVALID_METER = 2 # Meter not added because Meter specified # is invalid. OFPMMFC_UNKNOWN_METER = 3 # Meter not modified because a Meter # MODIFY attempted to modify a non-existent # Meter. OFPMMFC_BAD_COMMAND = 4 # Unsupported or unknown command. OFPMMFC_BAD_FLAGS = 5 # Flag configuration unsupported. OFPMMFC_BAD_RATE = 6 # Rate unsupported. OFPMMFC_BAD_BURST = 7 # Burst size unsupported. OFPMMFC_BAD_BAND = 8 # Band unsupported. OFPMMFC_BAD_BAND_VALUE = 9 # Band value unsupported. OFPMMFC_OUT_OF_METERS = 10 # No more meters availabile. OFPMMFC_OUT_OF_BANDS = 11 # The maximum number of properties # for a meter has been exceeded. # enum ofp_table_features_failed_code OFPTFFC_BAD_TABLE = 0 # Specified table does not exist. OFPTFFC_BAD_METADATA = 1 # Invalid metadata mask. OFPTFFC_BAD_TYPE = 2 # Unknown property type. OFPTFFC_BAD_LEN = 3 # Length problem in properties. OFPTFFC_BAD_ARGUMENT = 4 # Unsupported property value. OFPTFFC_EPERM = 5 # Permissions error. # struct ofp_error_experimenter_msg OFP_ERROR_EXPERIMENTER_MSG_PACK_STR = '!HHI' OFP_ERROR_EXPERIMENTER_MSG_SIZE = 16 assert (calcsize(OFP_ERROR_EXPERIMENTER_MSG_PACK_STR) + OFP_HEADER_SIZE) == OFP_ERROR_EXPERIMENTER_MSG_SIZE # struct ofp_experimenter_header OFP_EXPERIMENTER_HEADER_PACK_STR = '!II' OFP_EXPERIMENTER_HEADER_SIZE = 16 assert (calcsize(OFP_EXPERIMENTER_HEADER_PACK_STR) + OFP_HEADER_SIZE == OFP_EXPERIMENTER_HEADER_SIZE) # struct ofp_hello OFP_HELLO_HEADER_SIZE = 8 # struct ofp_hello_elem_header OFP_HELLO_ELEM_HEADER_PACK_STR = '!HH' OFP_HELLO_ELEM_HEADER_SIZE = 4 assert (calcsize(OFP_HELLO_ELEM_HEADER_PACK_STR) == OFP_HELLO_ELEM_HEADER_SIZE) # enum ofp_hello_elem_type OFPHET_VERSIONBITMAP = 1 # struct ofp_hello_elem_versionbitmap OFP_HELLO_ELEM_VERSIONBITMAP_HEADER_PACK_STR = '!HH' OFP_HELLO_ELEM_VERSIONBITMAP_HEADER_SIZE = 4 assert (calcsize(OFP_HELLO_ELEM_VERSIONBITMAP_HEADER_PACK_STR) == OFP_HELLO_ELEM_VERSIONBITMAP_HEADER_SIZE) # OXM def _oxm_tlv_header(class_, field, hasmask, length): return (class_ << 16) | (field << 9) | (hasmask << 8) | length def oxm_tlv_header(field, length): return _oxm_tlv_header(OFPXMC_OPENFLOW_BASIC, field, 0, length) def oxm_tlv_header_w(field, length): return _oxm_tlv_header(OFPXMC_OPENFLOW_BASIC, field, 1, length * 2) def oxm_tlv_header_extract_hasmask(header): return (header >> 8) & 1 def oxm_tlv_header_extract_length(header): if oxm_tlv_header_extract_hasmask(header): length = (header & 0xff) / 2 else: length = header & 0xff return length OXM_OF_IN_PORT = oxm_tlv_header(OFPXMT_OFB_IN_PORT, 4) OXM_OF_IN_PHY_PORT = oxm_tlv_header(OFPXMT_OFB_IN_PHY_PORT, 4) OXM_OF_METADATA = oxm_tlv_header(OFPXMT_OFB_METADATA, 8) OXM_OF_METADATA_W = oxm_tlv_header_w(OFPXMT_OFB_METADATA, 8) OXM_OF_ETH_DST = oxm_tlv_header(OFPXMT_OFB_ETH_DST, 6) OXM_OF_ETH_DST_W = oxm_tlv_header_w(OFPXMT_OFB_ETH_DST, 6) OXM_OF_ETH_SRC = oxm_tlv_header(OFPXMT_OFB_ETH_SRC, 6) OXM_OF_ETH_SRC_W = oxm_tlv_header_w(OFPXMT_OFB_ETH_SRC, 6) OXM_OF_ETH_TYPE = oxm_tlv_header(OFPXMT_OFB_ETH_TYPE, 2) OXM_OF_VLAN_VID = oxm_tlv_header(OFPXMT_OFB_VLAN_VID, 2) OXM_OF_VLAN_VID_W = oxm_tlv_header_w(OFPXMT_OFB_VLAN_VID, 2) OXM_OF_VLAN_PCP = oxm_tlv_header(OFPXMT_OFB_VLAN_PCP, 1) OXM_OF_IP_DSCP = oxm_tlv_header(OFPXMT_OFB_IP_DSCP, 1) OXM_OF_IP_ECN = oxm_tlv_header(OFPXMT_OFB_IP_ECN, 1) OXM_OF_IP_PROTO = oxm_tlv_header(OFPXMT_OFB_IP_PROTO, 1) OXM_OF_IPV4_SRC = oxm_tlv_header(OFPXMT_OFB_IPV4_SRC, 4) OXM_OF_IPV4_SRC_W = oxm_tlv_header_w(OFPXMT_OFB_IPV4_SRC, 4) OXM_OF_IPV4_DST = oxm_tlv_header(OFPXMT_OFB_IPV4_DST, 4) OXM_OF_IPV4_DST_W = oxm_tlv_header_w(OFPXMT_OFB_IPV4_DST, 4) OXM_OF_TCP_SRC = oxm_tlv_header(OFPXMT_OFB_TCP_SRC, 2) OXM_OF_TCP_DST = oxm_tlv_header(OFPXMT_OFB_TCP_DST, 2) OXM_OF_UDP_SRC = oxm_tlv_header(OFPXMT_OFB_UDP_SRC, 2) OXM_OF_UDP_DST = oxm_tlv_header(OFPXMT_OFB_UDP_DST, 2) OXM_OF_SCTP_SRC = oxm_tlv_header(OFPXMT_OFB_SCTP_SRC, 2) OXM_OF_SCTP_DST = oxm_tlv_header(OFPXMT_OFB_SCTP_DST, 2) OXM_OF_ICMPV4_TYPE = oxm_tlv_header(OFPXMT_OFB_ICMPV4_TYPE, 1) OXM_OF_ICMPV4_CODE = oxm_tlv_header(OFPXMT_OFB_ICMPV4_CODE, 1) OXM_OF_ARP_OP = oxm_tlv_header(OFPXMT_OFB_ARP_OP, 2) OXM_OF_ARP_SPA = oxm_tlv_header(OFPXMT_OFB_ARP_SPA, 4) OXM_OF_ARP_SPA_W = oxm_tlv_header_w(OFPXMT_OFB_ARP_SPA, 4) OXM_OF_ARP_TPA = oxm_tlv_header(OFPXMT_OFB_ARP_TPA, 4) OXM_OF_ARP_TPA_W = oxm_tlv_header_w(OFPXMT_OFB_ARP_TPA, 4) OXM_OF_ARP_SHA = oxm_tlv_header(OFPXMT_OFB_ARP_SHA, 6) OXM_OF_ARP_SHA_W = oxm_tlv_header_w(OFPXMT_OFB_ARP_SHA, 6) OXM_OF_ARP_THA = oxm_tlv_header(OFPXMT_OFB_ARP_THA, 6) OXM_OF_ARP_THA_W = oxm_tlv_header_w(OFPXMT_OFB_ARP_THA, 6) OXM_OF_IPV6_SRC = oxm_tlv_header(OFPXMT_OFB_IPV6_SRC, 16) OXM_OF_IPV6_SRC_W = oxm_tlv_header_w(OFPXMT_OFB_IPV6_SRC, 16) OXM_OF_IPV6_DST = oxm_tlv_header(OFPXMT_OFB_IPV6_DST, 16) OXM_OF_IPV6_DST_W = oxm_tlv_header_w(OFPXMT_OFB_IPV6_DST, 16) OXM_OF_IPV6_FLABEL = oxm_tlv_header(OFPXMT_OFB_IPV6_FLABEL, 4) OXM_OF_IPV6_FLABEL_W = oxm_tlv_header_w(OFPXMT_OFB_IPV6_FLABEL, 4) OXM_OF_ICMPV6_TYPE = oxm_tlv_header(OFPXMT_OFB_ICMPV6_TYPE, 1) OXM_OF_ICMPV6_CODE = oxm_tlv_header(OFPXMT_OFB_ICMPV6_CODE, 1) OXM_OF_IPV6_ND_TARGET = oxm_tlv_header(OFPXMT_OFB_IPV6_ND_TARGET, 16) OXM_OF_IPV6_ND_SLL = oxm_tlv_header(OFPXMT_OFB_IPV6_ND_SLL, 6) OXM_OF_IPV6_ND_TLL = oxm_tlv_header(OFPXMT_OFB_IPV6_ND_TLL, 6) OXM_OF_MPLS_LABEL = oxm_tlv_header(OFPXMT_OFB_MPLS_LABEL, 4) OXM_OF_MPLS_TC = oxm_tlv_header(OFPXMT_OFB_MPLS_TC, 1) OXM_OF_MPLS_BOS = oxm_tlv_header(OFPXMT_OFB_MPLS_BOS, 1) OXM_OF_PBB_ISID = oxm_tlv_header(OFPXMT_OFB_PBB_ISID, 3) OXM_OF_PBB_ISID_W = oxm_tlv_header_w(OFPXMT_OFB_PBB_ISID, 3) OXM_OF_TUNNEL_ID = oxm_tlv_header(OFPXMT_OFB_TUNNEL_ID, 8) OXM_OF_TUNNEL_ID_W = oxm_tlv_header_w(OFPXMT_OFB_TUNNEL_ID, 8) OXM_OF_IPV6_EXTHDR = oxm_tlv_header(OFPXMT_OFB_IPV6_EXTHDR, 2) OXM_OF_IPV6_EXTHDR_W = oxm_tlv_header_w(OFPXMT_OFB_IPV6_EXTHDR, 2) # define constants OFP_VERSION = 0x04 OFP_TCP_PORT = 6633 MAX_XID = 0xffffffff OFP_NO_BUFFER = 0xffffffff
apache-2.0
xujiaji/HaiNaBaiChuan
app/src/main/java/io/xujiaji/hnbc/model/entity/UpdateEntity.java
6160
/* * Copyright 2018 XuJiaji * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.xujiaji.hnbc.model.entity; import android.os.Parcel; import android.os.Parcelable; /** * Created by jiana on 16-8-3. */ public class UpdateEntity implements Parcelable { /** * name : 生活之家 * version : 1 * changelog : First release * updated_at : 1470226175 * versionShort : 1.0 * build : 1 * installUrl : http://download.fir.im/v2/app/install/57a1dee2ca87a8691e000837?download_token=c0b6471ee5a9ee841e7a40f3ee3d49da * install_url : http://download.fir.im/v2/app/install/57a1dee2ca87a8691e000837?download_token=c0b6471ee5a9ee841e7a40f3ee3d49da * direct_install_url : http://download.fir.im/v2/app/install/57a1dee2ca87a8691e000837?download_token=c0b6471ee5a9ee841e7a40f3ee3d49da * update_url : http://fir.im/cookbook * binary : {"fsize":3748008} */ private String name; private String version; private String changelog; private int updated_at; private String versionShort; private String build; private String installUrl; private String install_url; private String direct_install_url; private String update_url; /** * fsize : 3748008 */ private BinaryBean binary; public String getName() { return name; } public void setName(String name) { this.name = name; } public String getVersion() { return version; } public void setVersion(String version) { this.version = version; } public String getChangelog() { return changelog; } public void setChangelog(String changelog) { this.changelog = changelog; } public int getUpdated_at() { return updated_at; } public void setUpdated_at(int updated_at) { this.updated_at = updated_at; } public String getVersionShort() { return versionShort; } public void setVersionShort(String versionShort) { this.versionShort = versionShort; } public String getBuild() { return build; } public void setBuild(String build) { this.build = build; } public String getInstallUrl() { return installUrl; } public void setInstallUrl(String installUrl) { this.installUrl = installUrl; } public String getInstall_url() { return install_url; } public void setInstall_url(String install_url) { this.install_url = install_url; } public String getDirect_install_url() { return direct_install_url; } public void setDirect_install_url(String direct_install_url) { this.direct_install_url = direct_install_url; } public String getUpdate_url() { return update_url; } public void setUpdate_url(String update_url) { this.update_url = update_url; } public BinaryBean getBinary() { return binary; } public void setBinary(BinaryBean binary) { this.binary = binary; } public static class BinaryBean implements Parcelable { private int fsize; public int getFsize() { return fsize; } public void setFsize(int fsize) { this.fsize = fsize; } @Override public int describeContents() { return 0; } @Override public void writeToParcel(Parcel dest, int flags) { dest.writeInt(this.fsize); } public BinaryBean() { } protected BinaryBean(Parcel in) { this.fsize = in.readInt(); } public static final Parcelable.Creator<BinaryBean> CREATOR = new Parcelable.Creator<BinaryBean>() { @Override public BinaryBean createFromParcel(Parcel source) { return new BinaryBean(source); } @Override public BinaryBean[] newArray(int size) { return new BinaryBean[size]; } }; } @Override public int describeContents() { return 0; } @Override public void writeToParcel(Parcel dest, int flags) { dest.writeString(this.name); dest.writeString(this.version); dest.writeString(this.changelog); dest.writeInt(this.updated_at); dest.writeString(this.versionShort); dest.writeString(this.build); dest.writeString(this.installUrl); dest.writeString(this.install_url); dest.writeString(this.direct_install_url); dest.writeString(this.update_url); dest.writeParcelable(this.binary, flags); } public UpdateEntity() { } protected UpdateEntity(Parcel in) { this.name = in.readString(); this.version = in.readString(); this.changelog = in.readString(); this.updated_at = in.readInt(); this.versionShort = in.readString(); this.build = in.readString(); this.installUrl = in.readString(); this.install_url = in.readString(); this.direct_install_url = in.readString(); this.update_url = in.readString(); this.binary = in.readParcelable(BinaryBean.class.getClassLoader()); } public static final Parcelable.Creator<UpdateEntity> CREATOR = new Parcelable.Creator<UpdateEntity>() { @Override public UpdateEntity createFromParcel(Parcel source) { return new UpdateEntity(source); } @Override public UpdateEntity[] newArray(int size) { return new UpdateEntity[size]; } }; }
apache-2.0
vovan888/droidparts
droidparts/src/org/droidparts/util/ResourceUtils.java
2045
/** * Copyright 2015 Alex Yanchenko * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.droidparts.util; import java.io.IOException; import java.io.InputStream; import java.util.Arrays; import android.content.Context; import android.util.TypedValue; public final class ResourceUtils { public static int dpToPx(Context ctx, int val) { return (int) TypedValue.applyDimension(TypedValue.COMPLEX_UNIT_DIP, val, ctx.getResources().getDisplayMetrics()); } public static String valueForKey(Context ctx, int keysArrId, int valuesArrId, String key) { String[] keysArr = ctx.getResources().getStringArray(keysArrId); String[] valuesArr = ctx.getResources().getStringArray(valuesArrId); int idx = Arrays.asList(keysArr).indexOf(key); return (idx != -1) ? valuesArr[idx] : null; } public static String readRawResource(Context ctx, int resId) throws IllegalArgumentException { InputStream is = null; try { is = ctx.getResources().openRawResource(resId); return IOUtils.readToString(is); } catch (IOException e) { throw new IllegalArgumentException(e); } finally { IOUtils.silentlyClose(is); } } public static int getResourceId(Context ctx, String resourceName) { return getId(ctx, "id", resourceName); } public static int getStringId(Context ctx, String stringName) { return getId(ctx, "string", stringName); } private static int getId(Context ctx, String type, String name) { return ctx.getResources().getIdentifier(name, type, ctx.getPackageName()); } }
apache-2.0
sailthru/stolos
stolos/examples/tasks/pyspark_example.py
2301
"""This example demonstrates how to incorporate your pyspark application with stolos's pyspark plugin. It's probably simpler to use the bash plugin instead. """ def main(elem, ns, **job_id_identifiers): """Stolos's pyspark plugin will call this function to begin the application The function parameters may be one of the below. The plugin will intelligently figure out what type of object you want to receive based on the function definition. def main(sc, ns, **job_id_identifiers): def main(textFile, ns, **job_id_identifiers): def main(elem, ns, **job_id_identifiers): `sc` - an instance of a spark context `textFile` - a pyspark RDD from a textFile, where the data loaded into the textFile RDD is determined by ns.read_fp `elem` - assume this application is a simple map operation that receives individual elements of an RDD. `ns` - an argparse.Namespace containing whatever argparse options you specified + the default ones provided by the pyspark plugin `job_id_identifiers` - a dictionary of extra keyword args that make up the job_id. (The job_id identifies what variation of work this application performs). * Note that `sc`, `textFile` and `elem` are mutually exclusive. They identify which specific api your pyspark application will use. """ # ... your code here. result = elem return result # And don't forget you would need to add this to the tasks graph: # "test_stolos/test_pyspark": { # "job_type": "pyspark", # "pymodule": "stolos.examples.tasks.pyspark_example" # } # # Then, to run it, there are two methods: # # 1. Queue a job in the task queue and then run the job # 2. Manually run a job (not recommended except for testing) # Option 1 looks like this: # ./bin/stolos-submit -a test_stolos/test_pyspark # --job_id 20140501_1_test # # stolos # -a test_stolos/test_pyspark --write_fp /tmp/alex --read_fp ./README.md # Option 2 bypasses scheduling and just runs a task + plugin directly. This # option is useful if you wish to verify that your code works with the plugin: # # python # -a test_stolos/test_pyspark --write_fp /tmp/alex --read_fp ./README.md # --bypass_scheduler --job_id 20140501_1_test
apache-2.0
RobAltena/deeplearning4j
deeplearning4j/deeplearning4j-nn/src/main/java/org/deeplearning4j/nn/conf/preprocessor/CnnToFeedForwardPreProcessor.java
7820
/******************************************************************************* * Copyright (c) 2015-2018 Skymind, Inc. * * This program and the accompanying materials are made available under the * terms of the Apache License, Version 2.0 which is available at * https://www.apache.org/licenses/LICENSE-2.0. * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. * * SPDX-License-Identifier: Apache-2.0 ******************************************************************************/ package org.deeplearning4j.nn.conf.preprocessor; import lombok.Data; import lombok.val; import org.deeplearning4j.nn.api.MaskState; import org.deeplearning4j.nn.conf.InputPreProcessor; import org.deeplearning4j.nn.conf.inputs.InputType; import org.nd4j.linalg.api.ndarray.INDArray; import org.nd4j.linalg.api.shape.Shape; import org.nd4j.linalg.primitives.Pair; import org.deeplearning4j.nn.workspace.LayerWorkspaceMgr; import org.deeplearning4j.nn.workspace.ArrayType; import org.nd4j.shade.jackson.annotation.JsonCreator; import org.nd4j.shade.jackson.annotation.JsonProperty; import java.util.Arrays; /** * * * A preprocessor to allow CNN and standard feed-forward network layers to be used together.<br> * For example, CNN -> Denselayer <br> * This does two things:<br> * (b) Reshapes 4d activations out of CNN layer, with shape * [numExamples, numChannels, inputHeight, inputWidth]) into 2d activations (with shape * [numExamples, inputHeight*inputWidth*numChannels]) for use in feed forward layer * (a) Reshapes epsilons (weights*deltas) out of FeedFoward layer (which is 2D or 3D with shape * [numExamples, inputHeight*inputWidth*numChannels]) into 4d epsilons (with shape * [numExamples, numChannels, inputHeight, inputWidth]) suitable to feed into CNN layers.<br> * Note: numChannels is equivalent to channels or featureMaps referenced in different literature * @author Adam Gibson * @see FeedForwardToCnnPreProcessor for opposite case (i.e., DenseLayer -> CNNetc) */ @Data public class CnnToFeedForwardPreProcessor implements InputPreProcessor { protected long inputHeight; protected long inputWidth; protected long numChannels; /** * @param inputHeight the columns * @param inputWidth the rows * @param numChannels the channels */ @JsonCreator public CnnToFeedForwardPreProcessor(@JsonProperty("inputHeight") long inputHeight, @JsonProperty("inputWidth") long inputWidth, @JsonProperty("numChannels") long numChannels) { this.inputHeight = inputHeight; this.inputWidth = inputWidth; this.numChannels = numChannels; } public CnnToFeedForwardPreProcessor(long inputHeight, long inputWidth) { this.inputHeight = inputHeight; this.inputWidth = inputWidth; this.numChannels = 1; } public CnnToFeedForwardPreProcessor() {} @Override // return 2 dimensions public INDArray preProcess(INDArray input, int miniBatchSize, LayerWorkspaceMgr workspaceMgr) { if (input.rank() == 2) return input; //Should usually never happen if(input.size(1) != numChannels || input.size(2) != inputHeight || input.size(3) != inputWidth){ throw new IllegalStateException("Invalid input, does not match configuration: expected [minibatch, numChannels=" + numChannels + ", inputHeight=" + inputHeight + ", inputWidth=" + inputWidth + "] but got input array of" + "shape " + Arrays.toString(input.shape())); } //Check input: nchw format if(input.size(1) != numChannels || input.size(2) != inputHeight || input.size(3) != inputWidth){ throw new IllegalStateException("Invalid input array: expected shape [minibatch, channels, height, width] = " + "[minibatch, " + numChannels + ", " + inputHeight + ", " + inputWidth + "] - got " + Arrays.toString(input.shape())); } //Assume input is standard rank 4 activations out of CNN layer //First: we require input to be in c order. But c order (as declared in array order) isn't enough; also need strides to be correct if (input.ordering() != 'c' || !Shape.hasDefaultStridesForShape(input)) input = workspaceMgr.dup(ArrayType.ACTIVATIONS, input, 'c'); val inShape = input.shape(); //[miniBatch,depthOut,outH,outW] val outShape = new long[]{inShape[0], inShape[1] * inShape[2] * inShape[3]}; return workspaceMgr.leverageTo(ArrayType.ACTIVATIONS, input.reshape('c', outShape)); //Should be zero copy reshape } @Override public INDArray backprop(INDArray epsilons, int miniBatchSize, LayerWorkspaceMgr workspaceMgr) { //Epsilons from layer above should be 2d, with shape [miniBatchSize, depthOut*outH*outW] if (epsilons.ordering() != 'c' || !Shape.strideDescendingCAscendingF(epsilons)) epsilons = workspaceMgr.dup(ArrayType.ACTIVATION_GRAD, epsilons, 'c'); if (epsilons.rank() == 4) return workspaceMgr.leverageTo(ArrayType.ACTIVATION_GRAD, epsilons); //Should never happen if (epsilons.columns() != inputWidth * inputHeight * numChannels) throw new IllegalArgumentException("Invalid input: expect output columns must be equal to rows " + inputHeight + " x columns " + inputWidth + " x channels " + numChannels + " but was instead " + Arrays.toString(epsilons.shape())); INDArray ret = epsilons.reshape('c', epsilons.size(0), numChannels, inputHeight, inputWidth); return workspaceMgr.leverageTo(ArrayType.ACTIVATION_GRAD, ret); //Move if required to specified workspace } @Override public CnnToFeedForwardPreProcessor clone() { try { CnnToFeedForwardPreProcessor clone = (CnnToFeedForwardPreProcessor) super.clone(); return clone; } catch (CloneNotSupportedException e) { throw new RuntimeException(e); } } @Override public InputType getOutputType(InputType inputType) { if (inputType == null || inputType.getType() != InputType.Type.CNN) { throw new IllegalStateException("Invalid input type: Expected input of type CNN, got " + inputType); } InputType.InputTypeConvolutional c = (InputType.InputTypeConvolutional) inputType; val outSize = c.getChannels() * c.getHeight() * c.getWidth(); return InputType.feedForward(outSize); } @Override public Pair<INDArray, MaskState> feedForwardMaskArray(INDArray maskArray, MaskState currentMaskState, int minibatchSize) { if(maskArray == null || maskArray.rank() == 2) return new Pair<>(maskArray, currentMaskState); if (maskArray.rank() != 4 || maskArray.size(2) != 1 || maskArray.size(3) != 1) { throw new UnsupportedOperationException( "Expected rank 4 mask array for 2D CNN layer activations. Got rank " + maskArray.rank() + " mask array (shape " + Arrays.toString(maskArray.shape()) + ") - when used in conjunction with input data of shape" + " [batch,channels,h,w] 4d masks passing through CnnToFeedForwardPreProcessor should have shape" + " [batchSize,1,1,1]"); } return new Pair<>(maskArray.reshape(maskArray.ordering(), maskArray.size(0), maskArray.size(1)), currentMaskState); } }
apache-2.0
mF2C/COMPSs
tests/sources/java/2_environment_variables/src/main/java/environmentVariables/MainItf.java
1186
package environmentVariables; import es.bsc.compss.types.annotations.Constraints; import es.bsc.compss.types.annotations.Parameter; import es.bsc.compss.types.annotations.parameter.Type; import es.bsc.compss.types.annotations.parameter.Direction; import es.bsc.compss.types.annotations.task.Method; public interface MainItf { @Method(declaringClass = "environmentVariables.MainImpl") @Constraints(computingUnits = "${computingUnits}", processorName = "${processorName}", processorSpeed = "${processorSpeed}", processorArchitecture = "${processorArchitecture}", processorPropertyName = "${processorPropertyName}", processorPropertyValue = "${processorPropertyValue}", memorySize = "${memorySize}", memoryType = "${memoryType}", storageSize = "${storageSize}", storageType = "${storageType}", storageBW = "${storageBW}", operatingSystemType = "${operatingSystemType}", operatingSystemDistribution = "${operatingSystemDistribution}", operatingSystemVersion = "${operatingSystemVersion}", appSoftware = "${appSoftware}", hostQueues = "${hostQueues}", wallClockLimit = "${wallClockLimit}") int task(@Parameter(type = Type.STRING, direction = Direction.IN) String message); }
apache-2.0
scscgit/XposeCraft
Assets/Game/Scripts/Collections/SerializableDictionary1.cs
4243
using System; using System.Collections; using System.Collections.Generic; using UnityEngine; namespace XposeCraft.Collections { /// <summary> /// Represents a generic collection of key/value pairs. /// Can be used for serialization purposes, which implies that it can be used during a hot-swap in Unity Editor. /// Source: http://stackoverflow.com/questions/36194178/unity-serialized-dictionary-index-out-of-range-after-12-items /// </summary> /// <typeparam name="TKey">The type of keys in the dictionary.</typeparam> /// <typeparam name="TValue">The type of values in the dictionary.</typeparam> [Serializable] public class SerializableDictionary1<TKey, TValue> : IDictionary<TKey, TValue>, ISerializationCallbackReceiver { [SerializeField] private List<TKey> _keys = new List<TKey>(); [SerializeField] private List<TValue> _values = new List<TValue>(); private Dictionary<TKey, TValue> _dictionary = new Dictionary<TKey, TValue>(); public ICollection<TKey> Keys { get { return ((IDictionary<TKey, TValue>) _dictionary).Keys; } } public ICollection<TValue> Values { get { return ((IDictionary<TKey, TValue>) _dictionary).Values; } } public int Count { get { return ((IDictionary<TKey, TValue>) _dictionary).Count; } } public bool IsReadOnly { get { return ((IDictionary<TKey, TValue>) _dictionary).IsReadOnly; } } public TValue this[TKey key] { get { return ((IDictionary<TKey, TValue>) _dictionary)[key]; } set { ((IDictionary<TKey, TValue>) _dictionary)[key] = value; } } public void OnBeforeSerialize() { _keys.Clear(); _values.Clear(); foreach (KeyValuePair<TKey, TValue> pair in this) { _keys.Add(pair.Key); _values.Add(pair.Value); } } public void OnAfterDeserialize() { _dictionary = new Dictionary<TKey, TValue>(); if (_keys.Count != _values.Count) { throw new Exception(string.Format( "there are {0} keys and {1} values after deserialization. Make sure that both key and value types are serializable.", _keys.Count, _values.Count)); } for (int i = 0; i < _keys.Count; i++) { Add(_keys[i], _values[i]); } } public void Add(TKey key, TValue value) { ((IDictionary<TKey, TValue>) _dictionary).Add(key, value); } public bool ContainsKey(TKey key) { return ((IDictionary<TKey, TValue>) _dictionary).ContainsKey(key); } public bool Remove(TKey key) { return ((IDictionary<TKey, TValue>) _dictionary).Remove(key); } public bool TryGetValue(TKey key, out TValue value) { return ((IDictionary<TKey, TValue>) _dictionary).TryGetValue(key, out value); } public void Add(KeyValuePair<TKey, TValue> item) { ((IDictionary<TKey, TValue>) _dictionary).Add(item); } public void Clear() { ((IDictionary<TKey, TValue>) _dictionary).Clear(); } public bool Contains(KeyValuePair<TKey, TValue> item) { return ((IDictionary<TKey, TValue>) _dictionary).Contains(item); } public void CopyTo(KeyValuePair<TKey, TValue>[] array, int arrayIndex) { ((IDictionary<TKey, TValue>) _dictionary).CopyTo(array, arrayIndex); } public bool Remove(KeyValuePair<TKey, TValue> item) { return ((IDictionary<TKey, TValue>) _dictionary).Remove(item); } public IEnumerator<KeyValuePair<TKey, TValue>> GetEnumerator() { return ((IDictionary<TKey, TValue>) _dictionary).GetEnumerator(); } IEnumerator IEnumerable.GetEnumerator() { return ((IDictionary<TKey, TValue>) _dictionary).GetEnumerator(); } } }
apache-2.0
aws/aws-sdk-java
aws-java-sdk-connect/src/main/java/com/amazonaws/services/connect/model/transform/RoutingProfileQueueConfigSummaryMarshaller.java
3698
/* * Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.connect.model.transform; import javax.annotation.Generated; import com.amazonaws.SdkClientException; import com.amazonaws.services.connect.model.*; import com.amazonaws.protocol.*; import com.amazonaws.annotation.SdkInternalApi; /** * RoutingProfileQueueConfigSummaryMarshaller */ @Generated("com.amazonaws:aws-java-sdk-code-generator") @SdkInternalApi public class RoutingProfileQueueConfigSummaryMarshaller { private static final MarshallingInfo<String> QUEUEID_BINDING = MarshallingInfo.builder(MarshallingType.STRING).marshallLocation(MarshallLocation.PAYLOAD) .marshallLocationName("QueueId").build(); private static final MarshallingInfo<String> QUEUEARN_BINDING = MarshallingInfo.builder(MarshallingType.STRING).marshallLocation(MarshallLocation.PAYLOAD) .marshallLocationName("QueueArn").build(); private static final MarshallingInfo<String> QUEUENAME_BINDING = MarshallingInfo.builder(MarshallingType.STRING).marshallLocation(MarshallLocation.PAYLOAD) .marshallLocationName("QueueName").build(); private static final MarshallingInfo<Integer> PRIORITY_BINDING = MarshallingInfo.builder(MarshallingType.INTEGER) .marshallLocation(MarshallLocation.PAYLOAD).marshallLocationName("Priority").build(); private static final MarshallingInfo<Integer> DELAY_BINDING = MarshallingInfo.builder(MarshallingType.INTEGER).marshallLocation(MarshallLocation.PAYLOAD) .marshallLocationName("Delay").build(); private static final MarshallingInfo<String> CHANNEL_BINDING = MarshallingInfo.builder(MarshallingType.STRING).marshallLocation(MarshallLocation.PAYLOAD) .marshallLocationName("Channel").build(); private static final RoutingProfileQueueConfigSummaryMarshaller instance = new RoutingProfileQueueConfigSummaryMarshaller(); public static RoutingProfileQueueConfigSummaryMarshaller getInstance() { return instance; } /** * Marshall the given parameter object. */ public void marshall(RoutingProfileQueueConfigSummary routingProfileQueueConfigSummary, ProtocolMarshaller protocolMarshaller) { if (routingProfileQueueConfigSummary == null) { throw new SdkClientException("Invalid argument passed to marshall(...)"); } try { protocolMarshaller.marshall(routingProfileQueueConfigSummary.getQueueId(), QUEUEID_BINDING); protocolMarshaller.marshall(routingProfileQueueConfigSummary.getQueueArn(), QUEUEARN_BINDING); protocolMarshaller.marshall(routingProfileQueueConfigSummary.getQueueName(), QUEUENAME_BINDING); protocolMarshaller.marshall(routingProfileQueueConfigSummary.getPriority(), PRIORITY_BINDING); protocolMarshaller.marshall(routingProfileQueueConfigSummary.getDelay(), DELAY_BINDING); protocolMarshaller.marshall(routingProfileQueueConfigSummary.getChannel(), CHANNEL_BINDING); } catch (Exception e) { throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e); } } }
apache-2.0
bgd-point/point-app-test
packages/point/point-sales/src/views/app/sales/point/pos/retur.blade.php
6780
@extends('core::app.layout') @section('content') <div id="page-content"> <h2 class="sub-header">Point of Sales</h2> @include('point-sales::app.sales.point.pos._menu') <div class="panel panel-default"> <div class="panel-body" id="posview"> <div class="form-horizontal row"> <div class="col-xs-12 col-md-4"> <img src="{{url_logo()}}" height="80px" width="auto" class="img pull-left" style="margin-left: 10px"> <div class="pull-left text-left v-center"> <div class="h4 text-primary"><strong>{{$warehouse_profiles->store_name}}</strong></div> <p><b>{{$warehouse_profiles->address}}<br> {{$warehouse_profiles->phone}}</b></p> </div> </div> <div class="col-xs-12 col-md-8"> <div class="form-group"> <label class="col-xs-12 col-sm-3 col-md-3 control-label">Customer</label> <div class="col-xs-12 col-sm-3 col-md-9 content-show" id="content-customer"> {{$pos->customer->name}} </div> </div> <div class="form-group"> <label class="col-xs-12 col-sm-3 col-md-3 control-label">Warehouse</label> <div class="col-xs-12 col-sm-9 col-md-9 content-show"> {{ $warehouse->name }} </div> </div> <div class="form-group"> <div class="col-xs-12 col-sm-3 col-md-3 control-label"> <strong>Date</strong> </div> <div class="col-xs-12 col-sm-9 col-md-9 content-show"> {{ date_format_view(date('Y-m-d'))}} </div> </div> </div> </div> <form action="{{ url('sales/point/pos/'.$pos->id.'/retur') }}" method="post" class="form-horizontal row"> {!! csrf_field() !!} <input name="_method" type="hidden" value="PUT"> <input type="hidden" name="form_date" value="{{ date('Y-m-d') }}" /> <input type="hidden" readonly name="customer_id" id="customer_id" value="{{ session('customer_id') }}"> <input type="hidden" readonly name="pos_id" id="pos_id" value="{{ $pos->id }}"> <input type="hidden" readonly name="warehouse_id" id="warehouse_id" value="{{ $warehouse->id }}"> <div class="table-responsive"> <table id="item-datatable" class="table table-striped"> <thead> <tr> <th width="55%">ITEM</th> <th width="15%" class="text-right">QUANTITY</th> <th width="15%" class="text-right">RETUR</th> <th width="15%" class="text-right">TOTAL</th> </tr> </thead> <tbody class=""> <?php $index = 0 ?> @foreach($pos->items as $detail) <tr> <td style="vertical-align:middle"> <div style="margin-top:5px" id="item-name-{{$index}}">{{ $detail->item->codeName }}</div> <input type="hidden" id="item-id-{{$index}}" name="item_id[]" value="{{$detail->item->id}}"/> <input type="hidden" name="price[]" readonly id="item-price-{{$index}}" class="form-control format-quantity calculate text-right" value="{{ $detail->quantity * $detail->price }}"> </td> <td><input type="text" name="quantity[]" readonly id="item-quantity-{{$index}}" class="form-control format-quantity text-right" value="{{ $detail->quantity }}" autofocus="false"></td> <td><input type="text" name="quantity_retur[]" id="item-quantity-retur-{{$index}}" class="form-control format-quantity calculate text-right" value="0"></td> <td><input type="text" name="total[]" id="item-total-{{$index}}" class="form-control format-price text-right" readonly value="0" autofocus="false"/></td> </tr> <?php $index++ ?> @endforeach </tbody> <tfoot> <tr> <td colspan="3"></td> <td><input type="text" name="total[]" id="total" class="form-control format-price text-right" readonly value="0"/></td> </tr> <tr> <td colspan="3"></td> <td><button class="btn btn-primary btn-block">Retur</button></td> </tr> </tfoot> </table> </div> </form> </div> </div> </div> @include('framework::scripts.item') @stop @section('scripts') <style type="text/css"> .form-group { margin-bottom: 0; } .form-group input[type="checkbox"] { display: none; } .form-group input[type="checkbox"] + .btn-group > label span { width: 20px; } .form-group input[type="checkbox"] + .btn-group > label span:first-child { display: none; } .form-group input[type="checkbox"] + .btn-group > label span:last-child { display: inline-block; } .form-group input[type="checkbox"]:checked + .btn-group > label span:first-child { display: inline-block; } .form-group input[type="checkbox"]:checked + .btn-group > label span:last-child { display: none; } </style> <script> initDatatable('#item-datatable'); var counter = $("#item-datatable").dataTable().fnGetNodes().length; $('.calculate').keyup(function(){ calculate(); }); function calculate() { var total = 0; console.log('counter: ' + counter); for(var i=0; i<counter; i++) { var price = dbNum($('#item-quantity-retur-'+i).val()); var qty = dbNum($('#item-price-'+i).val()); var total_per_row = price * qty; $('#item-total-'+i).val(appNum(total_per_row)); total += total_per_row; } $('#total').val(appNum(total_per_row)); } </script> @stop
apache-2.0
jjenkov/iap-tools-java
src/main/java/com/jenkov/iap/ion/write/custom/IonFieldWriterDouble.java
1508
package com.jenkov.iap.ion.write.custom; import com.jenkov.iap.ion.IonFieldTypes; import com.jenkov.iap.ion.IonUtil; import com.jenkov.iap.ion.write.IIonFieldWriter; /** * Created by jjenkov on 04-11-2015. */ public class IonFieldWriterDouble implements IIonFieldWriter { protected byte[] keyField = null; protected IGetterDouble getter = null; public IonFieldWriterDouble(String fieldName, IGetterDouble getter) { this.keyField = IonUtil.preGenerateKeyField(fieldName); this.getter = getter; } @Override public int writeKeyAndValueFields(Object sourceObject, byte[] destination, int destinationOffset, int maxLengthLength) { System.arraycopy(this.keyField, 0, destination, destinationOffset, this.keyField.length); destinationOffset += this.keyField.length; return this.keyField.length + writeValueField(sourceObject, destination, destinationOffset, maxLengthLength); } @Override public int writeValueField(Object sourceObject, byte[] dest, int destOffset, int maxLengthLength) { double value = this.getter.get(sourceObject); long valueLongBits = Double.doubleToLongBits(value); //magic number "8" is the length in bytes of a 32 bit floating point number in ION. dest[destOffset++] = (byte) (255 & ((IonFieldTypes.FLOAT << 4) | 8)); for(int i=(8-1)*8; i >= 0; i-=8){ dest[destOffset++] = (byte) (255 & (valueLongBits >> i)); } return 9; } }
apache-2.0