repo_name
stringlengths
4
116
path
stringlengths
4
379
size
stringlengths
1
7
content
stringlengths
3
1.05M
license
stringclasses
15 values
osmanpub/java-games
c64/1.4-j8/j64-net/src/main/java/org/free/j64/net/Utils.java
2619
package org.free.j64.net; import java.awt.Color; import java.awt.Dimension; import java.awt.Toolkit; import java.awt.Window; import java.io.BufferedWriter; import java.io.FileWriter; import java.io.IOException; import java.io.Writer; import java.text.SimpleDateFormat; import java.util.ArrayList; import java.util.Date; import java.util.List; import java.util.Locale; import java.util.logging.Level; import java.util.logging.Logger; import javax.swing.JButton; import javax.swing.JRootPane; import javax.swing.UIManager; import javax.swing.text.SimpleAttributeSet; import javax.swing.text.StyleConstants; public final class Utils { public static final String localDate = new SimpleDateFormat("yy-MM-dd", Locale.ENGLISH).format(new Date()); private Utils() { } public static void appendTextFile(String file, String data) { if (data.isEmpty()) { return; } try (Writer out = new BufferedWriter(new FileWriter(file, true))) { out.write(data); out.flush(); } catch (IOException ex) { Logger.getLogger(Utils.class.getName()).log(Level.SEVERE, null, ex); } } public static SimpleAttributeSet getAttributes(Color color, int size) { SimpleAttributeSet attr = new SimpleAttributeSet(); attr.addAttribute(StyleConstants.CharacterConstants.Foreground, color); attr.addAttribute(StyleConstants.FontSize, size); return attr; } // call from event dispatch thread only public static void initWindow(final Window win, JRootPane rp, JButton btn) { Dimension sz = Toolkit.getDefaultToolkit().getScreenSize(); win.setLocation((sz.width - win.getWidth()) / 2, (sz.height - win.getHeight()) / 2); rp.setDefaultButton(btn); UIManager.put("Button.defaultButtonFollowsFocus", Boolean.TRUE); // rp.getInputMap(JComponent.WHEN_IN_FOCUSED_WINDOW).put( // KeyStroke.getKeyStroke(KeyEvent.VK_ESCAPE, 0), "WindowClosing"); // rp.getActionMap().put("WindowClosing", new AbstractAction() { // @Override // public void actionPerformed(ActionEvent e) { // win.setVisible(false); // } // }); } public static List<String> split(String str, String pattern) { List<String> t = new ArrayList<>(); for (String s : str.split(pattern)) { if (!s.isEmpty()) { t.add(s); } } return t; } public static int validateInt(String num, int radix, int low, int high) { String s = num.trim(); try { int n; if (s.startsWith("$")) { n = Integer.parseInt(s.substring(1), 16); } else { n = Integer.parseInt(s, radix); } if (n >= low && n < high) { return n; } } catch (NumberFormatException e) { } return -1; } }
apache-2.0
rhyolight/nupic.son
app/soc/logic/delete_account.py
6564
# Copyright 2008 the Melange authors. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Logic related to handling deletion of user accounts.""" from google.appengine.api import mail from google.appengine.ext import ndb from google.appengine.ext import db from melange.appengine import system from soc.logic import accounts from soc.logic import user as user_logic from soc.modules.gci.logic import conversation as conversation_logic from soc.modules.gci.logic import message as message_logic from soc.modules.gci.logic import profile as profile_logic from soc.modules.gci.models import comment as comment_model from soc.modules.gci.models import task as task_model ADMIN_REQUEST_EMAIL_SUBJEST = """ User %(url_id)s has requested account deletion. """ ADMIN_REQUEST_EMAIL_BODY = """ Dear application admin, User %(name)s (%(email)s), whose username is %(url_id)s, has requested their account to be deleted. """ def request_account_deletion(user): """Requests deletion of user's account from application administrators by sending them an email. This is a temporary method, until we have an automated solution. """ account = accounts.getCurrentAccount(normalize=False) sender = system.getApplicationNoReplyEmail() subject = ADMIN_REQUEST_EMAIL_SUBJEST % { 'url_id': user.url_id } body = ADMIN_REQUEST_EMAIL_BODY % { 'name': user.name, 'email': account.email(), 'url_id': user.url_id, } mail.send_mail_to_admins(sender, subject, body) def confirm_delete(profile): """Deletes the given profile entity and also the user entity if possible. 1. Deletes the profile. 2. Deletes the user entity if no other profiles exist for the user. 3. Removes the user from task notification subscription lists. 4. Replaces GCITask created_by, modified_by, student and GCIComment created_by properties with dummy "melange_deleted_user" profile or user entity. 5. Replaces GCIMessage author with dummy "melange_deleted_user". 6. Replaces GCIConversation creator with dummy "melange_deleted_user". 7. Removes GCIConversationUser entities representing the user's involvement in a GCIConversation. This method implements a giant XG transaction, but should not take a long time because experience has shown that there won't be too much data to modify or delete. Args: profile: GCIProfile entity of the user. """ profile_key = profile.key() program_ndb_key = ndb.Key.from_old_key(profile.program.key()) user_ndb_key = ndb.Key.from_old_key(profile.parent_key()) # Cannot delete the user entity if the user has other profiles, so set it # to False in that case. user_delete = not (profile_logic.hasOtherGCIProfiles(profile) or profile_logic.hasOtherGCIProfiles(profile)) task_sub_q = task_model.GCITask.all().filter('subscribers', profile) task_sub_remove_list = [] for task in task_sub_q.run(): task_sub_remove_list.append(task) tasks_created_by_q = task_model.GCITask.all().filter('created_by', profile) task_created_list = [] for task in tasks_created_by_q.run(): task_created_list.append(task) tasks_modified_by_q = task_model.GCITask.all().filter('modified_by', profile) task_modified_list = [] for task in tasks_modified_by_q.run(): task_modified_list.append(task) tasks_student_q = task_model.GCITask.all().filter('student', profile) task_student_list = [] for task in tasks_student_q.run(): task_student_list.append(task) comments_created_by_q = comment_model.GCIComment.all().filter( 'created_by', profile.user) comments_created_by_list = [] for comment in comments_created_by_q.run(): comments_created_by_list.append(comment) conversations = conversation_logic.queryForProgramAndCreator( program_ndb_key, user_ndb_key) messages = message_logic.queryForUser(user_ndb_key) conversation_users = conversation_logic.queryForProgramAndUser( program_ndb_key, user_ndb_key) dummy_user = user_logic.getOrCreateDummyMelangeDeletedUser() dummy_profile = profile_logic.getOrCreateDummyMelangeDeletedProfile( profile.program) dummy_user_ndb_key = ndb.Key.from_old_key(dummy_user.key()) options = db.create_transaction_options(xg=True) def delete_account_txn(): entities_to_save = set([]) entities_to_del = set([]) # The batch size for query.run() is 20, in most of the cases we have # seen so far the user had a few tasks with subscriptions, created_by, # modified_by etc., so this should still be single datastore hits per # loop. Also, by running the query outside the transaction we may run # into situations of user subscribing to the task or creating or modifying # tasks or performing another activity after this batch fetch. However, # the chances of that happening is very low and can be traded-off for # the bigger problem of half run transactions. for conversation in conversations: conversation.creator = dummy_user_ndb_key conversation.put() for message in messages: message.author = dummy_user_ndb_key message.put() for conversation_user in conversation_users: conversation_user.key.delete() for task in task_sub_remove_list: task.subscribers.remove(profile_key) entities_to_save.add(task) for task in task_created_list: task.created_by = dummy_profile entities_to_save.add(task) for task in task_modified_list: task.modified_by = dummy_profile entities_to_save.add(task) for task in task_student_list: task.student = dummy_profile entities_to_save.add(task) for comment in comments_created_by_list: comment.created_by = dummy_user entities_to_save.add(comment) if profile.student_info: entities_to_del.add(profile.student_info) entities_to_del.add(profile) if user_delete: entities_to_del.add(profile.parent()) db.put(entities_to_save) db.delete(entities_to_del) db.run_in_transaction_options(options, delete_account_txn)
apache-2.0
gogit/popcorn
src/main/java/uk/co/thinktag/Wrapper.java
1466
package uk.co.thinktag; import uk.co.thinktag.cmd.CommandConsole; import uk.co.thinktag.server.CoreServer; import uk.co.thinktag.server.RestServer; /** * The class is a wrapper for starting in either the client or the server mode * * @author pt * */ public class Wrapper { private static void helpBlock(){ System.out.println("------------------"); System.out.println("Client mode to accept commands from the console and communicate with server on a given IP:port"); System.out.println("------------------"); System.out.println("-client 127.0.0.1:9090"); System.out.println("------------------"); System.out.println("Server mode to accept connections on a specified mode"); System.out.println("------------------"); System.out.println("-server 9090:8080"); System.out.println("------------------"); System.exit(1); } public static void main(String[] args) throws Exception { if(args==null || args.length!=2){ helpBlock(); } if(!("-client".equals(args[0])) && !("-server".equals(args[0]))){ helpBlock(); } if("-client".equals(args[0])){ String tokens[] = args[1].split(":"); CommandConsole.exec(tokens[0], Integer.parseInt(tokens[1])); } else{ String ports[]= args[1].split(":"); System.setProperty("port", ports[0]); System.setProperty("restPort", ports[1]); RestServer.main(null); CoreServer.main(null); } } }
apache-2.0
Shawnten49/AngularJS
frameworkDemo/webapp/app/core/app.core.module.js
67
(function(){ 'use strict'; angular.module('app.core', []); })();
apache-2.0
sindicate/solidbase
src/test/java/solidbase/test/ant/SqlSectionsTests.java
1789
/*-- * Copyright 2010 René M. de Bloois * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package solidbase.test.ant; import java.io.File; import java.sql.SQLException; import org.testng.Assert; import org.testng.annotations.Test; import solidbase.core.Setup; import solidbase.core.TestUtil; public class SqlSectionsTests extends MyBuildFileTest { @Test public void testSqlTask() throws SQLException { TestUtil.dropHSQLDBSchema( Setup.defaultdb, "sa", null ); configureProject( "test-sqltask.xml" ); project.setBaseDir( new File( "." ) ); // Needed when testing through Maven executeTarget( "ant-test-sections" ); String log = TestUtil.generalizeOutput( getLog() ); Assert.assertEquals( log, "SolidBase v1.5.x (http://solidbase.org)\n" + "\n" + "Opening file 'X:/.../testsql-sections.sql'\n" + " Encoding is 'ISO-8859-1'\n" + "Connecting to database...\n" + "Creating table USERS...\n" + "Filling USERS...\n" + " Inserting admin user...\n" + " Inserting 3 users...\n" + " Inserting 3 users...\n" + "Adding more USERS...\n" + " Inserting 3 users...\n" + " Inserting 3 users...\n" + "Execution complete.\n" + "\n" ); } }
apache-2.0
JetBrains/xodus
entity-store/src/main/java/jetbrains/exodus/entitystore/iterate/PropertyChangedHandleChecker.java
929
/** * Copyright 2010 - 2022 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package jetbrains.exodus.entitystore.iterate; import org.jetbrains.annotations.Nullable; public interface PropertyChangedHandleChecker extends HandleChecker { int getTypeId(); int getPropertyId(); long getLocalId(); @Nullable Comparable getOldValue(); @Nullable Comparable getNewValue(); }
apache-2.0
IvanGarza07/MaterialDesignLibrary
MaterialDesignLibrary/MaterialDesign/src/main/java/com/ivan/materialdesign/views/LayoutRipple.java
5185
package com.ivan.materialdesign.views; import android.content.Context; import android.graphics.Bitmap; import android.graphics.Bitmap.Config; import android.graphics.Canvas; import android.graphics.Color; import android.graphics.Paint; import android.graphics.Rect; import android.util.AttributeSet; import android.view.MotionEvent; public class LayoutRipple extends CustomView { int background; float rippleSpeed = 10f; int rippleSize = 3; OnClickListener onClickListener; int backgroundColor = Color.parseColor("#FFFFFF"); Integer rippleColor; Float xRippleOrigin; Float yRippleOrigin; public LayoutRipple(Context context, AttributeSet attrs) { super(context, attrs); setAttributes(attrs); } // Set atributtes of XML to View protected void setAttributes(AttributeSet attrs) { // Set background Color // Color by resource int bacgroundColor = attrs.getAttributeResourceValue(ANDROIDXML, "background", -1); if (bacgroundColor != -1) { setBackgroundColor(getResources().getColor(bacgroundColor)); } else { // Color by hexadecimal background = attrs.getAttributeIntValue(ANDROIDXML, "background", -1); if (background != -1) setBackgroundColor(background); else setBackgroundColor(this.backgroundColor); } // Set Ripple Color // Color by resource int rippleColor = attrs.getAttributeResourceValue(MATERIALDESIGNXML, "rippleColor", -1); if (rippleColor != -1) { setRippleColor(getResources().getColor(rippleColor)); } else { // Color by hexadecimal int background = attrs.getAttributeIntValue(MATERIALDESIGNXML, "rippleColor", -1); if (background != -1) setRippleColor(background); else setRippleColor(makePressColor()); } rippleSpeed = attrs.getAttributeFloatValue(MATERIALDESIGNXML, "rippleSpeed", 20f); } // Set color of background public void setBackgroundColor(int color) { this.backgroundColor = color; if (isEnabled()) beforeBackground = backgroundColor; super.setBackgroundColor(color); } public void setRippleSpeed(int rippleSpeed) { this.rippleSpeed = rippleSpeed; } // ### RIPPLE EFFECT ### float x = -1, y = -1; float radius = -1; @Override public boolean onTouchEvent(MotionEvent event) { invalidate(); if (isEnabled()) { isLastTouch = true; if (event.getAction() == MotionEvent.ACTION_DOWN) { radius = getHeight() / rippleSize; x = event.getX(); y = event.getY(); } else if (event.getAction() == MotionEvent.ACTION_MOVE) { radius = getHeight() / rippleSize; x = event.getX(); y = event.getY(); if (!((event.getX() <= getWidth() && event.getX() >= 0) && (event .getY() <= getHeight() && event.getY() >= 0))) { isLastTouch = false; x = -1; y = -1; } } else if (event.getAction() == MotionEvent.ACTION_UP) { if ((event.getX() <= getWidth() && event.getX() >= 0) && (event.getY() <= getHeight() && event.getY() >= 0)) { radius++; } else { isLastTouch = false; x = -1; y = -1; } }if (event.getAction() == MotionEvent.ACTION_CANCEL) { isLastTouch = false; x = -1; y = -1; } } return true; } @Override protected void onFocusChanged(boolean gainFocus, int direction, Rect previouslyFocusedRect) { if (!gainFocus) { x = -1; y = -1; } } @Override public boolean onInterceptTouchEvent(MotionEvent ev) { // super.onInterceptTouchEvent(ev); return true; } public Bitmap makeCircle() { Bitmap output = Bitmap.createBitmap(getWidth(), getHeight(), Config.ARGB_8888); Canvas canvas = new Canvas(output); canvas.drawARGB(0, 0, 0, 0); Paint paint = new Paint(); paint.setAntiAlias(true); if (rippleColor == null) rippleColor = makePressColor(); paint.setColor(rippleColor); x = (xRippleOrigin == null) ? x : xRippleOrigin; y = (yRippleOrigin == null) ? y : yRippleOrigin; canvas.drawCircle(x, y, radius, paint); if (radius > getHeight() / rippleSize) radius += rippleSpeed; if (radius >= getWidth()) { x = -1; y = -1; radius = getHeight() / rippleSize; if (onClickListener != null) onClickListener.onClick(this); } return output; } protected void onDraw(Canvas canvas) { super.onDraw(canvas); if (x != -1) { Rect src = new Rect(0, 0, getWidth(), getHeight()); Rect dst = new Rect(0, 0, getWidth(), getHeight()); canvas.drawBitmap(makeCircle(), src, dst, null); invalidate(); } } /** * Make a dark color to ripple effect * * @return */ protected int makePressColor() { int r = (this.backgroundColor >> 16) & 0xFF; int g = (this.backgroundColor >> 8) & 0xFF; int b = (this.backgroundColor >> 0) & 0xFF; r = (r - 30 < 0) ? 0 : r - 30; g = (g - 30 < 0) ? 0 : g - 30; b = (b - 30 < 0) ? 0 : b - 30; return Color.rgb(r, g, b); } @Override public void setOnClickListener(OnClickListener l) { onClickListener = l; } public void setRippleColor(int rippleColor) { this.rippleColor = rippleColor; } public void setxRippleOrigin(Float xRippleOrigin) { this.xRippleOrigin = xRippleOrigin; } public void setyRippleOrigin(Float yRippleOrigin) { this.yRippleOrigin = yRippleOrigin; } }
apache-2.0
whummer/WS-Aggregation
code/wsaggr-testbed/src/main/java/at/ac/tuwien/infosys/aggr/testbed/RequestServer.java
3835
/* * Project 'WS-Aggregation': * http://www.infosys.tuwien.ac.at/prototype/WS-Aggregation/ * * Copyright 2010-2012 Vienna University of Technology * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package at.ac.tuwien.infosys.aggr.testbed; import java.io.EOFException; import java.io.IOException; import java.io.ObjectInputStream; import java.net.ServerSocket; import java.net.Socket; import at.ac.tuwien.infosys.aggr.testbed.messaging.Request; public class RequestServer { private boolean running; private int port; // Socket socket = null; ServerSocket serverSocket = null; // ObjectInputStream ois = null; // MyClassLoader classloader = null; private final Object lock = new Object(); // only one thread should perform deployment work at a time.. public RequestServer(int port) { this.port = port; } public static void main(String[] args) throws Exception { int port = 1234; if(args.length == 1 && !args[0].trim().isEmpty()) { port = Integer.parseInt(args[0]); } new RequestServer(port).run(); } // public static class MyClassLoader extends ClassLoader { // // private Set<Class<?>> classes = new HashSet<Class<?>>(); // // protected synchronized Class<?> loadClass(String name, boolean resolve) // throws ClassNotFoundException { // for(Class<?> c : classes) { // if(c.getName().equals(name)) // return c; // } // return super.loadClass(name, resolve); // } // } public void run() throws Exception { BootstrapWrapper bootstrapper = new BootstrapWrapper(); final RequestExecuter executer = new RequestExecuter(this, bootstrapper); running = true; try { System.out.println("Starting up listener.."); serverSocket = new ServerSocket(port, 0); //classloader = new MyClassLoader(); while(running) { try { System.out.println("Listener started, waiting for connections.."); final Socket socket = serverSocket.accept(); new Thread() { public void run() { ObjectInputStream ois = null; try { System.out.println("Got connection.."); ois = new ObjectInputStream(socket.getInputStream()); System.out.println("Got input stream.."); Request request; while((request = (Request) ois.readObject()) != null) { synchronized (lock) { System.out.println("got request: " + request); request.execute(executer); } } } catch (EOFException e) { System.out.println("Client closed socket"); } catch (Exception e) { e.printStackTrace(); } finally { try { socket.close(); } catch (Exception e2) { e2.printStackTrace(); } } } }.start(); } catch (Exception e) { e.printStackTrace(); } } Thread.sleep(100); } catch (Exception e) { System.out.println(e.getMessage()); } finally { closeSockets(); } } public void stop() { running = false; closeSockets(); System.exit(0); } private void closeSockets() { try { // if(ois != null) // ois.close(); // if(socket != null) // socket.close(); if(serverSocket != null) serverSocket.close(); } catch (IOException e) { e.printStackTrace(); } } }
apache-2.0
googleapis/java-datastore
proto-google-cloud-datastore-v1/src/main/java/com/google/datastore/v1/CompositeFilterOrBuilder.java
2719
/* * Copyright 2019 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/datastore/v1/query.proto package com.google.datastore.v1; public interface CompositeFilterOrBuilder extends // @@protoc_insertion_point(interface_extends:google.datastore.v1.CompositeFilter) com.google.protobuf.MessageOrBuilder { /** * * * <pre> * The operator for combining multiple filters. * </pre> * * <code>.google.datastore.v1.CompositeFilter.Operator op = 1;</code> * * @return The enum numeric value on the wire for op. */ int getOpValue(); /** * * * <pre> * The operator for combining multiple filters. * </pre> * * <code>.google.datastore.v1.CompositeFilter.Operator op = 1;</code> * * @return The op. */ com.google.datastore.v1.CompositeFilter.Operator getOp(); /** * * * <pre> * The list of filters to combine. * Must contain at least one filter. * </pre> * * <code>repeated .google.datastore.v1.Filter filters = 2;</code> */ java.util.List<com.google.datastore.v1.Filter> getFiltersList(); /** * * * <pre> * The list of filters to combine. * Must contain at least one filter. * </pre> * * <code>repeated .google.datastore.v1.Filter filters = 2;</code> */ com.google.datastore.v1.Filter getFilters(int index); /** * * * <pre> * The list of filters to combine. * Must contain at least one filter. * </pre> * * <code>repeated .google.datastore.v1.Filter filters = 2;</code> */ int getFiltersCount(); /** * * * <pre> * The list of filters to combine. * Must contain at least one filter. * </pre> * * <code>repeated .google.datastore.v1.Filter filters = 2;</code> */ java.util.List<? extends com.google.datastore.v1.FilterOrBuilder> getFiltersOrBuilderList(); /** * * * <pre> * The list of filters to combine. * Must contain at least one filter. * </pre> * * <code>repeated .google.datastore.v1.Filter filters = 2;</code> */ com.google.datastore.v1.FilterOrBuilder getFiltersOrBuilder(int index); }
apache-2.0
avenwu/support
support/src/main/java/net/avenwu/support/widget/ExTextView.java
9247
package net.avenwu.support.widget; import android.animation.ValueAnimator; import android.annotation.TargetApi; import android.content.Context; import android.content.res.TypedArray; import android.graphics.drawable.Drawable; import android.text.Html; import android.text.TextUtils; import android.util.AttributeSet; import android.util.Log; import android.view.View; import android.view.animation.DecelerateInterpolator; import android.widget.TextView; import net.avenwu.support.R; import java.io.UnsupportedEncodingException; import java.lang.reflect.Field; /** * Created by chaobin on 11/18/15. */ @TargetApi(11) public class ExTextView extends TextView implements View.OnClickListener, ValueAnimator.AnimatorUpdateListener { static final String TAG = ExTextView.class.getCanonicalName(); static final int END = 0; static final int RIGHT = 1; int mMaxHeight; int mCollapsedHeight; int mMaxLine; boolean isCollapsed; boolean isLayout = false; boolean isMeasured = false; ValueAnimator mExpandAnimator; CharSequence mCollapsedText; CharSequence mFullText; Drawable mIndicator; OnClickListener mOuterListener; Style mStyle; public ExTextView(Context context) { this(context, null); } public ExTextView(Context context, AttributeSet attrs) { super(context, attrs); TypedArray a = context.obtainStyledAttributes(attrs, R.styleable.ExTextView); isCollapsed = a.getBoolean(R.styleable.ExTextView_expand_collapse_default, true); mIndicator = a.getDrawable(R.styleable.ExTextView_expand_indicator); if (mIndicator != null) { mIndicator.setBounds(0, 0, mIndicator.getIntrinsicWidth(), mIndicator.getIntrinsicHeight ()); } int style = a.getInt(R.styleable.ExTextView_expand_style, 0); switch (style) { case END: mStyle = new EndStyle(); break; case RIGHT: mStyle = new RightStyle(); break; } a.recycle(); reflectMaxLines(); super.setOnClickListener(this); } @Override protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { Log.d(TAG, "onMeasure"); if (!TextUtils.isEmpty(mFullText) && !isMeasured) { Log.d(TAG, "onMeasure isCollapsed=" + isCollapsed); if (isCollapsed) { setMaxLines(Integer.MAX_VALUE); super.onMeasure(widthMeasureSpec, heightMeasureSpec); mMaxHeight = getMeasuredHeight(); setMaxLines(mMaxLine); super.onMeasure(widthMeasureSpec, heightMeasureSpec); mCollapsedHeight = getMeasuredHeight(); } else { setMaxLines(mMaxLine); super.onMeasure(widthMeasureSpec, heightMeasureSpec); mCollapsedHeight = getMeasuredHeight(); setMaxLines(Integer.MAX_VALUE); super.onMeasure(widthMeasureSpec, heightMeasureSpec); mMaxHeight = getMeasuredHeight(); } isMeasured = true; } else { super.onMeasure(widthMeasureSpec, heightMeasureSpec); } } @Override protected void onLayout(boolean changed, int left, int top, int right, int bottom) { if (!TextUtils.isEmpty(mFullText) && (!isLayout && isExpandable())) { Log.d(TAG, "onLayout reset text"); mStyle.onLayout(isCollapsed, this, mIndicator); if (isCollapsed) { if (mCollapsedText == null) { mCollapsedText = mStyle.collapsedText(this, mIndicator, mFullText, mMaxLine); } super.setText(mCollapsedText, reflectCurrentBufferType()); } else { super.setText(mFullText, reflectCurrentBufferType()); } isLayout = true; } else { super.onLayout(changed, left, top, right, bottom); } } private void reflectMaxLines() { try { Field maximumField = TextView.class.getDeclaredField("mMaximum"); maximumField.setAccessible(true); mMaxLine = maximumField.getInt(this); } catch (NoSuchFieldException e) { e.printStackTrace(); } catch (IllegalAccessException e) { e.printStackTrace(); } } private BufferType reflectCurrentBufferType() { try { Field field = TextView.class.getDeclaredField("mBufferType"); field.setAccessible(true); return (BufferType) field.get(this); } catch (NoSuchFieldException e) { e.printStackTrace(); } catch (IllegalAccessException e) { e.printStackTrace(); } return BufferType.NORMAL; } @Override public void setText(CharSequence text, BufferType type) { //view 复用的时候会重复绑定数据 if (TextUtils.isEmpty(text)) { super.setText(text, type); } else if (text.equals(mFullText)) { super.setText(isCollapsed ? mCollapsedText : mFullText, reflectCurrentBufferType()); } else { mFullText = text; mCollapsedText = null; isLayout = false; isMeasured = false; super.setText(mFullText, type); } } @Override public void onClick(View v) { if (mOuterListener != null) { mOuterListener.onClick(v); } if (!isExpandable()) { return; } if (mExpandAnimator != null) { mExpandAnimator.end(); } isLayout = false; int from = getHeight(); int to; if (from < mMaxHeight) { to = mMaxHeight; isCollapsed = false; setMaxLines(Integer.MAX_VALUE); } else { to = mCollapsedHeight; isCollapsed = true; setMaxLines(mMaxLine); } if (mExpandAnimator == null) { mExpandAnimator = ValueAnimator.ofInt(from, to); mExpandAnimator.setInterpolator(new DecelerateInterpolator()); mExpandAnimator.addUpdateListener(this); } else { mExpandAnimator.setIntValues(from, to); } mExpandAnimator.start(); } @Override public void onAnimationUpdate(ValueAnimator animation) { getLayoutParams().height = (Integer) animation.getAnimatedValue(); requestLayout(); } private boolean isExpandable() { return mCollapsedHeight != mMaxHeight; } @Override public void setOnClickListener(OnClickListener listener) { mOuterListener = listener; } public interface Style { CharSequence collapsedText(TextView textView, Drawable drawable, CharSequence text, int maxLine); void onLayout(boolean collapsed, TextView textView, Drawable drawable); } private static class EndStyle implements Style { static final String HTML_IMG = "...<img src='icon'/>"; static final String HTML_NEW_LINE = "<br>"; @Override public CharSequence collapsedText(TextView view, final Drawable drawable, CharSequence text, int maxLine) { StringBuilder stringBuilder = new StringBuilder(); int start = 0; // 由于中英文字符等排版问题断行具有不确定性,此处强行对缩略文本断行 for (int i = 0; i < maxLine; i++) { int end = view.getLayout().getLineVisibleEnd(i); String append; if (i == maxLine - 1) { end -= 3; append = HTML_IMG; } else { append = HTML_NEW_LINE; } stringBuilder.append(text.subSequence(start, end)).append(append); start = end; } String subString = stringBuilder.toString(); return Html.fromHtml(subString, new Html.ImageGetter() { @Override public Drawable getDrawable(String source) { return "icon".equals(source) ? drawable : null; } }, null); } @Override public void onLayout(boolean collapsed, TextView textView, Drawable drawable) { } } private static class RightStyle implements Style { @Override public CharSequence collapsedText(TextView view, Drawable drawable, CharSequence text, int maxLine) { int end = view.getLayout().getLineVisibleEnd(maxLine - 1); return text.subSequence(0, end - 3) + "..."; } @Override public void onLayout(boolean collapsed, TextView textView, Drawable drawable) { Drawable[] d = textView.getCompoundDrawables(); if (collapsed) { d[2] = drawable; } else { d[2] = null; } textView.setCompoundDrawables(d[0], d[1], d[2], d[3]); } } }
apache-2.0
utybo/epc2eu
java/utybo/mod/epc2eu/common/BonusEvent.java
4777
package utybo.mod.epc2eu.common; import net.minecraft.client.Minecraft; import net.minecraft.client.entity.AbstractClientPlayer; import net.minecraft.client.model.ModelBiped; import net.minecraft.client.renderer.IImageBuffer; import net.minecraft.client.renderer.ThreadDownloadImageData; import net.minecraft.client.renderer.texture.ITextureObject; import net.minecraft.client.renderer.texture.TextureManager; import net.minecraft.util.MathHelper; import net.minecraft.util.ResourceLocation; import net.minecraft.util.StringUtils; import net.minecraftforge.client.event.RenderPlayerEvent; import org.lwjgl.opengl.GL11; import cpw.mods.fml.common.eventhandler.SubscribeEvent; public class BonusEvent { private ResourceLocation locationCape; private ThreadDownloadImageData downloadImageCape; @SubscribeEvent public void onPlayerRender(RenderPlayerEvent.Specials.Pre event) { AbstractClientPlayer player = (AbstractClientPlayer)event.entityPlayer; if(player.getDisplayName() != null && !player.getDisplayName().isEmpty()) { if(this.locationCape == null) { this.locationCape = getLocationCape(player.getDisplayName()); } else if(this.downloadImageCape == null) { this.downloadImageCape = getDownloadImageCape(this.getLocationCape(player.getDisplayName()), player.getDisplayName()); } else { boolean flag = this.getTextureCape().isTextureUploaded(); boolean flag1 = !player.isInvisible(); boolean flag2 = !player.getHideCape(); flag = event.renderCape && flag; float f6; if(flag && flag1 && flag2) { Minecraft.getMinecraft().renderEngine.bindTexture(this.getLocationCape(player.getDisplayName())); GL11.glPushMatrix(); GL11.glTranslatef(0.0F, 0.0F, 0.125F); double d0 = player.field_71091_bM + (player.field_71094_bP - player.field_71091_bM) * (double)event.partialRenderTick - (player.prevPosX + (player.posX - player.prevPosX) * (double)event.partialRenderTick); double d1 = player.field_71096_bN + (player.field_71095_bQ - player.field_71096_bN) * (double)event.partialRenderTick - (player.prevPosY + (player.posY - player.prevPosY) * (double)event.partialRenderTick); double d2 = player.field_71097_bO + (player.field_71085_bR - player.field_71097_bO) * (double)event.partialRenderTick - (player.prevPosZ + (player.posZ - player.prevPosZ) * (double)event.partialRenderTick); f6 = player.prevRenderYawOffset + (player.renderYawOffset - player.prevRenderYawOffset) * event.partialRenderTick; double d3 = (double)MathHelper.sin(f6 * (float)Math.PI / 180.0F); double d4 = (double)(-MathHelper.cos(f6 * (float)Math.PI / 180.0F)); float f7 = (float)d1 * 10.0F; if(f7 < -6.0F) { f7 = -6.0F; } if(f7 > 32.0F) { f7 = 32.0F; } float f8 = (float)(d0 * d3 + d2 * d4) * 100.0F; float f9 = (float)(d0 * d4 - d2 * d3) * 100.0F; if(f8 < 0.0F) { f8 = 0.0F; } float f10 = player.prevCameraYaw + (player.cameraYaw - player.prevCameraYaw) * event.partialRenderTick; f7 += MathHelper.sin((player.prevDistanceWalkedModified + (player.distanceWalkedModified - player.prevDistanceWalkedModified) * event.partialRenderTick) * 6.0F) * 32.0F * f10; if(player.isSneaking()) { f7 += 25.0F; } GL11.glRotatef(6.0F + f8 / 2.0F + f7, 1.0F, 0.0F, 0.0F); GL11.glRotatef(f9 / 2.0F, 0.0F, 0.0F, 1.0F); GL11.glRotatef(-f9 / 2.0F, 0.0F, 1.0F, 0.0F); GL11.glRotatef(180.0F, 0.0F, 1.0F, 0.0F); ModelBiped modelBiped = new ModelBiped(); modelBiped.renderCloak(0.0625F); GL11.glPopMatrix(); } } } } public ThreadDownloadImageData getTextureCape() { return this.downloadImageCape; } public static ThreadDownloadImageData getDownloadImageCape(ResourceLocation resourceLocation, String playerName) { return getDownloadImage(resourceLocation, getCapeUrl(playerName), (ResourceLocation)null, (IImageBuffer)null); } private static ThreadDownloadImageData getDownloadImage(ResourceLocation res, String link, ResourceLocation defRes, IImageBuffer image) { TextureManager texturemanager = Minecraft.getMinecraft().getTextureManager(); Object object = texturemanager.getTexture(res); if(object == null) { object = new ThreadDownloadImageData(link, defRes, image); texturemanager.loadTexture(res, (ITextureObject)object); } return (ThreadDownloadImageData)object; } public static String getCapeUrl(String playerName) { return String.format("http://utybo.tk/capes/%s.png", new Object[] {StringUtils.stripControlCodes(playerName)}); } public static ResourceLocation getLocationCape(String playerName) { return new ResourceLocation("epc2eu" + "cloaks/" + StringUtils.stripControlCodes(playerName)); } }
apache-2.0
whitewalter/fleetwood-bounder
src/main/java/com/heisenberg/impl/definition/TimerDefinitionImpl.java
1393
/* * Copyright 2014 Heisenberg Enterprises Ltd. * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.heisenberg.impl.definition; import com.heisenberg.api.builder.TimerBuilder; import com.heisenberg.impl.WorkflowEngineImpl; import com.heisenberg.impl.job.JobType; /** * @author Walter White */ public class TimerDefinitionImpl implements TimerBuilder { public String name; public WorkflowEngineImpl processEngine; public WorkflowImpl processDefinition; public ScopeImpl parent; public JobType jobType; public void validate(WorkflowValidator validateProcessDefinitionAfterDeserialization) { } @Override public TimerBuilder name(String name) { return null; } @Override public TimerBuilder duedateAfterCreation(long millis) { return null; } @Override public TimerBuilder repeatAfterExecution(long millis) { return null; } }
apache-2.0
OLR-xray/OLR-3.0
src/xray/xr_3da/xrGame/space_restrictor.cpp
7584
//////////////////////////////////////////////////////////////////////////// // Module : space_restrictor.cpp // Created : 17.08.2004 // Modified : 17.08.2004 // Author : Dmitriy Iassenev // Description : Space restrictor //////////////////////////////////////////////////////////////////////////// #include "pch_script.h" #include "space_restrictor.h" #include "xrServer_Objects_ALife.h" #include "level.h" #include "space_restriction_manager.h" #include "restriction_space.h" #include "ai_space.h" #include "CustomZone.h" #ifdef DEBUG # include "debug_renderer.h" #endif CSpaceRestrictor::~CSpaceRestrictor () { } void CSpaceRestrictor::Center (Fvector& C) const { XFORM().transform_tiny (C,CFORM()->getSphere().P); } float CSpaceRestrictor::Radius () const { return (CFORM()->getRadius()); } BOOL CSpaceRestrictor::net_Spawn (CSE_Abstract* data) { actual (false); CSE_Abstract *abstract = (CSE_Abstract*)data; CSE_ALifeSpaceRestrictor *se_shape = smart_cast<CSE_ALifeSpaceRestrictor*>(abstract); R_ASSERT (se_shape); m_space_restrictor_type = se_shape->m_space_restrictor_type; CCF_Shape *shape = xr_new<CCF_Shape>(this); collidable.model = shape; for (u32 i=0; i < se_shape->shapes.size(); ++i) { CShapeData::shape_def &S = se_shape->shapes[i]; switch (S.type) { case 0 : { shape->add_sphere (S.data.sphere); break; } case 1 : { shape->add_box (S.data.box); break; } } } shape->ComputeBounds (); BOOL result = inherited::net_Spawn(data); if (!result) return (FALSE); setEnabled (FALSE); setVisible (FALSE); if (!ai().get_level_graph() || (RestrictionSpace::ERestrictorTypes(se_shape->m_space_restrictor_type) == RestrictionSpace::eRestrictorTypeNone)) return (TRUE); Level().space_restriction_manager().register_restrictor(this,RestrictionSpace::ERestrictorTypes(se_shape->m_space_restrictor_type)); return (TRUE); } void CSpaceRestrictor::net_Destroy () { inherited::net_Destroy (); if (!ai().get_level_graph()) return; if (RestrictionSpace::ERestrictorTypes(m_space_restrictor_type) == RestrictionSpace::eRestrictorTypeNone) return; Level().space_restriction_manager().unregister_restrictor(this); } bool CSpaceRestrictor::inside (const Fsphere &sphere) const { if (!actual()) prepare (); if (!m_selfbounds.intersect(sphere)) return (false); return (prepared_inside(sphere)); } BOOL CSpaceRestrictor::UsedAI_Locations () { return (FALSE); } void CSpaceRestrictor::spatial_move () { inherited::spatial_move (); actual (false); } void CSpaceRestrictor::prepare () const { Center (m_selfbounds.P); m_selfbounds.R = Radius(); m_spheres.resize (0); m_boxes.resize (0); const CCF_Shape *shape = (const CCF_Shape*)collidable.model; typedef xr_vector<CCF_Shape::shape_def> SHAPES; SHAPES::const_iterator I = shape->shapes.begin(); SHAPES::const_iterator E = shape->shapes.end(); for ( ; I != E; ++I) { switch ((*I).type) { case 0 : { // sphere Fsphere temp; const Fsphere &sphere = (*I).data.sphere; XFORM().transform_tiny (temp.P,sphere.P); temp.R = sphere.R; m_spheres.push_back (temp); break; } case 1 : { // box Fmatrix sphere; const Fmatrix &box = (*I).data.box; sphere.mul_43 (XFORM(),box); // Build points Fvector A,B[8]; CPlanes temp; A.set (-.5f, -.5f, -.5f); sphere.transform_tiny(B[0],A); A.set (-.5f, -.5f, +.5f); sphere.transform_tiny(B[1],A); A.set (-.5f, +.5f, +.5f); sphere.transform_tiny(B[2],A); A.set (-.5f, +.5f, -.5f); sphere.transform_tiny(B[3],A); A.set (+.5f, +.5f, +.5f); sphere.transform_tiny(B[4],A); A.set (+.5f, +.5f, -.5f); sphere.transform_tiny(B[5],A); A.set (+.5f, -.5f, +.5f); sphere.transform_tiny(B[6],A); A.set (+.5f, -.5f, -.5f); sphere.transform_tiny(B[7],A); temp.m_planes[0].build (B[0],B[3],B[5]); temp.m_planes[1].build (B[1],B[2],B[3]); temp.m_planes[2].build (B[6],B[5],B[4]); temp.m_planes[3].build (B[4],B[2],B[1]); temp.m_planes[4].build (B[3],B[2],B[4]); temp.m_planes[5].build (B[1],B[0],B[6]); m_boxes.push_back (temp); break; } default : NODEFAULT; } } actual (true); } bool CSpaceRestrictor::prepared_inside (const Fsphere &sphere) const { VERIFY (actual()); { SPHERES::const_iterator I = m_spheres.begin(); SPHERES::const_iterator E = m_spheres.end(); for ( ; I != E; ++I) if (sphere.intersect(*I)) return (true); } { BOXES::const_iterator I = m_boxes.begin(); BOXES::const_iterator E = m_boxes.end(); for ( ; I != E; ++I) { for (u32 i=0; i<PLANE_COUNT; ++i) if ((*I).m_planes[i].classify(sphere.P) > sphere.R) goto continue_loop; return (true); continue_loop: continue; } } return (false); } #ifdef DEBUG #include "customzone.h" #include "hudmanager.h" extern Flags32 dbg_net_Draw_Flags; void CSpaceRestrictor::OnRender () { if(!bDebug) return; if (!(dbg_net_Draw_Flags.is_any((1<<2)))) return; RCache.OnFrameEnd(); Fvector l_half; l_half.set(.5f, .5f, .5f); Fmatrix l_ball, l_box; xr_vector<CCF_Shape::shape_def> &l_shapes = ((CCF_Shape*)CFORM())->Shapes(); xr_vector<CCF_Shape::shape_def>::iterator l_pShape; u32 Color = 0; CCustomZone *custom_zone = smart_cast<CCustomZone*>(this); if (custom_zone && custom_zone->IsEnabled()) Color = D3DCOLOR_XRGB(0,255,255); else Color = D3DCOLOR_XRGB(255,0,0); for(l_pShape = l_shapes.begin(); l_shapes.end() != l_pShape; ++l_pShape) { switch(l_pShape->type) { case 0: { Fsphere &l_sphere = l_pShape->data.sphere; l_ball.scale(l_sphere.R, l_sphere.R, l_sphere.R); //l_ball.scale(1.f, 1.f, 1.f); Fvector l_p; XFORM().transform(l_p, l_sphere.P); l_ball.translate_add(l_p); //l_ball.mul(XFORM(), l_ball); //l_ball.mul(l_ball, XFORM()); Level().debug_renderer().draw_ellipse(l_ball, Color); } break; case 1: { l_box.mul(XFORM(), l_pShape->data.box); Level().debug_renderer().draw_obb(l_box, l_half, Color); } break; } } if( Device.vCameraPosition.distance_to(XFORM().c)<100.0f ){ //DRAW name Fmatrix res; res.mul (Device.mFullTransform, XFORM()); Fvector4 v_res; float delta_height = 0.f; // get up on 2 meters Fvector shift; static float gx = 0.0f; static float gy = 2.0f; static float gz = 0.0f; shift.set(gx,gy,gz); res.transform(v_res, shift); // check if the object in sight if (v_res.z < 0 || v_res.w < 0) return; if (v_res.x < -1.f || v_res.x > 1.f || v_res.y<-1.f || v_res.y>1.f) return; // get real (x,y) float x = (1.f + v_res.x)/2.f * (Device.dwWidth); float y = (1.f - v_res.y)/2.f * (Device.dwHeight) - delta_height; HUD().Font().pFontMedium->SetColor (0xffff0000); HUD().Font().pFontMedium->OutSet (x, y-=delta_height); HUD().Font().pFontMedium->OutNext ( Name() ); CCustomZone* z = smart_cast<CCustomZone*>(this); if(z){ string64 str; switch (z->ZoneState()){ case CCustomZone::eZoneStateIdle: strcpy(str,"IDLE"); break; case CCustomZone::eZoneStateAwaking: strcpy(str,"AWAKING"); break; case CCustomZone::eZoneStateBlowout: strcpy(str,"BLOWOUT"); break; case CCustomZone::eZoneStateAccumulate: strcpy(str,"ACCUMULATE"); break; case CCustomZone::eZoneStateDisabled: strcpy(str,"DISABLED"); break; }; HUD().Font().pFontMedium->OutNext ( str ); } } } #endif
apache-2.0
DanielInGitHub/CrazyDog
src/main/java/org/crazydog/controller/PageDispatcher.java
689
package org.crazydog.controller; import org.springframework.stereotype.Controller; import org.springframework.web.bind.annotation.RequestMapping; import javax.servlet.http.HttpServletRequest; /** * 这个是jsp资源跳转的分配器 * Created by never on 2015/8/27. */ @Deprecated @Controller public class PageDispatcher { @RequestMapping("/views") public String dispatch(HttpServletRequest request) { String parameter = request.getParameter("page"); // System.out.println(parameter); if (parameter != null) { // return "forward:/WEB-INF/views/" + parameter + ".jsp"; return parameter; } return null; } }
apache-2.0
dhgarrette/2015-ccg-parsing
src/main/scala/dhg/ccg/bisk/Bisk.scala
15630
package dhg.ccg.bisk import dhg.util._ import dhg.util.viz._ import dhg.ccg.cat._ import dhg.ccg.rule._ import dhg.ccg.tagdict._ import dhg.ccg.parse._ import dhg.ccg.parse.pcfg._ import scalaz.{ \/ => _, _ } import Scalaz._ import scala.collection.parallel.immutable.ParVector import dhg.gfl.Fudg import dhg.gfl.FudgSentence import dhg.gfl.{ Edge => GflEdge } import dhg.gfl.Token import dhg.gfl.Node import dhg.gfl.WordNode import dhg.gfl.Sentence import dhg.gfl.{ Sentence => GflSentence } /** * @author dhg */ object Bisk { type Word = String type Pos = String val A = cat"A".asInstanceOf[AtomCat] val N = cat"N".asInstanceOf[AtomCat] val S = cat"S".asInstanceOf[AtomCat] val CONJ = cat"conj".asInstanceOf[AtomCat] def main(args: Array[String]): Unit = { // val tagdict = SimpleTagDictionary(Map( // "The" -> Set(cat"(NP/N)"), // "old" -> Set(cat"N", cat"(N/N)"), // "man" -> Set(cat"N", cat"((S\NP)/NP)"), // "ships" -> Set(cat"N", cat"(S\NP)")), "<S>", StartCat, "<E>", EndCat) // parser.parse("The old man ships".splitWhitespace, tagdict) foreach TreeViz.drawTree // Vector[Cat]( // S, // // S / S, // S \ S, // // (S \ S) / (S \ S), // (S \ S) \ (S \ S), // (S / S) / (S / S), // (S / S) \ (S / S), // // (S \ S) / (S / S), // (S \ S) \ (S / S), // (S / S) / (S \ S), // (S / S) \ (S \ S), // // (N \ S) \ (S \ S), // (S \ N) / (S \ S), // (S / S) / (N / S), // (S / S) \ (S / N)).foreach { // case ModMod(x) => println(s"Yes: $x") // case x => println(s"No: $x") // } // // { // val cats = Set[Cat]( // N, S \ N, S \ S, N / N, (S \ N) / N, (S \ N) / (S \ N), // (N \ N) / (S / N), S / (S \ N), // S / N, N \ N, // N, S, CONJ, TopCat, // N / N, N \ N, S / S, S \ S, S \ N, // (S / S) / (S / S), (N \ N) / (N \ N), S \ N, (S \ S) / (S \ S), (S \ S) \ (S \ S), // (N / N) \ (N / N), (S / S) \ (S / S), (N \ N) \ (N \ N), (S \ S) / (S \ S), // N, N / N, (N / N) / N, // (S / S) / N, // (S \ S) / N, // (N \ N) / N, (N / N) / N, (S \ N) \ S, (S \ S) / S, (S \ S) / N, S / N, // (S / N) / S) // // println(cats -- cleanCatSet(cats)) // } // val ss1: Vector[(Vector[(Word, Pos)], Option[FudgSentence])] = { val s1 = "The|DT man|NNS ate|VBD quickly|RB".splitWhitespace.map(_.rsplit("\\|")).map { case Seq(w, t) => (w: Word, t: Pos) } val s2 = "The|DT man|NNS eats|VBZ with|IN friends|NNS".splitWhitespace.map(_.rsplit("\\|")).map { case Seq(w, t) => (w: Word, t: Pos) } val s3 = "The|DT man|NNS ate|VBD chips|NNS".splitWhitespace.map(_.rsplit("\\|")).map { case Seq(w, t) => (w: Word, t: Pos) } Vector(s1 -> None, s2 -> None, s3 -> None) } val ss2: Vector[(Vector[(Word, Pos)], Option[FudgSentence])] = { // 0 1 2 3 val s4 = "John|NNP walked|VBD a|DT dog|NN".splitWhitespace.map(_.rsplit("\\|")).map { case Seq(w, t) => (w: Word, t: Pos) } val e4 = Some(GflSentence.fromDepIndices(s4.map(_._1), Vector(1 -> 0, 1 -> 3))) Vector(s4 -> e4) } val pos2catFinal = new BiskPosCatMappingInducer().induceWithConstraints(ss2, 2) pos2catFinal.toVector.sortBy(_._1).mapt((p, cs) => f"$p%-3s -> ${cs.size}: ${cs.mkString(", ")}") foreach println } } class BiskPosCatMappingInducer( gcb: CfgGuideChartBuilder = new DependencyTrackingCfgGuideChartBuilder(Vector[CcgRule]( FA, BA, FC, BX, BC, FX, FC2, BX2, BC2, FX2, new UnaryCcgRule { val child: Cat = cat"N"; val parent: Cat = cat"(S/(S\N))" }, new UnaryCcgRule { val child: Cat = cat"S"; val parent: Cat = TopCat }, new UnaryCcgRule { val child: Cat = cat"N"; val parent: Cat = TopCat }), rootSet = Set(TopCat), additionalSupertagAdder = new NoOpAdditionalTagAdder), quick: Boolean = true) { type Word = String type Pos = String private[this] val N = cat"N".asInstanceOf[AtomCat] private[this] val S = cat"S".asInstanceOf[AtomCat] private[this] val CONJ = cat"conj".asInstanceOf[AtomCat] private[this] val initialPos2Cat: Map[Pos, Set[Cat]] = Map( N -> "NN NNS NNP PRP DT".splitWhitespace, S -> "MD VB VBZ VBG VBN VBD".splitWhitespace, CONJ -> "CC".splitWhitespace) .flatMap { case (c, ps) => ps.mapToVal(Set[Cat](c)) } // def apply(posTaggedSentences: Vector[Vector[(String, String)]], iterations: Int) = { // val intermediatePos2Cat: Map[String, Set[Cat]] = (1 to iterations).foldLeft(initialPos2Cat) { (pos2cat, i) => // time(f" iteration $i (${posTaggedSentences.size} to process)", posTaggedSentences.par.map(doIteration(_, guessCatsFromTriples, pos2cat, quick)).reduce(_ |+| _)) // //|+| Map("DT" -> Set(N / N), "VBD" -> Set(S \ N), "VBZ" -> Set(S \ N), "RB" -> Set(S \ S), "NNS" -> Set(S \ S), "IN" -> Set(S \ S)) // } // // val finalPos2Cat: Map[String, Set[Cat]] = time(" finalize", posTaggedSentences.par.map(doIteration(_, guessCatsFromConstituents, intermediatePos2Cat, false))).reduce(_ |+| _) // println("Final result of Bisk induction: "); finalPos2Cat.toVector.sortBy(_._1).mapt((p, cs) => f" $p%-3s -> ${cs.mkString(", ")}") foreach println // finalPos2Cat // } // // def doIteration(s: Vector[(String, String)], guessFunction: ((Vector[Word], Vector[Set[Cat]]) => Vector[Set[Cat]]), pos2cat: Map[Pos, Set[Cat]], quick: Boolean): Map[String, Set[Cat]] = { // val words = s.map(_._1) // val pos = s.map(_._2) // val initialSupertagSets: Vector[Set[Cat]] = pos.map(p => pos2cat.getOrElse(p, Set.empty[Cat])) // val guessedSupertagSets: Vector[Set[Cat]] = guessFunction(words, initialSupertagSets) // val usableSupertagSets: Vector[Set[Cat]] = if (quick) guessedSupertagSets else filterWithParsing(words, pos, guessedSupertagSets) // val usablePosSupertagMap = (pos zipSafe usableSupertagSets).groupByKey.mapVals(_.flatten.toSet) // usablePosSupertagMap // } //|+| Map("DT" -> Set(N / N), "VBD" -> Set(S \ N), "VBZ" -> Set(S \ N), "RB" -> Set(S \ S), "NNS" -> Set(S \ S), "IN" -> Set(S \ S)) def fudgToGraphViz(fs: FudgSentence): String = { def nodeString(n: Node): String = { s""""${n.name}"""" } def edgeString(edge: GflEdge, color: Option[String] = None): String = { nodeString(edge.parent) + " -> " + nodeString(edge.child) + edge.label.fold("") { c => f""" [ label = "${c}" ]""" } + color.fold("") { c => f""" [ color = "$c" ]""" } + """ [samehead=true, sametail=true]""" } val edges = fs.edges //.collect { case GflEdge(parent, child, _) if false || parent != child => println(s"$parent -> $child, ${parent != child}"); GflEdge(parent, child, None) } //val allnodes = edges.flatMap { case GflEdge(p, c, _) => Vector(p, c) } val sb = new StringBuilder sb ++= "digraph {\n" //sb ++= " graph [splines=ortho];\n" for ((n, i) <- fs.nodes.values.toVector.sortBy(s => s.name.rsplit("_").last.toInt).zipWithIndex) { sb ++= (" " + nodeString(n) + f""" [shape=box] [ pos="${i * 1.5},5!" ]""" + "\n") } for (e <- edges) { sb ++= (" " + edgeString(e) + "\n") } sb ++= "}" sb.toString } def induce(posTaggedSentences: Vector[Vector[(String, String)]], iterations: Int) = { induceWithConstraints(posTaggedSentences.map(s => (s, none[FudgSentence])), iterations) } def induceWithConstraints(posTaggedSentencesWithConstraints: Vector[(Vector[(String, String)], Option[FudgSentence])], iterations: Int) = { val intermediatePos2Cat: Map[String, Set[Cat]] = (1 to iterations).foldLeft(initialPos2Cat) { (pos2cat, i) => time(f" iteration $i (${posTaggedSentencesWithConstraints.size} to process)", posTaggedSentencesWithConstraints.par.map { s => val words = s._1.map(_._1) val pos = s._1.map(_._2) // val fudgAnnotation = s._2 // fudgAnnotation.foreach { a => // if (!Fudg.isSemanticallyValid(a.edges, ignoreSelfLoops = true, ignoreMultipleParents = true, throwOnFalse = false)){ // println(words.mkString(" ")) // println(fudgToGraphViz(a)) // } // Fudg.isSemanticallyValid(a.edges, ignoreSelfLoops = true, ignoreMultipleParents = true, throwOnFalse = true) // } //println("bisk induction, sentence: " + posTaggedSentencesWithConstraints.map{_._1}) val initialSupertagSets: Vector[Set[Cat]] = pos.map(p => pos2cat.getOrElse(p, Set.empty[Cat])) //println("bisk induction, initialSupertagSets: " + initialSupertagSets) val guessedSupertagSets: Vector[Set[Cat]] = guessCatsFromTriples(words, initialSupertagSets) val usableSupertagSets: Vector[Set[Cat]] = /*if (quick)*/ guessedSupertagSets /*else filterWithParsing(words, pos, fudgAnnotation, guessedSupertagSets)*/ val usablePosSupertagMap = (pos zipSafe usableSupertagSets).groupByKey.mapVals(_.flatten.toSet) //println("Intermediate result of Bisk induction: "); usablePosSupertagMap.toVector.sortBy(_._1).mapt((p, cs) => f" $p%-3s -> ${cs.toVector.sorted.mkString(", ")}") foreach println usablePosSupertagMap }.reduce(_ |+| _)) } val finalPos2Cat: Map[String, Set[Cat]] = time(" finalize", posTaggedSentencesWithConstraints.par.map { s => val words = s._1.map(_._1) val pos = s._1.map(_._2) val fudgAnnotation = s._2 val initialSupertagSets: Vector[Set[Cat]] = pos.map(p => intermediatePos2Cat.getOrElse(p, Set.empty[Cat])) val guessedSupertagSets: Vector[Set[Cat]] = if (quick) initialSupertagSets else guessCatsFromConstituents(words, fudgAnnotation, initialSupertagSets) val usableSupertagSets: Vector[Set[Cat]] = filterWithParsing(words, pos, fudgAnnotation, guessedSupertagSets) val usablePosSupertagMap = (pos zipSafe usableSupertagSets).groupByKey.mapVals(_.flatten.toSet) usablePosSupertagMap }.reduce(_ |+| _)) println("Final result of Bisk induction: "); finalPos2Cat.toVector.sortBy(_._1).mapt((p, cs) => f" $p%-3s -> ${cs.toVector.sorted.mkString(", ")}") foreach println finalPos2Cat } def filterWithParsing(words: Vector[Word], pos: Vector[Pos], fudgAnnotation: Option[FudgSentence], guessedSupertagSets: Vector[Set[Cat]]): Vector[Set[Cat]] = { val guideChart = time1(f" build chart (${words.length}%3d)", { val gco = fudgAnnotation.flatMap(_ => gcb.buildFromSupertagSetSentence(words zipSafe guessedSupertagSets, fudgAnnotation, DummyCatTagDictionary)) // try with annotation constraints if(fudgAnnotation.isDefined && gco.nonEmpty) Console.err.println("failed to parse with annotation, attempting without...") val gco2 = gco.orElse(gcb.buildFromSupertagSetSentence(words zipSafe guessedSupertagSets, None, DummyCatTagDictionary)) // if it fails, try without constraints if(gco2.isEmpty) Console.err.println("failed to parse WITHOUT annotation") gco2 }) guideChart.map(_.supertagSets).getOrElse(Vector.fill(pos.length)(Set.empty)) } /** * guessFunction for normal iterations */ def guessCatsFromTriples(words: Vector[Word], initialSupertagSets: Vector[Set[Cat]]): Vector[Set[Cat]] = { val triples = (("" -> Set.empty[Cat]) +: (words zipSafe initialSupertagSets) :+ ("" -> Set.empty[Cat])).sliding3.toVector triples.mapt { case (aw -> as, bw -> bs, cw -> cs) => bw match { case "." | "?" | "!" => Set(cat"""(S\S)""") case "," => Set(cat"""(N\N)""") case ";" => Set(cat"""((S\S)/S)""") case _ => val newCatSet = bs ++ as.collect { case a: NonPuncCat => a \ a } ++ cs.collect { case c: NonPuncCat => c / c } ++ bs.flatCollect { case b: NonPuncCat if b != N => as.collect { case a: NonPuncCat if !a.isModifier => b \ a } } ++ bs.flatCollect { case b: NonPuncCat if b != N => cs.collect { case c: NonPuncCat if !c.isModifier => b / c } } cleanCatSet(newCatSet) } } } /** * guessFunction for final iteration */ def guessCatsFromConstituents(words: Vector[Word], fudgAnnotation: Option[FudgSentence], initialSupertagSets: Vector[Set[Cat]]): Vector[Set[Cat]] = { val gc: CfgGuideChart = gcb.buildUnpruned(words zipSafe initialSupertagSets, fudgAnnotation) val n = gc.length for (j <- (0 until n).toVector) yield { val thisCatSet: Set[Cat] = gc.supertagSets(j) val combWithRght: Iterable[Cat] = (for (k <- (j + 2) to n) yield thisCatSet.flatCollect { case b: NonPuncCat => gc(j + 1, k).keys.collect { case c: NonPuncCat => (b / c): Cat } }).flatten val combWithLeft: Iterable[Cat] = (for (i <- 0 to (j - 1)) yield thisCatSet.flatCollect { case b: NonPuncCat => gc(i, j).keys.collect { case a: NonPuncCat => (b \ a): Cat } }).flatten cleanCatSet(thisCatSet ++ combWithRght ++ combWithLeft) } } def cleanCatSet(newCatSet: Set[Cat]): Set[Cat] = { newCatSet.filter { // #1 case N || N => true case (N || N) || (N || N) => true case N || x => false case _ => true }.filter { // #2 case ModMod(_) => true case Mod(_) => true case ModModAndArgs(_, args) => args.forall(Set(S, N)) case ModAndArgs(_, args) => args.forall(Set(S, N)) // blocks (N\N)/(S/N) :-( case HeadAndArgs(S, args) => args.forall(Set(S, N)) case _ => true }.filter { // #3 case S || S => true case S || a => a == N case _ => true }.filter { // #4 case ModMod(_) => true case (a || b) || (c || d) if a == b && b == c && c == d => false case _ => true }.filter { // #5 case HeadAndArgs(_, args) => args.size <= 2 }.flatMap { // #6 case (S / N) \ N => Some((S \ N) / N) case c => Some(c) } } // object HeadAndArgs { def unapply(c: Cat): Option[(Cat, Vector[Cat])] = toVec(c) match { case Vector(h, args @ _*) => Some(h -> args.toVector) } def toVec(c: Cat): Vector[Cat] = c match { case x || y => toVec(x) :+ y case a => Vector(a) } } def argsOf(Head: Cat, cat: Cat): Option[Vector[Cat]] = cat match { case Head => Some(Vector.empty) case functor || arg => argsOf(Head, functor).map(_ :+ arg) case _ => None } object ModAndArgs { def unapply(cat: Cat): Option[(Cat, Vector[Cat])] = cat match { case Mod(mod) => Some((mod, Vector.empty)) case functor || arg => unapply(functor).map { case (mod, args) => (mod, args :+ arg) } case _ => None } } object ModModAndArgs { def unapply(cat: Cat): Option[(Cat, Vector[Cat])] = cat match { case ModMod(mod) => Some((mod, Vector.empty)) case functor || arg => unapply(functor).map { case (mod, args) => (mod, args :+ arg) } case _ => None } } trait ModExtractor { def unapply(cat: Cat): Option[Cat] } object Mod extends ModExtractor { def unapply(cat: Cat) = cat match { case a || b if a == b => Some(cat); case _ => None } } //object ModMod extends ModExtractor { def unapply(cat: Cat) = cat match { case (a || b) || (c || d) if a == b && b == c && c == d => Some(cat); case _ => None } } object ModMod extends ModExtractor { def unapply(cat: Cat) = cat match { case Mod(Mod(_) || Mod(_)) => Some(cat); case _ => None } } }
apache-2.0
googlearchive/appengine-nodejs
lib/net/proto2/contrib/WireSerializer.js
19125
// Copyright 2014 Google Inc. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. /** * @fileoverview Protocol Buffer 2 Serializer which serializes and deserializes * messages using the wire format. Note that this serializer requires protocol * buffer reflection, which carries some overhead. * @supported any browser with DataView implemented. For now Chrome9, FF15, IE10 * * @see https://developers.google.com/protocol-buffers/docs/encoding */ goog.provide('net.proto2.contrib.WireSerializer'); goog.require('goog.asserts'); goog.require('goog.math.Long'); goog.require('goog.proto2.Message'); goog.require('goog.proto2.Serializer'); /** * Wire format serializer. * * @constructor * @extends {goog.proto2.Serializer} */ net.proto2.contrib.WireSerializer = function() { /** * This array is where proto bytes go during serialization. * It must be reset for each serialization. * @type {!Array.<number>} * @private */ this.buffer_ = []; /** * Scratch workspace to avoid allocations during serialization. * @type {{value: number, length: number}} * @private */ this.scratchTag32_ = {value: 0, length: 0}; /** * Scratch workspace to avoid allocations during serialization. * @type {{value: !goog.math.Long, length: number}} * @private */ this.scratchTag64_ = {value: goog.math.Long.ZERO, length: 0}; /** * Scratch data view for coding/decoding little-endian numbers. * @type {!DataView} * @private */ this.dataView_ = new DataView(new ArrayBuffer(8)); }; goog.inherits(net.proto2.contrib.WireSerializer, goog.proto2.Serializer); /** * @return {!Array.<number>} The serialized form of the message. * @override */ net.proto2.contrib.WireSerializer.prototype.serialize = function(message) { if (message == null) { return []; } this.buffer_ = []; var descriptor = message.getDescriptor(); var fields = descriptor.getFields(); // Add the known fields. for (var i = 0; i < fields.length; i++) { var field = fields[i]; if (!message.has(field)) { continue; } if (field.isRepeated()) { for (var j = 0, n = message.countOf(field); j < n; j++) { var val = message.get(field, j); this.getSerializedValue(field, val); } } else { this.getSerializedValue(field, message.get(field)); } } return this.buffer_; }; /** * Append the serialized field tag to our serialization buffer. * @param {goog.proto2.FieldDescriptor} field The field to serialize. * @return {boolean} Whether the field tag was serialized. * @private */ net.proto2.contrib.WireSerializer.prototype.serializeFieldTag_ = function(field) { var wireType = 0; switch (field.getFieldType()) { default: return false; case goog.proto2.Message.FieldType.SINT32: case goog.proto2.Message.FieldType.SINT64: case goog.proto2.Message.FieldType.BOOL: case goog.proto2.Message.FieldType.INT64: case goog.proto2.Message.FieldType.ENUM: case goog.proto2.Message.FieldType.INT32: case goog.proto2.Message.FieldType.UINT32: case goog.proto2.Message.FieldType.UINT64: wireType = 0; break; case goog.proto2.Message.FieldType.FIXED64: case goog.proto2.Message.FieldType.SFIXED64: case goog.proto2.Message.FieldType.DOUBLE: wireType = 1; break; case goog.proto2.Message.FieldType.STRING: case goog.proto2.Message.FieldType.BYTES: case goog.proto2.Message.FieldType.MESSAGE: wireType = 2; break; case goog.proto2.Message.FieldType.GROUP: wireType = 3; break; case goog.proto2.Message.FieldType.FIXED32: case goog.proto2.Message.FieldType.SFIXED32: case goog.proto2.Message.FieldType.FLOAT: wireType = 5; break; } this.serializeVarint_((field.getTag() << 3) | wireType); return true; }; /** @override */ net.proto2.contrib.WireSerializer.prototype.getSerializedValue = function(field, value) { if (!this.serializeFieldTag_(field)) { return false; } switch (field.getFieldType()) { default: throw new Error('Unknown field type ' + field.getFieldType()); case goog.proto2.Message.FieldType.SINT32: this.serializeVarint_(this.zigZagEncode(/** @type {number} */ (value))); break; case goog.proto2.Message.FieldType.SINT64: this.serializeVarint64_(this.zigZagEncode64_( goog.math.Long.fromString(/** @type {string} */(value)))); break; case goog.proto2.Message.FieldType.BOOL: this.serializeVarint_(value ? 1 : 0); break; case goog.proto2.Message.FieldType.INT64: this.serializeVarint64_( goog.math.Long.fromString(/** @type {string} */(value))); break; case goog.proto2.Message.FieldType.ENUM: case goog.proto2.Message.FieldType.INT32: case goog.proto2.Message.FieldType.UINT32: case goog.proto2.Message.FieldType.UINT64: this.serializeVarint_(/** @type {number} */ (value)); break; case goog.proto2.Message.FieldType.FIXED64: case goog.proto2.Message.FieldType.SFIXED64: this.serializeFixed_(/** @type {number} */ (value), 8); break; case goog.proto2.Message.FieldType.DOUBLE: this.serializeDouble_(/** @type {number} */ (value)); break; case goog.proto2.Message.FieldType.STRING: case goog.proto2.Message.FieldType.BYTES: this.serializeString(value); break; case goog.proto2.Message.FieldType.GROUP: var serialized = new net.proto2.contrib.WireSerializer().serialize( /** @type {goog.proto2.Message} */ (value)); Array.prototype.push.apply(this.buffer_, serialized); this.serializeVarint_((field.getTag() << 3) | 4); break; case goog.proto2.Message.FieldType.MESSAGE: var serialized = new net.proto2.contrib.WireSerializer().serialize( /** @type {goog.proto2.Message} */ (value)); this.serializeVarint_(serialized.length); Array.prototype.push.apply(this.buffer_, serialized); break; case goog.proto2.Message.FieldType.FIXED32: case goog.proto2.Message.FieldType.SFIXED32: this.serializeFixed_(/** @type {number} */ (value), 4); break; case goog.proto2.Message.FieldType.FLOAT: this.serializeFloat_(/** @type {number} */ (value)); break; } // To avoid allocations, this method serializes into a pre-existing buffer, // rather than serializing into a new value object. return null; }; /** @override */ net.proto2.contrib.WireSerializer.prototype.deserializeTo = function(message, buffer) { if (buffer == null) { // Since value double-equals null, it may be either null or undefined. // Ensure we return the same one, since they have different meanings. return buffer; } var descriptor = message.getDescriptor(); while (true) { var tag = this.parseUnsignedVarInt_(buffer); var tagValue = tag.value; var tagLength = tag.length; var index = tagValue >> 3; // For future reference, the wiretype is tagValue & 0x7. var value = {value: undefined, length: 0}; var field = descriptor.findFieldByTag(index); if (field) { value = this.getDeserializedValue(field, buffer.subarray(tagLength)); if (value && value.value !== null) { if (field.isRepeated()) { message.add(field, value.value); } else { message.set(field, value.value); } } } if (buffer.length < tagLength + value.length) { break; } buffer = buffer.subarray(tagLength + value.length); } }; /** * Deserializes a message from the expected format and places the * data in the message. The message must correspond to a group. Moreover * the buffer must be positioned after the initial START_GROUP tag for the * group. The message will be terminated by the first END_GROUP tag at the * same nesting level. It is the responsibility of the caller to validate that * its field index matches the one in the opening START_GROUP tag. Since groups * are not length-delimited, this method returns the length of the parsed * data excluding the END_GROUP tag. * * @param {goog.proto2.Message} message The message in which to * place the information. * @param {*} buffer The data of the message. * @return {number} the length of the parsed message, excluding the closing tag. * @protected */ net.proto2.contrib.WireSerializer.prototype.deserializeGroupTo = function(message, buffer) { if (buffer == null) { return 0; } var descriptor = message.getDescriptor(); var parsedLength = 0; while (true) { var tag = this.parseUnsignedVarInt_(buffer); var tagValue = tag.value; var tagLength = tag.length; var index = tagValue >> 3; var wiretype = tagValue & 7; if (wiretype == 4) { // Got an end group. break; } parsedLength += tagLength; var value = {value: undefined, length: 0}; var field = descriptor.findFieldByTag(index); if (field) { value = this.getDeserializedValue(field, buffer.subarray(tagLength)); if (value && value.value !== null) { if (field.isRepeated()) { message.add(field, value.value); } else { message.set(field, value.value); } } } parsedLength += value.length; if (buffer.length < tagLength + value.length) { break; } buffer = buffer.subarray(tagLength + value.length); } return parsedLength; }; /** * @override */ net.proto2.contrib.WireSerializer.prototype.getDeserializedValue = function(field, buffer) { var value = null; var t = field.getFieldType(); var varInt = this.parseVarInt64_(buffer); var length = varInt.length; switch (t) { case goog.proto2.Message.FieldType.SINT32: value = this.zigZagDecode_(varInt.value.toInt()); break; case goog.proto2.Message.FieldType.SINT64: value = this.zigZagDecode64_(varInt.value).toString(); break; case goog.proto2.Message.FieldType.BOOL: value = varInt.value.equals(goog.math.Long.ONE); break; case goog.proto2.Message.FieldType.INT64: case goog.proto2.Message.FieldType.UINT64: value = varInt.value.toString(); break; case goog.proto2.Message.FieldType.ENUM: case goog.proto2.Message.FieldType.INT32: case goog.proto2.Message.FieldType.UINT32: value = varInt.value.toInt(); break; case goog.proto2.Message.FieldType.FIXED64: case goog.proto2.Message.FieldType.SFIXED64: value = this.parseFixed_(buffer.subarray(0, 8)).toString(); length = 8; break; case goog.proto2.Message.FieldType.DOUBLE: value = this.parseDouble_(buffer.subarray(0, 8)); length = 8; break; case goog.proto2.Message.FieldType.STRING: var strBuffer = buffer.subarray(varInt.length, varInt.length + varInt.value.toInt()); value = String.fromCharCode.apply(null, strBuffer); length = varInt.length + varInt.value.toInt(); break; case goog.proto2.Message.FieldType.BYTES: var strBuffer = buffer.subarray(varInt.length, varInt.length + varInt.value.toInt()); value = String.fromCharCode.apply(null, strBuffer); length = varInt.length + varInt.value.toInt(); break; case goog.proto2.Message.FieldType.GROUP: value = field.getFieldMessageType().createMessageInstance(); var groupLength = this.deserializeGroupTo(value, buffer); var next = buffer.subarray(groupLength); var closingTag = this.parseVarInt64_(next); var expected = (field.getTag() << 3) | 4; goog.asserts.assert(closingTag.value.toInt() == expected, 'Error deserializing group'); length = groupLength + closingTag.length; break; case goog.proto2.Message.FieldType.MESSAGE: length = varInt.length + varInt.value.toInt(); var data = buffer.subarray(varInt.length, length); value = field.getFieldMessageType().createMessageInstance(); this.deserializeTo(value, data); break; case goog.proto2.Message.FieldType.FIXED32: case goog.proto2.Message.FieldType.SFIXED32: value = this.parseFixed_(buffer.subarray(0, 4)); length = 4; break; case goog.proto2.Message.FieldType.FLOAT: value = this.parseFloat_(buffer.subarray(0, 4)); length = 4; break; } return {value: value, length: length}; }; /** * @param {*} value String that needs to be converted to bytes. */ net.proto2.contrib.WireSerializer.prototype.serializeString = function(value) { if (goog.isDefAndNotNull(value)) { // Serialize length. this.serializeVarint_(value.length); for (var i = 0; i < value.length; i++) { this.buffer_.push(value.charCodeAt(i)); } } }; /** * @param {*} buffer to parse as String. * @return {{value: string, length: number}} */ net.proto2.contrib.WireSerializer.prototype.parseString = function(buffer) { var length = this.parseUnsignedVarInt_(buffer); var strBuffer = buffer.subarray(length.length, length.length + length.value); return { value: String.fromCharCode.apply(null, strBuffer), length: length.length + length.value }; }; /** * @param {number} number signed number that needs to be converted to unsigned. * @return {number} */ net.proto2.contrib.WireSerializer.prototype.zigZagEncode = function(number) { var sign = number >>> 31; return (number << 1) ^ -sign; }; /** * @param {number} number Unsigned number in zigzag format that needs to be converted to signed. * @return {number} signed. * @private */ net.proto2.contrib.WireSerializer.prototype.zigZagDecode_ = function(number) { return (number >>> 1) ^ -(number & 1); }; /** * @param {!goog.math.Long} number signed number that needs to be converted to * unsigned. * @return {!goog.math.Long} * @private */ net.proto2.contrib.WireSerializer.prototype.zigZagEncode64_ = function(number) { var sign = number.shiftRightUnsigned(63); return number.shiftLeft(1).xor(sign.negate()); }; /** * @param {!goog.math.Long} number Unsigned number in zigzag format that needs to be converted to signed. * @return {!goog.math.Long} * @private */ net.proto2.contrib.WireSerializer.prototype.zigZagDecode64_ = function(number) { return number.shiftRightUnsigned(1).xor( number.and(goog.math.Long.ONE).negate()); }; /** * Serialize the given number as a varint into our buffer. * @param {number} number that needs to be converted to varint. * @private */ net.proto2.contrib.WireSerializer.prototype.serializeVarint_ = function(number) { do { var chunk = number & 0x7F; number = number >>> 7; if (number > 0) { chunk = chunk | 0x80; } this.buffer_.push(chunk); } while (number > 0); }; /** * Serialize the given 64-bit number as a varint into our buffer. * @param {!goog.math.Long} number that needs to be encoded as varint. * @private */ net.proto2.contrib.WireSerializer.prototype.serializeVarint64_ = function(number) { var mask = goog.math.Long.fromInt(0x7F); do { var chunk = number.and(mask).toInt(); number = number.shiftRightUnsigned(7); if (number.greaterThan(goog.math.Long.ZERO)) { chunk = chunk | 0x80; } this.buffer_.push(chunk); } while (number.greaterThan(goog.math.Long.ZERO)); }; /** * @param {*} buffer from which field number and type needs to be extracted. * @return {{value: !goog.math.Long, length: number}} * @private */ net.proto2.contrib.WireSerializer.prototype.parseVarInt64_ = function(buffer) { var valueInfo = this.scratchTag64_; var number = goog.math.Long.fromNumber(0); var i = 0; for (; i < buffer.length; i++) { var bits = goog.math.Long.fromInt(buffer[i] & 0x7F).shiftLeft(i * 7); number = number.or(bits); if ((buffer[i] & 0x80) == 0) { break; } } valueInfo.value = number; valueInfo.length = i + 1; return valueInfo; }; /** * A special case parser for unsigned 32-bit varints, which can fit comfortably * in 32 bits during decoding. * @param {*} buffer from which field number and type needs to be extracted. * @return {{value: number, length: number}} * @private */ net.proto2.contrib.WireSerializer.prototype.parseUnsignedVarInt_ = function(buffer) { var valueInfo = this.scratchTag32_; var result = 0; var i = 0; for (; i < buffer.length; i++) { result = result | ((buffer[i] & 0x7F) << (i * 7)); if ((buffer[i] & 0x80) == 0) { break; } } valueInfo.value = result; valueInfo.length = i + 1; return valueInfo; }; /** * @param {number} number that needs to be converted to little endian order. * @param {number} size of the result array (4 = 32bit, 8 = 64bit). * @private */ net.proto2.contrib.WireSerializer.prototype.serializeFixed_ = function(number, size) { for (var i = 0; i < size; i++) { this.buffer_.push(number & 0xFF); number = number >>> 8; } }; /** * @param {*} buffer from which field number and type needs * to be extracted. * @return {number} * @private */ net.proto2.contrib.WireSerializer.prototype.parseFixed_ = function(buffer) { var number = 0; for (var i = 0; i < buffer.length; i++) { number = number | (buffer[i] << (i * 8)); } return number; }; /** * @param {*} buffer from which double needs to be extracted. * @return {number} * @private */ net.proto2.contrib.WireSerializer.prototype.parseDouble_ = function(buffer) { for (var i = 0; i < 8; i++) { this.dataView_.setUint8(i, buffer[i]); } return this.dataView_.getFloat64(0, true); // little-endian }; /** * @param {*} buffer from which float needs to be extracted. * @return {number} * @private */ net.proto2.contrib.WireSerializer.prototype.parseFloat_ = function(buffer) { for (var i = 0; i < 4; i++) { this.dataView_.setUint8(i, buffer[i]); } return this.dataView_.getFloat32(0, true); // little-endian }; /** * @param {number} number to be serialized to 8 bytes. * @private */ net.proto2.contrib.WireSerializer.prototype.serializeDouble_ = function(number) { this.dataView_.setFloat64(0, number, true); // little-endian for (var i = 0; i < 8; i++) { this.buffer_.push(this.dataView_.getUint8(i)); } }; /** * @param {number} number to be serialized to 4 bytes. * @private */ net.proto2.contrib.WireSerializer.prototype.serializeFloat_ = function(number) { this.dataView_.setFloat32(0, number, true); // little-endian for (var i = 0; i < 4; i++) { this.buffer_.push(this.dataView_.getUint8(i)); } };
apache-2.0
Glamdring/welshare
src/main/java/com/welshare/service/impl/GoogleTranslateService.java
1318
package com.welshare.service.impl; import org.codehaus.jackson.JsonNode; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Value; import org.springframework.stereotype.Service; import org.springframework.web.client.RestTemplate; @Service public class GoogleTranslateService { private static final Logger logger = LoggerFactory.getLogger(GoogleTranslateService.class); private static final String GOOGLE_TRANSLATE_URL = "https://www.googleapis.com/language/translate/v2?key={key}&q={q}&target={target}"; @Value("${google.translate.key}") private String key; private final RestTemplate restTemplate = new RestTemplate(); public String translate(String text, String targetLanguage) { // not specifying source - google will automatically try to resolve it JsonNode node = restTemplate.getForObject(GOOGLE_TRANSLATE_URL, JsonNode.class, key, text, targetLanguage); try { return node.get("data").get("translations").get(0).get("translatedText").asText(); } catch (Exception ex) { logger.warn("Translation problem: " + ex.getMessage()); // if anything goes wrong here (missing data from the result), return the original text return text; } } }
apache-2.0
solusi-integral/hrm
HRM/application/controllers/Welcome.php
680
<?php defined('BASEPATH') OR exit('No direct script access allowed'); class Welcome extends CI_Controller { /** * Index Page for this controller. * * Maps to the following URL * http://example.com/index.php/welcome * - or - * http://example.com/index.php/welcome/index * - or - * Since this controller is set as the default controller in * config/routes.php, it's displayed at http://example.com/ * * So any other public methods not prefixed with an underscore will * map to /index.php/welcome/<method_name> * @see http://codeigniter.com/user_guide/general/urls.html */ public function index() { //$this->load->view('welcome_message'); } }
apache-2.0
danielkza/h2scala
core/src/test/scala/net/danielkza/http2/stream/FrameDecoderStageTest.scala
1540
package net.danielkza.http2.stream import net.danielkza.http2.api.Header import scala.collection.immutable import akka.stream.scaladsl._ import akka.stream.testkit.scaladsl._ import akka.util.ByteString import net.danielkza.http2.{AkkaStreamsTest, TestHelpers} import net.danielkza.http2.protocol.{Frame, HTTP2Error} import net.danielkza.http2.protocol.coders.FrameCoder import net.danielkza.http2.hpack.coders.HeaderBlockCoder class FrameDecoderStageTest extends AkkaStreamsTest with TestHelpers { import Frame._ val headerCoder = new HeaderBlockCoder val frameCoder = new FrameCoder val headers = immutable.Seq( ":method" -> "GET", ":path" -> "/", "host" -> "example.com" ).map(t => Header.plain(t._1, t._2)) val headerBlock = headerCoder.encode(headers).getOrThrow() val okFrames = immutable.Seq( Headers(1, None, headerBlock, endHeaders=true), Data(1, "Line 1\n"), Data(1, "Line 2\n\n", padding=Some("Padding"), endStream=true), GoAway(1) ) val framesBytes = okFrames.map(frameCoder.encode(_).getOrThrow()) "FrameDecoderStage" should { val flow = Flow[ByteString].transform(() => new FrameDecoderStage(false)) val (pub, sub) = TestSource.probe[ByteString] .via(flow) .toMat(TestSink.probe[Frame])(Keep.both) .run() "decode frames correctly" in { okFrames.zip(framesBytes).foreach { case (frame, bytes) => sub.request(1) pub.sendNext(bytes) sub.expectNextOrError() must_== Right(frame) } ok } } }
apache-2.0
feorean/JumpOnCloud
WebContent/js/angular-sanitize.js
26972
/** * @license AngularJS v1.5.9-build.4970+sha.1660ddd * (c) 2010-2016 Google, Inc. http://angularjs.org * License: MIT */ (function(window, angular) {'use strict'; /* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * Any commits to this file should be reviewed with security in mind. * * Changes to this file can potentially create security vulnerabilities. * * An approval from 2 Core members with history of modifying * * this file is required. * * * * Does the change somehow allow for arbitrary javascript to be executed? * * Or allows for someone to change the prototype of built-in objects? * * Or gives undesired access to variables likes document or window? * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */ var $sanitizeMinErr = angular.$$minErr('$sanitize'); var bind; var extend; var forEach; var isDefined; var lowercase; var noop; var htmlParser; var htmlSanitizeWriter; /** * @ngdoc module * @name ngSanitize * @description * * # ngSanitize * * The `ngSanitize` module provides functionality to sanitize HTML. * * * <div doc-module-components="ngSanitize"></div> * * See {@link ngSanitize.$sanitize `$sanitize`} for usage. */ /** * @ngdoc service * @name $sanitize * @kind function * * @description * Sanitizes an html string by stripping all potentially dangerous tokens. * * The input is sanitized by parsing the HTML into tokens. All safe tokens (from a whitelist) are * then serialized back to properly escaped html string. This means that no unsafe input can make * it into the returned string. * * The whitelist for URL sanitization of attribute values is configured using the functions * `aHrefSanitizationWhitelist` and `imgSrcSanitizationWhitelist` of {@link ng.$compileProvider * `$compileProvider`}. * * The input may also contain SVG markup if this is enabled via {@link $sanitizeProvider}. * * @param {string} html HTML input. * @returns {string} Sanitized HTML. * * @example <example module="sanitizeExample" deps="angular-sanitize.js" name="sanitize-service"> <file name="index.html"> <script> angular.module('sanitizeExample', ['ngSanitize']) .controller('ExampleController', ['$scope', '$sce', function($scope, $sce) { $scope.snippet = '<p style="color:blue">an html\n' + '<em onmouseover="this.textContent=\'PWN3D!\'">click here</em>\n' + 'snippet</p>'; $scope.deliberatelyTrustDangerousSnippet = function() { return $sce.trustAsHtml($scope.snippet); }; }]); </script> <div ng-controller="ExampleController"> Snippet: <textarea ng-model="snippet" cols="60" rows="3"></textarea> <table> <tr> <td>Directive</td> <td>How</td> <td>Source</td> <td>Rendered</td> </tr> <tr id="bind-html-with-sanitize"> <td>ng-bind-html</td> <td>Automatically uses $sanitize</td> <td><pre>&lt;div ng-bind-html="snippet"&gt;<br/>&lt;/div&gt;</pre></td> <td><div ng-bind-html="snippet"></div></td> </tr> <tr id="bind-html-with-trust"> <td>ng-bind-html</td> <td>Bypass $sanitize by explicitly trusting the dangerous value</td> <td> <pre>&lt;div ng-bind-html="deliberatelyTrustDangerousSnippet()"&gt; &lt;/div&gt;</pre> </td> <td><div ng-bind-html="deliberatelyTrustDangerousSnippet()"></div></td> </tr> <tr id="bind-default"> <td>ng-bind</td> <td>Automatically escapes</td> <td><pre>&lt;div ng-bind="snippet"&gt;<br/>&lt;/div&gt;</pre></td> <td><div ng-bind="snippet"></div></td> </tr> </table> </div> </file> <file name="protractor.js" type="protractor"> it('should sanitize the html snippet by default', function() { expect(element(by.css('#bind-html-with-sanitize div')).getAttribute('innerHTML')). toBe('<p>an html\n<em>click here</em>\nsnippet</p>'); }); it('should inline raw snippet if bound to a trusted value', function() { expect(element(by.css('#bind-html-with-trust div')).getAttribute('innerHTML')). toBe("<p style=\"color:blue\">an html\n" + "<em onmouseover=\"this.textContent='PWN3D!'\">click here</em>\n" + "snippet</p>"); }); it('should escape snippet without any filter', function() { expect(element(by.css('#bind-default div')).getAttribute('innerHTML')). toBe("&lt;p style=\"color:blue\"&gt;an html\n" + "&lt;em onmouseover=\"this.textContent='PWN3D!'\"&gt;click here&lt;/em&gt;\n" + "snippet&lt;/p&gt;"); }); it('should update', function() { element(by.model('snippet')).clear(); element(by.model('snippet')).sendKeys('new <b onclick="alert(1)">text</b>'); expect(element(by.css('#bind-html-with-sanitize div')).getAttribute('innerHTML')). toBe('new <b>text</b>'); expect(element(by.css('#bind-html-with-trust div')).getAttribute('innerHTML')).toBe( 'new <b onclick="alert(1)">text</b>'); expect(element(by.css('#bind-default div')).getAttribute('innerHTML')).toBe( "new &lt;b onclick=\"alert(1)\"&gt;text&lt;/b&gt;"); }); </file> </example> */ /** * @ngdoc provider * @name $sanitizeProvider * * @description * Creates and configures {@link $sanitize} instance. */ function $SanitizeProvider() { var svgEnabled = false; this.$get = ['$$sanitizeUri', function($$sanitizeUri) { if (svgEnabled) { extend(validElements, svgElements); } return function(html) { var buf = []; htmlParser(html, htmlSanitizeWriter(buf, function(uri, isImage) { return !/^unsafe:/.test($$sanitizeUri(uri, isImage)); })); return buf.join(''); }; }]; /** * @ngdoc method * @name $sanitizeProvider#enableSvg * @kind function * * @description * Enables a subset of svg to be supported by the sanitizer. * * <div class="alert alert-warning"> * <p>By enabling this setting without taking other precautions, you might expose your * application to click-hijacking attacks. In these attacks, sanitized svg elements could be positioned * outside of the containing element and be rendered over other elements on the page (e.g. a login * link). Such behavior can then result in phishing incidents.</p> * * <p>To protect against these, explicitly setup `overflow: hidden` css rule for all potential svg * tags within the sanitized content:</p> * * <br> * * <pre><code> * .rootOfTheIncludedContent svg { * overflow: hidden !important; * } * </code></pre> * </div> * * @param {boolean=} flag Enable or disable SVG support in the sanitizer. * @returns {boolean|ng.$sanitizeProvider} Returns the currently configured value if called * without an argument or self for chaining otherwise. */ this.enableSvg = function(enableSvg) { if (isDefined(enableSvg)) { svgEnabled = enableSvg; return this; } else { return svgEnabled; } }; ////////////////////////////////////////////////////////////////////////////////////////////////// // Private stuff ////////////////////////////////////////////////////////////////////////////////////////////////// bind = angular.bind; extend = angular.extend; forEach = angular.forEach; isDefined = angular.isDefined; lowercase = angular.lowercase; noop = angular.noop; htmlParser = htmlParserImpl; htmlSanitizeWriter = htmlSanitizeWriterImpl; // Regular Expressions for parsing tags and attributes var SURROGATE_PAIR_REGEXP = /[\uD800-\uDBFF][\uDC00-\uDFFF]/g, // Match everything outside of normal chars and " (quote character) NON_ALPHANUMERIC_REGEXP = /([^\#-~ |!])/g; // Good source of info about elements and attributes // http://dev.w3.org/html5/spec/Overview.html#semantics // http://simon.html5.org/html-elements // Safe Void Elements - HTML5 // http://dev.w3.org/html5/spec/Overview.html#void-elements var voidElements = toMap("area,br,col,hr,img,wbr"); // Elements that you can, intentionally, leave open (and which close themselves) // http://dev.w3.org/html5/spec/Overview.html#optional-tags var optionalEndTagBlockElements = toMap("colgroup,dd,dt,li,p,tbody,td,tfoot,th,thead,tr"), optionalEndTagInlineElements = toMap("rp,rt"), optionalEndTagElements = extend({}, optionalEndTagInlineElements, optionalEndTagBlockElements); // Safe Block Elements - HTML5 var blockElements = extend({}, optionalEndTagBlockElements, toMap("address,article," + "aside,blockquote,caption,center,del,dir,div,dl,figure,figcaption,footer,h1,h2,h3,h4,h5," + "h6,header,hgroup,hr,ins,map,menu,nav,ol,pre,section,table,ul")); // Inline Elements - HTML5 var inlineElements = extend({}, optionalEndTagInlineElements, toMap("a,abbr,acronym,b," + "bdi,bdo,big,br,cite,code,del,dfn,em,font,i,img,ins,kbd,label,map,mark,q,ruby,rp,rt,s," + "samp,small,span,strike,strong,sub,sup,time,tt,u,var")); // SVG Elements // https://wiki.whatwg.org/wiki/Sanitization_rules#svg_Elements // Note: the elements animate,animateColor,animateMotion,animateTransform,set are intentionally omitted. // They can potentially allow for arbitrary javascript to be executed. See #11290 var svgElements = toMap("circle,defs,desc,ellipse,font-face,font-face-name,font-face-src,g,glyph," + "hkern,image,linearGradient,line,marker,metadata,missing-glyph,mpath,path,polygon,polyline," + "radialGradient,rect,stop,svg,switch,text,title,tspan"); // Blocked Elements (will be stripped) var blockedElements = toMap("script,style"); var validElements = extend({}, voidElements, blockElements, inlineElements, optionalEndTagElements); //Attributes that have href and hence need to be sanitized var uriAttrs = toMap("background,cite,href,longdesc,src,xlink:href"); var htmlAttrs = toMap('abbr,align,alt,axis,bgcolor,border,cellpadding,cellspacing,class,clear,' + 'color,cols,colspan,compact,coords,dir,face,headers,height,hreflang,hspace,' + 'ismap,lang,language,nohref,nowrap,rel,rev,rows,rowspan,rules,' + 'scope,scrolling,shape,size,span,start,summary,tabindex,target,title,type,' + 'valign,value,vspace,width'); // SVG attributes (without "id" and "name" attributes) // https://wiki.whatwg.org/wiki/Sanitization_rules#svg_Attributes var svgAttrs = toMap('accent-height,accumulate,additive,alphabetic,arabic-form,ascent,' + 'baseProfile,bbox,begin,by,calcMode,cap-height,class,color,color-rendering,content,' + 'cx,cy,d,dx,dy,descent,display,dur,end,fill,fill-rule,font-family,font-size,font-stretch,' + 'font-style,font-variant,font-weight,from,fx,fy,g1,g2,glyph-name,gradientUnits,hanging,' + 'height,horiz-adv-x,horiz-origin-x,ideographic,k,keyPoints,keySplines,keyTimes,lang,' + 'marker-end,marker-mid,marker-start,markerHeight,markerUnits,markerWidth,mathematical,' + 'max,min,offset,opacity,orient,origin,overline-position,overline-thickness,panose-1,' + 'path,pathLength,points,preserveAspectRatio,r,refX,refY,repeatCount,repeatDur,' + 'requiredExtensions,requiredFeatures,restart,rotate,rx,ry,slope,stemh,stemv,stop-color,' + 'stop-opacity,strikethrough-position,strikethrough-thickness,stroke,stroke-dasharray,' + 'stroke-dashoffset,stroke-linecap,stroke-linejoin,stroke-miterlimit,stroke-opacity,' + 'stroke-width,systemLanguage,target,text-anchor,to,transform,type,u1,u2,underline-position,' + 'underline-thickness,unicode,unicode-range,units-per-em,values,version,viewBox,visibility,' + 'width,widths,x,x-height,x1,x2,xlink:actuate,xlink:arcrole,xlink:role,xlink:show,xlink:title,' + 'xlink:type,xml:base,xml:lang,xml:space,xmlns,xmlns:xlink,y,y1,y2,zoomAndPan', true); var validAttrs = extend({}, uriAttrs, svgAttrs, htmlAttrs); function toMap(str, lowercaseKeys) { var obj = {}, items = str.split(','), i; for (i = 0; i < items.length; i++) { obj[lowercaseKeys ? lowercase(items[i]) : items[i]] = true; } return obj; } var inertBodyElement; (function(window) { var doc; if (window.document && window.document.implementation) { doc = window.document.implementation.createHTMLDocument("inert"); } else { throw $sanitizeMinErr('noinert', "Can't create an inert html document"); } var docElement = doc.documentElement || doc.getDocumentElement(); var bodyElements = docElement.getElementsByTagName('body'); // usually there should be only one body element in the document, but IE doesn't have any, so we need to create one if (bodyElements.length === 1) { inertBodyElement = bodyElements[0]; } else { var html = doc.createElement('html'); inertBodyElement = doc.createElement('body'); html.appendChild(inertBodyElement); doc.appendChild(html); } })(window); /** * @example * htmlParser(htmlString, { * start: function(tag, attrs) {}, * end: function(tag) {}, * chars: function(text) {}, * comment: function(text) {} * }); * * @param {string} html string * @param {object} handler */ function htmlParserImpl(html, handler) { if (html === null || html === undefined) { html = ''; } else if (typeof html !== 'string') { html = '' + html; } inertBodyElement.innerHTML = html; //mXSS protection var mXSSAttempts = 5; do { if (mXSSAttempts === 0) { throw $sanitizeMinErr('uinput', "Failed to sanitize html because the input is unstable"); } mXSSAttempts--; // strip custom-namespaced attributes on IE<=11 if (window.document.documentMode) { stripCustomNsAttrs(inertBodyElement); } html = inertBodyElement.innerHTML; //trigger mXSS inertBodyElement.innerHTML = html; } while (html !== inertBodyElement.innerHTML); var node = inertBodyElement.firstChild; while (node) { switch (node.nodeType) { case 1: // ELEMENT_NODE handler.start(node.nodeName.toLowerCase(), attrToMap(node.attributes)); break; case 3: // TEXT NODE handler.chars(node.textContent); break; } var nextNode; if (!(nextNode = node.firstChild)) { if (node.nodeType === 1) { handler.end(node.nodeName.toLowerCase()); } nextNode = node.nextSibling; if (!nextNode) { while (nextNode == null) { node = node.parentNode; if (node === inertBodyElement) break; nextNode = node.nextSibling; if (node.nodeType === 1) { handler.end(node.nodeName.toLowerCase()); } } } } node = nextNode; } while (node = inertBodyElement.firstChild) { inertBodyElement.removeChild(node); } } function attrToMap(attrs) { var map = {}; for (var i = 0, ii = attrs.length; i < ii; i++) { var attr = attrs[i]; map[attr.name] = attr.value; } return map; } /** * Escapes all potentially dangerous characters, so that the * resulting string can be safely inserted into attribute or * element text. * @param value * @returns {string} escaped text */ function encodeEntities(value) { return value. replace(/&/g, '&amp;'). replace(SURROGATE_PAIR_REGEXP, function(value) { var hi = value.charCodeAt(0); var low = value.charCodeAt(1); return '&#' + (((hi - 0xD800) * 0x400) + (low - 0xDC00) + 0x10000) + ';'; }). replace(NON_ALPHANUMERIC_REGEXP, function(value) { return '&#' + value.charCodeAt(0) + ';'; }). replace(/</g, '&lt;'). replace(/>/g, '&gt;'); } /** * create an HTML/XML writer which writes to buffer * @param {Array} buf use buf.join('') to get out sanitized html string * @returns {object} in the form of { * start: function(tag, attrs) {}, * end: function(tag) {}, * chars: function(text) {}, * comment: function(text) {} * } */ function htmlSanitizeWriterImpl(buf, uriValidator) { var ignoreCurrentElement = false; var out = bind(buf, buf.push); return { start: function(tag, attrs) { tag = lowercase(tag); if (!ignoreCurrentElement && blockedElements[tag]) { ignoreCurrentElement = tag; } if (!ignoreCurrentElement && validElements[tag] === true) { out('<'); out(tag); forEach(attrs, function(value, key) { var lkey = lowercase(key); var isImage = (tag === 'img' && lkey === 'src') || (lkey === 'background'); if (validAttrs[lkey] === true && (uriAttrs[lkey] !== true || uriValidator(value, isImage))) { out(' '); out(key); out('="'); out(encodeEntities(value)); out('"'); } }); out('>'); } }, end: function(tag) { tag = lowercase(tag); if (!ignoreCurrentElement && validElements[tag] === true && voidElements[tag] !== true) { out('</'); out(tag); out('>'); } if (tag == ignoreCurrentElement) { ignoreCurrentElement = false; } }, chars: function(chars) { if (!ignoreCurrentElement) { out(encodeEntities(chars)); } } }; } /** * When IE9-11 comes across an unknown namespaced attribute e.g. 'xlink:foo' it adds 'xmlns:ns1' attribute to declare * ns1 namespace and prefixes the attribute with 'ns1' (e.g. 'ns1:xlink:foo'). This is undesirable since we don't want * to allow any of these custom attributes. This method strips them all. * * @param node Root element to process */ function stripCustomNsAttrs(node) { if (node.nodeType === window.Node.ELEMENT_NODE) { var attrs = node.attributes; for (var i = 0, l = attrs.length; i < l; i++) { var attrNode = attrs[i]; var attrName = attrNode.name.toLowerCase(); if (attrName === 'xmlns:ns1' || attrName.lastIndexOf('ns1:', 0) === 0) { node.removeAttributeNode(attrNode); i--; l--; } } } var nextNode = node.firstChild; if (nextNode) { stripCustomNsAttrs(nextNode); } nextNode = node.nextSibling; if (nextNode) { stripCustomNsAttrs(nextNode); } } } function sanitizeText(chars) { var buf = []; var writer = htmlSanitizeWriter(buf, noop); writer.chars(chars); return buf.join(''); } // define ngSanitize module and register $sanitize service angular.module('ngSanitize', []).provider('$sanitize', $SanitizeProvider); /** * @ngdoc filter * @name linky * @kind function * * @description * Finds links in text input and turns them into html links. Supports `http/https/ftp/mailto` and * plain email address links. * * Requires the {@link ngSanitize `ngSanitize`} module to be installed. * * @param {string} text Input text. * @param {string} target Window (`_blank|_self|_parent|_top`) or named frame to open links in. * @param {object|function(url)} [attributes] Add custom attributes to the link element. * * Can be one of: * * - `object`: A map of attributes * - `function`: Takes the url as a parameter and returns a map of attributes * * If the map of attributes contains a value for `target`, it overrides the value of * the target parameter. * * * @returns {string} Html-linkified and {@link $sanitize sanitized} text. * * @usage <span ng-bind-html="linky_expression | linky"></span> * * @example <example module="linkyExample" deps="angular-sanitize.js" name="linky-filter"> <file name="index.html"> <div ng-controller="ExampleController"> Snippet: <textarea ng-model="snippet" cols="60" rows="3"></textarea> <table> <tr> <th>Filter</th> <th>Source</th> <th>Rendered</th> </tr> <tr id="linky-filter"> <td>linky filter</td> <td> <pre>&lt;div ng-bind-html="snippet | linky"&gt;<br>&lt;/div&gt;</pre> </td> <td> <div ng-bind-html="snippet | linky"></div> </td> </tr> <tr id="linky-target"> <td>linky target</td> <td> <pre>&lt;div ng-bind-html="snippetWithSingleURL | linky:'_blank'"&gt;<br>&lt;/div&gt;</pre> </td> <td> <div ng-bind-html="snippetWithSingleURL | linky:'_blank'"></div> </td> </tr> <tr id="linky-custom-attributes"> <td>linky custom attributes</td> <td> <pre>&lt;div ng-bind-html="snippetWithSingleURL | linky:'_self':{rel: 'nofollow'}"&gt;<br>&lt;/div&gt;</pre> </td> <td> <div ng-bind-html="snippetWithSingleURL | linky:'_self':{rel: 'nofollow'}"></div> </td> </tr> <tr id="escaped-html"> <td>no filter</td> <td><pre>&lt;div ng-bind="snippet"&gt;<br>&lt;/div&gt;</pre></td> <td><div ng-bind="snippet"></div></td> </tr> </table> </file> <file name="script.js"> angular.module('linkyExample', ['ngSanitize']) .controller('ExampleController', ['$scope', function($scope) { $scope.snippet = 'Pretty text with some links:\n'+ 'http://angularjs.org/,\n'+ 'mailto:us@somewhere.org,\n'+ 'another@somewhere.org,\n'+ 'and one more: ftp://127.0.0.1/.'; $scope.snippetWithSingleURL = 'http://angularjs.org/'; }]); </file> <file name="protractor.js" type="protractor"> it('should linkify the snippet with urls', function() { expect(element(by.id('linky-filter')).element(by.binding('snippet | linky')).getText()). toBe('Pretty text with some links: http://angularjs.org/, us@somewhere.org, ' + 'another@somewhere.org, and one more: ftp://127.0.0.1/.'); expect(element.all(by.css('#linky-filter a')).count()).toEqual(4); }); it('should not linkify snippet without the linky filter', function() { expect(element(by.id('escaped-html')).element(by.binding('snippet')).getText()). toBe('Pretty text with some links: http://angularjs.org/, mailto:us@somewhere.org, ' + 'another@somewhere.org, and one more: ftp://127.0.0.1/.'); expect(element.all(by.css('#escaped-html a')).count()).toEqual(0); }); it('should update', function() { element(by.model('snippet')).clear(); element(by.model('snippet')).sendKeys('new http://link.'); expect(element(by.id('linky-filter')).element(by.binding('snippet | linky')).getText()). toBe('new http://link.'); expect(element.all(by.css('#linky-filter a')).count()).toEqual(1); expect(element(by.id('escaped-html')).element(by.binding('snippet')).getText()) .toBe('new http://link.'); }); it('should work with the target property', function() { expect(element(by.id('linky-target')). element(by.binding("snippetWithSingleURL | linky:'_blank'")).getText()). toBe('http://angularjs.org/'); expect(element(by.css('#linky-target a')).getAttribute('target')).toEqual('_blank'); }); it('should optionally add custom attributes', function() { expect(element(by.id('linky-custom-attributes')). element(by.binding("snippetWithSingleURL | linky:'_self':{rel: 'nofollow'}")).getText()). toBe('http://angularjs.org/'); expect(element(by.css('#linky-custom-attributes a')).getAttribute('rel')).toEqual('nofollow'); }); </file> </example> */ angular.module('ngSanitize').filter('linky', ['$sanitize', function($sanitize) { var LINKY_URL_REGEXP = /((ftp|https?):\/\/|(www\.)|(mailto:)?[A-Za-z0-9._%+-]+@)\S*[^\s.;,(){}<>"\u201d\u2019]/i, MAILTO_REGEXP = /^mailto:/i; var linkyMinErr = angular.$$minErr('linky'); var isDefined = angular.isDefined; var isFunction = angular.isFunction; var isObject = angular.isObject; var isString = angular.isString; return function(text, target, attributes) { if (text == null || text === '') return text; if (!isString(text)) throw linkyMinErr('notstring', 'Expected string but received: {0}', text); var attributesFn = isFunction(attributes) ? attributes : isObject(attributes) ? function getAttributesObject() {return attributes;} : function getEmptyAttributesObject() {return {};}; var match; var raw = text; var html = []; var url; var i; while ((match = raw.match(LINKY_URL_REGEXP))) { // We can not end in these as they are sometimes found at the end of the sentence url = match[0]; // if we did not match ftp/http/www/mailto then assume mailto if (!match[2] && !match[4]) { url = (match[3] ? 'http://' : 'mailto:') + url; } i = match.index; addText(raw.substr(0, i)); addLink(url, match[0].replace(MAILTO_REGEXP, '')); raw = raw.substring(i + match[0].length); } addText(raw); return $sanitize(html.join('')); function addText(text) { if (!text) { return; } html.push(sanitizeText(text)); } function addLink(url, text) { var key, linkAttributes = attributesFn(url); html.push('<a '); for (key in linkAttributes) { html.push(key + '="' + linkAttributes[key] + '" '); } if (isDefined(target) && !('target' in linkAttributes)) { html.push('target="', target, '" '); } html.push('href="', url.replace(/"/g, '&quot;'), '">'); addText(text); html.push('</a>'); } }; }]); })(window, window.angular);
apache-2.0
devigned/azure-sdk-for-ruby
management/azure_mgmt_service_bus/lib/generated/azure_mgmt_service_bus/models/topic_create_or_update_parameters.rb
9701
# encoding: utf-8 # Code generated by Microsoft (R) AutoRest Code Generator 1.0.0.0 # Changes may cause incorrect behavior and will be lost if the code is # regenerated. module Azure::ARM::ServiceBus module Models # # Parameters supplied to the Create Or Update Topic operation. # class TopicCreateOrUpdateParameters include MsRestAzure # @return [String] Topic name. attr_accessor :name # @return [String] Location of the resource. attr_accessor :location # @return [DateTime] Last time the message was sent, or a request was # received, for this topic. attr_accessor :accessed_at # @return [String] TimeSpan idle interval after which the topic is # automatically deleted. The minimum duration is 5 minutes. attr_accessor :auto_delete_on_idle # @return [EntityAvailabilityStatus] Entity availability status for the # topic. Possible values include: 'Available', 'Limited', 'Renaming', # 'Restoring', 'Unknown' attr_accessor :entity_availability_status # @return [DateTime] Exact time the message was created. attr_accessor :created_at # @return [MessageCountDetails] attr_accessor :count_details # @return [String] Default message time to live value. This is the # duration after which the message expires, starting from when the # message is sent to Service Bus. This is the default value used when # TimeToLive is not set on a message itself. attr_accessor :default_message_time_to_live # @return [String] TimeSpan structure that defines the duration of the # duplicate detection history. The default value is 10 minutes. attr_accessor :duplicate_detection_history_time_window # @return [Boolean] Value that indicates whether server-side batched # operations are enabled. attr_accessor :enable_batched_operations # @return [Boolean] Value that indicates whether Express Entities are # enabled. An express topic holds a message in memory temporarily before # writing it to persistent storage. attr_accessor :enable_express # @return [Boolean] Value that indicates whether the topic to be # partitioned across multiple message brokers is enabled. attr_accessor :enable_partitioning # @return [Boolean] Value that indicates whether partitioning is enabled # or disabled. attr_accessor :enable_subscription_partitioning # @return [Boolean] Whether messages should be filtered before # publishing. attr_accessor :filtering_messages_before_publishing # @return [Boolean] Value that indicates whether the message is # accessible anonymously. attr_accessor :is_anonymous_accessible # @return [Boolean] attr_accessor :is_express # @return [Integer] Maximum size of the topic in megabytes, which is the # size of the memory allocated for the topic. attr_accessor :max_size_in_megabytes # @return [Boolean] Value indicating if this topic requires duplicate # detection. attr_accessor :requires_duplicate_detection # @return [Integer] Size of the topic, in bytes. attr_accessor :size_in_bytes # @return [EntityStatus] Enumerates the possible values for the status of # a messaging entity. Possible values include: 'Active', 'Creating', # 'Deleting', 'Disabled', 'ReceiveDisabled', 'Renaming', 'Restoring', # 'SendDisabled', 'Unknown' attr_accessor :status # @return [Integer] Number of subscriptions. attr_accessor :subscription_count # @return [Boolean] Value that indicates whether the topic supports # ordering. attr_accessor :support_ordering # @return [DateTime] The exact time the message was updated. attr_accessor :updated_at # # Mapper for TopicCreateOrUpdateParameters class as Ruby Hash. # This will be used for serialization/deserialization. # def self.mapper() { required: false, serialized_name: 'TopicCreateOrUpdateParameters', type: { name: 'Composite', class_name: 'TopicCreateOrUpdateParameters', model_properties: { name: { required: false, serialized_name: 'name', type: { name: 'String' } }, location: { required: true, serialized_name: 'location', type: { name: 'String' } }, accessed_at: { required: false, serialized_name: 'properties.accessedAt', type: { name: 'DateTime' } }, auto_delete_on_idle: { required: false, serialized_name: 'properties.autoDeleteOnIdle', type: { name: 'String' } }, entity_availability_status: { required: false, serialized_name: 'properties.entityAvailabilityStatus ', type: { name: 'Enum', module: 'EntityAvailabilityStatus' } }, created_at: { required: false, serialized_name: 'properties.createdAt', type: { name: 'DateTime' } }, count_details: { required: false, serialized_name: 'properties.countDetails', type: { name: 'Composite', class_name: 'MessageCountDetails' } }, default_message_time_to_live: { required: false, serialized_name: 'properties.defaultMessageTimeToLive', type: { name: 'String' } }, duplicate_detection_history_time_window: { required: false, serialized_name: 'properties.duplicateDetectionHistoryTimeWindow ', type: { name: 'String' } }, enable_batched_operations: { required: false, serialized_name: 'properties.enableBatchedOperations', type: { name: 'Boolean' } }, enable_express: { required: false, serialized_name: 'properties.enableExpress', type: { name: 'Boolean' } }, enable_partitioning: { required: false, serialized_name: 'properties.enablePartitioning', type: { name: 'Boolean' } }, enable_subscription_partitioning: { required: false, serialized_name: 'properties.enableSubscriptionPartitioning', type: { name: 'Boolean' } }, filtering_messages_before_publishing: { required: false, serialized_name: 'properties.filteringMessagesBeforePublishing', type: { name: 'Boolean' } }, is_anonymous_accessible: { required: false, serialized_name: 'properties.isAnonymousAccessible', type: { name: 'Boolean' } }, is_express: { required: false, serialized_name: 'properties.isExpress', type: { name: 'Boolean' } }, max_size_in_megabytes: { required: false, serialized_name: 'properties.maxSizeInMegabytes', type: { name: 'Number' } }, requires_duplicate_detection: { required: false, serialized_name: 'properties.requiresDuplicateDetection', type: { name: 'Boolean' } }, size_in_bytes: { required: false, serialized_name: 'properties.sizeInBytes', type: { name: 'Number' } }, status: { required: false, serialized_name: 'properties.status', type: { name: 'Enum', module: 'EntityStatus' } }, subscription_count: { required: false, serialized_name: 'properties.subscriptionCount', type: { name: 'Number' } }, support_ordering: { required: false, serialized_name: 'properties.supportOrdering', type: { name: 'Boolean' } }, updated_at: { required: false, serialized_name: 'properties.updatedAt', type: { name: 'DateTime' } } } } } end end end end
apache-2.0
legend0702/zhq
zhq-core/src/main/java/cn/zhuhongqing/utils/scan/TrueResourceFilter.java
503
package cn.zhuhongqing.utils.scan; /** * True filter * * @author HongQing.Zhu * <nl> * <li>Mail:qwepoidjdj(a)gmail.com</li> * <li>HomePage:www.zhuhongqing.cn</li> * <li>Github:github.com/legend0702</li> * </nl> * */ public class TrueResourceFilter<R> implements ResourceFilter<R> { public static final ResourceFilter<?> INSTANCE = new TrueResourceFilter<>(); @Override public boolean accept(R resource) { return true; } }
apache-2.0
viaper/DBPlus
DerbyHodgepodge/java/engine/org/apache/derby/iapi/jdbc/EngineStatement.java
2915
/* Derby - Class org.apache.derby.iapi.jdbc.EngineStatement Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE file distributed with this work for additional information regarding copyright ownership. The ASF licenses this file to you under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package org.apache.derby.iapi.jdbc; import java.sql.SQLException; import java.sql.Statement; /** * Additional methods the embedded engine exposes on its Statement object * implementations. An internal api only, mainly for the network * server. Allows consistent interaction between emebdded statements * and brokered statements. * */ public interface EngineStatement extends Statement { /** * Identical to the JDBC 3 getMoreResults(int). * * @see java.sql.Statement#getMoreResults(int) */ public boolean getMoreResults(int current) throws SQLException; /** * Identical to the JDBC 3 getResultSetHoldability(int). * * @see java.sql.Statement#getResultSetHoldability() */ public int getResultSetHoldability() throws SQLException; /** * Identical to the JDBC 4 isClosed() method. */ public boolean isClosed() throws SQLException; //////////////////////////////////////////////////////////////////// // // INTRODUCED BY JDBC 4.1 IN JAVA 7 // //////////////////////////////////////////////////////////////////// public void closeOnCompletion() throws SQLException; public boolean isCloseOnCompletion() throws SQLException; //////////////////////////////////////////////////////////////////// // // INTRODUCED BY JDBC 4.2 IN JAVA 8 // //////////////////////////////////////////////////////////////////// public long[] executeLargeBatch() throws SQLException; public long executeLargeUpdate( String sql ) throws SQLException; public long executeLargeUpdate( String sql, int autoGeneratedKeys) throws SQLException; public long executeLargeUpdate( String sql, int[] columnIndexes ) throws SQLException; public long executeLargeUpdate( String sql, String[] columnNames ) throws SQLException; public long getLargeMaxRows() throws SQLException; public long getLargeUpdateCount() throws SQLException; public void setLargeMaxRows( long max ) throws SQLException; }
apache-2.0
pdiemert/sstv
lib/find.js
236
var _channels = require('./channels'); function init() { _channels.addDir('find-channels', 'find'); } function getItems(path, cb) { return _channels.getItems('find', path, cb); } init(); module.exports = { getItems : getItems };
apache-2.0
kubernetes/cloud-provider-alibaba-cloud
vendor/github.com/aliyun/alibaba-cloud-sdk-go/services/ecs/struct_entry.go
869
package ecs //Licensed under the Apache License, Version 2.0 (the "License"); //you may not use this file except in compliance with the License. //You may obtain a copy of the License at // //http://www.apache.org/licenses/LICENSE-2.0 // //Unless required by applicable law or agreed to in writing, software //distributed under the License is distributed on an "AS IS" BASIS, //WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. //See the License for the specific language governing permissions and //limitations under the License. // // Code generated by Alibaba Cloud SDK Code Generator. // Changes may cause incorrect behavior and will be lost if the code is regenerated. // Entry is a nested struct in ecs response type Entry struct { Cidr string `json:"Cidr" xml:"Cidr"` Description string `json:"Description" xml:"Description"` }
apache-2.0
debanjanmahata/textmining-kit
inputoutput/util.py
10587
''' Created on Mar 1, 2017 @author: debanjan ''' """ Borrowed from textacy (https://github.com/chartbeat-labs/textacy) """ import bz2 import gzip import io from itertools import tee, starmap import os import re import warnings try: # Py3 import lzma except ImportError: # Py2 pass from cytoolz.itertoolz import cons, pluck from utils import compat def open_sesame(filepath, mode='rt', encoding=None, auto_make_dirs=False, errors=None, newline=None): """ Open file ``filepath``. Compression (if any) is inferred from the file extension ('.gz', '.bz2', or '.xz') and handled automatically; '~', '.', and/or '..' in paths are automatically expanded; if writing to a directory that doesn't exist, all intermediate directories can be created automatically, as needed. `open_sesame` may be used as a drop-in replacement for the built-in `open`. Args: filepath (str): path on disk (absolute or relative) of the file to open mode (str): optional string specifying the mode in which ``filepath`` is opened encoding (str): optional name of the encoding used to decode or encode ``filepath``; only applicable in text mode errors (str): optional string specifying how encoding/decoding errors are handled; only applicable in text mode newline (str): optional string specifying how universal newlines mode works; only applicable in text mode auto_make_dirs (bool): if True, automatically create (sub)directories if not already present in order to write `filepath` Returns: file object """ # sanity check args if not isinstance(filepath, compat.string_types): raise TypeError('filepath must be a string, not {}'.format(type(filepath))) if encoding and 't' not in mode: raise ValueError('encoding only applicable for text mode') # process filepath and create dirs filepath = os.path.realpath(os.path.expanduser(filepath)) if auto_make_dirs is True: make_dirs(filepath, mode) elif 'r' in mode and not os.path.exists(filepath): raise OSError('file "{}" does not exist'.format(filepath)) # infer compression from filepath extension # and get file handle accordingly _, ext = os.path.splitext(filepath) ext = ext.lower() if ext in ('.gz', '.bz2', '.xz'): # strip bytes/text from mode; 'b' is default, and we'll handle 't' below mode_ = mode.replace('b', '').replace('t', '') if ext == '.gz': f = gzip.GzipFile(filepath, mode=mode_) elif ext == '.bz2': f = bz2.BZ2File(filepath, mode=mode_) elif ext == '.xz': if compat.PY2 is True: msg = "lzma compression isn't enabled for Python 2; try gzip or bz2" raise ValueError(msg) f = lzma.LZMAFile(filepath, mode=mode_) # handle reading/writing compressed files in text mode if 't' in mode: if compat.PY2 is True: msg = 'Python 2 can\'t read/write compressed files in "{}" mode'.format(mode) raise ValueError(msg) else: f = io.TextIOWrapper(f, encoding=encoding, errors=errors, newline=newline) # no compression, so file is opened as usual else: f = io.open(filepath, mode=mode, encoding=encoding, errors=errors, newline=newline) return f def make_dirs(filepath, mode): """ If writing ``filepath`` to a directory that doesn't exist, all intermediate directories will be created as needed. """ head, _ = os.path.split(filepath) if 'w' in mode and head and not os.path.exists(head): os.makedirs(head) def coerce_content_type(content, file_mode): """ If the `content` to be written to file and the `file_mode` used to open it are incompatible (either bytes with text mode or unicode with bytes mode), try to coerce the content type so it can be written. """ if 't' in file_mode and isinstance(content, compat.bytes_): return compat.bytes_to_unicode(content) elif 'b' in file_mode and isinstance(content, compat.unicode): return compat.unicode_to_bytes(content) return content def split_record_fields(items, content_field, itemwise=False): """ Split records' content (text) field from associated metadata fields, but keep them paired together for convenient loading into a ``textacy.Doc <textacy.doc.Doc>`` (with ``itemwise = True``) or ``textacy.Corpus <textacy.corpus.Corpus>`` (with ``itemwise = False``). Output format depends on the form of the input items (dicts vs. lists) and the value for ``itemwise``. Args: items (Iterable[dict] or Iterable[list]): an iterable of dicts, e.g. as read from disk by :func:`read_json_lines() <textacy.fileio.read.read_json_lines>`, or an iterable of lists, e.g. as read from disk by :func:`read_csv() <textacy.fileio.read.read_csv>` content_field (str or int): if str, key in each dict item whose value is the item's content (text); if int, index of the value in each list item corresponding to the item's content (text) itemwise (bool): if True, content + metadata are paired item-wise as an iterable of (content, metadata) 2-tuples; if False, content + metadata are paired by position in two parallel iterables in the form of a (iterable(content), iterable(metadata)) 2-tuple Returns: generator(Tuple[str, dict]): if ``itemwise`` is True and ``items`` is an iterable of dicts; the first element in each tuple is the item's content, the second element is its metadata as a dictionary generator(Tuple[str, list]): if ``itemwise`` is True and ``items`` is an iterable of lists; the first element in each tuple is the item's content, the second element is its metadata as a list Tuple[Iterable[str], Iterable[dict]]: if ``itemwise`` is False and ``items`` is an iterable of dicts; the first element of the tuple is an iterable of items' contents, the second is an iterable of their metadata dicts Tuple[Iterable[str], Iterable[list]]: if ``itemwise`` is False and ``items`` is an iterable of lists; the first element of the tuple is an iterable of items' contents, the second is an iterable of their metadata lists """ if itemwise is True: return ((item.pop(content_field), item) for item in items) else: return unzip(((item.pop(content_field), item) for item in items)) def unzip(seq): """ Borrowed from ``toolz.sandbox.core.unzip``, but using cytoolz instead of toolz to avoid the additional dependency. """ seq = iter(seq) # check how many iterators we need try: first = tuple(next(seq)) except StopIteration: return tuple() # and create them niters = len(first) seqs = tee(cons(first, seq), niters) return tuple(starmap(pluck, enumerate(seqs))) def get_filenames(dirname, match_substr=None, ignore_substr=None, match_regex=None, ignore_regex=None, extension=None, ignore_invisible=True, recursive=False): """ Yield full paths of files on disk under directory ``dirname``, optionally filtering for or against particular substrings or file extensions and crawling all subdirectories. Args: dirname (str): /path/to/dir on disk where files to read are saved match_substr (str): match only files with given substring (DEPRECATED; use match_regex) ignore_substr (str): match only files *without* given substring (DEPRECATED; use ignore_regex) match_regex (str): include files whose names match this regex pattern ignore_regex (str): include files whose names do *not* match this regex pattern extension (str): if files only of a certain type are wanted, specify the file extension (e.g. ".txt") ignore_invisible (bool): if True, ignore invisible files, i.e. those that begin with a period recursive (bool): if True, iterate recursively through all files in subdirectories; otherwise, only return files directly under ``dirname`` Yields: str: next file's name, including the full path on disk Raises: OSError: if ``dirname`` is not found on disk """ if not os.path.exists(dirname): raise OSError('directory "{}" does not exist'.format(dirname)) # TODO: remove these params in, say, v0.4 if match_substr or ignore_substr: with warnings.catch_warnings(): warnings.simplefilter('always', DeprecationWarning) msg = """ the `match_substr` and `ignore_substr` params are deprecated! use the more flexible `match_regex` and `ignore_regex` params instead """.strip().replace('\n', ' ') warnings.warn(msg, DeprecationWarning) match_regex = re.compile(match_regex) if match_regex else None ignore_regex = re.compile(ignore_regex) if ignore_regex else None def is_good_file(filename, filepath): if ignore_invisible and filename.startswith('.'): return False if match_substr and match_substr not in filename: return False if ignore_substr and ignore_substr in filename: return False if match_regex and not match_regex.search(filename): return False if ignore_regex and ignore_regex.search(filename): return False if extension and not os.path.splitext(filename)[-1] == extension: return False if not os.path.isfile(os.path.join(filepath, filename)): return False return True if recursive is True: for dirpath, _, filenames in os.walk(dirname): if ignore_invisible and dirpath.startswith('.'): continue for filename in filenames: if filename.startswith('.'): continue if is_good_file(filename, dirpath): yield os.path.join(dirpath, filename) else: for filename in os.listdir(dirname): if is_good_file(filename, dirname): yield os.path.join(dirname, filename)
apache-2.0
alex-dorokhov/gdx-pay
gdx-pay-android-googleplay/src/test/java/com/badlogic/gdx/pay/android/googleplay/testdata/GetBuyIntentResponseObjectMother.java
1168
package com.badlogic.gdx.pay.android.googleplay.testdata; import android.app.PendingIntent; import android.content.IntentSender; import android.os.Bundle; import static com.badlogic.gdx.pay.android.googleplay.GoogleBillingConstants.BUY_INTENT; import static com.badlogic.gdx.pay.android.googleplay.GoogleBillingConstants.RESPONSE_CODE; import static com.badlogic.gdx.pay.android.googleplay.ResponseCode.BILLING_RESPONSE_RESULT_OK; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; /** * Object mother for Buy intents. * * <p>Uses Mockito to instantiate objects that are hard to construct when Android environment is * not running.</p> */ public class GetBuyIntentResponseObjectMother { public static Bundle buyIntentResponseOk() { PendingIntent buyIntent = mock(PendingIntent.class); IntentSender intentSender = mock(IntentSender.class); when(buyIntent.getIntentSender()).thenReturn(intentSender); Bundle bundle = new Bundle(); bundle.putInt(RESPONSE_CODE, BILLING_RESPONSE_RESULT_OK.getCode()); bundle.putParcelable(BUY_INTENT, buyIntent); return bundle; } }
apache-2.0
ourbeehive/AndPlug
DaoGen/src/main/java/de/greenrobot/daogenerator/EntityGenerator.java
5860
package de.greenrobot.daogenerator; import freemarker.template.Configuration; import freemarker.template.DefaultObjectWrapper; import freemarker.template.Template; import java.io.File; import java.io.FileWriter; import java.io.IOException; import java.io.Writer; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.regex.Matcher; import java.util.regex.Pattern; /** * Once you have your model created, use this class to generate entities only * * @author Markus * @author Charles */ public class EntityGenerator { private Pattern patternKeepIncludes; private Pattern patternKeepFields; private Pattern patternKeepMethods; private Template templateEntity; public EntityGenerator() throws IOException { System.out.println("EntityGenerator"); System.out.println("Copyright 2011-2014 Markus Junginger, greenrobot.de. Licensed under GPL V3."); patternKeepIncludes = compilePattern("INCLUDES"); patternKeepFields = compilePattern("FIELDS"); patternKeepMethods = compilePattern("METHODS"); Configuration config = new Configuration(); config.setClassForTemplateLoading(this.getClass(), "/"); config.setObjectWrapper(new DefaultObjectWrapper()); templateEntity = config.getTemplate("entity-no-mapping.ftl"); } private Pattern compilePattern(String sectionName) { int flags = Pattern.DOTALL | Pattern.MULTILINE; return Pattern.compile(".*^\\s*?//\\s*?KEEP " + sectionName + ".*?\n(.*?)^\\s*// KEEP " + sectionName + " END.*?\n", flags); } /** Generates all entities and DAOs for the given schema. */ public void generateAll(Schema schema, String outDir) throws Exception { generateAll(schema, outDir, null); } /** Generates all entities and DAOs for the given schema. */ public void generateAll(Schema schema, String outDir, String outDirTest) throws Exception { long start = System.currentTimeMillis(); File outDirFile = toFileForceExists(outDir); schema.init2ndPass(); //schema.init3ndPass(); System.out.println("Processing schema version " + schema.getVersion() + "..."); List<Entity> entities = schema.getEntities(); for (Entity entity : entities) { if (!entity.isProtobuf() && !entity.isSkipGeneration()) { generate(templateEntity, outDirFile, entity.getJavaPackage(), entity.getClassName(), schema, entity); } } long time = System.currentTimeMillis() - start; System.out.println("Processed " + entities.size() + " entities in " + time + "ms"); } protected File toFileForceExists(String filename) throws IOException { File file = new File(filename); if (!file.exists()) { throw new IOException(filename + " does not exist. This check is to prevent accidental file generation into a wrong path."); } return file; } private void generate(Template template, File outDirFile, String javaPackage, String javaClassName, Schema schema, Entity entity) throws Exception { generate(template, outDirFile, javaPackage, javaClassName, schema, entity, null); } private void generate(Template template, File outDirFile, String javaPackage, String javaClassName, Schema schema, Entity entity, Map<String, Object> additionalObjectsForTemplate) throws Exception { Map<String, Object> root = new HashMap<String, Object>(); root.put("schema", schema); root.put("entity", entity); if (additionalObjectsForTemplate != null) { root.putAll(additionalObjectsForTemplate); } try { File file = toJavaFilename(outDirFile, javaPackage, javaClassName); file.getParentFile().mkdirs(); if (entity != null && entity.getHasKeepSections()) { checkKeepSections(file, root); } Writer writer = new FileWriter(file); try { template.process(root, writer); writer.flush(); System.out.println("Written " + file.getCanonicalPath()); } finally { writer.close(); } } catch (Exception ex) { System.err.println("Data map for template: " + root); System.err.println("Error while generating " + javaPackage + "." + javaClassName + " (" + outDirFile.getCanonicalPath() + ")"); throw ex; } } private void checkKeepSections(File file, Map<String, Object> root) { if (file.exists()) { try { String contents = new String(DaoUtil.readAllBytes(file)); Matcher matcher; matcher = patternKeepIncludes.matcher(contents); if (matcher.matches()) { root.put("keepIncludes", matcher.group(1)); } matcher = patternKeepFields.matcher(contents); if (matcher.matches()) { root.put("keepFields", matcher.group(1)); } matcher = patternKeepMethods.matcher(contents); if (matcher.matches()) { root.put("keepMethods", matcher.group(1)); } } catch (IOException e) { e.printStackTrace(); } } } protected File toJavaFilename(File outDirFile, String javaPackage, String javaClassName) { String packageSubPath = javaPackage.replace('.', '/'); File packagePath = new File(outDirFile, packageSubPath); File file = new File(packagePath, javaClassName + ".java"); return file; } }
apache-2.0
shioyang/StatsBar
e2e/app.e2e-spec.ts
315
import { StatsBarPage } from './app.po'; describe('stats-bar App', function() { let page: StatsBarPage; beforeEach(() => { page = new StatsBarPage(); }); it('should display message saying app works', () => { page.navigateTo(); expect(page.getParagraphText()).toEqual('app works!'); }); });
apache-2.0
loshine/GalleryPicker
gallerypicker/src/main/java/me/loshine/gallerypicker/entity/MediaBucket.java
1979
package me.loshine.gallerypicker.entity; /** * 描 述: * 作 者:loshine1992@gmail.com * 时 间:2016/12/22 */ public class MediaBucket { private String bucketId; private String bucketName; private int imageCount; private String cover; //图片方向 private int orientation; private boolean isChecked; public String getBucketId() { return bucketId; } public void setBucketId(String bucketId) { this.bucketId = bucketId; } public String getBucketName() { return bucketName; } public void setBucketName(String bucketName) { this.bucketName = bucketName; } public int getImageCount() { return imageCount; } public void setImageCount(int imageCount) { this.imageCount = imageCount; } public String getCover() { return cover; } public void setCover(String cover) { this.cover = cover; } public int getOrientation() { return orientation; } public void setOrientation(int orientation) { this.orientation = orientation; } public boolean isChecked() { return isChecked; } public void setChecked(boolean checked) { isChecked = checked; } @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; MediaBucket that = (MediaBucket) o; return bucketId != null ? bucketId.equals(that.bucketId) : that.bucketId == null; } @Override public String toString() { return "MediaBucket{" + "bucketId='" + bucketId + '\'' + ", bucketName='" + bucketName + '\'' + ", imageCount=" + imageCount + ", cover='" + cover + '\'' + ", orientation=" + orientation + ", isChecked=" + isChecked + '}'; } }
apache-2.0
ppamorim/SmallProject
app/src/main/java/com/smallproject/ui/util/CircleTransform.java
1128
package com.smallproject.ui.util; import android.graphics.Bitmap; import android.graphics.BitmapShader; import android.graphics.Canvas; import android.graphics.Paint; import com.squareup.picasso.Transformation; public class CircleTransform implements Transformation { @Override public Bitmap transform(Bitmap source) { int size = Math.min(source.getWidth(), source.getHeight()); int x = (source.getWidth() - size) / 2; int y = (source.getHeight() - size) / 2; Bitmap squaredBitmap = Bitmap.createBitmap(source, x, y, size, size); if (squaredBitmap != source) { source.recycle(); } Bitmap bitmap = Bitmap.createBitmap(size, size, source.getConfig()); Canvas canvas = new Canvas(bitmap); Paint paint = new Paint(); BitmapShader shader = new BitmapShader(squaredBitmap, BitmapShader.TileMode.CLAMP, BitmapShader.TileMode.CLAMP); paint.setShader(shader); paint.setAntiAlias(true); float r = size / 2f; canvas.drawCircle(r, r, r, paint); squaredBitmap.recycle(); return bitmap; } @Override public String key() { return "circle"; } }
apache-2.0
LetsBuildSomething/vmag_mobile
js/themes/variables.js
6855
import color from 'color'; import { Platform, Dimensions, PixelRatio } from 'react-native'; const deviceHeight = Dimensions.get('window').height; const deviceWidth = Dimensions.get('window').width; const platform = Platform.OS; const platformStyle = undefined; export default { platformStyle, platform, // AndroidRipple androidRipple: true, androidRippleColor: 'rgba(256, 256, 256, 0.3)', androidRippleColorDark: 'rgba(0, 0, 0, 0.15)', // Badge badgeBg: '#ED1727', badgeColor: '#fff', // New Variable badgePadding: (platform === 'ios') ? 3 : 0, // Button btnFontFamily: (platform === 'ios') ? 'System' : 'Roboto_medium', btnDisabledBg: '#b5b5b5', btnDisabledClr: '#f1f1f1', // CheckBox CheckboxRadius: (platform === 'ios') ? 13 : 0, CheckboxBorderWidth: (platform === 'ios') ? 1 : 2, CheckboxPaddingLeft: (platform === 'ios') ? 4 : 2, CheckboxPaddingBottom: (platform === 'ios') ? 0 : 5, CheckboxIconSize: (platform === 'ios') ? 21 : 14, CheckboxIconMarginTop: (platform === 'ios') ? undefined : 1, CheckboxFontSize: (platform === 'ios') ? (23 / 0.9) : 18, DefaultFontSize: 17, checkboxBgColor: '#039BE5', checkboxSize: 20, checkboxTickColor: '#fff', // New Variable get defaultTextColor() { return this.textColor; }, get btnPrimaryBg() { return this.brandPrimary; }, get btnPrimaryColor() { return this.inverseTextColor; }, get btnInfoBg() { return this.brandInfo; }, get btnInfoColor() { return this.inverseTextColor; }, get btnSuccessBg() { return this.brandSuccess; }, get btnSuccessColor() { return this.inverseTextColor; }, get btnDangerBg() { return this.brandDanger; }, get btnDangerColor() { return this.inverseTextColor; }, get btnWarningBg() { return this.brandWarning; }, get btnWarningColor() { return this.inverseTextColor; }, get btnTextSize() { return (platform === 'ios') ? this.fontSizeBase * 1.1 : this.fontSizeBase - 1; }, get btnTextSizeLarge() { return this.fontSizeBase * 1.5; }, get btnTextSizeSmall() { return this.fontSizeBase * 0.8; }, get borderRadiusLarge() { return this.fontSizeBase * 3.8; }, buttonPadding: 6, get iconSizeLarge() { return this.iconFontSize * 1.5; }, get iconSizeSmall() { return this.iconFontSize * 0.6; }, // Card cardDefaultBg: '#fff', // Color brandPrimary: (platform === 'ios') ? '#007aff' : '#C5F442', brandInfo: '#62B1F6', brandSuccess: '#5cb85c', brandDanger: '#d9534f', brandWarning: '#f0ad4e', brandSidebar: '#252932', // Font fontFamily: (platform === 'ios') ? 'System' : 'Roboto', fontSizeBase: 15, get fontSizeH1() { return this.fontSizeBase * 1.8; }, get fontSizeH2() { return this.fontSizeBase * 1.6; }, get fontSizeH3() { return this.fontSizeBase * 1.4; }, // Footer footerHeight: 55, footerDefaultBg: (platform === 'ios') ? '#F8F8F8' : '#4179F7', // FooterTab tabBarTextColor: (platform === 'ios') ? '#6b6b6b' : '#b3c7f9', tabBarTextSize: (platform === 'ios') ? 14 : 11, activeTab: (platform === 'ios') ? '#007aff' : '#fff', sTabBarActiveTextColor: '#007aff', tabBarActiveTextColor: (platform === 'ios') ? '#007aff' : '#fff', tabActiveBgColor: (platform === 'ios') ? '#cde1f9' : undefined, // Tab tabDefaultBg: (platform === 'ios') ? '#F8F8F8' : '#3F51B5', topTabBarTextColor: (platform === 'ios') ? '#6b6b6b' : '#b3c7f9', topTabBarActiveTextColor: (platform === 'ios') ? '#007aff' : '#fff', topTabActiveBgColor: (platform === 'ios') ? '#cde1f9' : undefined, topTabBarBorderColor: (platform === 'ios') ? '#007aff' : '#fff', // Header toolbarBtnColor: (platform === 'ios') ? '#007aff' : '#fff', toolbarDefaultBg: (platform === 'ios') ? '#A82117' : '#AF7011',//'#3F51B5', toolbarHeight: (platform === 'ios') ? 64 : 56, toolbarIconSize: (platform === 'ios') ? 20 : 22, toolbarSearchIconSize: (platform === 'ios') ? 20 : 23, toolbarInputColor: (platform === 'ios') ? '#CECDD2' : '#fff', searchBarHeight: (platform === 'ios') ? 30 : 40, toolbarInverseBg: '#222', toolbarTextColor: (platform === 'ios') ? '#000' : '#fff', toolbarDefaultBorder: (platform === 'ios') ? '#a7a6ab' : '#3F51B5', get statusBarColor() { return color(this.toolbarDefaultBg).darken(0.2).hexString(); }, // Icon iconFamily: 'Ionicons', iconFontSize: (platform === 'ios') ? 30 : 28, iconMargin: 7, iconHeaderSize: (platform === 'ios') ? 33 : 28, // InputGroup inputFontSize: 17, inputBorderColor: '#D9D5DC', inputSuccessBorderColor: '#2b8339', inputErrorBorderColor: '#ed2f2f', get inputColor() { return this.textColor; }, get inputColorPlaceholder() { return '#575757'; }, inputGroupMarginBottom: 10, inputHeightBase: 50, inputPaddingLeft: 5, get inputPaddingLeftIcon() { return this.inputPaddingLeft * 8; }, // Line Height btnLineHeight: 19, lineHeightH1: 32, lineHeightH2: 27, lineHeightH3: 22, iconLineHeight: (platform === 'ios') ? 37 : 30, lineHeight: (platform === 'ios') ? 20 : 24, // List listBorderColor: '#c9c9c9', listDividerBg: '#f4f4f4', listItemHeight: 45, // Card cardBorderColor: '#ccc', // Changed Variable listItemPadding: (platform === 'ios') ? 10 : 12, listNoteColor: '#808080', listNoteSize: 13, // Progress Bar defaultProgressColor: '#E4202D', inverseProgressColor: '#1A191B', // Radio Button radioBtnSize: (platform === 'ios') ? 25 : 23, radioSelectedColorAndroid: '#5067FF', // New Variable radioBtnLineHeight: (platform === 'ios') ? 29 : 24, radioColor: '#7e7e7e', get radioSelectedColor() { return color(this.radioColor).darken(0.2).hexString(); }, // Spinner defaultSpinnerColor: '#45D56E', inverseSpinnerColor: '#1A191B', // Tabs tabBgColor: '#F8F8F8', tabFontSize: 15, tabTextColor: '#222222', // Text textColor: '#000', inverseTextColor: '#fff', noteFontSize: 14, // Title titleFontfamily: (platform === 'ios') ? 'System' : 'Roboto_medium', titleFontSize: (platform === 'ios') ? 17 : 19, subTitleFontSize: (platform === 'ios') ? 12 : 14, subtitleColor: (platform === 'ios') ? '#8e8e93' : '#FFF', // New Variable titleFontColor: (platform === 'ios') ? '#000' : '#FFF', // Other borderRadiusBase: (platform === 'ios') ? 5 : 2, borderWidth: (1/PixelRatio.getPixelSizeForLayoutSize(1)), contentPadding: 10, get darkenHeader() { return color(this.tabBgColor).darken(0.03).hexString(); }, dropdownBg: '#000', dropdownLinkColor: '#414142', inputLineHeight: 24, jumbotronBg: '#C9C9CE', jumbotronPadding: 30, deviceWidth, deviceHeight, // New Variable inputGroupRoundedBorderRadius: 30, };
apache-2.0
T5750/maven-archetype-templates
dubbo-wusc/edu-demo/src/main/java/wusc/edu/demo/common/page/PageBean.java
4626
package wusc.edu.demo.common.page; import java.io.Serializable; import java.util.List; import java.util.Map; /** * * @描述: 分页组件. * @作者: WuShuicheng . * @创建时间: 2013-7-25,下午11:33:41 . * @版本: 1.0 . */ public class PageBean implements Serializable { /** * */ private static final long serialVersionUID = 8470697978259453214L; // 指定的或是页面参数 private int currentPage; // 当前页 private int numPerPage; // 每页显示多少条 // 查询数据库 private int totalCount; // 总记录数 private List<Object> recordList; // 本页的数据列表 // 计算 private int pageCount; // 总页数 private int beginPageIndex; // 页码列表的开始索引(包含) private int endPageIndex; // 页码列表的结束索引(包含) private Map<String, Object> countResultMap; // 当前分页条件下的统计结果 public PageBean() { } /** * 只接受前4个必要的属性,会自动的计算出其他3个属生的值 * * @param currentPage * @param pageSize * @param totalCount * @param recordList */ public PageBean(int currentPage, int numPerPage, int totalCount, List<Object> recordList) { this.currentPage = currentPage; this.numPerPage = numPerPage; this.totalCount = totalCount; this.recordList = recordList; // 计算总页码 pageCount = (totalCount + numPerPage - 1) / numPerPage; // 计算 beginPageIndex 和 endPageIndex // >> 总页数不多于10页,则全部显示 if (pageCount <= 10) { beginPageIndex = 1; endPageIndex = pageCount; } // >> 总页数多于10页,则显示当前页附近的共10个页码 else { // 当前页附近的共10个页码(前4个 + 当前页 + 后5个) beginPageIndex = currentPage - 4; endPageIndex = currentPage + 5; // 当前面的页码不足4个时,则显示前10个页码 if (beginPageIndex < 1) { beginPageIndex = 1; endPageIndex = 10; } // 当后面的页码不足5个时,则显示后10个页码 if (endPageIndex > pageCount) { endPageIndex = pageCount; beginPageIndex = pageCount - 10 + 1; } } } /** * 只接受前5个必要的属性,会自动的计算出其他3个属生的值 * * @param currentPage * @param pageSize * @param totalCount * @param recordList */ public PageBean(int currentPage, int numPerPage, int totalCount, List<Object> recordList, Map<String, Object> countResultMap) { this.currentPage = currentPage; this.numPerPage = numPerPage; this.totalCount = totalCount; this.recordList = recordList; this.countResultMap = countResultMap; // 计算总页码 pageCount = (totalCount + numPerPage - 1) / numPerPage; // 计算 beginPageIndex 和 endPageIndex // >> 总页数不多于10页,则全部显示 if (pageCount <= 10) { beginPageIndex = 1; endPageIndex = pageCount; } // >> 总页数多于10页,则显示当前页附近的共10个页码 else { // 当前页附近的共10个页码(前4个 + 当前页 + 后5个) beginPageIndex = currentPage - 4; endPageIndex = currentPage + 5; // 当前面的页码不足4个时,则显示前10个页码 if (beginPageIndex < 1) { beginPageIndex = 1; endPageIndex = 10; } // 当后面的页码不足5个时,则显示后10个页码 if (endPageIndex > pageCount) { endPageIndex = pageCount; beginPageIndex = pageCount - 10 + 1; } } } public List<Object> getRecordList() { return recordList; } public void setRecordList(List<Object> recordList) { this.recordList = recordList; } public int getCurrentPage() { return currentPage; } public void setCurrentPage(int currentPage) { this.currentPage = currentPage; } public int getPageCount() { return pageCount; } public void setPageCount(int pageCount) { this.pageCount = pageCount; } public int getNumPerPage() { return numPerPage; } public void setNumPerPage(int numPerPage) { this.numPerPage = numPerPage; } public int getTotalCount() { return totalCount; } public void setTotalCount(int totalCount) { this.totalCount = totalCount; } public int getBeginPageIndex() { return beginPageIndex; } public void setBeginPageIndex(int beginPageIndex) { this.beginPageIndex = beginPageIndex; } public int getEndPageIndex() { return endPageIndex; } public void setEndPageIndex(int endPageIndex) { this.endPageIndex = endPageIndex; } public Map<String, Object> getCountResultMap() { return countResultMap; } public void setCountResultMap(Map<String, Object> countResultMap) { this.countResultMap = countResultMap; } }
apache-2.0
SAP/sap_mobile_platform_windows
E2ETraceWPFSolution/RKT_WPF_E2ETracing/Properties/Resources.Designer.cs
2801
//------------------------------------------------------------------------------ // <auto-generated> // This code was generated by a tool. // Runtime Version:4.0.30319.34003 // // Changes to this file may cause incorrect behavior and will be lost if // the code is regenerated. // </auto-generated> //------------------------------------------------------------------------------ namespace RKT_WPF_E2ETracing.Properties { using System; /// <summary> /// A strongly-typed resource class, for looking up localized strings, etc. /// </summary> // This class was auto-generated by the StronglyTypedResourceBuilder // class via a tool like ResGen or Visual Studio. // To add or remove a member, edit your .ResX file then rerun ResGen // with the /str option, or rebuild your VS project. [global::System.CodeDom.Compiler.GeneratedCodeAttribute("System.Resources.Tools.StronglyTypedResourceBuilder", "4.0.0.0")] [global::System.Diagnostics.DebuggerNonUserCodeAttribute()] [global::System.Runtime.CompilerServices.CompilerGeneratedAttribute()] internal class Resources { private static global::System.Resources.ResourceManager resourceMan; private static global::System.Globalization.CultureInfo resourceCulture; [global::System.Diagnostics.CodeAnalysis.SuppressMessageAttribute("Microsoft.Performance", "CA1811:AvoidUncalledPrivateCode")] internal Resources() { } /// <summary> /// Returns the cached ResourceManager instance used by this class. /// </summary> [global::System.ComponentModel.EditorBrowsableAttribute(global::System.ComponentModel.EditorBrowsableState.Advanced)] internal static global::System.Resources.ResourceManager ResourceManager { get { if (object.ReferenceEquals(resourceMan, null)) { global::System.Resources.ResourceManager temp = new global::System.Resources.ResourceManager("RKT_WPF_E2ETracing.Properties.Resources", typeof(Resources).Assembly); resourceMan = temp; } return resourceMan; } } /// <summary> /// Overrides the current thread's CurrentUICulture property for all /// resource lookups using this strongly typed resource class. /// </summary> [global::System.ComponentModel.EditorBrowsableAttribute(global::System.ComponentModel.EditorBrowsableState.Advanced)] internal static global::System.Globalization.CultureInfo Culture { get { return resourceCulture; } set { resourceCulture = value; } } } }
apache-2.0
gdl-lang/gdl2
src/main/java/org/gdl2/expression/OrdinalConstant.java
1188
package org.gdl2.expression; import org.gdl2.datatypes.CodePhrase; import org.gdl2.datatypes.DvCodedText; import org.gdl2.datatypes.DvOrdinal; import java.util.Objects; public class OrdinalConstant extends ConstantExpression { private DvOrdinal ordinal; public OrdinalConstant(int order, String value, CodePhrase code) { super(order + "|" + code.toString() + "|" + value + "|"); this.ordinal = new DvOrdinal(order, new DvCodedText(value, code)); } public OrdinalConstant(DvOrdinal ordinal) { super(ordinal.toString()); this.ordinal = ordinal; } public DvOrdinal getOrdinal() { return this.ordinal; } @Override public boolean equals(Object other) { if (this == other) { return true; } if (other == null || getClass() != other.getClass()) { return false; } if (!super.equals(other)) { return false; } OrdinalConstant that = (OrdinalConstant) other; return Objects.equals(ordinal, that.ordinal); } @Override public int hashCode() { return Objects.hash(super.hashCode(), ordinal); } }
apache-2.0
GAIPS-INESC-ID/FAtiMA-Toolkit
AuthoringTools/EmotionalAppraisalWF/AddOrEditAppraisalVariableForm.cs
7273
using AutobiographicMemory; using Conditions.DTOs; using EmotionalAppraisal.DTOs; using EmotionalAppraisal.OCCModel; using EmotionalAppraisalWF.Properties; using EmotionalAppraisalWF.ViewModels; using System; using System.Globalization; using System.Windows.Forms; using WellFormedNames; namespace EmotionalAppraisalWF { public partial class AddOrEditAppraisalVariableForm : Form { private AppraisalRulesVM _appraisalRulesVM; private AppraisalVariableDTO _toEdit; private AppraisalRuleDTO _appraisalRule; public AddOrEditAppraisalVariableForm(AppraisalRulesVM ruleVM, AppraisalRuleDTO appraisalRule , AppraisalVariableDTO variable = null) { InitializeComponent(); _appraisalRulesVM = ruleVM; _toEdit = variable; _appraisalRule = appraisalRule; //validationRules appraisalVariableValueTextBox.AllowNil = false; appraisalVariableValueTextBox.AllowComposedName = false; appraisalVariableValueTextBox.AllowUniversal = false; //defaultValues appraisalVariableName.Items.Add(OCCAppraisalVariables.DESIRABILITY); appraisalVariableName.Items.Add(OCCAppraisalVariables.DESIRABILITY_FOR_OTHER); appraisalVariableName.Items.Add(OCCAppraisalVariables.PRAISEWORTHINESS); appraisalVariableName.Items.Add(OCCAppraisalVariables.GOALSUCCESSPROBABILITY); appraisalVariableName.Items.Add(OCCAppraisalVariables.LIKE); appraisalVariableName.SelectedItem = OCCAppraisalVariables.DESIRABILITY; appraisalVariableTarget.Enabled = false; appraisalVariableValueTextBox.Value = (Name)"0"; if (_toEdit.Name != null) { this.addOrEditButton.Text = Resources.UpdateButtonLabel; appraisalVariableValueTextBox.Value = _toEdit.Value; appraisalVariableTarget.Value = _toEdit.Target; appraisalVariableName.SelectedIndex = appraisalVariableName.Items.IndexOf(_toEdit.Name); } } private void addOrEditButton_Click_1(object sender, EventArgs e) { bool noDesirability = false; if (appraisalVariableName.SelectedItem.ToString() == OCCAppraisalVariables.GOALSUCCESSPROBABILITY) { float value = 0.0f; if (float.TryParse(appraisalVariableValueTextBox.Value.ToString(), NumberStyles.Any, CultureInfo.InvariantCulture, out value)) { if (value < -1 || value > 1) { MessageBox.Show("Goal Value must be between -1 and 1", Resources.ErrorDialogTitle, MessageBoxButtons.OK, MessageBoxIcon.Error); return; } } else { var name = WellFormedNames.Name.BuildName(appraisalVariableValueTextBox.Value.ToString()); if (!name.IsVariable) { MessageBox.Show("Goal Value must either be a variable or a float number", Resources.ErrorDialogTitle, MessageBoxButtons.OK, MessageBoxIcon.Error); return; } } } AppraisalVariableDTO newVar = new AppraisalVariableDTO(); try { newVar = new AppraisalVariableDTO() { Name = appraisalVariableName.SelectedItem.ToString(), Target = appraisalVariableTarget.Value, Value = appraisalVariableValueTextBox.Value }; if(newVar.Name == OCCAppraisalVariables.DESIRABILITY_FOR_OTHER && _appraisalRule.AppraisalVariables.appraisalVariables.Find(x => x.Name == OCCAppraisalVariables.DESIRABILITY) == null){ noDesirability = true; var desirability = new AppraisalVariableDTO() { Name = OCCAppraisalVariables.DESIRABILITY, Target = (Name)"-", Value = (Name)"2" }; _appraisalRule.AppraisalVariables.appraisalVariables.Add(desirability); } if(_appraisalRule.AppraisalVariables.appraisalVariables.Find(x=>x.Name == newVar.Name) != null) { _appraisalRule.AppraisalVariables.appraisalVariables.Find(x=>x.Name == newVar.Name).Value = newVar.Value; _appraisalRule.AppraisalVariables.appraisalVariables.Find(x=>x.Name == newVar.Name).Target = newVar.Target; } else { _appraisalRule.AppraisalVariables.appraisalVariables.Add(newVar); } _appraisalRulesVM.AddOrUpdateAppraisalRule(_appraisalRule); if (noDesirability) MessageBox.Show("According to the OCC Model, the \"Desirability For Others \" appraisal variable requires another Desirability concerning the consequences of the event for agent to also be present in the same rule. \n We have automatically added it, change its values at your will"); Close(); } catch (Exception ex) { MessageBox.Show(ex.Message, Resources.ErrorDialogTitle, MessageBoxButtons.OK, MessageBoxIcon.Error); } } private void comboBoxEventType_SelectedIndexChanged(object sender, EventArgs e) { if (appraisalVariableName.SelectedItem.ToString() == OCCAppraisalVariables.DESIRABILITY_FOR_OTHER || appraisalVariableName.SelectedItem.ToString() == OCCAppraisalVariables.PRAISEWORTHINESS) { appraisalVariableTarget.Enabled = true; labelTarget.Text = "Target"; } else if (appraisalVariableName.SelectedItem.ToString() == OCCAppraisalVariables.GOALSUCCESSPROBABILITY) { appraisalVariableTarget.Enabled = true; labelTarget.Text = "Goal Name"; valueLabel.Text = "Value added to Likelihood (-1:1)"; } else appraisalVariableTarget.Enabled = false; } private void textBoxSubject_TextChanged(object sender, EventArgs e) { } private void label4_Click(object sender, EventArgs e) { } private void labelObject_Click(object sender, EventArgs e) { } private void AddOrEditAppraisalRuleForm_Load(object sender, EventArgs e) { } private void AddOrEditAppraisalRuleForm_KeyDown(object sender, KeyEventArgs e) { if (e.KeyCode == Keys.Escape) { this.Close(); } } private void appraisalVariableValueTextBox_TextChanged(object sender, EventArgs e) { } } }
apache-2.0
sekiguchi-nagisa/Aquarius
aquarius-core/src/main/java/aquarius/Token.java
1564
/* * Copyright (C) 2014-2015 Nagisa Sekiguchi * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package aquarius; /** * represent for token * * @author skgchxngsxyz-opensuse */ public class Token { protected int startPos; protected int size; public Token(int startPos, int size) { this.startPos = startPos; this.size = size; } /** * get token start position. inclusive * * @return */ public int getStartPos() { return this.startPos; } /** * get token text size. * * @return */ public int getSize() { return this.size; } @Override public String toString() { return "token<" + this.startPos + ":" + this.size + ">"; } @Override public boolean equals(Object target) { if(target instanceof Token) { Token token = (Token) target; return this.getStartPos() == token.getStartPos() && this.getSize() == token.getSize(); } return false; } }
apache-2.0
nate-rcl/irplus
ir_web/src/edu/ur/ir/web/action/ResponseBufferedOutputWriter.java
3314
/** Copyright 2008 University of Rochester Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package edu.ur.ir.web.action; import java.io.BufferedInputStream; import java.io.BufferedOutputStream; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; /** * Class to write data out to the stream * * @author Nathan Sarr * */ public class ResponseBufferedOutputWriter { /** Logger for class */ private static final Logger log = LogManager.getLogger(ResponseBufferedOutputWriter.class); /** * Writes the input stream to the given output stream. Both streams are wrpaped * in buffered streams. * * @param is - input stream to read from. * @param os - output stream to write to. * @param bufferSize - size of buffer to create. * @throws Exception * @throws IOException * * @throws Exception */ public void writeStream(InputStream is, OutputStream os, int bufferSize) throws Exception { BufferedInputStream input = new BufferedInputStream(is); BufferedOutputStream output = new BufferedOutputStream(os); Exception originalException = null; byte[] buffer = new byte[bufferSize]; try { int count = 0; while (count != -1) { count = input.read(buffer, 0, bufferSize); // write out those same bytes if( count > 0 ) { output.write(buffer, 0, count); } } } catch(Exception e) { // this will only happen in a tomcat contanier - it is when // the user selects cancel on the download window. log.error("Exception name is : " + e.getClass().getName()); if( e.getClass().getName().equals("org.apache.catalina.connector.ClientAbortException")) { log.error("client abort exception - this should be ok ",e); } else { originalException = e; } } finally { try { if( output != null) { output.flush(); } } catch (Exception e) { log.error(e); } try { if( input != null) { input.close(); } } catch (Exception e) { log.error(e); } try { if( output != null) { output.close(); } } catch (Exception e) { log.error(e); } buffer = null; output = null; input = null; } if( originalException != null) { throw(originalException); } } }
apache-2.0
jarvisji/Demo-Java-RestService
src/main/java/net/freecoder/restdemo/model/ReferenceType.java
947
/** * */ package net.freecoder.restdemo.model; /** * Keywords reply is different to auto reply and follow reply, it will not only * refer to material, but also other modules, for example an article, weather * report function, etc. So it has "ref_id", "ref_type", rather than * "material_id" and "material_type". * * This enum defined all the meta-type that keyword can ref.<br> * Notice: if a keyword is refer to a material, its 'ref_type' should be * combination of KeywordRefType_MaterialType. * * @author JiTing */ public enum ReferenceType { MATERIAL("material"), MODULE("module"); private final String value; private ReferenceType(String value) { this.value = value; } public String value() { return this.value; } static public ReferenceType parseValue(String value) { String _value = value.split("_").length == 2 ? value.split("_")[0] : value; return ReferenceType.valueOf(_value.toUpperCase()); } }
apache-2.0
emmartins/wildfly-server-migration
migrations/wildfly22.0/wildfly8.2/src/main/java/org/jboss/migration/wfly/WildFly8ToWildFly22_0UpdateUndertowSubsystem.java
1865
/* * Copyright 2021 Red Hat, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jboss.migration.wfly; import org.jboss.migration.core.jboss.JBossSubsystemNames; import org.jboss.migration.wfly10.config.task.management.subsystem.UpdateSubsystemResources; import org.jboss.migration.wfly10.config.task.subsystem.undertow.AddHttpsListener; import org.jboss.migration.wfly10.config.task.subsystem.undertow.EnableHttp2; import org.jboss.migration.wfly10.config.task.subsystem.undertow.SetDefaultHttpListenerRedirectSocket; import org.jboss.migration.wfly11.task.subsystem.undertow.AddHttpInvoker; import org.jboss.migration.wfly13.task.subsystem.undertow.UnsetDefaultHostResponseHeaderServer; import org.jboss.migration.wfly13.task.subsystem.undertow.UnsetDefaultHostResponseHeaderXPoweredBy; /** * @author emmartins */ public class WildFly8ToWildFly22_0UpdateUndertowSubsystem<S> extends UpdateSubsystemResources<S> { public WildFly8ToWildFly22_0UpdateUndertowSubsystem() { super(JBossSubsystemNames.UNDERTOW, new SetDefaultHttpListenerRedirectSocket<>(), new AddHttpsListener<>(), new EnableHttp2<>(), new UnsetDefaultHostResponseHeaderServer<>(), new UnsetDefaultHostResponseHeaderXPoweredBy<>(), new AddHttpInvoker<>()); } }
apache-2.0
Talend/ui
packages/containers/src/DeleteResource/actions.test.js
1371
import actions from './actions'; import deleteResourceConst from './constants'; describe('deleteResource actions', () => { describe('deleteResource:open', () => { beforeEach(() => { delete window.location; window.location = { pathname: '/test' }; }); afterAll(() => { window.location = location; }); it('should return an action DIALOG_BOX_DELETE_RESOURCE object', () => { // Given window.location.pathname = 'currentUrl'; const model = { id: 'modelId' }; const data = { model }; // When const result = actions.open({}, data); // Then expect(result).toEqual({ type: deleteResourceConst.DIALOG_BOX_DELETE_RESOURCE, cmf: { routerReplace: 'currentUrl/modelId/delete', }, model, redirectUrl: 'currentUrl', }); }); }); describe('validate', () => { it('should return an action object', () => { // Given const data = { model: { resourceInfo: { id: 'modelId' } } }; // When const result = actions.validate({}, data); // Then expect(result).toEqual({ type: deleteResourceConst.DIALOG_BOX_DELETE_RESOURCE_OK, data, }); }); }); describe('cancel', () => { it('should return an action object', () => { // When const result = actions.cancel(); // Then expect(result).toEqual({ type: deleteResourceConst.DIALOG_BOX_DELETE_RESOURCE_CANCEL }); }); }); });
apache-2.0
jaquadro/MonoGdx
MonoGdx/Utils/SnapshotList.cs
5420
/** * Copyright 2011-2013 See AUTHORS file. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ using System; using System.Collections; using System.Collections.Generic; using System.Collections.ObjectModel; using System.Linq; using System.Text; using System.Threading.Tasks; namespace MonoGdx.Utils { public class SnapshotList<T> : Collection<T> { private IList<T> _snapshot; private IList<T> _recycled; private int _snapshots; public SnapshotList () : base(new ProxyList<T>()) { } public SnapshotList (IList<T> list) : base(new ProxyList<T>(new List<T>(list))) { } public SnapshotList (int capacity) : base(new ProxyList<T>(new List<T>(capacity))) { } private new ProxyList<T> Items { get { return base.Items as ProxyList<T>; } } public IList<T> Begin () { Modified(); _snapshot = Items.InnerList; _snapshots++; return _snapshot; } public void End () { _snapshots = Math.Max(0, _snapshots - 1); if (_snapshot == null) return; // The backing array was copied, keep around the old array if (_snapshot != Items.InnerList && _snapshots == 0) { _recycled = _snapshot; _recycled.Clear(); //for (int i = 0, n = _recycled.Count; i < n; i++) // _recycled[i] = default(T); } _snapshot = null; } private void Modified () { if (_snapshot == null || _snapshot != Items.InnerList) return; // Snapshot is in use, copy backing array to recycled array or create new backing array if (_recycled != null) { for (int i = 0; i < Count; i++) _recycled.Add(Items[i]); Items.InnerList = _recycled; _recycled = null; } else Resize(Items.Count); } private void Resize (int newSize) { IList<T> oldList = Items.InnerList; List<T> newList = new List<T>(newSize); for (int i = 0, n = oldList.Count; i < n; i++) newList.Add(oldList[i]); Items.InnerList = newList; } protected override void InsertItem (int index, T item) { Modified(); base.InsertItem(index, item); } protected override void SetItem (int index, T item) { Modified(); base.SetItem(index, item); } protected override void RemoveItem (int index) { Modified(); base.RemoveItem(index); } protected override void ClearItems () { Modified(); base.ClearItems(); } private class ProxyList<K> : IList<K> { public IList<K> InnerList { get; set; } public ProxyList () { InnerList = new List<K>(); } public ProxyList (IList<K> list) { InnerList = list; } public int IndexOf (K item) { return InnerList.IndexOf(item); } public void Insert (int index, K item) { InnerList.Insert(index, item); } public void RemoveAt (int index) { InnerList.RemoveAt(index); } public K this[int index] { get { return InnerList[index]; } set { InnerList[index] = value; } } public void Add (K item) { InnerList.Add(item); } public void Clear () { InnerList.Clear(); } public bool Contains (K item) { return InnerList.Contains(item); } public void CopyTo (K[] array, int arrayIndex) { InnerList.CopyTo(array, arrayIndex); } public int Count { get { return InnerList.Count; } } public bool IsReadOnly { get { return InnerList.IsReadOnly; } } public bool Remove (K item) { return InnerList.Remove(item); } public IEnumerator<K> GetEnumerator () { return InnerList.GetEnumerator(); } IEnumerator IEnumerable.GetEnumerator () { return InnerList.GetEnumerator(); } } } }
apache-2.0
RobMcZag/AlgoritmiGenetici
HelloWorld/src/main/java/com/rzagni/salesman/Tour.java
1306
package com.rzagni.salesman; import java.util.Arrays; import java.util.List; /** * The class representing a potential solution to visit all the city in the destination list * @author Roberto * */ public class Tour { private final City[] tour; public Tour(int lenght) { this.tour = new City[lenght]; } public Tour(List<City> destinations) { this(destinations.size()); int index=0; for (City city : destinations) { putCity(index++, city); } } public List<City> getDestinations() { return Arrays.asList(tour); } public void putCity(int index, City city) { tour[index] = city; } public City getCity(int index) { return tour[index]; } public int numberOfCities() { return tour.length; } public double getTourDistance() { double distance = 0; for (int i = 0; i < tour.length -1; i++) { distance += tour[i].distance(tour[i+1]); } return distance; } /* (non-Javadoc) * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("Tour has " + numberOfCities() + " cities: "); for (City city : tour) { sb.append("(" + city.toString() + ") "); } sb.append("."); return sb.toString(); } }
apache-2.0
slspeek/camlistore_tls_test
pkg/types/serverconfig/config.go
6676
/* Copyright 2014 The Camlistore Authors. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ // Package serverconfig provides types related to the server configuration file. package serverconfig import ( "camlistore.org/pkg/types" ) // Config holds the values from the JSON (high-level) server config // file that is exposed to users (and is by default at // osutil.UserServerConfigPath). From this simpler configuration, a // complete, low-level one, is generated by // serverinit.genLowLevelConfig, and used to configure the various // Camlistore components. type Config struct { Auth string `json:"auth"` // auth scheme and values (ex: userpass:foo:bar). BaseURL string `json:"baseURL,omitempty"` // Base URL the server advertizes. For when behind a proxy. Listen string `json:"listen"` // address (of the form host|ip:port) on which the server will listen on. Identity string `json:"identity"` // GPG identity. IdentitySecretRing string `json:"identitySecretRing"` // path to the secret ring file. // alternative source tree, to override the embedded ui and/or closure resources. // If non empty, the ui files will be expected at // sourceRoot + "/server/camlistored/ui" and the closure library at // sourceRoot + "/third_party/closure/lib" // Also used by the publish handler. SourceRoot string `json:"sourceRoot,omitempty"` OwnerName string `json:"ownerName,omitempty"` // Blob storage. MemoryStorage bool `json:"memoryStorage,omitempty"` // do not store anything (blobs or queues) on localdisk, use memory instead. BlobPath string `json:"blobPath,omitempty"` // path to the directory containing the blobs. PackBlobs bool `json:"packBlobs,omitempty"` // use diskpacked instead of the default filestorage. S3 string `json:"s3,omitempty"` // Amazon S3 credentials: access_key_id:secret_access_key:bucket[:hostname]. GoogleCloudStorage string `json:"googlecloudstorage,omitempty"` // Google Cloud credentials: clientId:clientSecret:refreshToken:bucket or ":bucket" for auto on GCE GoogleDrive string `json:"googledrive,omitempty"` // Google Drive credentials: clientId:clientSecret:refreshToken:parentId. ShareHandler bool `json:"shareHandler,omitempty"` // enable the share handler. If true, and shareHandlerPath is empty then shareHandlerPath will default to "/share/" when generating the low-level config. ShareHandlerPath string `json:"shareHandlerPath,omitempty"` // URL prefix for the share handler. If set, overrides shareHandler. // HTTPS. HTTPS bool `json:"https,omitempty"` // enable HTTPS. HTTPSCert string `json:"httpsCert,omitempty"` // path to the HTTPS certificate file. HTTPSKey string `json:"httpsKey,omitempty"` // path to the HTTPS key file. // Index. RunIndex types.InvertedBool `json:"runIndex,omitempty"` // if logically false: no search, no UI, etc. CopyIndexToMemory types.InvertedBool `json:"copyIndexToMemory,omitempty"` // copy disk-based index to memory on start-up. MemoryIndex bool `json:"memoryIndex,omitempty"` // use memory-only indexer. DBName string `json:"dbname,omitempty"` // name of the database for mysql, postgres, mongo. LevelDB string `json:"levelDB,omitempty"` // path to the levelDB file, for indexing with github.com/syndtr/goleveldb. KVFile string `json:"kvIndexFile,omitempty"` // path to the kv file, for indexing with github.com/cznic/kv. MySQL string `json:"mysql,omitempty"` // MySQL credentials (username@host:password), for indexing with MySQL. Mongo string `json:"mongo,omitempty"` // MongoDB credentials ([username:password@]host), for indexing with MongoDB. PostgreSQL string `json:"postgres,omitempty"` // PostgreSQL credentials (username@host:password), for indexing with PostgreSQL. SQLite string `json:"sqlite,omitempty"` // path to the SQLite file, for indexing with SQLite. // DBNames lists which database names to use for various types of key/value stores. The keys may be: // "index" (overrides 'dbname' key above) // "queue-sync-to-index" (the sync queue to index things) // "queue-sync-to-s3" (the sync queue to replicate to s3) DBNames map[string]string `json:"dbNames"` ReplicateTo []interface{} `json:"replicateTo,omitempty"` // NOOP for now. // Publish maps a URL prefix path used as a root for published paths (a.k.a. a camliRoot path), to the configuration of the publish handler that serves all the published paths under this root. Publish map[string]*Publish `json:"publish,omitempty"` // TODO(mpl): map of importers instead? Flickr string `json:"flickr,omitempty"` // flicker importer. Picasa string `json:"picasa,omitempty"` // picasa importer. } // Publish holds the server configuration values specific to a publisher, i.e. to a publish prefix. type Publish struct { // Program is the server app program to run as the publisher. // Defaults to "publisher". Program string `json:"program"` // CamliRoot value that defines our root permanode for this // publisher. The root permanode is used as the root for all the // paths served by this publisher. CamliRoot string `json:"camliRoot"` // Base URL the app will run at. BaseURL string `json:"baseURL,omitempty"` // GoTemplate is the name of the Go template file used by this // publisher to represent the data. This file should live in // app/publisher/. GoTemplate string `json:"goTemplate"` // CacheRoot is the path that will be used as the root for the // caching blobserver (for images). No caching if empty. // An example value is Config.BlobPath + "/cache". CacheRoot string `json:"cacheRoot,omitempty"` HTTPSCert string `json:"httpsCert,omitempty"` // path to the HTTPS certificate file. HTTPSKey string `json:"httpsKey,omitempty"` // path to the HTTPS key file. }
apache-2.0
ufcpp/UfcppSample
Chapters/Old/spctrum/Graph/Graph.cs
9439
using System; using System.Collections; using System.ComponentModel; using System.Drawing; using System.Data; using System.Windows.Forms; namespace Graph { /// <summary> /// グラフを表示するコントロールクラス。 /// </summary> public class Graph : System.Windows.Forms.UserControl { private System.Windows.Forms.Panel plotArea; /// <summary> /// 必要なデザイナ変数です。 /// </summary> private System.ComponentModel.Container components = null; #region 手動更新用領域 /// <summary> /// グラフの項目。 /// </summary> public class Entry { public double[] x; // x 方向データ public double[] y; // y 方向データ public Pen pen; // グラフ描写用のペン public Entry(double[] x, double[] y, Pen pen) { this.x = x; this.y = y; this.pen = pen; } public bool IsInvalid() { return this.x == null || this.y == null || this.x.Length != this.y.Length || this.pen == null; } } /// <summary> /// 軸の設定。 /// </summary> public class AxisSetting { public double min; // 最小値 public double max; // 最大値 public int split; // 分割数 public Font font; // フォント public Brush brush; // ブラシ public AxisSetting() : this(0, 0, 0, null, null){} public AxisSetting(double min, double max, int split, Font font, Brush brush) { this.min = min; this.max = max; this.split = split; this.font = font; this.brush = brush; } public bool IsInvalid() { return this.min >= this.max || this.split == 0 || this.font == null || this.brush == null; } } private System.Windows.Forms.Panel xAxis; private System.Windows.Forms.Panel yAxis; ArrayList entries = new ArrayList(); // y 軸データ AxisSetting xAxisSetting = new AxisSetting(); AxisSetting yAxisSetting = new AxisSetting(); public void AddEntry(double[] x, double[] y, Pen pen) { if(x.Length != y.Length) return; this.entries.Add(new Entry(x, y, pen)); } public void SetXAxis(double min, double max, int split, Font font, Brush brush) { this.xAxisSetting = new AxisSetting(min, max, split, font, brush); } public void SetYAxis(double min, double max, int split, Font font, Brush brush) { this.yAxisSetting = new AxisSetting(min, max, split, font, brush); } public double XMin { set{this.xAxisSetting.min = value;} get{return this.xAxisSetting.min;} } public double XMax { set{this.xAxisSetting.max = value;} get{return this.xAxisSetting.max;} } public double YMin { set{this.yAxisSetting.min = value;} get{return this.yAxisSetting.min;} } public double YMax { set{this.yAxisSetting.max = value;} get{return this.yAxisSetting.max;} } public void AutoScale() { double xMin = double.MaxValue; double xMax = double.MinValue; double yMin = double.MaxValue; double yMax = double.MinValue; foreach(Entry entry in this.entries) { if(entry.IsInvalid()) continue; MaxMinValue(entry.x, ref xMin, ref xMax); MaxMinValue(entry.y, ref yMin, ref yMax); } int xn = this.xAxisSetting.split; int yn = this.yAxisSetting.split; this.XMin = Math.Floor(xMin / xn) * xn; this.XMax = Math.Ceiling(xMax / xn) * xn; this.YMin = Math.Floor(yMin / yn) * yn; this.YMax = Math.Ceiling(yMax / yn) * yn; } public void AutoScaleX() { double xMin = double.MaxValue; double xMax = double.MinValue; foreach(Entry entry in this.entries) { if(entry.IsInvalid()) continue; MaxMinValue(entry.x, ref xMin, ref xMax); } int xn = this.xAxisSetting.split; this.XMin = Math.Floor(xMin / xn) * xn; this.XMax = Math.Ceiling(xMax / xn) * xn; } public void AutoScaleY() { double yMin = double.MaxValue; double yMax = double.MinValue; foreach(Entry entry in this.entries) { if(entry.IsInvalid()) continue; MaxMinValue(entry.y, ref yMin, ref yMax); } int yn = this.yAxisSetting.split; this.YMin = Math.Floor(yMin / yn) * yn; this.YMax = Math.Ceiling(yMax / yn) * yn; } void MaxMinValue(double[] array, ref double min, ref double max) { foreach(double val in array) { if(min > val) min = val; if(max < val) max = val; } } protected virtual void DrawGraph(Graphics g, Entry entry) { if(entry.IsInvalid()) return; double[] x = entry.x; double[] y = entry.y; Pen pen = entry.pen; int n = x.Length; PointF[] points = new PointF[n]; double width = this.XMax - this.XMin; if(width == 0) width = 1; double height = this.YMax - this.YMin; if(height == 0) height = 1; for(int i=0; i<n; ++i) { points[i].X = (float)((x[i] - this.XMin) / width ) * this.plotArea.Width; points[i].Y = (float)((this.YMax - y[i]) / height) * this.plotArea.Height; } g.DrawLines(pen, points); } #endregion public Graph() { InitializeComponent(); } /// <summary> /// 使用されているリソースに後処理を実行します。 /// </summary> protected override void Dispose( bool disposing ) { if( disposing ) { if(components != null) { components.Dispose(); } } base.Dispose( disposing ); } #region Component Designer generated code /// <summary> /// デザイナ サポートに必要なメソッドです。このメソッドの内容を /// コード エディタで変更しないでください。 /// </summary> private void InitializeComponent() { this.plotArea = new System.Windows.Forms.Panel(); this.xAxis = new System.Windows.Forms.Panel(); this.yAxis = new System.Windows.Forms.Panel(); this.SuspendLayout(); // // plotArea // this.plotArea.Anchor = (((System.Windows.Forms.AnchorStyles.Top | System.Windows.Forms.AnchorStyles.Bottom) | System.Windows.Forms.AnchorStyles.Left) | System.Windows.Forms.AnchorStyles.Right); this.plotArea.BackColor = System.Drawing.SystemColors.Window; this.plotArea.Location = new System.Drawing.Point(48, 8); this.plotArea.Name = "plotArea"; this.plotArea.Size = new System.Drawing.Size(424, 320); this.plotArea.TabIndex = 0; this.plotArea.Resize += new System.EventHandler(this.plotArea_Resize); this.plotArea.Paint += new System.Windows.Forms.PaintEventHandler(this.plotArea_Paint); // // xAxis // this.xAxis.Anchor = ((System.Windows.Forms.AnchorStyles.Bottom | System.Windows.Forms.AnchorStyles.Left) | System.Windows.Forms.AnchorStyles.Right); this.xAxis.Location = new System.Drawing.Point(48, 328); this.xAxis.Name = "xAxis"; this.xAxis.Size = new System.Drawing.Size(424, 24); this.xAxis.TabIndex = 1; this.xAxis.Resize += new System.EventHandler(this.xAxis_Resize); this.xAxis.Paint += new System.Windows.Forms.PaintEventHandler(this.xAxis_Paint); // // yAxis // this.yAxis.Anchor = ((System.Windows.Forms.AnchorStyles.Top | System.Windows.Forms.AnchorStyles.Bottom) | System.Windows.Forms.AnchorStyles.Left); this.yAxis.Location = new System.Drawing.Point(8, 8); this.yAxis.Name = "yAxis"; this.yAxis.Size = new System.Drawing.Size(40, 320); this.yAxis.TabIndex = 2; this.yAxis.Resize += new System.EventHandler(this.yAxis_Resize); this.yAxis.Paint += new System.Windows.Forms.PaintEventHandler(this.yAxis_Paint); // // Graph // this.BackColor = System.Drawing.SystemColors.Window; this.Controls.AddRange(new System.Windows.Forms.Control[] { this.xAxis, this.plotArea, this.yAxis}); this.Name = "Graph"; this.Size = new System.Drawing.Size(480, 360); this.ResumeLayout(false); } #endregion private void plotArea_Paint(object sender, System.Windows.Forms.PaintEventArgs e) { foreach(Entry entry in this.entries) { DrawGraph(e.Graphics, entry); } } private void plotArea_Resize(object sender, System.EventArgs e) { this.plotArea.Refresh(); } private void xAxis_Paint(object sender, System.Windows.Forms.PaintEventArgs e) { if(this.xAxisSetting.IsInvalid()) return; double min = this.xAxisSetting.min; double max = this.xAxisSetting.max; int n = this.xAxisSetting.split; Font font = this.xAxisSetting.font; Brush brush = this.xAxisSetting.brush; for(int i=0; i<n; ++i) { double val = (max - min) / n * i + min; string str = val.ToString(); float x = (float)(this.xAxis.Width / n * i); e.Graphics.DrawString(str, font, brush, x, 0); } } private void xAxis_Resize(object sender, System.EventArgs e) { this.xAxis.Refresh(); } private void yAxis_Paint(object sender, System.Windows.Forms.PaintEventArgs e) { if(this.yAxisSetting.IsInvalid()) return; double min = this.yAxisSetting.min; double max = this.yAxisSetting.max; int n = this.yAxisSetting.split; Font font = this.yAxisSetting.font; Brush brush = this.yAxisSetting.brush; for(int i=0; i<n; ++i) { double val = (max - min) / n * i + min; string str = val.ToString(); float y = (float)(this.yAxis.Height / n * (n - i)) - (font.Size + 5); e.Graphics.DrawString(str, font, brush, 0, y); } } private void yAxis_Resize(object sender, System.EventArgs e) { this.yAxis.Refresh(); } } }
apache-2.0
phatboyg/Machete
src/Machete.X12/Parsers/ParsedX12Settings.cs
1496
namespace Machete.X12.Parsers { using System; using TextParsers; public class ParsedX12Settings : X12ParserSettings { public char ElementSeparator { get; set; } public char SubElementSeparator { get; set; } public char SegmentSeparator { get; set; } public char RepetitionSeparator { get; set; } public ITextParser ElementParser => _elementParser.Value; public ITextParser SubElementParser => _subElementParser.Value; public ITextParser SegmentParser => _segmentParser.Value; public ITextParser RepetitionParser => _repetitionParser.Value; public ITextParser TextParser => _textParser.Value; readonly Lazy<ITextParser> _elementParser; readonly Lazy<ITextParser> _subElementParser; readonly Lazy<ITextParser> _segmentParser; readonly Lazy<ITextParser> _repetitionParser; readonly Lazy<ITextParser> _textParser; public ParsedX12Settings() { _elementParser = new Lazy<ITextParser>(() => new SeparatorParser(ElementSeparator)); _subElementParser = new Lazy<ITextParser>(() => new SeparatorParser(SubElementSeparator)); _segmentParser = new Lazy<ITextParser>(() => new SeparatorParser(SegmentSeparator)); _repetitionParser = new Lazy<ITextParser>(() => new SeparatorParser(RepetitionSeparator)); _textParser = new Lazy<ITextParser>(() => new X12TextParser(this)); } } }
apache-2.0
shahramgdz/hibernate-validator
engine/src/test/java/org/hibernate/validator/test/constraints/ObjectConstraintValidator.java
680
/* * Hibernate Validator, declare and validate application constraints * * License: Apache License, Version 2.0 * See the license.txt file in the root directory or <http://www.apache.org/licenses/LICENSE-2.0>. */ package org.hibernate.validator.test.constraints; import javax.validation.ConstraintValidator; import javax.validation.ConstraintValidatorContext; /** * @author Hardy Ferentschik */ public class ObjectConstraintValidator implements ConstraintValidator<org.hibernate.validator.test.constraints.Object, java.lang.Object> { @Override public boolean isValid(java.lang.Object value, ConstraintValidatorContext constraintValidatorContext) { return true; } }
apache-2.0
dengxiangyu768/dengxytools
harbor/harbor/src/ui/static/resources/js/layout/details/details.module.js
801
/* Copyright (c) 2016 VMware, Inc. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ (function() { 'use strict'; angular .module('harbor.details', [ 'harbor.services.project', 'harbor.services.project.member' ]); })();
apache-2.0
mhausenblas/burry.sh
vendor/github.com/minio/minio-go/core_test.go
20809
/* * Minio Go Library for Amazon S3 Compatible Cloud Storage * Copyright 2017 Minio, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package minio import ( "bytes" "io" "log" "os" "testing" "time" "math/rand" ) const ( serverEndpoint = "SERVER_ENDPOINT" accessKey = "ACCESS_KEY" secretKey = "SECRET_KEY" enableSecurity = "ENABLE_HTTPS" ) // Minimum part size const MinPartSize = 1024 * 1024 * 64 const letterBytes = "abcdefghijklmnopqrstuvwxyz01234569" const ( letterIdxBits = 6 // 6 bits to represent a letter index letterIdxMask = 1<<letterIdxBits - 1 // All 1-bits, as many as letterIdxBits letterIdxMax = 63 / letterIdxBits // # of letter indices fitting in 63 bits ) // randString generates random names and prepends them with a known prefix. func randString(n int, src rand.Source, prefix string) string { b := make([]byte, n) // A rand.Int63() generates 63 random bits, enough for letterIdxMax letters! for i, cache, remain := n-1, src.Int63(), letterIdxMax; i >= 0; { if remain == 0 { cache, remain = src.Int63(), letterIdxMax } if idx := int(cache & letterIdxMask); idx < len(letterBytes) { b[i] = letterBytes[idx] i-- } cache >>= letterIdxBits remain-- } return prefix + string(b[0:30-len(prefix)]) } // Tests for Core GetObject() function. func TestGetObjectCore(t *testing.T) { if testing.Short() { t.Skip("skipping functional tests for the short runs") } // Seed random based on current time. rand.Seed(time.Now().Unix()) // Instantiate new minio core client object. c, err := NewCore( os.Getenv(serverEndpoint), os.Getenv(accessKey), os.Getenv(secretKey), mustParseBool(os.Getenv(enableSecurity)), ) if err != nil { t.Fatal("Error:", err) } // Enable tracing, write to stderr. // c.TraceOn(os.Stderr) // Set user agent. c.SetAppInfo("Minio-go-FunctionalTest", "0.1.0") // Generate a new random bucket name. bucketName := randString(60, rand.NewSource(time.Now().UnixNano()), "minio-go-test") // Make a new bucket. err = c.MakeBucket(bucketName, "us-east-1") if err != nil { t.Fatal("Error:", err, bucketName) } // Generate data more than 32K buf := bytes.Repeat([]byte("3"), rand.Intn(1<<20)+32*1024) // Save the data objectName := randString(60, rand.NewSource(time.Now().UnixNano()), "") n, err := c.Client.PutObject(bucketName, objectName, bytes.NewReader(buf), int64(len(buf)), PutObjectOptions{ ContentType: "binary/octet-stream", }) if err != nil { t.Fatal("Error:", err, bucketName, objectName) } if n != int64(len(buf)) { t.Fatalf("Error: number of bytes does not match, want %v, got %v\n", len(buf), n) } offset := int64(2048) // read directly buf1 := make([]byte, 512) buf2 := make([]byte, 512) buf3 := make([]byte, n) buf4 := make([]byte, 1) opts := GetObjectOptions{} opts.SetRange(offset, offset+int64(len(buf1))-1) reader, objectInfo, err := c.GetObject(bucketName, objectName, opts) if err != nil { t.Fatal(err) } m, err := io.ReadFull(reader, buf1) reader.Close() if err != nil { t.Fatal(err) } if objectInfo.Size != int64(m) { t.Fatalf("Error: GetObject read shorter bytes before reaching EOF, want %v, got %v\n", objectInfo.Size, m) } if !bytes.Equal(buf1, buf[offset:offset+512]) { t.Fatal("Error: Incorrect read between two GetObject from same offset.") } offset += 512 opts.SetRange(offset, offset+int64(len(buf2))-1) reader, objectInfo, err = c.GetObject(bucketName, objectName, opts) if err != nil { t.Fatal(err) } m, err = io.ReadFull(reader, buf2) reader.Close() if err != nil { t.Fatal(err) } if objectInfo.Size != int64(m) { t.Fatalf("Error: GetObject read shorter bytes before reaching EOF, want %v, got %v\n", objectInfo.Size, m) } if !bytes.Equal(buf2, buf[offset:offset+512]) { t.Fatal("Error: Incorrect read between two GetObject from same offset.") } opts.SetRange(0, int64(len(buf3))) reader, objectInfo, err = c.GetObject(bucketName, objectName, opts) if err != nil { t.Fatal(err) } m, err = io.ReadFull(reader, buf3) if err != nil { reader.Close() t.Fatal(err) } reader.Close() if objectInfo.Size != int64(m) { t.Fatalf("Error: GetObject read shorter bytes before reaching EOF, want %v, got %v\n", objectInfo.Size, m) } if !bytes.Equal(buf3, buf) { t.Fatal("Error: Incorrect data read in GetObject, than what was previously upoaded.") } opts = GetObjectOptions{} opts.SetMatchETag("etag") _, _, err = c.GetObject(bucketName, objectName, opts) if err == nil { t.Fatal("Unexpected GetObject should fail with mismatching etags") } if errResp := ToErrorResponse(err); errResp.Code != "PreconditionFailed" { t.Fatalf("Expected \"PreconditionFailed\" as code, got %s instead", errResp.Code) } opts = GetObjectOptions{} opts.SetMatchETagExcept("etag") reader, objectInfo, err = c.GetObject(bucketName, objectName, opts) if err != nil { t.Fatal(err) } m, err = io.ReadFull(reader, buf3) reader.Close() if err != nil { t.Fatal(err) } if objectInfo.Size != int64(m) { t.Fatalf("Error: GetObject read shorter bytes before reaching EOF, want %v, got %v\n", objectInfo.Size, m) } if !bytes.Equal(buf3, buf) { t.Fatal("Error: Incorrect data read in GetObject, than what was previously upoaded.") } opts = GetObjectOptions{} opts.SetRange(0, 0) reader, objectInfo, err = c.GetObject(bucketName, objectName, opts) if err != nil { t.Fatal(err) } m, err = io.ReadFull(reader, buf4) reader.Close() if err != nil { t.Fatal(err) } if objectInfo.Size != int64(m) { t.Fatalf("Error: GetObject read shorter bytes before reaching EOF, want %v, got %v\n", objectInfo.Size, m) } err = c.RemoveObject(bucketName, objectName) if err != nil { t.Fatal("Error: ", err) } err = c.RemoveBucket(bucketName) if err != nil { t.Fatal("Error:", err) } } // Tests GetObject to return Content-Encoding properly set // and overrides any auto decoding. func TestGetObjectContentEncoding(t *testing.T) { if testing.Short() { t.Skip("skipping functional tests for the short runs") } // Seed random based on current time. rand.Seed(time.Now().Unix()) // Instantiate new minio core client object. c, err := NewCore( os.Getenv(serverEndpoint), os.Getenv(accessKey), os.Getenv(secretKey), mustParseBool(os.Getenv(enableSecurity)), ) if err != nil { t.Fatal("Error:", err) } // Enable tracing, write to stderr. // c.TraceOn(os.Stderr) // Set user agent. c.SetAppInfo("Minio-go-FunctionalTest", "0.1.0") // Generate a new random bucket name. bucketName := randString(60, rand.NewSource(time.Now().UnixNano()), "minio-go-test") // Make a new bucket. err = c.MakeBucket(bucketName, "us-east-1") if err != nil { t.Fatal("Error:", err, bucketName) } // Generate data more than 32K buf := bytes.Repeat([]byte("3"), rand.Intn(1<<20)+32*1024) // Save the data objectName := randString(60, rand.NewSource(time.Now().UnixNano()), "") n, err := c.Client.PutObject(bucketName, objectName, bytes.NewReader(buf), int64(len(buf)), PutObjectOptions{ ContentEncoding: "gzip", }) if err != nil { t.Fatal("Error:", err, bucketName, objectName) } if n != int64(len(buf)) { t.Fatalf("Error: number of bytes does not match, want %v, got %v\n", len(buf), n) } rwc, objInfo, err := c.GetObject(bucketName, objectName, GetObjectOptions{}) if err != nil { t.Fatalf("Error: %v", err) } rwc.Close() if objInfo.Size <= 0 { t.Fatalf("Unexpected size of the object %v, expected %v", objInfo.Size, n) } value, ok := objInfo.Metadata["Content-Encoding"] if !ok { t.Fatalf("Expected Content-Encoding metadata to be set.") } if value[0] != "gzip" { t.Fatalf("Unexpected content-encoding found, want gzip, got %v", value) } } // Tests get bucket policy core API. func TestGetBucketPolicy(t *testing.T) { if testing.Short() { t.Skip("skipping functional tests for short runs") } // Seed random based on current time. rand.Seed(time.Now().Unix()) // Instantiate new minio client object. c, err := NewCore( os.Getenv(serverEndpoint), os.Getenv(accessKey), os.Getenv(secretKey), mustParseBool(os.Getenv(enableSecurity)), ) if err != nil { t.Fatal("Error:", err) } // Enable to debug // c.TraceOn(os.Stderr) // Set user agent. c.SetAppInfo("Minio-go-FunctionalTest", "0.1.0") // Generate a new random bucket name. bucketName := randString(60, rand.NewSource(time.Now().UnixNano()), "minio-go-test") // Make a new bucket. err = c.MakeBucket(bucketName, "us-east-1") if err != nil { t.Fatal("Error:", err, bucketName) } // Verify if bucket exits and you have access. var exists bool exists, err = c.BucketExists(bucketName) if err != nil { t.Fatal("Error:", err, bucketName) } if !exists { t.Fatal("Error: could not find ", bucketName) } // Asserting the default bucket policy. bucketPolicy, err := c.GetBucketPolicy(bucketName) if err != nil { errResp := ToErrorResponse(err) if errResp.Code != "NoSuchBucketPolicy" { t.Error("Error:", err, bucketName) } } if bucketPolicy != "" { t.Errorf("Bucket policy expected %#v, got %#v", "", bucketPolicy) } err = c.RemoveBucket(bucketName) if err != nil { t.Fatal("Error:", err) } } // Tests Core CopyObject API implementation. func TestCoreCopyObject(t *testing.T) { if testing.Short() { t.Skip("skipping functional tests for short runs") } // Seed random based on current time. rand.Seed(time.Now().Unix()) // Instantiate new minio client object. c, err := NewCore( os.Getenv(serverEndpoint), os.Getenv(accessKey), os.Getenv(secretKey), mustParseBool(os.Getenv(enableSecurity)), ) if err != nil { t.Fatal("Error:", err) } // Enable tracing, write to stderr. // c.TraceOn(os.Stderr) // Set user agent. c.SetAppInfo("Minio-go-FunctionalTest", "0.1.0") // Generate a new random bucket name. bucketName := randString(60, rand.NewSource(time.Now().UnixNano()), "minio-go-test") // Make a new bucket. err = c.MakeBucket(bucketName, "us-east-1") if err != nil { t.Fatal("Error:", err, bucketName) } buf := bytes.Repeat([]byte("a"), 32*1024) // Save the data objectName := randString(60, rand.NewSource(time.Now().UnixNano()), "") objInfo, err := c.PutObject(bucketName, objectName, bytes.NewReader(buf), int64(len(buf)), "", "", map[string]string{ "Content-Type": "binary/octet-stream", }) if err != nil { t.Fatal("Error:", err, bucketName, objectName) } if objInfo.Size != int64(len(buf)) { t.Fatalf("Error: number of bytes does not match, want %v, got %v\n", len(buf), objInfo.Size) } destBucketName := bucketName destObjectName := objectName + "-dest" cobjInfo, err := c.CopyObject(bucketName, objectName, destBucketName, destObjectName, map[string]string{ "X-Amz-Metadata-Directive": "REPLACE", "Content-Type": "application/javascript", }) if err != nil { t.Fatal("Error:", err, bucketName, objectName, destBucketName, destObjectName) } if cobjInfo.ETag != objInfo.ETag { t.Fatalf("Error: expected etag to be same as source object %s, but found different etag :%s", objInfo.ETag, cobjInfo.ETag) } // Attempt to read from destBucketName and object name. r, err := c.Client.GetObject(destBucketName, destObjectName, GetObjectOptions{}) if err != nil { t.Fatal("Error:", err, bucketName, objectName) } st, err := r.Stat() if err != nil { t.Fatal("Error:", err, bucketName, objectName) } if st.Size != int64(len(buf)) { t.Fatalf("Error: number of bytes in stat does not match, want %v, got %v\n", len(buf), st.Size) } if st.ContentType != "application/javascript" { t.Fatalf("Error: Content types don't match, expected: application/javascript, found: %+v\n", st.ContentType) } if st.ETag != objInfo.ETag { t.Fatalf("Error: expected etag to be same as source object %s, but found different etag :%s", objInfo.ETag, st.ETag) } if err := r.Close(); err != nil { t.Fatal("Error:", err) } if err := r.Close(); err == nil { t.Fatal("Error: object is already closed, should return error") } err = c.RemoveObject(bucketName, objectName) if err != nil { t.Fatal("Error: ", err) } err = c.RemoveObject(destBucketName, destObjectName) if err != nil { t.Fatal("Error: ", err) } err = c.RemoveBucket(bucketName) if err != nil { t.Fatal("Error:", err) } // Do not need to remove destBucketName its same as bucketName. } // Test Core CopyObjectPart implementation func TestCoreCopyObjectPart(t *testing.T) { if testing.Short() { t.Skip("skipping functional tests for short runs") } // Seed random based on current time. rand.Seed(time.Now().Unix()) // Instantiate new minio client object. c, err := NewCore( os.Getenv(serverEndpoint), os.Getenv(accessKey), os.Getenv(secretKey), mustParseBool(os.Getenv(enableSecurity)), ) if err != nil { t.Fatal("Error:", err) } // Enable tracing, write to stderr. // c.TraceOn(os.Stderr) // Set user agent. c.SetAppInfo("Minio-go-FunctionalTest", "0.1.0") // Generate a new random bucket name. bucketName := randString(60, rand.NewSource(time.Now().UnixNano()), "minio-go-test") // Make a new bucket. err = c.MakeBucket(bucketName, "us-east-1") if err != nil { t.Fatal("Error:", err, bucketName) } // Make a buffer with 5MB of data buf := bytes.Repeat([]byte("abcde"), 1024*1024) // Save the data objectName := randString(60, rand.NewSource(time.Now().UnixNano()), "") objInfo, err := c.PutObject(bucketName, objectName, bytes.NewReader(buf), int64(len(buf)), "", "", map[string]string{ "Content-Type": "binary/octet-stream", }) if err != nil { t.Fatal("Error:", err, bucketName, objectName) } if objInfo.Size != int64(len(buf)) { t.Fatalf("Error: number of bytes does not match, want %v, got %v\n", len(buf), objInfo.Size) } destBucketName := bucketName destObjectName := objectName + "-dest" uploadID, err := c.NewMultipartUpload(destBucketName, destObjectName, PutObjectOptions{}) if err != nil { t.Fatal("Error:", err, bucketName, objectName) } // Content of the destination object will be two copies of // `objectName` concatenated, followed by first byte of // `objectName`. // First of three parts fstPart, err := c.CopyObjectPart(bucketName, objectName, destBucketName, destObjectName, uploadID, 1, 0, -1, nil) if err != nil { t.Fatal("Error:", err, destBucketName, destObjectName) } // Second of three parts sndPart, err := c.CopyObjectPart(bucketName, objectName, destBucketName, destObjectName, uploadID, 2, 0, -1, nil) if err != nil { t.Fatal("Error:", err, destBucketName, destObjectName) } // Last of three parts lstPart, err := c.CopyObjectPart(bucketName, objectName, destBucketName, destObjectName, uploadID, 3, 0, 1, nil) if err != nil { t.Fatal("Error:", err, destBucketName, destObjectName) } // Complete the multipart upload err = c.CompleteMultipartUpload(destBucketName, destObjectName, uploadID, []CompletePart{fstPart, sndPart, lstPart}) if err != nil { t.Fatal("Error:", err, destBucketName, destObjectName) } // Stat the object and check its length matches objInfo, err = c.StatObject(destBucketName, destObjectName, StatObjectOptions{}) if err != nil { t.Fatal("Error:", err, destBucketName, destObjectName) } if objInfo.Size != (5*1024*1024)*2+1 { t.Fatal("Destination object has incorrect size!") } // Now we read the data back getOpts := GetObjectOptions{} getOpts.SetRange(0, 5*1024*1024-1) r, _, err := c.GetObject(destBucketName, destObjectName, getOpts) if err != nil { t.Fatal("Error:", err, destBucketName, destObjectName) } getBuf := make([]byte, 5*1024*1024) _, err = io.ReadFull(r, getBuf) if err != nil { t.Fatal("Error:", err, destBucketName, destObjectName) } if !bytes.Equal(getBuf, buf) { t.Fatal("Got unexpected data in first 5MB") } getOpts.SetRange(5*1024*1024, 0) r, _, err = c.GetObject(destBucketName, destObjectName, getOpts) if err != nil { t.Fatal("Error:", err, destBucketName, destObjectName) } getBuf = make([]byte, 5*1024*1024+1) _, err = io.ReadFull(r, getBuf) if err != nil { t.Fatal("Error:", err, destBucketName, destObjectName) } if !bytes.Equal(getBuf[:5*1024*1024], buf) { t.Fatal("Got unexpected data in second 5MB") } if getBuf[5*1024*1024] != buf[0] { t.Fatal("Got unexpected data in last byte of copied object!") } if err := c.RemoveObject(destBucketName, destObjectName); err != nil { t.Fatal("Error: ", err) } if err := c.RemoveObject(bucketName, objectName); err != nil { t.Fatal("Error: ", err) } if err := c.RemoveBucket(bucketName); err != nil { t.Fatal("Error: ", err) } // Do not need to remove destBucketName its same as bucketName. } // Test Core PutObject. func TestCorePutObject(t *testing.T) { if testing.Short() { t.Skip("skipping functional tests for short runs") } // Seed random based on current time. rand.Seed(time.Now().Unix()) // Instantiate new minio client object. c, err := NewCore( os.Getenv(serverEndpoint), os.Getenv(accessKey), os.Getenv(secretKey), mustParseBool(os.Getenv(enableSecurity)), ) if err != nil { t.Fatal("Error:", err) } // Enable tracing, write to stderr. // c.TraceOn(os.Stderr) // Set user agent. c.SetAppInfo("Minio-go-FunctionalTest", "0.1.0") // Generate a new random bucket name. bucketName := randString(60, rand.NewSource(time.Now().UnixNano()), "minio-go-test") // Make a new bucket. err = c.MakeBucket(bucketName, "us-east-1") if err != nil { t.Fatal("Error:", err, bucketName) } buf := bytes.Repeat([]byte("a"), 32*1024) // Save the data objectName := randString(60, rand.NewSource(time.Now().UnixNano()), "") // Object content type objectContentType := "binary/octet-stream" metadata := make(map[string]string) metadata["Content-Type"] = objectContentType objInfo, err := c.PutObject(bucketName, objectName, bytes.NewReader(buf), int64(len(buf)), "1B2M2Y8AsgTpgAmY7PhCfg==", "", metadata) if err == nil { t.Fatal("Error expected: error, got: nil(success)") } objInfo, err = c.PutObject(bucketName, objectName, bytes.NewReader(buf), int64(len(buf)), "", "", metadata) if err != nil { t.Fatal("Error:", err, bucketName, objectName) } if objInfo.Size != int64(len(buf)) { t.Fatalf("Error: number of bytes does not match, want %v, got %v\n", len(buf), objInfo.Size) } // Read the data back r, err := c.Client.GetObject(bucketName, objectName, GetObjectOptions{}) if err != nil { t.Fatal("Error:", err, bucketName, objectName) } st, err := r.Stat() if err != nil { t.Fatal("Error:", err, bucketName, objectName) } if st.Size != int64(len(buf)) { t.Fatalf("Error: number of bytes in stat does not match, want %v, got %v\n", len(buf), st.Size) } if st.ContentType != objectContentType { t.Fatalf("Error: Content types don't match, expected: %+v, found: %+v\n", objectContentType, st.ContentType) } if err := r.Close(); err != nil { t.Fatal("Error:", err) } if err := r.Close(); err == nil { t.Fatal("Error: object is already closed, should return error") } err = c.RemoveObject(bucketName, objectName) if err != nil { t.Fatal("Error: ", err) } err = c.RemoveBucket(bucketName) if err != nil { t.Fatal("Error:", err) } } func TestCoreGetObjectMetadata(t *testing.T) { if testing.Short() { t.Skip("skipping functional tests for the short runs") } core, err := NewCore( os.Getenv(serverEndpoint), os.Getenv(accessKey), os.Getenv(secretKey), mustParseBool(os.Getenv(enableSecurity))) if err != nil { log.Fatalln(err) } // Generate a new random bucket name. bucketName := randString(60, rand.NewSource(time.Now().UnixNano()), "minio-go-test") // Make a new bucket. err = core.MakeBucket(bucketName, "us-east-1") if err != nil { t.Fatal("Error:", err, bucketName) } metadata := map[string]string{ "X-Amz-Meta-Key-1": "Val-1", } _, err = core.PutObject(bucketName, "my-objectname", bytes.NewReader([]byte("hello")), 5, "", "", metadata) if err != nil { log.Fatalln(err) } reader, objInfo, err := core.GetObject(bucketName, "my-objectname", GetObjectOptions{}) if err != nil { log.Fatalln(err) } defer reader.Close() if objInfo.Metadata.Get("X-Amz-Meta-Key-1") != "Val-1" { log.Fatalln("Expected metadata to be available but wasn't") } }
apache-2.0
x0mak/test---project---python---Kurbatova
php4dvd/test_add.py
1994
# -*- coding: utf-8 -*- from selenium import webdriver from selenium.common.exceptions import * from selenium_fixture import app import time from model.user import User from selenium.webdriver.common.keys import Keys from selenium.webdriver.support.wait import WebDriverWait from selenium.webdriver.support.expected_conditions import * from selenium.webdriver.common.by import By def do_login(driver, user): driver.get("http://localhost:8080/php4dvd/") driver.find_element_by_id("username").clear() driver.find_element_by_id("username").send_keys(user.username) driver.find_element_by_name("password").clear() driver.find_element_by_name("password").send_keys(user.password) driver.find_element_by_name("submit").click() def test_login(app): do_login(app.driver, User.Admin()) def test_add(app): do_login(app.driver, User.Admin()) app.driver.find_element_by_css_selector("img[alt=\"Add movie\"]").click() app.driver.find_element_by_name("name").clear() app.driver.find_element_by_name("name").send_keys("titaniccc") app.driver.find_element_by_name("year").clear() app.driver.find_element_by_name("year").send_keys("1995") app.driver.find_element_by_id("submit").click() def test_remove(app): do_login(app.driver, User.Admin()) app.driver.find_element(By.XPATH, '//*[.="titaniccc"]').click() app.driver.find_element_by_css_selector("img[alt=\"Remove\"]").click() app.driver.assertRegexpMatches(app.driver.close_alert_and_get_its_text(), r"^Are you sure you want to remove this[\s\S]$") def test_search(app): do_login(app.driver, User.Admin()) app.driver.find_element_by_css_selector("img[alt=\"Add movie\"]").click() app.driver.find_element_by_id("imdbsearch").clear() app.driver.find_element_by_id("imdbsearch").send_keys(u"Шар") app.driver.find_element_by_css_selector("input[type=\"submit\"]").click() app.driver.assertTrue(app.driver.is_element_present(By.LINK_TEXT, u"Шар"))
apache-2.0
gromgull/cgajs
src/format.js
2832
/* global define, module */ // Saves me from ugly string formatting with + // https://raw.githubusercontent.com/davidchambers/string-format/master/index.js ;(function(global) { 'use strict'; // ValueError :: String -> Error var ValueError = function(message) { var err = new Error(message); err.name = 'ValueError'; return err; }; // defaultTo :: a,a? -> a var defaultTo = function(x, y) { return y == null ? x : y; }; // create :: Object -> String,*... -> String var create = function(transformers) { return function(template) { var args = Array.prototype.slice.call(arguments, 1); var idx = 0; var state = 'UNDEFINED'; return template.replace( /([{}])\1|[{](.*?)(?:!(.+?))?[}]/g, function(match, literal, key, xf) { if (literal != null) { return literal; } if (key.length > 0) { if (state === 'IMPLICIT') { throw ValueError('cannot switch from ' + 'implicit to explicit numbering'); } state = 'EXPLICIT'; } else { if (state === 'EXPLICIT') { throw ValueError('cannot switch from ' + 'explicit to implicit numbering'); } state = 'IMPLICIT'; key = String(idx); idx += 1; } var value = defaultTo('', lookup(args, key.split('.'))); if (xf == null) { return value; } else if (Object.prototype.hasOwnProperty.call(transformers, xf)) { return transformers[xf](value); } else { throw ValueError('no transformer named "' + xf + '"'); } } ); }; }; var lookup = function(obj, path) { if (!/^\d+$/.test(path[0])) { path = ['0'].concat(path); } for (var idx = 0; idx < path.length; idx += 1) { var key = path[idx]; obj = typeof obj[key] === 'function' ? obj[key]() : obj[key]; } return obj; }; // format :: String,*... -> String var format = create({}); // format.create :: Object -> String,*... -> String format.create = create; // format.extend :: Object,Object -> () format.extend = function(prototype, transformers) { var $format = create(transformers); prototype.format = function() { var args = Array.prototype.slice.call(arguments); args.unshift(this); return $format.apply(global, args); }; }; /* istanbul ignore else */ if (typeof module !== 'undefined') { module.exports = format; } else if (typeof define === 'function' && define.amd) { define(function() { return format; }); } else { global.format = format; } format.extend(String.prototype, {}); }.call(this, this));
apache-2.0
hhu94/Synapse-Repository-Services
lib/lib-utils/src/main/java/org/sagebionetworks/util/ValidateArgument.java
1304
package org.sagebionetworks.util; import org.apache.commons.validator.routines.UrlValidator; public class ValidateArgument { private static UrlValidatorPatched urlValidator = new UrlValidatorPatched(UrlValidator.ALLOW_2_SLASHES + UrlValidator.ALLOW_ALL_SCHEMES); public static void required(Object fieldValue, String fieldName) { if (fieldValue == null) { throw new IllegalArgumentException(fieldName + " is required."); } } public static void requirement(boolean requirement, String message) { if (!requirement) { throw new IllegalArgumentException(message); } } public static void requireType(Object fieldValue, Class<?> requiredType, String fieldName) { required(fieldValue, fieldName); if (!requiredType.isInstance(fieldValue)) { throw new IllegalArgumentException("Expected " + fieldName + " to be of type " + requiredType.getName() + ", but it was type " + fieldValue.getClass().getName() + " instead"); } } public static void failRequirement(String message) { throw new IllegalArgumentException(message); } public static void optional(String description, String string) { } public static void validUrl(String url) { if (!urlValidator.isValid(url)) { throw new IllegalArgumentException("The ExternalURL is not a valid url: " + url); } } }
apache-2.0
freedot/tstolua
tests/cases/compiler/crashInsourcePropertyIsRelatableToTargetProperty.ts
205
class C { private x = 1; } class D extends C { } function foo(x: "hi", items: string[]): typeof foo; function foo(x: string, items: string[]): typeof foo { return null; } var a: D = foo("hi", []);
apache-2.0
MarcoAntonio13/SistTemperanca
src/conexao/Main.java
5010
package conexao; import java.time.LocalDate; import java.util.ArrayList; import java.util.Calendar; import java.util.List; import dao.jdbc.JDBCClienteDAO; import dao.jdbc.JDBCEnderecoDAO; import dao.jdbc.JDBCFornecedorDAO; import dao.jdbc.JDBCProdutoDAO; import dao.jdbc.JDBCVendaDAO; import dao.jdbc.JDBCVendedorFornecedorDAO; import model.Cliente; import model.Endereco; import model.Fornecedor; import model.Produto; import model.Venda; import model.VendedorFornecedor; public class Main { public static void main (String[]args){ JDBCFornecedorDAO fornecedorDAO = new JDBCFornecedorDAO(); List<Fornecedor> fornecedores = fornecedorDAO.listarPorRazaoSocial("Marco"); fornecedores.forEach(action -> System.out.println("Razão Social: "+ action.getRazaoSocial())); // Endereco endereco = new Endereco(); // endereco.setRua("Curitiba"); // endereco.setBairro("Nova brasilia"); // endereco.setNumeroEstabeleciemnto(2697); // JDBCEnderecoDAO enderecoDAO = new JDBCEnderecoDAO(); // enderecoDAO.inserir(endereco); // // List<Endereco> enderecos = new ArrayList<Endereco>(); // enderecos.add(endereco); // // Cliente cliente = new Cliente(); // cliente.setCnpj(1238123124124l); // cliente.setRazaoSocial("Ervas da terra LTDA ME"); // cliente.setEnderecos(enderecos); // // JDBCClienteDAO clienteDAO = new JDBCClienteDAO(); // clienteDAO.inserir(cliente); // // Produto produto = new Produto(); // produto.setNome("Oregano Estragado"); // produto.setValorVenda(4.45); // produto.setPeso(5.87); // // JDBCProdutoDAO produtoDAO = new JDBCProdutoDAO(); // produtoDAO.inserir(produto); // // List<Produto> produtos = new ArrayList<Produto>(); // produtos.add(produto); // // Venda venda = new Venda(); // venda.setCliente(cliente); // venda.setData(Calendar.getInstance()); // venda.setProdutos(produtos); // // JDBCVendaDAO vendaDAO = new JDBCVendaDAO(); // vendaDAO.inserir(venda); // // LocalDate data = LocalDate.of(2010, 4 , 20); // JDBCDespesaDAO daoDespesa = new JDBCDespesaDAO(); // daoDespesa.remover(19); // // Renda renda = new Renda(11, "A mulher ", 1560.23 , data, "Salário da Mulher"); // JDBCRendaDAO daoRenda = new JDBCRendaDAO(); // VendedorFornecedor vendedorFornecedor = new VendedorFornecedor(10, 6934240101l, "Marco HUE", "ewegwrehweh"); // VendedorFornecedor vendedorFornecedor2 = new VendedorFornecedor(9, 6934240101l, "Marco HUE", "ewegwrehweh"); // VendedorFornecedor vendedorFornecedor3 = new VendedorFornecedor(8, 6934240101l, "Marco HUE", "ewegwrehweh"); // VendedorFornecedor vendedorFornecedor4 = new VendedorFornecedor(7, 6934240101l, "Marco HUE", "ewegwrehweh"); // VendedorFornecedor vendedorFornecedor5 = new VendedorFornecedor(6, 6934240101l, "Marco HUE", "ewegwrehweh"); // List<VendedorFornecedor> vendedoresFornecedores = new ArrayList<VendedorFornecedor>(); // vendedoresFornecedores.add(vendedorFornecedor); // vendedoresFornecedores.add(vendedorFornecedor2); // vendedoresFornecedores.add(vendedorFornecedor3); // vendedoresFornecedores.add(vendedorFornecedor4); // vendedoresFornecedores.add(vendedorFornecedor5); //// // Fornecedor fornecedor = new Fornecedor(null, "23190850721398", "Não sei", 6934240106l, "iqwufhiq@fuhe.com", vendedoresFornecedores); // // JDBCFornecedorDAO daoFornecedo = new JDBCFornecedorDAO(); // JDBCVendedorFornecedorDAO daoVendedorFornecedor = new JDBCVendedorFornecedorDAO(); // daoVendedorFornecedor.inserir(vendedorFornecedor); // daoVendedorFornecedor.inserir(vendedorFornecedor2); // daoVendedorFornecedor.inserir(vendedorFornecedor3); // daoVendedorFornecedor.inserir(vendedorFornecedor4); // daoVendedorFornecedor.inserir(vendedorFornecedor5); // // daoFornecedo.inserir(fornecedor); // daoFornecedo.inserir(fornecedor); // JDBCVendedorFornecedorDAO daoVendedorFornecedor = new JDBCVendedorFornecedorDAO(); // daoVendedorFornecedor.inserir(vendedorFornecedor); // daoVendedorFornecedor.remover(12); // System.out.println(daoVendedorFornecedor.buscar(10).getEmail()); // daoVendedorFornecedor.editar(vendedorFornecedor); // daoVendedorFornecedor.listar().forEach(action -> System.out.println(action.getNome())); // daoRenda.inserir(renda); // daoRenda.remover(11); // daoRenda.listar().forEach(action -> System.out.println(action.getDescricao() + action.getData())); // System.out.println(daoRenda.buscar(11).getNome()); // daoRenda.editar(renda); // daoDespesa.listar().forEach(action -> System.out.println(action.getData())); // daoDespesa.editar(despesa); // System.out.println(daoFormaPagamento.buscar(2).getNome()); // daoFormaPagamento.remover(10); // // JDBCClienteDAO daoCliente = new JDBCClienteDAO(); // // System.out.println(daoCliente.buscar(1).getRazaoSocial()); // daoCliente.listar().forEach(action -> System.out.println(action.getRazaoSocial())); } }
apache-2.0
zstackio/zstack
plugin/portForwarding/src/main/java/org/zstack/network/service/portforwarding/PortForwardingExtension.java
9643
package org.zstack.network.service.portforwarding; import org.springframework.beans.factory.annotation.Autowired; import org.zstack.compute.vm.StaticIpOperator; import org.zstack.core.db.SimpleQuery; import org.zstack.header.core.Completion; import org.zstack.header.core.NoErrorCompletion; import org.zstack.header.errorcode.ErrorCode; import org.zstack.header.network.l3.L3NetworkInventory; import org.zstack.header.network.l3.L3NetworkVO; import org.zstack.header.network.service.NetworkServiceProviderType; import org.zstack.header.network.service.NetworkServiceType; import org.zstack.header.vm.*; import org.zstack.header.vm.VmInstanceConstant.VmOperation; import org.zstack.network.l3.L3NetworkManager; import org.zstack.network.service.AbstractNetworkServiceExtension; import org.zstack.network.service.vip.VipInventory; import org.zstack.network.service.vip.VipVO; import org.zstack.utils.Utils; import org.zstack.utils.logging.CLogger; import java.util.*; /** * Created with IntelliJ IDEA. * User: frank * Time: 7:53 PM * To change this template use File | Settings | File Templates. */ public class PortForwardingExtension extends AbstractNetworkServiceExtension { private static final CLogger logger = Utils.getLogger(PortForwardingExtension.class); @Autowired private PortForwardingManager pfMgr; @Autowired private L3NetworkManager l3Mgr; private final String SUCCESS = PortForwardingExtension.class.getName(); public NetworkServiceType getNetworkServiceType() { return NetworkServiceType.PortForwarding; } protected List<PortForwardingStruct> makePortForwardingStruct(List<VmNicInventory> nics, boolean releaseVmNicInfo,L3NetworkInventory l3) { VmNicInventory nic = null; for (VmNicInventory inv : nics) { if (VmNicHelper.getL3Uuids(inv).contains(l3.getUuid())) { nic = inv; break; } } SimpleQuery<PortForwardingRuleVO> q = dbf.createQuery(PortForwardingRuleVO.class); q.add(PortForwardingRuleVO_.vmNicUuid, SimpleQuery.Op.EQ, nic.getUuid()); List<PortForwardingRuleVO> pfvos = q.list(); if (pfvos.isEmpty()) { // having port forwarding service but no rules applied yet return new ArrayList<PortForwardingStruct>(); } List<PortForwardingStruct> rules = new ArrayList<PortForwardingStruct>(); for (PortForwardingRuleVO pfvo : pfvos) { VipVO vipvo = dbf.findByUuid(pfvo.getVipUuid(), VipVO.class); L3NetworkVO l3vo = dbf.findByUuid(vipvo.getL3NetworkUuid(), L3NetworkVO.class); PortForwardingStruct struct = new PortForwardingStruct(); struct.setRule(PortForwardingRuleInventory.valueOf(pfvo)); struct.setVip(VipInventory.valueOf(vipvo)); struct.setGuestIp(nic.getIp()); struct.setGuestMac(nic.getMac()); struct.setGuestL3Network(l3); struct.setSnatInboundTraffic(PortForwardingGlobalConfig.SNAT_INBOUND_TRAFFIC.value(Boolean.class)); struct.setVipL3Network(L3NetworkInventory.valueOf(l3vo)); struct.setReleaseVmNicInfoWhenDetaching(releaseVmNicInfo); struct.setReleaseVip(false); rules.add(struct); } return rules; } @Override public void applyNetworkService(VmInstanceSpec spec, Map<String, Object> data, Completion completion) { // For new created vm, there is no port forwarding rule if (spec.getCurrentVmOperation() == VmInstanceConstant.VmOperation.NewCreate) { completion.success(); return; } Map<String, List<PortForwardingStruct>> structs = workoutPortForwarding(spec); Map<String, List<PortForwardingStruct>> applieds = new HashMap<String, List<PortForwardingStruct>>(); data.put(SUCCESS, applieds); applyNetworkService(structs.entrySet().iterator(), applieds, completion); } private void applyNetworkService(final Iterator<Map.Entry<String, List<PortForwardingStruct>>> it, final Map<String, List<PortForwardingStruct>> applieds, final Completion completion) { if (!it.hasNext()) { completion.success(); return; } Map.Entry<String, List<PortForwardingStruct>> e = it.next(); applyNetworkService(e.getValue().iterator(), e.getKey(), applieds, new Completion(completion) { @Override public void success() { applyNetworkService(it, applieds, completion); } @Override public void fail(ErrorCode errorCode) { completion.fail(errorCode); } }); } private void applyNetworkService(final Iterator<PortForwardingStruct> it, final String providerType, final Map<String, List<PortForwardingStruct>> applieds, final Completion completion) { if (!it.hasNext()) { completion.success(); return; } final PortForwardingStruct struct = it.next(); pfMgr.attachPortForwardingRule(struct, providerType, new Completion(completion) { private void addStructToApplieds() { List<PortForwardingStruct> structs = applieds.get(providerType); if (structs == null) { structs = new ArrayList<PortForwardingStruct>(); } structs.add(struct); } @Override public void success() { addStructToApplieds(); logger.debug(String.format("successfully applied %s", struct.toString())); applyNetworkService(it, providerType, applieds, completion); } @Override public void fail(ErrorCode errorCode) { completion.fail(errorCode); } }); } public void releaseNetworkService(final Iterator<Map.Entry<String, List<PortForwardingStruct>>> it, final NoErrorCompletion completion) { if (!it.hasNext()) { completion.done(); return; } Map.Entry<String, List<PortForwardingStruct>> e = it.next(); releaseNetworkService(e.getValue().iterator(), e.getKey(), new NoErrorCompletion(completion) { @Override public void done() { releaseNetworkService(it, completion); } }); } public void releaseNetworkService(final Iterator<PortForwardingStruct> it, final String providerType, final NoErrorCompletion completion) { if (!it.hasNext()) { completion.done(); return; } final PortForwardingStruct struct = it.next(); pfMgr.detachPortForwardingRule(struct, providerType, new Completion(completion) { @Override public void success() { logger.debug(String.format("successfully revoked %s on service provider[%s]", struct.toString(), providerType)); releaseNetworkService(it, providerType, completion); } @Override public void fail(ErrorCode errorCode) { logger.warn(String.format("failed to revoke %s on service provider[%s], provider should take care of cleanup", struct.toString(), providerType)); releaseNetworkService(it, providerType, completion); } }); } @Override public void releaseNetworkService(VmInstanceSpec spec, Map<String, Object> data, NoErrorCompletion completion) { Map<String, List<PortForwardingStruct>> structs; if (data.containsKey(SUCCESS)) { structs = (Map<String, List<PortForwardingStruct>>) data.get(SUCCESS); } else { structs = workoutPortForwarding(spec); } releaseNetworkService(structs.entrySet().iterator(), completion); } private boolean isPortForwardingShouldBeAttachedToBackend(String vmUuid, String l3Uuid, VmOperation operation) { boolean ipChanged = new StaticIpOperator().isIpChange(vmUuid, l3Uuid); boolean stateNeed = PortForwardingConstant.vmOperationForDetachPortfordingRule.contains(operation); L3NetworkVO l3Vo = dbf.findByUuid(l3Uuid, L3NetworkVO.class); boolean l3Need = l3Mgr.applyNetworkServiceWhenVmStateChange(l3Vo.getType()); return ipChanged || stateNeed || l3Need; } private Map<String, List<PortForwardingStruct>> workoutPortForwarding(VmInstanceSpec spec) { Map<String, List<PortForwardingStruct>> map = new HashMap<String, List<PortForwardingStruct>>(); Map<NetworkServiceProviderType, List<L3NetworkInventory>> providerMap = getNetworkServiceProviderMap(NetworkServiceType.PortForwarding, VmNicSpec.getL3NetworkInventoryOfSpec(spec.getL3Networks())); for (Map.Entry<NetworkServiceProviderType, List<L3NetworkInventory>> e : providerMap.entrySet()) { NetworkServiceProviderType ptype = e.getKey(); List<PortForwardingStruct> lst = new ArrayList<PortForwardingStruct>(); for (L3NetworkInventory l3 : e.getValue()) { if (!isPortForwardingShouldBeAttachedToBackend(spec.getVmInventory().getUuid(), l3.getUuid(), spec.getCurrentVmOperation())) { continue; } lst.addAll(makePortForwardingStruct(spec.getDestNics(), spec.getCurrentVmOperation() == VmOperation.Destroy || spec.getCurrentVmOperation() == VmOperation.DetachNic, l3)); } map.put(ptype.toString(), lst); } return map; } }
apache-2.0
devigned/azure-sdk-for-ruby
management/azure_mgmt_sql/lib/generated/azure_mgmt_sql/models/server_firewall_rule_list_result.rb
1466
# encoding: utf-8 # Code generated by Microsoft (R) AutoRest Code Generator 1.0.0.0 # Changes may cause incorrect behavior and will be lost if the code is # regenerated. module Azure::ARM::SQL module Models # # Represents the response to a List Firewall Rules request. # class ServerFirewallRuleListResult include MsRestAzure # @return [Array<ServerFirewallRule>] The list of Azure SQL server # firewall rules for the server. attr_accessor :value # # Mapper for ServerFirewallRuleListResult class as Ruby Hash. # This will be used for serialization/deserialization. # def self.mapper() { required: false, serialized_name: 'ServerFirewallRuleListResult', type: { name: 'Composite', class_name: 'ServerFirewallRuleListResult', model_properties: { value: { required: false, serialized_name: 'value', type: { name: 'Sequence', element: { required: false, serialized_name: 'ServerFirewallRuleElementType', type: { name: 'Composite', class_name: 'ServerFirewallRule' } } } } } } } end end end end
apache-2.0
cedral/aws-sdk-cpp
aws-cpp-sdk-kinesisanalyticsv2/source/model/MappingParameters.cpp
2030
/* * Copyright 2010-2017 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). * You may not use this file except in compliance with the License. * A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing * permissions and limitations under the License. */ #include <aws/kinesisanalyticsv2/model/MappingParameters.h> #include <aws/core/utils/json/JsonSerializer.h> #include <utility> using namespace Aws::Utils::Json; using namespace Aws::Utils; namespace Aws { namespace KinesisAnalyticsV2 { namespace Model { MappingParameters::MappingParameters() : m_jSONMappingParametersHasBeenSet(false), m_cSVMappingParametersHasBeenSet(false) { } MappingParameters::MappingParameters(JsonView jsonValue) : m_jSONMappingParametersHasBeenSet(false), m_cSVMappingParametersHasBeenSet(false) { *this = jsonValue; } MappingParameters& MappingParameters::operator =(JsonView jsonValue) { if(jsonValue.ValueExists("JSONMappingParameters")) { m_jSONMappingParameters = jsonValue.GetObject("JSONMappingParameters"); m_jSONMappingParametersHasBeenSet = true; } if(jsonValue.ValueExists("CSVMappingParameters")) { m_cSVMappingParameters = jsonValue.GetObject("CSVMappingParameters"); m_cSVMappingParametersHasBeenSet = true; } return *this; } JsonValue MappingParameters::Jsonize() const { JsonValue payload; if(m_jSONMappingParametersHasBeenSet) { payload.WithObject("JSONMappingParameters", m_jSONMappingParameters.Jsonize()); } if(m_cSVMappingParametersHasBeenSet) { payload.WithObject("CSVMappingParameters", m_cSVMappingParameters.Jsonize()); } return payload; } } // namespace Model } // namespace KinesisAnalyticsV2 } // namespace Aws
apache-2.0
unicef/un-partner-portal
frontend/src/components/partners/profile/overview/observations/observationExpand.js
3604
import R from 'ramda'; import React from 'react'; import PropTypes from 'prop-types'; import { withStyles } from 'material-ui/styles'; import Attachment from 'material-ui-icons/Attachment'; import GridColumn from '../../../../../components/common/grid/gridColumn'; import GridRow from '../../../../../components/common/grid/gridRow'; import ItemColumnCell from '../../../../common/cell/itemColumnCell'; import { fileNameFromUrl } from '../../../../../helpers/formHelper'; import { formatDateForPrint } from '../../../../../helpers/dates'; import { FLAGS } from '../../../../../helpers/constants'; const messages = { role: 'Role per Office', created: 'Created', comment: 'Comment', contact: 'Contact person (optional)', telephone: 'Telephone (optional)', email: 'E-mail', attachment: 'Attachment', reason: 'Reason for decision', reasonEscalation: 'Reason for deffering/escalation', }; const styleSheet = (theme) => { const padding = theme.spacing.unit; const paddingSmall = theme.spacing.unit * 2; const paddingMedium = theme.spacing.unit * 4; return { alignCenter: { display: 'flex', alignItems: 'center', }, alignText: { textAlign: 'center', }, row: { display: 'flex', }, padding: { padding: `0 0 0 ${padding}px`, }, icon: { fill: theme.palette.primary[300], marginRight: 3, width: 20, height: 20, }, container: { width: '100%', margin: '0', padding: `${paddingSmall}px 0 ${paddingSmall}px ${paddingMedium}px`, }, }; }; const displayAttachment = url => (<div style={{ display: 'flex', alignItems: 'center' }}> {url && <Attachment style={{ marginRight: 5 }} />} <div type="subheading" role="button" tabIndex={0} onClick={() => { window.open(url); }} style={{ cursor: 'pointer', overflow: 'hidden', whiteSpace: 'nowrap', textOverflow: 'ellipsis', }} > {fileNameFromUrl(url)} </div> </div>); const ObservationExpand = (props) => { const { classes, observation } = props; return ( <GridColumn spacing={8} className={classes.container}> <GridRow columns={2} spacing={8}> <ItemColumnCell label={messages.created} content={formatDateForPrint(R.path(['created'], observation))} /> <ItemColumnCell label={messages.comment} content={R.path(['comment'], observation)} /> </GridRow> {observation.category !== FLAGS.SANCTION && <GridRow columns={4} spacing={8}> <ItemColumnCell label={messages.contact} content={R.path(['contactPerson'], observation)} /> <ItemColumnCell label={messages.telephone} content={R.path(['contactPhone'], observation)} /> <ItemColumnCell label={messages.email} content={R.path(['contactEmail'], observation)} /> <ItemColumnCell label={messages.attachment} object={displayAttachment(R.path(['attachment'], observation))} /> </GridRow>} {observation.validationComment && <GridRow columns={1} spacing={8}> <ItemColumnCell label={messages.reason} content={R.path(['validationComment'], observation)} /> </GridRow>} {observation.escalationComment && <GridRow columns={1} spacing={8}> <ItemColumnCell label={messages.reasonEscalation} content={R.path(['escalationComment'], observation)} /> </GridRow>} </GridColumn> ); }; ObservationExpand.propTypes = { classes: PropTypes.object.isRequired, observation: PropTypes.object, }; export default withStyles(styleSheet, { name: 'ObservationExpand' })(ObservationExpand);
apache-2.0
raju249/oppia
core/domain/feedback_services_test.py
27168
# Copyright 2014 The Oppia Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS-IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Tests for feedback-related services.""" import json from core.domain import feedback_domain from core.domain import feedback_jobs_continuous_test from core.domain import feedback_services from core.domain import user_services from core.platform import models from core.tests import test_utils import feconf (feedback_models,) = models.Registry.import_models([models.NAMES.feedback]) taskqueue_services = models.Registry.import_taskqueue_services() class FeedbackServicesUnitTests(test_utils.GenericTestBase): """Test functions in feedback_services.""" def test_feedback_ids(self): """Test various conventions for thread and message ids.""" exp_id = '0' feedback_services.create_thread( exp_id, 'a_state_name', None, 'a subject', 'some text') threadlist = feedback_services.get_all_threads(exp_id, False) self.assertEqual(len(threadlist), 1) thread_id = threadlist[0].get_thread_id() # The thread id should not have any full stops. self.assertNotIn('.', thread_id) messages = feedback_services.get_messages(exp_id, thread_id) self.assertEqual(len(messages), 1) message_id = messages[0].message_id self.assertTrue(isinstance(message_id, int)) # Retrieve the message instance from the storage layer. datastore_id = feedback_models.FeedbackMessageModel.get_messages( exp_id, thread_id)[0].id full_thread_id = (feedback_models.FeedbackThreadModel .generate_full_thread_id(exp_id, thread_id)) # The message id should be prefixed with the full thread id and a full # stop, followed by the message id. self.assertEqual( datastore_id, '%s.%s' % (full_thread_id, message_id)) def test_create_message_fails_if_invalid_thread_id(self): exp_id = '0' with self.assertRaises( feedback_models.FeedbackMessageModel.EntityNotFoundError ): feedback_services.create_message( exp_id, 'invalid_thread_id', 'user_id', None, None, 'Hello') def test_status_of_newly_created_thread_is_open(self): exp_id = '0' feedback_services.create_thread( exp_id, 'a_state_name', None, 'a subject', 'some text') threadlist = feedback_services.get_all_threads(exp_id, False) thread_status = threadlist[0].status self.assertEqual(thread_status, feedback_models.STATUS_CHOICES_OPEN) class SuggestionQueriesUnitTests(test_utils.GenericTestBase): """Test learner suggestion query functions in feedback_services.""" THREAD_ID1 = '1111' THREAD_ID2 = '2222' THREAD_ID3 = '3333' THREAD_ID4 = '4444' THREAD_ID5 = '5555' EXP_ID1 = 'exp_id1' EXP_ID2 = 'exp_id2' USER_EMAIL = 'abc@xyz.com' USERNAME = 'user123' CURRENT_TIME_IN_MSEC = 12345678 def _generate_thread_id(self, unused_exp_id): return self.THREAD_ID1 def setUp(self): super(SuggestionQueriesUnitTests, self).setUp() # Register users. self.user_id = self.get_user_id_from_email(self.USER_EMAIL) user_services.get_or_create_user(self.user_id, self.USER_EMAIL) self.signup(self.USER_EMAIL, self.USERNAME) # Open thread with suggestion. thread1 = feedback_models.FeedbackThreadModel( id=feedback_models.FeedbackThreadModel.generate_full_thread_id( self.EXP_ID1, self.THREAD_ID1), exploration_id=self.EXP_ID1, state_name='state_name', original_author_id=self.user_id, subject='SUGGESTION', has_suggestion=True) # Closed threads with suggestion. thread2 = feedback_models.FeedbackThreadModel( id=feedback_models.FeedbackThreadModel.generate_full_thread_id( self.EXP_ID1, self.THREAD_ID2), exploration_id=self.EXP_ID1, state_name='state_name', original_author_id=self.user_id, subject='SUGGESTION', status=feedback_models.STATUS_CHOICES_FIXED, has_suggestion=True) thread3 = feedback_models.FeedbackThreadModel( id=feedback_models.FeedbackThreadModel.generate_full_thread_id( self.EXP_ID1, self.THREAD_ID3), exploration_id=self.EXP_ID1, state_name='state_name', original_author_id=self.user_id, subject='SUGGESTION', status=feedback_models.STATUS_CHOICES_IGNORED, has_suggestion=True) # Closed thread without suggestion. thread4 = feedback_models.FeedbackThreadModel( id=feedback_models.FeedbackThreadModel.generate_full_thread_id( self.EXP_ID1, self.THREAD_ID4), exploration_id=self.EXP_ID1, state_name='state_name', original_author_id=self.user_id, subject='NO SUGGESTION', status=feedback_models.STATUS_CHOICES_IGNORED) # Open thread without suggestion. thread5 = feedback_models.FeedbackThreadModel( id=feedback_models.FeedbackThreadModel.generate_full_thread_id( self.EXP_ID1, self.THREAD_ID5), exploration_id=self.EXP_ID1, state_name='state_name', original_author_id=self.user_id, subject='NO SUGGESTION', status=feedback_models.STATUS_CHOICES_OPEN) for thread in [thread1, thread2, thread3, thread4, thread5]: thread.put() def test_create_and_get_suggestion(self): with self.swap(feedback_models.FeedbackThreadModel, 'generate_new_thread_id', self._generate_thread_id): feedback_services.create_suggestion( self.EXP_ID2, self.user_id, 3, 'state_name', 'description', {'old_content': {}}) suggestion = feedback_services.get_suggestion( self.EXP_ID2, self.THREAD_ID1) thread = feedback_models.FeedbackThreadModel.get( feedback_models.FeedbackThreadModel.generate_full_thread_id( self.EXP_ID2, self.THREAD_ID1)) expected_suggestion_dict = { 'exploration_id': self.EXP_ID2, 'author_name': 'user123', 'exploration_version': 3, 'state_name': 'state_name', 'description': 'description', 'state_content': {'old_content': {}} } self.assertEqual(thread.status, feedback_models.STATUS_CHOICES_OPEN) self.assertDictEqual(expected_suggestion_dict, suggestion.to_dict()) def test_get_open_threads_with_suggestions(self): threads = feedback_services.get_open_threads(self.EXP_ID1, True) self.assertEqual(len(threads), 1) self.assertEqual(threads[0].id, self.EXP_ID1 + '.' + self.THREAD_ID1) def test_get_open_threads_without_suggestions(self): threads = feedback_services.get_open_threads(self.EXP_ID1, False) self.assertEqual(len(threads), 1) self.assertEqual(threads[0].id, self.EXP_ID1 + '.' + self.THREAD_ID5) def test_get_closed_threads_with_suggestions(self): threads = feedback_services.get_closed_threads(self.EXP_ID1, True) self.assertEqual(len(threads), 2) self.assertEqual(threads[0].id, self.EXP_ID1 + '.' + self.THREAD_ID2) self.assertEqual(threads[1].id, self.EXP_ID1 + '.' + self.THREAD_ID3) def test_get_closed_threads_without_suggestions(self): threads = feedback_services.get_closed_threads(self.EXP_ID1, False) self.assertEqual(len(threads), 1) self.assertEqual(threads[0].id, self.EXP_ID1 + '.' + self.THREAD_ID4) def test_get_all_threads_with_suggestion(self): threads = feedback_services.get_all_threads(self.EXP_ID1, True) self.assertEqual(len(threads), 3) self.assertEqual(threads[0].id, self.EXP_ID1 + '.' + self.THREAD_ID1) self.assertEqual(threads[1].id, self.EXP_ID1 + '.' + self.THREAD_ID2) self.assertEqual(threads[2].id, self.EXP_ID1 + '.' + self.THREAD_ID3) def test_get_all_threads_without_suggestion(self): threads = feedback_services.get_all_threads(self.EXP_ID1, False) self.assertEqual(len(threads), 2) self.assertEqual(threads[0].id, self.EXP_ID1 + '.' + self.THREAD_ID4) self.assertEqual(threads[1].id, self.EXP_ID1 + '.' + self.THREAD_ID5) class FeedbackThreadUnitTests(test_utils.GenericTestBase): EXP_ID_1 = 'eid1' EXP_ID_2 = 'eid2' EXPECTED_THREAD_DICT = { 'status': u'open', 'state_name': u'a_state_name', 'summary': None, 'original_author_username': None, 'subject': u'a subject' } EXPECTED_THREAD_DICT_VIEWER = { 'status': u'open', 'state_name': u'a_state_name_second', 'summary': None, 'original_author_username': None, 'subject': u'a subject second' } def setUp(self): super(FeedbackThreadUnitTests, self).setUp() self.viewer_id = self.get_user_id_from_email(self.VIEWER_EMAIL) user_services.get_or_create_user(self.viewer_id, self.VIEWER_EMAIL) self.signup(self.VIEWER_EMAIL, self.VIEWER_USERNAME) def _run_computation(self): (feedback_jobs_continuous_test.ModifiedFeedbackAnalyticsAggregator. start_computation()) self.assertEqual( self.count_jobs_in_taskqueue( queue_name=taskqueue_services.QUEUE_NAME_DEFAULT), 1) self.process_and_flush_pending_tasks() self.assertEqual( self.count_jobs_in_taskqueue( queue_name=taskqueue_services.QUEUE_NAME_DEFAULT), 0) self.process_and_flush_pending_tasks() def test_get_all_threads(self): # Create an anonymous feedback thread feedback_services.create_thread( self.EXP_ID_1, self.EXPECTED_THREAD_DICT['state_name'], None, self.EXPECTED_THREAD_DICT['subject'], 'not used here') threads = feedback_services.get_all_threads(self.EXP_ID_1, False) self.assertEqual(1, len(threads)) self.assertDictContainsSubset(self.EXPECTED_THREAD_DICT, threads[0].to_dict()) self.EXPECTED_THREAD_DICT_VIEWER['original_author_username'] = ( self.VIEWER_USERNAME) # Viewer creates feedback thread feedback_services.create_thread( self.EXP_ID_1, self.EXPECTED_THREAD_DICT_VIEWER['state_name'], self.viewer_id, self.EXPECTED_THREAD_DICT_VIEWER['subject'], 'not used here') threads = feedback_services.get_all_threads(self.EXP_ID_1, False) self.assertEqual(2, len(threads)) self.assertDictContainsSubset(self.EXPECTED_THREAD_DICT_VIEWER, threads[1].to_dict()) def test_get_total_open_threads_before_job_run(self): self.assertEqual(feedback_services.get_total_open_threads( feedback_services.get_thread_analytics_multi([self.EXP_ID_1])), 0) feedback_services.create_thread( self.EXP_ID_1, self.EXPECTED_THREAD_DICT['state_name'], None, self.EXPECTED_THREAD_DICT['subject'], 'not used here') threads = feedback_services.get_all_threads(self.EXP_ID_1, False) self.assertEqual(1, len(threads)) self.assertEqual(feedback_services.get_total_open_threads( feedback_services.get_thread_analytics_multi([self.EXP_ID_1])), 0) def test_get_total_open_threads_for_single_exploration(self): feedback_services.create_thread( self.EXP_ID_1, self.EXPECTED_THREAD_DICT['state_name'], None, self.EXPECTED_THREAD_DICT['subject'], 'not used here') threads = feedback_services.get_all_threads(self.EXP_ID_1, False) self.assertEqual(1, len(threads)) self._run_computation() self.assertEqual(feedback_services.get_total_open_threads( feedback_services.get_thread_analytics_multi([self.EXP_ID_1])), 1) def test_get_total_open_threads_for_multiple_explorations(self): feedback_services.create_thread( self.EXP_ID_1, self.EXPECTED_THREAD_DICT['state_name'], None, self.EXPECTED_THREAD_DICT['subject'], 'not used here') feedback_services.create_thread( self.EXP_ID_2, self.EXPECTED_THREAD_DICT['state_name'], None, self.EXPECTED_THREAD_DICT['subject'], 'not used here') threads_exp_1 = feedback_services.get_all_threads(self.EXP_ID_1, False) self.assertEqual(1, len(threads_exp_1)) threads_exp_2 = feedback_services.get_all_threads(self.EXP_ID_2, False) self.assertEqual(1, len(threads_exp_2)) def _close_thread(exp_id, thread_id): thread = (feedback_models.FeedbackThreadModel. get_by_exp_and_thread_id(exp_id, thread_id)) thread.status = feedback_models.STATUS_CHOICES_FIXED thread.put() _close_thread(self.EXP_ID_1, threads_exp_1[0].get_thread_id()) self.assertEqual( len(feedback_services.get_closed_threads(self.EXP_ID_1, False)), 1) self._run_computation() self.assertEqual(feedback_services.get_total_open_threads( feedback_services.get_thread_analytics_multi( [self.EXP_ID_1, self.EXP_ID_2])), 1) class EmailsTaskqueueTests(test_utils.GenericTestBase): """Tests for tasks in emails taskqueue.""" def test_create_new_batch_task(self): user_id = 'user' feedback_services.enqueue_feedback_message_batch_email_task(user_id) self.assertEqual(self.count_jobs_in_taskqueue(), 1) tasks = self.get_pending_tasks() self.assertEqual( tasks[0].url, feconf.FEEDBACK_MESSAGE_EMAIL_HANDLER_URL) def test_create_new_instant_task(self): user_id = 'user' reference_dict = { 'exploration_id': 'eid', 'thread_id': 'tid', 'message_id': 'mid' } reference = feedback_domain.FeedbackMessageReference( reference_dict['exploration_id'], reference_dict['thread_id'], reference_dict['message_id']) feedback_services.enqueue_feedback_message_instant_email_task( user_id, reference) self.assertEqual(self.count_jobs_in_taskqueue(), 1) tasks = self.get_pending_tasks() payload = json.loads(tasks[0].payload) self.assertEqual( tasks[0].url, feconf.INSTANT_FEEDBACK_EMAIL_HANDLER_URL) self.assertDictEqual(payload['reference_dict'], reference_dict) class FeedbackMessageEmailTests(test_utils.GenericTestBase): """Tests for feedback message emails.""" def setUp(self): super(FeedbackMessageEmailTests, self).setUp() self.signup('a@example.com', 'A') self.user_id_a = self.get_user_id_from_email('a@example.com') self.signup(self.EDITOR_EMAIL, self.EDITOR_USERNAME) self.editor_id = self.get_user_id_from_email(self.EDITOR_EMAIL) self.exploration = self.save_new_default_exploration( 'A', self.editor_id, 'Title') self.can_send_emails_ctx = self.swap( feconf, 'CAN_SEND_EMAILS', True) self.can_send_feedback_email_ctx = self.swap( feconf, 'CAN_SEND_FEEDBACK_MESSAGE_EMAILS', True) def test_send_feedback_message_email(self): with self.can_send_emails_ctx, self.can_send_feedback_email_ctx: feedback_services.create_thread( self.exploration.id, 'a_state_name', self.user_id_a, 'a subject', 'some text') threadlist = feedback_services.get_all_threads( self.exploration.id, False) thread_id = threadlist[0].get_thread_id() messagelist = feedback_services.get_messages( self.exploration.id, thread_id) self.assertEqual(len(messagelist), 1) expected_feedback_message_dict = { 'exploration_id': self.exploration.id, 'thread_id': thread_id, 'message_id': messagelist[0].message_id } # There are two jobs in the taskqueue: one for the realtime event # associated with creating a thread, and one for sending the email. self.assertEqual(self.count_jobs_in_taskqueue(), 2) model = feedback_models.UnsentFeedbackEmailModel.get(self.editor_id) self.assertEqual(len(model.feedback_message_references), 1) self.assertDictEqual( model.feedback_message_references[0], expected_feedback_message_dict) self.assertEqual(model.retries, 0) def test_add_new_feedback_message(self): with self.can_send_emails_ctx, self.can_send_feedback_email_ctx: feedback_services.create_thread( self.exploration.id, 'a_state_name', self.user_id_a, 'a subject', 'some text') threadlist = feedback_services.get_all_threads( self.exploration.id, False) thread_id = threadlist[0].get_thread_id() feedback_services.create_message( self.exploration.id, thread_id, self.user_id_a, None, None, 'editor message') # There are two jobs in the taskqueue: one for the realtime event # associated with creating a thread, and one for sending the email. self.assertEqual(self.count_jobs_in_taskqueue(), 2) messagelist = feedback_services.get_messages( self.exploration.id, thread_id) self.assertEqual(len(messagelist), 2) expected_feedback_message_dict1 = { 'exploration_id': self.exploration.id, 'thread_id': thread_id, 'message_id': messagelist[0].message_id } expected_feedback_message_dict2 = { 'exploration_id': self.exploration.id, 'thread_id': thread_id, 'message_id': messagelist[1].message_id } model = feedback_models.UnsentFeedbackEmailModel.get(self.editor_id) self.assertEqual(len(model.feedback_message_references), 2) self.assertDictEqual( model.feedback_message_references[0], expected_feedback_message_dict1) self.assertDictEqual( model.feedback_message_references[1], expected_feedback_message_dict2) self.assertEqual(model.retries, 0) def test_email_is_not_sent_if_recipient_has_declined_such_emails(self): user_services.update_email_preferences( self.editor_id, True, False, False) with self.can_send_emails_ctx, self.can_send_feedback_email_ctx: feedback_services.create_thread( self.exploration.id, 'a_state_name', self.user_id_a, 'a subject', 'some text') # Note: the job in the taskqueue represents the realtime # event emitted by create_thread(). self.assertEqual(self.count_jobs_in_taskqueue(), 1) self.process_and_flush_pending_tasks() messages = self.mail_stub.get_sent_messages(to=self.EDITOR_EMAIL) self.assertEqual(len(messages), 0) def test_that_emails_are_not_sent_for_anonymous_user(self): with self.can_send_emails_ctx, self.can_send_feedback_email_ctx: feedback_services.create_thread( self.exploration.id, 'a_state_name', None, 'a subject', 'some text') # Note: the job in the taskqueue represents the realtime # event emitted by create_thread(). self.assertEqual(self.count_jobs_in_taskqueue(), 1) self.process_and_flush_pending_tasks() messages = self.mail_stub.get_sent_messages(to=self.EDITOR_EMAIL) self.assertEqual(len(messages), 0) def test_that_emails_are_sent_for_registered_user(self): with self.can_send_emails_ctx, self.can_send_feedback_email_ctx: feedback_services.create_thread( self.exploration.id, 'a_state_name', self.user_id_a, 'a subject', 'some text') # There are two jobs in the taskqueue: one for the realtime event # associated with creating a thread, and one for sending the email. self.assertEqual(self.count_jobs_in_taskqueue(), 2) tasks = self.get_pending_tasks() self.assertEqual( tasks[0].url, feconf.FEEDBACK_MESSAGE_EMAIL_HANDLER_URL) self.process_and_flush_pending_tasks() messages = self.mail_stub.get_sent_messages(to=self.EDITOR_EMAIL) self.assertEqual(len(messages), 1) def test_that_emails_are_not_sent_if_service_is_disabled(self): cannot_send_emails_ctx = self.swap( feconf, 'CAN_SEND_EMAILS', False) cannot_send_feedback_message_email_ctx = self.swap( feconf, 'CAN_SEND_FEEDBACK_MESSAGE_EMAILS', False) with cannot_send_emails_ctx, cannot_send_feedback_message_email_ctx: feedback_services.create_thread( self.exploration.id, 'a_state_name', self.user_id_a, 'a subject', 'some text') # Note: the job in the taskqueue represents the realtime # event emitted by create_thread(). self.assertEqual(self.count_jobs_in_taskqueue(), 1) self.process_and_flush_pending_tasks() messages = self.mail_stub.get_sent_messages(to=self.EDITOR_EMAIL) self.assertEqual(len(messages), 0) def test_that_emails_are_not_sent_for_thread_status_changes(self): with self.can_send_emails_ctx, self.can_send_feedback_email_ctx: feedback_services.create_thread( self.exploration.id, 'a_state_name', self.user_id_a, 'a subject', '') # Note: the job in the taskqueue represents the realtime # event emitted by create_thread(). self.assertEqual(self.count_jobs_in_taskqueue(), 1) self.process_and_flush_pending_tasks() messages = self.mail_stub.get_sent_messages(to=self.EDITOR_EMAIL) self.assertEqual(len(messages), 0) def test_that_email_are_not_sent_to_author_himself(self): with self.can_send_emails_ctx, self.can_send_feedback_email_ctx: feedback_services.create_thread( self.exploration.id, 'a_state_name', self.editor_id, 'a subject', 'A message') # Note: the job in the taskqueue represents the realtime # event emitted by create_thread(). self.assertEqual(self.count_jobs_in_taskqueue(), 1) self.process_and_flush_pending_tasks() messages = self.mail_stub.get_sent_messages(to=self.EDITOR_EMAIL) self.assertEqual(len(messages), 0) def test_that_email_is_sent_for_reply_on_feedback(self): with self.can_send_emails_ctx, self.can_send_feedback_email_ctx: feedback_services.create_thread( self.exploration.id, 'a_state_name', self.user_id_a, 'a subject', 'A message') # There are two jobs in the taskqueue: one for the realtime event # associated with creating a thread, and one for sending the email. self.assertEqual(self.count_jobs_in_taskqueue(), 2) self.process_and_flush_pending_tasks() threadlist = feedback_services.get_all_threads( self.exploration.id, False) thread_id = threadlist[0].get_thread_id() feedback_services.create_message( self.exploration.id, thread_id, self.editor_id, None, None, 'editor message') self.assertEqual(self.count_jobs_in_taskqueue(), 1) self.process_and_flush_pending_tasks() def test_that_email_is_sent_for_changing_status_of_thread(self): with self.can_send_emails_ctx, self.can_send_feedback_email_ctx: feedback_services.create_thread( self.exploration.id, 'a_state_name', self.user_id_a, 'a subject', 'A message') # There are two jobs in the taskqueue: one for the realtime event # associated with creating a thread, and one for sending the email. self.assertEqual(self.count_jobs_in_taskqueue(), 2) self.process_and_flush_pending_tasks() threadlist = feedback_services.get_all_threads( self.exploration.id, False) thread_id = threadlist[0].get_thread_id() feedback_services.create_message( self.exploration.id, thread_id, self.editor_id, feedback_models.STATUS_CHOICES_FIXED, None, '') # There are two jobs in the taskqueue: one for the realtime event # associated with changing subject of thread, and one for sending # the email. self.assertEqual(self.count_jobs_in_taskqueue(), 2) self.process_and_flush_pending_tasks() def test_that_email_is_sent_for_each_feedback_message(self): with self.can_send_emails_ctx, self.can_send_feedback_email_ctx: feedback_services.create_thread( self.exploration.id, 'a_state_name', self.user_id_a, 'a subject', 'A message') threadlist = feedback_services.get_all_threads( self.exploration.id, False) thread_id = threadlist[0].get_thread_id() # There are two jobs in the taskqueue: one for the realtime event # associated with creating a thread, and one for sending the email. self.assertEqual(self.count_jobs_in_taskqueue(), 2) self.process_and_flush_pending_tasks() feedback_services.create_message( self.exploration.id, thread_id, self.editor_id, None, None, 'editor message') self.assertEqual(self.count_jobs_in_taskqueue(), 1) self.process_and_flush_pending_tasks() feedback_services.create_message( self.exploration.id, thread_id, self.editor_id, None, None, 'editor message2') self.assertEqual(self.count_jobs_in_taskqueue(), 1) self.process_and_flush_pending_tasks()
apache-2.0
CAH-FlyChen/ScrumBasic
src/ScrumBasic/Migrations/20160323074709_Modify_UserStory_Colum.cs
5747
using System; using System.Collections.Generic; using Microsoft.Data.Entity.Migrations; namespace ScrumBasic.Migrations { public partial class Modify_UserStory_Colum : Migration { protected override void Up(MigrationBuilder migrationBuilder) { migrationBuilder.DropForeignKey(name: "FK_IdentityRoleClaim<string>_IdentityRole_RoleId", table: "AspNetRoleClaims"); migrationBuilder.DropForeignKey(name: "FK_IdentityUserClaim<string>_ApplicationUser_UserId", table: "AspNetUserClaims"); migrationBuilder.DropForeignKey(name: "FK_IdentityUserLogin<string>_ApplicationUser_UserId", table: "AspNetUserLogins"); migrationBuilder.DropForeignKey(name: "FK_IdentityUserRole<string>_IdentityRole_RoleId", table: "AspNetUserRoles"); migrationBuilder.DropForeignKey(name: "FK_IdentityUserRole<string>_ApplicationUser_UserId", table: "AspNetUserRoles"); migrationBuilder.AddColumn<DateTime>( name: "CreateTime", table: "UserStory", nullable: false, defaultValue: new DateTime(1, 1, 1, 0, 0, 0, 0, DateTimeKind.Unspecified)); migrationBuilder.AddColumn<int>( name: "Order", table: "UserStory", nullable: false, defaultValue: 0); migrationBuilder.AddForeignKey( name: "FK_IdentityRoleClaim<string>_IdentityRole_RoleId", table: "AspNetRoleClaims", column: "RoleId", principalTable: "AspNetRoles", principalColumn: "Id", onDelete: ReferentialAction.Cascade); migrationBuilder.AddForeignKey( name: "FK_IdentityUserClaim<string>_ApplicationUser_UserId", table: "AspNetUserClaims", column: "UserId", principalTable: "AspNetUsers", principalColumn: "Id", onDelete: ReferentialAction.Cascade); migrationBuilder.AddForeignKey( name: "FK_IdentityUserLogin<string>_ApplicationUser_UserId", table: "AspNetUserLogins", column: "UserId", principalTable: "AspNetUsers", principalColumn: "Id", onDelete: ReferentialAction.Cascade); migrationBuilder.AddForeignKey( name: "FK_IdentityUserRole<string>_IdentityRole_RoleId", table: "AspNetUserRoles", column: "RoleId", principalTable: "AspNetRoles", principalColumn: "Id", onDelete: ReferentialAction.Cascade); migrationBuilder.AddForeignKey( name: "FK_IdentityUserRole<string>_ApplicationUser_UserId", table: "AspNetUserRoles", column: "UserId", principalTable: "AspNetUsers", principalColumn: "Id", onDelete: ReferentialAction.Cascade); } protected override void Down(MigrationBuilder migrationBuilder) { migrationBuilder.DropForeignKey(name: "FK_IdentityRoleClaim<string>_IdentityRole_RoleId", table: "AspNetRoleClaims"); migrationBuilder.DropForeignKey(name: "FK_IdentityUserClaim<string>_ApplicationUser_UserId", table: "AspNetUserClaims"); migrationBuilder.DropForeignKey(name: "FK_IdentityUserLogin<string>_ApplicationUser_UserId", table: "AspNetUserLogins"); migrationBuilder.DropForeignKey(name: "FK_IdentityUserRole<string>_IdentityRole_RoleId", table: "AspNetUserRoles"); migrationBuilder.DropForeignKey(name: "FK_IdentityUserRole<string>_ApplicationUser_UserId", table: "AspNetUserRoles"); migrationBuilder.DropColumn(name: "CreateTime", table: "UserStory"); migrationBuilder.DropColumn(name: "Order", table: "UserStory"); migrationBuilder.AddForeignKey( name: "FK_IdentityRoleClaim<string>_IdentityRole_RoleId", table: "AspNetRoleClaims", column: "RoleId", principalTable: "AspNetRoles", principalColumn: "Id", onDelete: ReferentialAction.Restrict); migrationBuilder.AddForeignKey( name: "FK_IdentityUserClaim<string>_ApplicationUser_UserId", table: "AspNetUserClaims", column: "UserId", principalTable: "AspNetUsers", principalColumn: "Id", onDelete: ReferentialAction.Restrict); migrationBuilder.AddForeignKey( name: "FK_IdentityUserLogin<string>_ApplicationUser_UserId", table: "AspNetUserLogins", column: "UserId", principalTable: "AspNetUsers", principalColumn: "Id", onDelete: ReferentialAction.Restrict); migrationBuilder.AddForeignKey( name: "FK_IdentityUserRole<string>_IdentityRole_RoleId", table: "AspNetUserRoles", column: "RoleId", principalTable: "AspNetRoles", principalColumn: "Id", onDelete: ReferentialAction.Restrict); migrationBuilder.AddForeignKey( name: "FK_IdentityUserRole<string>_ApplicationUser_UserId", table: "AspNetUserRoles", column: "UserId", principalTable: "AspNetUsers", principalColumn: "Id", onDelete: ReferentialAction.Restrict); } } }
apache-2.0
hfp/tensorflow-xsmm
tensorflow/compiler/xla/service/llvm_ir/llvm_util.cc
28335
/* Copyright 2017 The TensorFlow Authors. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. ==============================================================================*/ #include "tensorflow/compiler/xla/service/llvm_ir/llvm_util.h" #include <algorithm> #include <memory> #include <vector> #include "absl/base/casts.h" #include "absl/strings/match.h" #include "absl/strings/str_cat.h" #include "llvm/IR/DerivedTypes.h" #include "llvm/IR/GlobalValue.h" #include "llvm/IR/GlobalVariable.h" #include "llvm/IR/MDBuilder.h" #include "llvm/IR/Operator.h" #include "llvm/Target/TargetOptions.h" #include "llvm/Transforms/Utils/Cloning.h" #include "tensorflow/compiler/xla/layout_util.h" #include "tensorflow/compiler/xla/literal.h" #include "tensorflow/compiler/xla/service/name_uniquer.h" #include "tensorflow/compiler/xla/shape_util.h" #include "tensorflow/compiler/xla/types.h" #include "tensorflow/compiler/xla/util.h" #include "tensorflow/core/lib/core/errors.h" #include "tensorflow/core/lib/io/path.h" #include "tensorflow/core/platform/byte_order.h" #include "tensorflow/core/platform/env.h" #include "tensorflow/core/platform/logging.h" #include "tensorflow/core/platform/types.h" namespace xla { namespace llvm_ir { namespace { // Note, this function is only useful in an insertion context; in a global // (e.g. constants) context it will CHECK fail. llvm::Module* ModuleFromIRBuilder(llvm::IRBuilder<>* b) { auto block = CHECK_NOTNULL(b->GetInsertBlock()); auto fn = CHECK_NOTNULL(block->getParent()); auto module = CHECK_NOTNULL(fn->getParent()); return module; } } // namespace string AsString(const std::string& str) { return string(str.data(), str.length()); } llvm::StringRef AsStringRef(absl::string_view str) { return llvm::StringRef(str.data(), str.size()); } std::unique_ptr<llvm::Module> DropConstantInitializers( const llvm::Module& module) { std::unique_ptr<llvm::Module> cloned_module = CloneModule(module); for (llvm::GlobalVariable& global_var : cloned_module->globals()) { global_var.setInitializer(nullptr); global_var.setLinkage(llvm::GlobalValue::LinkageTypes::ExternalLinkage); } return cloned_module; } string DumpModuleToString(const llvm::Module& module) { std::string buffer_string; llvm::raw_string_ostream ostream(buffer_string); module.print(ostream, nullptr); ostream.flush(); return AsString(buffer_string); } llvm::CallInst* EmitCallToIntrinsic( llvm::Intrinsic::ID intrinsic_id, absl::Span<llvm::Value* const> operands, absl::Span<llvm::Type* const> overloaded_types, llvm::IRBuilder<>* b) { llvm::Module* module = ModuleFromIRBuilder(b); llvm::Function* intrinsic = llvm::Intrinsic::getDeclaration( module, intrinsic_id, AsArrayRef(overloaded_types)); return b->CreateCall(intrinsic, AsArrayRef(operands)); } llvm::Value* EmitFloatMax(llvm::Value* lhs_value, llvm::Value* rhs_value, llvm::IRBuilder<>* b) { if (b->getFastMathFlags().noNaNs()) { auto cmp = b->CreateFCmpUGE(lhs_value, rhs_value); return b->CreateSelect(cmp, lhs_value, rhs_value); } else { auto cmp_ge = b->CreateFCmpOGE(lhs_value, rhs_value); auto lhs_is_nan = b->CreateFCmpUNE(lhs_value, lhs_value); auto sel_lhs = b->CreateOr(cmp_ge, lhs_is_nan); return b->CreateSelect(sel_lhs, lhs_value, rhs_value); } } llvm::Value* EmitFloatMin(llvm::Value* lhs_value, llvm::Value* rhs_value, llvm::IRBuilder<>* b) { if (b->getFastMathFlags().noNaNs()) { auto cmp = b->CreateFCmpULE(lhs_value, rhs_value); return b->CreateSelect(cmp, lhs_value, rhs_value); } else { auto cmp_le = b->CreateFCmpOLE(lhs_value, rhs_value); auto lhs_is_nan = b->CreateFCmpUNE(lhs_value, lhs_value); auto sel_lhs = b->CreateOr(cmp_le, lhs_is_nan); return b->CreateSelect(sel_lhs, lhs_value, rhs_value); } } llvm::Value* EmitBufferIndexingGEP(llvm::Value* array, llvm::Value* index, llvm::IRBuilder<>* b) { llvm::Type* array_type = array->getType(); CHECK(array_type->isPointerTy()); llvm::PointerType* array_type_as_pointer = llvm::cast<llvm::PointerType>(array_type); VLOG(2) << "EmitBufferIndexingGEP with type=" << llvm_ir::DumpToString(*array_type) << " array=" << llvm_ir::DumpToString(*array) << " index=" << llvm_ir::DumpToString(*index); return b->CreateInBoundsGEP( array_type_as_pointer->getElementType(), array, llvm::isa<llvm::GlobalVariable>(array) ? llvm::ArrayRef<llvm::Value*>({b->getInt64(0), index}) : index); } llvm::Value* EmitBufferIndexingGEP(llvm::Value* array, int64 index, llvm::IRBuilder<>* b) { return EmitBufferIndexingGEP(array, b->getInt64(index), b); } llvm::Type* PrimitiveTypeToIrType(PrimitiveType element_type, llvm::Module* module) { switch (element_type) { case PRED: case S8: case U8: return llvm::Type::getInt8Ty(module->getContext()); case S16: case U16: case BF16: // For BF16 we just need some type that is 16 bits wide so that it will // take up the right amount of space in memory. LLVM does not have a BF16 // type (the LLVM half type is IEEE 16 bit floating point, not bfloat), so // we can't map it directly to an LLVM type. We will not map a BF16 // addition to an addition on this type (int16) - this is just the type // used for storage. return llvm::Type::getInt16Ty(module->getContext()); case F16: return llvm::Type::getHalfTy(module->getContext()); case S32: case U32: return llvm::Type::getInt32Ty(module->getContext()); case S64: case U64: return llvm::Type::getInt64Ty(module->getContext()); case F32: return llvm::Type::getFloatTy(module->getContext()); case F64: return llvm::Type::getDoubleTy(module->getContext()); case C64: { auto cplx_t = module->getTypeByName("complex64"); if (cplx_t == nullptr) { // C++ standard dictates the memory layout of std::complex is contiguous // real followed by imaginary. C++11 section 26.4 [complex.numbers]: // If z is an lvalue expression of type cv std::complex<T> then the // expression reinterpret_cast<cv T(&)[2]>(z) shall be well-formed, // reinterpret_cast<cv T(&)[2]>(z)[0] shall designate the real part of // z, and reinterpret_cast<cv T(&)[2]>(z)[1] shall designate the // imaginary part of z. return llvm::StructType::create( {llvm::Type::getFloatTy(module->getContext()), llvm::Type::getFloatTy(module->getContext())}, "complex64", /*isPacked=*/true); } return cplx_t; } // A Tuple contains an array of pointers. Use i8*. case TUPLE: // An Opaque is like a void*, use i8*. case OPAQUE: return llvm::Type::getInt8PtrTy(module->getContext()); case TOKEN: // Tokens do not have a physical representation, but the compiler needs // some placeholder type, so use int8*. return llvm::Type::getInt8PtrTy(module->getContext()); default: LOG(FATAL) << "unsupported type " << element_type; } } int GetSizeInBits(llvm::Type* type) { const llvm::StructType* struct_ty = llvm::dyn_cast<llvm::StructType>(type); if (struct_ty) { CHECK(struct_ty->isPacked()); int bits = 0; for (auto element_type : struct_ty->elements()) { bits += GetSizeInBits(element_type); } return bits; } int bits = type->getPrimitiveSizeInBits(); CHECK_GT(bits, 0) << "type is not sized"; return bits; } llvm::Type* ShapeToIrType(const Shape& shape, llvm::Module* module) { llvm::Type* result_type = PrimitiveTypeToIrType(shape.element_type(), module); if (shape.IsTuple()) { // A tuple buffer is an array of pointers. result_type = llvm::ArrayType::get(result_type, shape.tuple_shapes_size()); } else if (shape.IsArray()) { for (int64 dimension : LayoutUtil::MinorToMajor(shape)) { result_type = llvm::ArrayType::get(result_type, shape.dimensions(dimension)); } } return result_type; } StatusOr<llvm::Value*> EncodeSelfDescribingShapeConstant(const Shape& shape, int32* shape_size, llvm::IRBuilder<>* b) { string encoded_shape = shape.SerializeAsString(); if (encoded_shape.size() > std::numeric_limits<int32>::max()) { return InternalError("Encoded shape size exceeded int32 size limit."); } *shape_size = static_cast<int32>(encoded_shape.size()); return b->CreateGlobalStringPtr(llvm_ir::AsStringRef(encoded_shape)); } StatusOr<Shape> DecodeSelfDescribingShapeConstant(const void* shape_ptr, int32 size_bytes) { ShapeProto shape_proto; TF_RET_CHECK(shape_proto.ParseFromArray(shape_ptr, size_bytes)); Shape shape(shape_proto); TF_RETURN_IF_ERROR(ShapeUtil::ValidateShape(shape)); return std::move(shape); } llvm::Constant* ConvertLiteralToIrConstant(const Literal& literal, llvm::Module* module) { const char* data = static_cast<const char*>(literal.untyped_data()); CHECK_EQ(module->getDataLayout().isLittleEndian(), tensorflow::port::kLittleEndian); return llvm::ConstantDataArray::getString( module->getContext(), llvm::StringRef(data, literal.size_bytes()), /*AddNull=*/false); } llvm::GlobalVariable* AllocateSharedMemoryTile(llvm::Module* module, llvm::Type* tile_type, absl::string_view name) { const int kNVPTXSharedMemoryAddrSpace = 3; return new llvm::GlobalVariable( *module, tile_type, /*isConstant=*/false, llvm::GlobalValue::PrivateLinkage, llvm::UndefValue::get(tile_type), AsStringRef(name), nullptr, llvm::GlobalValue::NotThreadLocal, kNVPTXSharedMemoryAddrSpace); } llvm::AllocaInst* EmitAllocaAtFunctionEntry(llvm::Type* type, absl::string_view name, llvm::IRBuilder<>* b, int alignment) { return EmitAllocaAtFunctionEntryWithCount(type, nullptr, name, b, alignment); } llvm::AllocaInst* EmitAllocaAtFunctionEntryWithCount(llvm::Type* type, llvm::Value* element_count, absl::string_view name, llvm::IRBuilder<>* b, int alignment) { llvm::IRBuilder<>::InsertPoint insert_point = b->saveIP(); llvm::Function* function = b->GetInsertBlock()->getParent(); b->SetInsertPoint(&function->getEntryBlock(), function->getEntryBlock().getFirstInsertionPt()); llvm::AllocaInst* alloca = b->CreateAlloca(type, element_count, AsStringRef(name)); if (alignment != 0) { alloca->setAlignment(alignment); } b->restoreIP(insert_point); return alloca; } llvm::BasicBlock* CreateBasicBlock(llvm::BasicBlock* insert_before, absl::string_view name, llvm::IRBuilder<>* b) { return llvm::BasicBlock::Create( /*Context=*/b->getContext(), /*Name=*/AsStringRef(name), /*Parent=*/b->GetInsertBlock()->getParent(), /*InsertBefore*/ insert_before); } LlvmIfData EmitIfThenElse(llvm::Value* condition, absl::string_view name, llvm::IRBuilder<>* b, bool emit_else) { llvm_ir::LlvmIfData if_data; if_data.if_block = b->GetInsertBlock(); if_data.true_block = CreateBasicBlock(nullptr, absl::StrCat(name, "-true"), b); if_data.false_block = emit_else ? CreateBasicBlock(nullptr, absl::StrCat(name, "-false"), b) : nullptr; // Add a terminator to the if block, if necessary. if (if_data.if_block->getTerminator() == nullptr) { b->SetInsertPoint(if_data.if_block); if_data.after_block = CreateBasicBlock(nullptr, absl::StrCat(name, "-after"), b); b->CreateBr(if_data.after_block); } else { if_data.after_block = if_data.if_block->splitBasicBlock( b->GetInsertPoint(), AsStringRef(absl::StrCat(name, "-after"))); } // Our basic block should now end with an unconditional branch. Remove it; // we're going to replace it with a conditional branch. if_data.if_block->getTerminator()->eraseFromParent(); b->SetInsertPoint(if_data.if_block); b->CreateCondBr(condition, if_data.true_block, emit_else ? if_data.false_block : if_data.after_block); b->SetInsertPoint(if_data.true_block); b->CreateBr(if_data.after_block); if (emit_else) { b->SetInsertPoint(if_data.false_block); b->CreateBr(if_data.after_block); } b->SetInsertPoint(if_data.after_block, if_data.after_block->getFirstInsertionPt()); return if_data; } llvm::Value* EmitComparison(llvm::CmpInst::Predicate predicate, llvm::Value* lhs_value, llvm::Value* rhs_value, llvm::IRBuilder<>* b) { llvm::Value* comparison_result; if (lhs_value->getType()->isIntegerTy()) { comparison_result = b->CreateICmp(predicate, lhs_value, rhs_value); } else { comparison_result = b->CreateFCmp(predicate, lhs_value, rhs_value); } // comparison_result is i1, but the NVPTX codegen incorrectly lowers i1 // arrays. So we extend it to i8 so that it's addressable. return b->CreateZExt(comparison_result, llvm_ir::PrimitiveTypeToIrType( PRED, ModuleFromIRBuilder(b))); } // Internal helper that is called from emitted code to log an int64 value with a // tag. static void LogS64(const char* tag, int64 value) { LOG(INFO) << tag << " (int64): " << value; } void EmitLogging(const char* tag, llvm::Value* value, llvm::IRBuilder<>* b) { llvm::FunctionType* log_function_type = llvm::FunctionType::get( b->getVoidTy(), {b->getInt64Ty(), b->getInt64Ty()}, /*isVarArg=*/false); b->CreateCall(log_function_type, b->CreateIntToPtr(b->getInt64(absl::bit_cast<int64>(&LogS64)), log_function_type->getPointerTo()), {b->getInt64(absl::bit_cast<int64>(tag)), value}); } void SetAlignmentMetadataForLoad(llvm::LoadInst* load, uint64_t alignment) { llvm::LLVMContext& context = load->getContext(); llvm::Type* int64_ty = llvm::Type::getInt64Ty(context); llvm::Constant* alignment_constant = llvm::ConstantInt::get(int64_ty, alignment); llvm::MDBuilder metadata_builder(context); auto* alignment_metadata = metadata_builder.createConstant(alignment_constant); load->setMetadata(llvm::LLVMContext::MD_align, llvm::MDNode::get(context, alignment_metadata)); } void SetDereferenceableMetadataForLoad(llvm::LoadInst* load, uint64_t dereferenceable_bytes) { llvm::LLVMContext& context = load->getContext(); llvm::Type* int64_ty = llvm::Type::getInt64Ty(context); llvm::Constant* dereferenceable_bytes_constant = llvm::ConstantInt::get(int64_ty, dereferenceable_bytes); llvm::MDBuilder metadata_builder(context); auto* dereferenceable_bytes_metadata = metadata_builder.createConstant(dereferenceable_bytes_constant); load->setMetadata(llvm::LLVMContext::MD_dereferenceable, llvm::MDNode::get(context, dereferenceable_bytes_metadata)); } llvm::Instruction* AddRangeMetadata(int64 lower, int64 upper, llvm::Instruction* inst) { llvm::LLVMContext& context = inst->getParent()->getContext(); llvm::IntegerType* i32 = llvm::Type::getInt32Ty(context); inst->setMetadata( llvm::LLVMContext::MD_range, llvm::MDNode::get( context, {llvm::ConstantAsMetadata::get(llvm::ConstantInt::get(i32, lower)), llvm::ConstantAsMetadata::get(llvm::ConstantInt::get(i32, upper))})); return inst; } string IrName(string a) { a.erase(std::remove(a.begin(), a.end(), '%'), a.end()); return a; } string IrName(absl::string_view a, absl::string_view b) { if (!a.empty() && !b.empty()) { return IrName(absl::StrCat(a, ".", b)); } return IrName(absl::StrCat(a, b)); } string IrName(const HloInstruction* a, absl::string_view b) { return IrName(a->name(), b); } string SanitizeFunctionName(string function_name) { // The backend with the strictest requirements on function names is NVPTX, so // we sanitize to its requirements. // // A slightly stricter version of the NVPTX requirements is that names match // /[a-zA-Z_$][a-zA-Z0-9_$]*/, with the exception that the names "_" and "$" // are illegal. // Sanitize chars in function_name. std::transform(function_name.begin(), function_name.end(), function_name.begin(), [](char c) { if (('a' <= c && c <= 'z') || ('A' <= c && c <= 'Z') || ('0' <= c && c <= '9') || c == '_' || c == '$') { return c; } return '_'; }); // Ensure the name isn't empty. if (function_name.empty()) { function_name = "__unnamed"; } // Ensure the name doesn't start with a number. if (!function_name.empty() && function_name[0] >= '0' && function_name[0] <= '9') { function_name.insert(function_name.begin(), '_'); } // Ensure the name isn't "_" or "$". if (function_name == "_" || function_name == "$") { function_name += '_'; } return function_name; } void SetToFirstInsertPoint(llvm::BasicBlock* blk, llvm::IRBuilder<>* builder) { builder->SetInsertPoint(blk, blk->getFirstInsertionPt()); } void SetToLastInsertPoint(llvm::BasicBlock* blk, llvm::IRBuilder<>* builder) { if (llvm::Instruction* terminator = blk->getTerminator()) { builder->SetInsertPoint(terminator); } else { builder->SetInsertPoint(blk); } } llvm::Value* CreateRor(llvm::Value* rotand, llvm::Value* rotor, llvm::IRBuilder<>* builder) { auto size = rotand->getType()->getPrimitiveSizeInBits(); auto size_value = builder->getIntN(size, size); auto mod = [=](llvm::Value* x) { return builder->CreateURem(x, size_value); }; return builder->CreateOr( builder->CreateShl(rotand, mod(builder->CreateSub(size_value, rotor))), builder->CreateLShr(rotand, mod(rotor))); } int64 ByteSizeOf(const Shape& shape, const llvm::DataLayout& data_layout) { unsigned pointer_size = data_layout.getPointerSize(); return ShapeUtil::ByteSizeOf(shape, pointer_size); } llvm::FastMathFlags GetFastMathFlags(bool fast_math_enabled) { llvm::FastMathFlags flags; if (fast_math_enabled) { // Fast implies AllowReassoc, NoInfs, NoNaNs, NoSignedZeros, // AllowReciprocal, AllowContract, and ApproxFunc. flags.setFast(); } return flags; } void SetTargetOptions(bool fast_math_enabled, llvm::TargetOptions* target_options) { // In LLVM backend flags, UnsafeFPMath does not explicitly imply // NoInfs, etc. target_options->UnsafeFPMath = fast_math_enabled; target_options->NoInfsFPMath = fast_math_enabled; target_options->NoNaNsFPMath = fast_math_enabled; target_options->NoSignedZerosFPMath = fast_math_enabled; } std::map<int, llvm::MDNode*> MergeMetadata( llvm::LLVMContext* context, const std::map<int, llvm::MDNode*>& a, const std::map<int, llvm::MDNode*>& b) { // We should extend this as needed to deal with other kinds of metadata like // !dereferenceable and !range. std::map<int, llvm::MDNode*> result; for (auto kind_md_pair : a) { if (kind_md_pair.first == llvm::LLVMContext::MD_alias_scope) { llvm::SmallVector<llvm::Metadata*, 8> union_of_scopes; llvm::SmallPtrSet<llvm::Metadata*, 8> scope_set; for (const auto& scope_a : kind_md_pair.second->operands()) { scope_set.insert(llvm::cast<llvm::MDNode>(scope_a.get())); union_of_scopes.push_back(llvm::cast<llvm::MDNode>(scope_a.get())); } auto it = b.find(kind_md_pair.first); if (it != b.end()) { for (const auto& scope_b : it->second->operands()) { if (!scope_set.count(llvm::cast<llvm::MDNode>(scope_b.get()))) { union_of_scopes.push_back(llvm::cast<llvm::MDNode>(scope_b.get())); } } } result[llvm::LLVMContext::MD_alias_scope] = llvm::MDNode::get(*context, union_of_scopes); } else if (kind_md_pair.first == llvm::LLVMContext::MD_noalias) { llvm::SmallVector<llvm::Metadata*, 8> intersection_of_scopes; llvm::SmallPtrSet<llvm::Metadata*, 8> scope_set; for (const auto& scope_a : kind_md_pair.second->operands()) { scope_set.insert(llvm::cast<llvm::MDNode>(scope_a.get())); } auto it = b.find(kind_md_pair.first); if (it != b.end()) { for (const auto& scope_b : it->second->operands()) { if (scope_set.count(llvm::cast<llvm::MDNode>(scope_b))) { intersection_of_scopes.push_back(llvm::cast<llvm::MDNode>(scope_b)); } } } if (!intersection_of_scopes.empty()) { result[llvm::LLVMContext::MD_noalias] = llvm::MDNode::get(*context, intersection_of_scopes); } } } return result; } static string GetProcessUniqueIrFileName(absl::string_view prefix) { static tensorflow::mutex mu(tensorflow::LINKER_INITIALIZED); static NameUniquer* uniquer = new NameUniquer(/*separator=*/"-"); tensorflow::mutex_lock lock(mu); return uniquer->GetUniqueName(prefix); } static Status CreateAndWriteStringToFile(const string& directory_name, const string& file_name, const string& text) { std::unique_ptr<tensorflow::WritableFile> f; TF_RETURN_IF_ERROR( tensorflow::Env::Default()->RecursivelyCreateDir(directory_name)); TF_RETURN_IF_ERROR( tensorflow::Env::Default()->NewWritableFile(file_name, &f)); TF_RETURN_IF_ERROR(f->Append(text)); TF_RETURN_IF_ERROR(f->Close()); return Status::OK(); } Status DumpIRToDirectory(const string& directory_name, const string& hlo_module_name, const llvm::Module& llvm_module, bool optimized) { // We can end up compiling different modules with the same name when using // XlaJitCompiledCpuFunction::Compile. Avoid overwriting IR files previously // dumped from the same process in such cases. string unique_and_safe_file_name = GetProcessUniqueIrFileName( absl::StrCat("ir-", SanitizeFileName(hlo_module_name), "-", optimized ? "with" : "no", "-opt")); string ir_file_name = tensorflow::io::JoinPath( directory_name, absl::StrCat(unique_and_safe_file_name, ".ll")); // For some models the embedded constants can be huge, so also dump the module // with the constants stripped to get IR that is easier to manipulate. string ir_no_constant_initializers_file_name = tensorflow::io::JoinPath( directory_name, absl::StrCat(unique_and_safe_file_name, "-noconst.ll")); TF_RETURN_IF_ERROR(CreateAndWriteStringToFile( directory_name, ir_file_name, DumpModuleToString(llvm_module))); return CreateAndWriteStringToFile( directory_name, ir_no_constant_initializers_file_name, DumpModuleToString(*DropConstantInitializers(llvm_module))); } llvm::Function* CreateFunction(llvm::FunctionType* function_type, llvm::GlobalValue::LinkageTypes linkage, bool enable_fast_math, bool optimize_for_size, absl::string_view name, llvm::Module* module) { llvm::Function* function = llvm::Function::Create(function_type, linkage, AsStringRef(name), module); function->setCallingConv(llvm::CallingConv::C); function->addFnAttr("no-frame-pointer-elim", "false"); if (enable_fast_math) { function->addFnAttr("unsafe-fp-math", "true"); function->addFnAttr("no-infs-fp-math", "true"); function->addFnAttr("no-nans-fp-math", "true"); function->addFnAttr("no-signed-zeros-fp-math", "true"); } // Add the optize attribute to the function if optimizing for size. This // controls internal behavior of some optimization passes (e.g. loop // unrolling). if (optimize_for_size) { function->addFnAttr(llvm::Attribute::OptimizeForSize); } return function; } void InitializeLLVMCommandLineOptions(const HloModuleConfig& config) { auto options = config.debug_options().xla_backend_extra_options(); if (!options.empty()) { std::vector<string> fake_argv_storage; fake_argv_storage.push_back(""); for (const auto& it : options) { // Skip options the XLA backend itself consumes. if (!absl::StartsWith(it.first, "xla_")) { if (it.second.empty()) { fake_argv_storage.push_back(it.first); } else { fake_argv_storage.push_back(it.first + "=" + it.second); } } } VLOG(2) << "Passing argv to LLVM:"; std::vector<const char*> fake_argv; for (const auto& s : fake_argv_storage) { fake_argv.push_back(s.c_str()); VLOG(2) << s; } llvm::cl::ParseCommandLineOptions(fake_argv.size(), &fake_argv[0]); } } std::pair<llvm::Value*, llvm::Value*> UMulLowHigh32(llvm::IRBuilder<>* b, llvm::Value* src0, llvm::Value* src1) { CHECK_EQ(src0->getType()->getPrimitiveSizeInBits(), 32); CHECK_EQ(src1->getType()->getPrimitiveSizeInBits(), 32); llvm::Type* int64_ty = b->getInt64Ty(); src0 = b->CreateZExt(src0, int64_ty); src1 = b->CreateZExt(src1, int64_ty); return SplitInt64ToInt32s(b, b->CreateMul(src0, src1)); } std::pair<llvm::Value*, llvm::Value*> SplitInt64ToInt32s( llvm::IRBuilder<>* b, llvm::Value* value_64bits) { CHECK_EQ(value_64bits->getType()->getPrimitiveSizeInBits(), 64); llvm::Type* int32_ty = b->getInt32Ty(); llvm::Value* low_32bits = b->CreateTrunc(value_64bits, int32_ty); llvm::Value* high_32bits = b->CreateTrunc(b->CreateLShr(value_64bits, 32), int32_ty); return std::make_pair(low_32bits, high_32bits); } llvm::GlobalVariable* GetOrCreateVariableForPhiloxRngState( llvm::Module* module, llvm::IRBuilder<>* b) { static const char* kPhiloxRngStateVariableName = "philox_rng_state"; llvm::GlobalVariable* state_ptr = module->getNamedGlobal(kPhiloxRngStateVariableName); if (!state_ptr) { state_ptr = new llvm::GlobalVariable( /*M=*/*module, /*Ty=*/b->getInt64Ty(), /*isConstant=*/false, /*Linkage=*/llvm::GlobalValue::PrivateLinkage, /*Initializer=*/b->getInt64(0), /*Name=*/kPhiloxRngStateVariableName); } return state_ptr; } void IncrementVariableForPhiloxRngState(int64 value, llvm::Module* module, llvm::IRBuilder<>* builder) { llvm::GlobalVariable* state_ptr = GetOrCreateVariableForPhiloxRngState(module, builder); llvm::Value* state_value_old = builder->CreateLoad(state_ptr, "load_state"); // If the 64-bit value overflows, we use the wraparound value. This should // be fine in practice as we only add one to the value each time when a RNG is // executed. llvm::Value* state_value_new = builder->CreateAdd( state_value_old, builder->getInt64(value), "inc_state"); builder->CreateStore(state_value_new, state_ptr); } } // namespace llvm_ir } // namespace xla
apache-2.0
tyler-johnson/pouchdb
bin/test-browser.js
7741
#!/usr/bin/env node 'use strict'; var wd = require('wd'); wd.configureHttp({timeout: 180000}); // 3 minutes var sauceConnectLauncher = require('sauce-connect-launcher'); var selenium = require('selenium-standalone'); var querystring = require("querystring"); var MochaSpecReporter = require('mocha').reporters.Spec; var devserver = require('./dev-server.js'); var testTimeout = 30 * 60 * 1000; var username = process.env.SAUCE_USERNAME; var accessKey = process.env.SAUCE_ACCESS_KEY; var SELENIUM_VERSION = process.env.SELENIUM_VERSION || '3.141.0'; var CHROME_BIN = process.env.CHROME_BIN; var FIREFOX_BIN = process.env.FIREFOX_BIN; // BAIL=0 to disable bailing var bail = process.env.BAIL !== '0'; // process.env.CLIENT is a colon seperated list of // (saucelabs|selenium):browserName:browserVerion:platform var tmp = (process.env.CLIENT || 'selenium:firefox').split(':'); var client = { runner: tmp[0] || 'selenium', browser: tmp[1] || 'firefox', version: tmp[2] || null, // Latest platform: tmp[3] || null }; var testRoot = 'http://127.0.0.1:8000/tests/'; var testUrl; if (process.env.PERF) { testUrl = testRoot + 'performance/index.html'; } else if (process.env.TYPE === 'fuzzy') { testUrl = testRoot + 'fuzzy/index.html'; } else if (process.env.TYPE === 'mapreduce') { testUrl = testRoot + 'mapreduce/index.html'; } else if (process.env.TYPE === 'find') { testUrl = testRoot + 'find/index.html'; } else { testUrl = testRoot + 'integration/index.html'; } var qs = { remote: 1 }; var sauceClient; var sauceConnectProcess; var tunnelId = process.env.TRAVIS_JOB_NUMBER || 'tunnel-' + Date.now(); if (client.runner === 'saucelabs') { qs.saucelabs = true; } if (process.env.INVERT) { qs.invert = process.env.INVERT; } if (process.env.GREP) { qs.grep = process.env.GREP; } if (process.env.ADAPTERS) { qs.adapters = process.env.ADAPTERS; } if (process.env.AUTO_COMPACTION) { qs.autoCompaction = true; } if (process.env.SERVER) { qs.SERVER = process.env.SERVER; } if (process.env.SKIP_MIGRATION) { qs.SKIP_MIGRATION = process.env.SKIP_MIGRATION; } if (process.env.POUCHDB_SRC) { qs.src = process.env.POUCHDB_SRC; } if (process.env.PLUGINS) { qs.plugins = process.env.PLUGINS; } if (process.env.COUCH_HOST) { qs.couchHost = process.env.COUCH_HOST; } if (process.env.ADAPTER) { qs.adapter = process.env.ADAPTER; } if (process.env.ITERATIONS) { qs.iterations = process.env.ITERATIONS; } if (process.env.NEXT) { qs.NEXT = '1'; } testUrl += '?'; testUrl += querystring.stringify(qs); function testError(e) { console.error(e); console.error('Doh, tests failed'); closeClient(function () { process.exit(3); }); } function startSelenium(callback) { // Start selenium var opts = {version: SELENIUM_VERSION}; selenium.install(opts, function (err) { if (err) { console.error('Failed to install selenium'); process.exit(1); } selenium.start(opts, function () { sauceClient = wd.promiseChainRemote(); callback(); }); }); } function startSauceConnect(callback) { var options = { username: username, accessKey: accessKey, tunnelIdentifier: tunnelId }; sauceConnectLauncher(options, function (err, sauceProcess) { if (err) { console.error('Failed to connect to saucelabs'); console.error(err); return process.exit(1); } sauceConnectProcess = sauceProcess; sauceClient = wd.promiseChainRemote("localhost", 4445, username, accessKey); callback(); }); } function closeClient(callback) { sauceClient.quit().then(function () { if (sauceConnectProcess) { sauceConnectProcess.close(function () { callback(); }); } else { callback(); } }); } function RemoteRunner() { this.handlers = {}; this.completed = false; this.failed = false; } RemoteRunner.prototype.on = function (name, handler) { var handlers = this.handlers; if (!handlers[name]) { handlers[name] = []; } handlers[name].push(handler); }; RemoteRunner.prototype.handleEvents = function (events) { var self = this; var handlers = this.handlers; events.forEach(function (event) { self.completed = self.completed || event.name === 'end'; self.failed = self.failed || event.name === 'fail'; var additionalProps = ['pass', 'fail', 'pending'].indexOf(event.name) === -1 ? {} : { slow: event.obj.slow ? function () { return event.obj.slow; } : function () { return 60; }, fullTitle: event.obj.fullTitle ? function () { return event.obj.fullTitle; } : undefined }; var obj = Object.assign({}, event.obj, additionalProps); handlers[event.name].forEach(function (handler) { handler(obj, event.err); }); if (event.logs && event.logs.length > 0) { event.logs.forEach(function (line) { if (line.type === 'log') { console.log(line.content); } else if (line.type === 'error') { console.error(line.content); } else { console.error('Invalid log line', line); } }); console.log(); } }); }; RemoteRunner.prototype.bail = function () { var handlers = this.handlers; handlers['end'].forEach(function (handler) { handler(); }); this.completed = true; }; function BenchmarkReporter(runner) { runner.on('benchmark:result', function (obj) { console.log(' ', obj); }); } function startTest() { console.log('Starting', client, 'on', testUrl); var opts = { browserName: client.browser, version: client.version, platform: client.platform, tunnelTimeout: testTimeout, name: client.browser + ' - ' + tunnelId, 'max-duration': 60 * 45, 'command-timeout': 599, 'idle-timeout': 599, 'tunnel-identifier': tunnelId }; if (CHROME_BIN) { opts.chromeOptions = { binary: CHROME_BIN, args: ['--headless', '--disable-gpu', '--no-sandbox', '--disable-setuid-sandbox'] }; } if (FIREFOX_BIN) { opts.firefox_binary = FIREFOX_BIN; } var runner = new RemoteRunner(); new MochaSpecReporter(runner); new BenchmarkReporter(runner); sauceClient.init(opts, function () { console.log('Initialized'); sauceClient.get(testUrl, function () { console.log('Successfully started'); sauceClient.eval('navigator.userAgent', function (err, userAgent) { if (err) { testError(err); } else { console.log('Testing on:', userAgent); /* jshint evil: true */ var interval = setInterval(function () { sauceClient.eval('window.testEvents()', function (err, events) { if (err) { clearInterval(interval); testError(err); } else if (events) { runner.handleEvents(events); if (runner.completed || (runner.failed && bail)) { if (!runner.completed && runner.failed) { try { runner.bail(); } catch (e) { // Temporary debugging of bailing failure console.log('An error occurred while bailing:'); console.log(e); } } clearInterval(interval); closeClient(function () { process.exit(!process.env.PERF && runner.failed ? 1 : 0); }); } } }); }, 10 * 1000); } }); }); }); } devserver.start(function () { if (client.runner === 'saucelabs') { startSauceConnect(startTest); } else { startSelenium(startTest); } });
apache-2.0
xuyonghai123/jeesite
src/main/java/com/thinkgem/jeesite/modules/gen/util/GenUtils.java
12927
/** * Copyright &copy; 2012-2016 <a href="https://github.com/thinkgem/jeesite">JeeSite</a> All rights reserved. */ package com.thinkgem.jeesite.modules.gen.util; import java.io.BufferedReader; import java.io.File; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.util.List; import java.util.Map; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.core.io.ClassPathResource; import org.springframework.core.io.DefaultResourceLoader; import org.springframework.core.io.Resource; import com.google.common.collect.Lists; import com.google.common.collect.Maps; import com.thinkgem.jeesite.common.config.Global; import com.thinkgem.jeesite.common.mapper.JaxbMapper; import com.thinkgem.jeesite.common.utils.DateUtils; import com.thinkgem.jeesite.common.utils.FileUtils; import com.thinkgem.jeesite.common.utils.FreeMarkers; import com.thinkgem.jeesite.common.utils.StringUtils; import com.thinkgem.jeesite.modules.gen.entity.GenCategory; import com.thinkgem.jeesite.modules.gen.entity.GenConfig; import com.thinkgem.jeesite.modules.gen.entity.GenScheme; import com.thinkgem.jeesite.modules.gen.entity.GenTable; import com.thinkgem.jeesite.modules.gen.entity.GenTableColumn; import com.thinkgem.jeesite.modules.gen.entity.GenTemplate; import com.thinkgem.jeesite.modules.sys.entity.Area; import com.thinkgem.jeesite.modules.sys.entity.Office; import com.thinkgem.jeesite.modules.sys.entity.User; import com.thinkgem.jeesite.modules.sys.utils.UserUtils; /** * 代码生成工具类 * @author ThinkGem * @version 2013-11-16 */ public class GenUtils { private static Logger logger = LoggerFactory.getLogger(GenUtils.class); /** * 初始化列属性字段 * @param genTable */ public static void initColumnField(GenTable genTable){ for (GenTableColumn column : genTable.getColumnList()){ // 如果是不是新增列,则跳过。 if (StringUtils.isNotBlank(column.getId())){ continue; } // 设置字段说明 if (StringUtils.isBlank(column.getComments())){ column.setComments(column.getName()); } // 设置java类型 if (StringUtils.startsWithIgnoreCase(column.getJdbcType(), "CHAR") || StringUtils.startsWithIgnoreCase(column.getJdbcType(), "VARCHAR") || StringUtils.startsWithIgnoreCase(column.getJdbcType(), "NARCHAR")){ column.setJavaType("String"); }else if (StringUtils.startsWithIgnoreCase(column.getJdbcType(), "DATETIME") || StringUtils.startsWithIgnoreCase(column.getJdbcType(), "DATE") || StringUtils.startsWithIgnoreCase(column.getJdbcType(), "TIMESTAMP")){ column.setJavaType("java.util.Date"); column.setShowType("dateselect"); }else if (StringUtils.startsWithIgnoreCase(column.getJdbcType(), "BIGINT") || StringUtils.startsWithIgnoreCase(column.getJdbcType(), "NUMBER")){ // 如果是浮点型 String[] ss = StringUtils.split(StringUtils.substringBetween(column.getJdbcType(), "(", ")"), ","); if (ss != null && ss.length == 2 && Integer.parseInt(ss[1])>0){ column.setJavaType("Double"); } // 如果是整形 else if (ss != null && ss.length == 1 && Integer.parseInt(ss[0])<=10){ column.setJavaType("Integer"); } // 长整形 else{ column.setJavaType("Long"); } } // 设置java字段名 column.setJavaField(StringUtils.toCamelCase(column.getName())); // 是否是主键 column.setIsPk(genTable.getPkList().contains(column.getName())?"1":"0"); // 插入字段 column.setIsInsert("1"); // 编辑字段 if (!StringUtils.equalsIgnoreCase(column.getName(), "id") && !StringUtils.equalsIgnoreCase(column.getName(), "create_by") && !StringUtils.equalsIgnoreCase(column.getName(), "create_date") && !StringUtils.equalsIgnoreCase(column.getName(), "del_flag")){ column.setIsEdit("1"); } // 列表字段 if (StringUtils.equalsIgnoreCase(column.getName(), "name") || StringUtils.equalsIgnoreCase(column.getName(), "title") || StringUtils.equalsIgnoreCase(column.getName(), "remarks") || StringUtils.equalsIgnoreCase(column.getName(), "update_date")){ column.setIsList("1"); } // 查询字段 if (StringUtils.equalsIgnoreCase(column.getName(), "name") || StringUtils.equalsIgnoreCase(column.getName(), "title")){ column.setIsQuery("1"); } // 查询字段类型 if (StringUtils.equalsIgnoreCase(column.getName(), "name") || StringUtils.equalsIgnoreCase(column.getName(), "title")){ column.setQueryType("like"); } // 设置特定类型和字段名 // 用户 if (StringUtils.startsWithIgnoreCase(column.getName(), "user_id")){ column.setJavaType(User.class.getName()); column.setJavaField(column.getJavaField().replaceAll("Id", ".id|name")); column.setShowType("userselect"); } // 部门 else if (StringUtils.startsWithIgnoreCase(column.getName(), "office_id")){ column.setJavaType(Office.class.getName()); column.setJavaField(column.getJavaField().replaceAll("Id", ".id|name")); column.setShowType("officeselect"); } // 区域 else if (StringUtils.startsWithIgnoreCase(column.getName(), "area_id")){ column.setJavaType(Area.class.getName()); column.setJavaField(column.getJavaField().replaceAll("Id", ".id|name")); column.setShowType("areaselect"); } // 创建者、更新者 else if (StringUtils.startsWithIgnoreCase(column.getName(), "create_by") || StringUtils.startsWithIgnoreCase(column.getName(), "update_by")){ column.setJavaType(User.class.getName()); column.setJavaField(column.getJavaField() + ".id"); } // 创建时间、更新时间 else if (StringUtils.startsWithIgnoreCase(column.getName(), "create_date") || StringUtils.startsWithIgnoreCase(column.getName(), "update_date")){ column.setShowType("dateselect"); } // 备注、内容 else if (StringUtils.equalsIgnoreCase(column.getName(), "remarks") || StringUtils.equalsIgnoreCase(column.getName(), "content")){ column.setShowType("textarea"); } // 父级ID else if (StringUtils.equalsIgnoreCase(column.getName(), "parent_id")){ column.setJavaType("This"); column.setJavaField("parent.id|name"); column.setShowType("treeselect"); } // 所有父级ID else if (StringUtils.equalsIgnoreCase(column.getName(), "parent_ids")){ column.setQueryType("like"); } // 删除标记 else if (StringUtils.equalsIgnoreCase(column.getName(), "del_flag")){ column.setShowType("radiobox"); column.setDictType("del_flag"); } } } /** * 获取模板路径 * @return */ public static String getTemplatePath(){ try{ File file = new DefaultResourceLoader().getResource("").getFile(); if(file != null){ return file.getAbsolutePath() + File.separator + StringUtils.replaceEach(GenUtils.class.getName(), new String[]{"util."+GenUtils.class.getSimpleName(), "."}, new String[]{"template", File.separator}); } }catch(Exception e){ logger.error("{}", e); } return ""; } /** * XML文件转换为对象 * @param fileName * @param clazz * @return */ @SuppressWarnings("unchecked") public static <T> T fileToObject(String fileName, Class<?> clazz){ try { String pathName = "/templates/modules/gen/" + fileName; // logger.debug("File to object: {}", pathName); Resource resource = new ClassPathResource(pathName); InputStream is = resource.getInputStream(); BufferedReader br = new BufferedReader(new InputStreamReader(is, "UTF-8")); StringBuilder sb = new StringBuilder(); while (true) { String line = br.readLine(); if (line == null){ break; } sb.append(line).append("\r\n"); } if (is != null) { is.close(); } if (br != null) { br.close(); } // logger.debug("Read file content: {}", sb.toString()); return (T) JaxbMapper.fromXml(sb.toString(), clazz); } catch (IOException e) { logger.warn("Error file convert: {}", e.getMessage()); } // String pathName = StringUtils.replace(getTemplatePath() + "/" + fileName, "/", File.separator); // logger.debug("file to object: {}", pathName); // String content = ""; // try { // content = FileUtils.readFileToString(new File(pathName), "utf-8"); //// logger.debug("read config content: {}", content); // return (T) JaxbMapper.fromXml(content, clazz); // } catch (IOException e) { // logger.warn("error convert: {}", e.getMessage()); // } return null; } /** * 获取代码生成配置对象 * @return */ public static GenConfig getConfig(){ return fileToObject("config.xml", GenConfig.class); } /** * 根据分类获取模板列表 * @param config * @param config, * @param isChildTable 是否是子表 * @return */ public static List<GenTemplate> getTemplateList(GenConfig config, String category, boolean isChildTable){ List<GenTemplate> templateList = Lists.newArrayList(); if (config !=null && config.getCategoryList() != null && category != null){ for (GenCategory e : config.getCategoryList()){ if (category.equals(e.getValue())){ List<String> list = null; if (!isChildTable){ list = e.getTemplate(); }else{ list = e.getChildTableTemplate(); } if (list != null){ for (String s : list){ if (StringUtils.startsWith(s, GenCategory.CATEGORY_REF)){ templateList.addAll(getTemplateList(config, StringUtils.replace(s, GenCategory.CATEGORY_REF, ""), false)); }else{ GenTemplate template = fileToObject(s, GenTemplate.class); if (template != null){ templateList.add(template); } } } } break; } } } return templateList; } /** * 获取数据模型 * @param genScheme * @param genScheme * @return */ public static Map<String, Object> getDataModel(GenScheme genScheme){ Map<String, Object> model = Maps.newHashMap(); model.put("packageName", StringUtils.lowerCase(genScheme.getPackageName())); model.put("lastPackageName", StringUtils.substringAfterLast((String)model.get("packageName"),".")); model.put("moduleName", StringUtils.lowerCase(genScheme.getModuleName())); model.put("subModuleName", StringUtils.lowerCase(genScheme.getSubModuleName())); model.put("className", StringUtils.uncapitalize(genScheme.getGenTable().getClassName())); model.put("ClassName", StringUtils.capitalize(genScheme.getGenTable().getClassName())); model.put("functionName", genScheme.getFunctionName()); model.put("functionNameSimple", genScheme.getFunctionNameSimple()); model.put("functionAuthor", StringUtils.isNotBlank(genScheme.getFunctionAuthor())?genScheme.getFunctionAuthor():UserUtils.getUser().getName()); model.put("functionVersion", DateUtils.getDate()); model.put("urlPrefix", model.get("moduleName")+(StringUtils.isNotBlank(genScheme.getSubModuleName()) ?"/"+StringUtils.lowerCase(genScheme.getSubModuleName()):"")+"/"+model.get("className")); model.put("viewPrefix", //StringUtils.substringAfterLast(model.get("packageName"),".")+"/"+ model.get("urlPrefix")); model.put("permissionPrefix", model.get("moduleName")+(StringUtils.isNotBlank(genScheme.getSubModuleName()) ?":"+StringUtils.lowerCase(genScheme.getSubModuleName()):"")+":"+model.get("className")); model.put("dbType", Global.getConfig("jdbc.type")); model.put("table", genScheme.getGenTable()); return model; } /** * 生成到文件 * @param tpl * @param model * @param isReplaceFile * @return */ public static String generateToFile(GenTemplate tpl, Map<String, Object> model, boolean isReplaceFile){ // 获取生成文件 String fileName = Global.getProjectPath() + File.separator + StringUtils.replaceEach(FreeMarkers.renderString(tpl.getFilePath() + "/", model), new String[]{"//", "/", "."}, new String[]{File.separator, File.separator, File.separator}) + FreeMarkers.renderString(tpl.getFileName(), model); logger.debug(" fileName === " + fileName); // 获取生成文件内容 String content = FreeMarkers.renderString(StringUtils.trimToEmpty(tpl.getContent()), model); logger.debug(" content === \r\n" + content); // 如果选择替换文件,则删除原文件 if (isReplaceFile){ FileUtils.deleteFile(fileName); } // 创建并写入文件 if (FileUtils.createFile(fileName)){ FileUtils.writeToFile(fileName, content, true); logger.debug(" file create === " + fileName); return "生成成功:"+fileName+"<br/>"; }else{ logger.debug(" file extents === " + fileName); return "文件已存在:"+fileName+"<br/>"; } } public static void main(String[] args) { try { GenConfig config = getConfig(); System.out.println(config); System.out.println(JaxbMapper.toXml(config)); } catch (Exception e) { e.printStackTrace(); } } }
apache-2.0
jangorecki/h2o-3
h2o-app/src/main/java/water/H2OApp.java
355
package water; public class H2OApp extends H2OStarter { public static void main(String[] args) { if (H2O.checkUnsupportedJava()) System.exit(1); start(args, System.getProperty("user.dir")); } @SuppressWarnings("unused") public static void main2(String relativeResourcePath) { start(new String[0], relativeResourcePath); } }
apache-2.0
strongo/bots-framework
package.go
80
package botsframework // Main code for the package is in the `core` directory.
apache-2.0
jetstack-experimental/cert-manager
pkg/controller/certificates/listers.go
2830
/* Copyright 2020 The cert-manager Authors. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package certificates import ( corev1 "k8s.io/api/core/v1" "k8s.io/apimachinery/pkg/labels" corelisters "k8s.io/client-go/listers/core/v1" cmapi "github.com/jetstack/cert-manager/pkg/apis/certmanager/v1" cmlisters "github.com/jetstack/cert-manager/pkg/client/listers/certmanager/v1" "github.com/jetstack/cert-manager/pkg/util/predicate" ) // ListCertificateRequestsMatchingPredicates will list CertificateRequest // resources using the provided lister, optionally applying the given predicate // functions to filter the CertificateRequest resources returned. func ListCertificateRequestsMatchingPredicates(lister cmlisters.CertificateRequestNamespaceLister, selector labels.Selector, predicates ...predicate.Func) ([]*cmapi.CertificateRequest, error) { reqs, err := lister.List(selector) if err != nil { return nil, err } funcs := predicate.Funcs(predicates) out := make([]*cmapi.CertificateRequest, 0) for _, req := range reqs { if funcs.Evaluate(req) { out = append(out, req) } } return out, nil } // ListCertificatesMatchingPredicates will list Certificate resources using // the provided lister, optionally applying the given predicate functions to // filter the Certificate resources returned. func ListCertificatesMatchingPredicates(lister cmlisters.CertificateNamespaceLister, selector labels.Selector, predicates ...predicate.Func) ([]*cmapi.Certificate, error) { reqs, err := lister.List(selector) if err != nil { return nil, err } funcs := predicate.Funcs(predicates) out := make([]*cmapi.Certificate, 0) for _, req := range reqs { if funcs.Evaluate(req) { out = append(out, req) } } return out, nil } // ListSecretsMatchingPredicates will list Secret resources using // the provided lister, optionally applying the given predicate functions to // filter the Secret resources returned. func ListSecretsMatchingPredicates(lister corelisters.SecretNamespaceLister, selector labels.Selector, predicates ...predicate.Func) ([]*corev1.Secret, error) { reqs, err := lister.List(selector) if err != nil { return nil, err } funcs := predicate.Funcs(predicates) out := make([]*corev1.Secret, 0) for _, req := range reqs { if funcs.Evaluate(req) { out = append(out, req) } } return out, nil }
apache-2.0
mcintyred/cli
cf/ssh/ssh_test.go
34673
// +build !windows,!386 // skipping 386 because lager uses UInt64 in Session() // skipping windows because Unix/Linux only syscall in test. // should refactor out the conflicts so we could test this package in multi platforms. package sshCmd_test import ( "errors" "fmt" "io" "net" "os" "syscall" "time" "github.com/cloudfoundry-incubator/diego-ssh/cf-plugin/terminal" "github.com/cloudfoundry-incubator/diego-ssh/server" fake_server "github.com/cloudfoundry-incubator/diego-ssh/server/fakes" "github.com/cloudfoundry-incubator/diego-ssh/test_helpers" "github.com/cloudfoundry-incubator/diego-ssh/test_helpers/fake_io" "github.com/cloudfoundry-incubator/diego-ssh/test_helpers/fake_net" "github.com/cloudfoundry-incubator/diego-ssh/test_helpers/fake_ssh" "github.com/cloudfoundry/cli/cf/models" "github.com/cloudfoundry/cli/cf/ssh" "github.com/cloudfoundry/cli/cf/ssh/options" "github.com/cloudfoundry/cli/cf/ssh/sshfakes" "github.com/cloudfoundry/cli/cf/ssh/terminal/terminalhelperfakes" "github.com/docker/docker/pkg/term" "github.com/kr/pty" "github.com/pivotal-golang/lager/lagertest" "golang.org/x/crypto/ssh" . "github.com/onsi/ginkgo" . "github.com/onsi/gomega" ) var _ = Describe("SSH", func() { var ( fakeTerminalHelper *terminalhelperfakes.FakeTerminalHelper fakeListenerFactory *sshfakes.FakeListenerFactory fakeConnection *fake_ssh.FakeConn fakeSecureClient *sshfakes.FakeSecureClient fakeSecureDialer *sshfakes.FakeSecureDialer fakeSecureSession *sshfakes.FakeSecureSession terminalHelper terminal.TerminalHelper keepAliveDuration time.Duration secureShell sshCmd.SecureShell stdinPipe *fake_io.FakeWriteCloser currentApp models.Application sshEndpointFingerprint string sshEndpoint string token string ) BeforeEach(func() { fakeTerminalHelper = &terminalhelperfakes.FakeTerminalHelper{} terminalHelper = terminal.DefaultHelper() fakeListenerFactory = new(sshfakes.FakeListenerFactory) fakeListenerFactory.ListenStub = net.Listen keepAliveDuration = 30 * time.Second currentApp = models.Application{} sshEndpoint = "" sshEndpointFingerprint = "" token = "" fakeConnection = &fake_ssh.FakeConn{} fakeSecureClient = new(sshfakes.FakeSecureClient) fakeSecureDialer = new(sshfakes.FakeSecureDialer) fakeSecureSession = new(sshfakes.FakeSecureSession) fakeSecureDialer.DialReturns(fakeSecureClient, nil) fakeSecureClient.NewSessionReturns(fakeSecureSession, nil) fakeSecureClient.ConnReturns(fakeConnection) stdinPipe = &fake_io.FakeWriteCloser{} stdinPipe.WriteStub = func(p []byte) (int, error) { return len(p), nil } stdoutPipe := &fake_io.FakeReader{} stdoutPipe.ReadStub = func(p []byte) (int, error) { return 0, io.EOF } stderrPipe := &fake_io.FakeReader{} stderrPipe.ReadStub = func(p []byte) (int, error) { return 0, io.EOF } fakeSecureSession.StdinPipeReturns(stdinPipe, nil) fakeSecureSession.StdoutPipeReturns(stdoutPipe, nil) fakeSecureSession.StderrPipeReturns(stderrPipe, nil) }) JustBeforeEach(func() { secureShell = sshCmd.NewSecureShell( fakeSecureDialer, terminalHelper, fakeListenerFactory, keepAliveDuration, currentApp, sshEndpointFingerprint, sshEndpoint, token, ) }) Describe("Validation", func() { var connectErr error var opts *options.SSHOptions BeforeEach(func() { opts = &options.SSHOptions{ AppName: "app-1", } }) JustBeforeEach(func() { connectErr = secureShell.Connect(opts) }) Context("when the app model and endpoint info are successfully acquired", func() { BeforeEach(func() { token = "" currentApp.State = "STARTED" currentApp.Diego = true }) Context("when the app is not in the 'STARTED' state", func() { BeforeEach(func() { currentApp.State = "STOPPED" currentApp.Diego = true }) It("returns an error", func() { Expect(connectErr).To(MatchError(MatchRegexp("Application.*not in the STARTED state"))) }) }) Context("when the app is not a Diego app", func() { BeforeEach(func() { currentApp.State = "STARTED" currentApp.Diego = false }) It("returns an error", func() { Expect(connectErr).To(MatchError(MatchRegexp("Application.*not running on Diego"))) }) }) Context("when dialing fails", func() { var dialError = errors.New("woops") BeforeEach(func() { fakeSecureDialer.DialReturns(nil, dialError) }) It("returns the dial error", func() { Expect(connectErr).To(Equal(dialError)) Expect(fakeSecureDialer.DialCallCount()).To(Equal(1)) }) }) }) }) Describe("InteractiveSession", func() { var opts *options.SSHOptions var sessionError error var interactiveSessionInvoker func(secureShell sshCmd.SecureShell) BeforeEach(func() { sshEndpoint = "ssh.example.com:22" opts = &options.SSHOptions{ AppName: "app-name", Index: 2, } currentApp.State = "STARTED" currentApp.Diego = true currentApp.GUID = "app-guid" token = "bearer token" interactiveSessionInvoker = func(secureShell sshCmd.SecureShell) { sessionError = secureShell.InteractiveSession() } }) JustBeforeEach(func() { connectErr := secureShell.Connect(opts) Expect(connectErr).NotTo(HaveOccurred()) interactiveSessionInvoker(secureShell) }) It("dials the correct endpoint as the correct user", func() { Expect(fakeSecureDialer.DialCallCount()).To(Equal(1)) network, address, config := fakeSecureDialer.DialArgsForCall(0) Expect(network).To(Equal("tcp")) Expect(address).To(Equal("ssh.example.com:22")) Expect(config.Auth).NotTo(BeEmpty()) Expect(config.User).To(Equal("cf:app-guid/2")) Expect(config.HostKeyCallback).NotTo(BeNil()) }) Context("when host key validation is enabled", func() { var callback func(hostname string, remote net.Addr, key ssh.PublicKey) error var addr net.Addr JustBeforeEach(func() { Expect(fakeSecureDialer.DialCallCount()).To(Equal(1)) _, _, config := fakeSecureDialer.DialArgsForCall(0) callback = config.HostKeyCallback listener, err := net.Listen("tcp", "localhost:0") Expect(err).NotTo(HaveOccurred()) addr = listener.Addr() listener.Close() }) Context("when the SHA1 fingerprint does not match", func() { BeforeEach(func() { sshEndpointFingerprint = "00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00" }) It("returns an error'", func() { err := callback("", addr, TestHostKey.PublicKey()) Expect(err).To(MatchError(MatchRegexp("Host key verification failed\\."))) Expect(err).To(MatchError(MatchRegexp("The fingerprint of the received key was \".*\""))) }) }) Context("when the MD5 fingerprint does not match", func() { BeforeEach(func() { sshEndpointFingerprint = "00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00" }) It("returns an error'", func() { err := callback("", addr, TestHostKey.PublicKey()) Expect(err).To(MatchError(MatchRegexp("Host key verification failed\\."))) Expect(err).To(MatchError(MatchRegexp("The fingerprint of the received key was \".*\""))) }) }) Context("when no fingerprint is present in endpoint info", func() { BeforeEach(func() { sshEndpointFingerprint = "" sshEndpoint = "" }) It("returns an error'", func() { err := callback("", addr, TestHostKey.PublicKey()) Expect(err).To(MatchError(MatchRegexp("Unable to verify identity of host\\."))) Expect(err).To(MatchError(MatchRegexp("The fingerprint of the received key was \".*\""))) }) }) Context("when the fingerprint length doesn't make sense", func() { BeforeEach(func() { sshEndpointFingerprint = "garbage" }) It("returns an error", func() { err := callback("", addr, TestHostKey.PublicKey()) Eventually(err).Should(MatchError(MatchRegexp("Unsupported host key fingerprint format"))) }) }) }) Context("when the skip host validation flag is set", func() { BeforeEach(func() { opts.SkipHostValidation = true }) It("removes the HostKeyCallback from the client config", func() { Expect(fakeSecureDialer.DialCallCount()).To(Equal(1)) _, _, config := fakeSecureDialer.DialArgsForCall(0) Expect(config.HostKeyCallback).To(BeNil()) }) }) Context("when dialing is successful", func() { BeforeEach(func() { fakeTerminalHelper.StdStreamsStub = terminalHelper.StdStreams terminalHelper = fakeTerminalHelper }) It("creates a new secure shell session", func() { Expect(fakeSecureClient.NewSessionCallCount()).To(Equal(1)) }) It("closes the session", func() { Expect(fakeSecureSession.CloseCallCount()).To(Equal(1)) }) It("allocates standard streams", func() { Expect(fakeTerminalHelper.StdStreamsCallCount()).To(Equal(1)) }) It("gets a stdin pipe for the session", func() { Expect(fakeSecureSession.StdinPipeCallCount()).To(Equal(1)) }) Context("when getting the stdin pipe fails", func() { BeforeEach(func() { fakeSecureSession.StdinPipeReturns(nil, errors.New("woops")) }) It("returns the error", func() { Expect(sessionError).Should(MatchError("woops")) }) }) It("gets a stdout pipe for the session", func() { Expect(fakeSecureSession.StdoutPipeCallCount()).To(Equal(1)) }) Context("when getting the stdout pipe fails", func() { BeforeEach(func() { fakeSecureSession.StdoutPipeReturns(nil, errors.New("woops")) }) It("returns the error", func() { Expect(sessionError).Should(MatchError("woops")) }) }) It("gets a stderr pipe for the session", func() { Expect(fakeSecureSession.StderrPipeCallCount()).To(Equal(1)) }) Context("when getting the stderr pipe fails", func() { BeforeEach(func() { fakeSecureSession.StderrPipeReturns(nil, errors.New("woops")) }) It("returns the error", func() { Expect(sessionError).Should(MatchError("woops")) }) }) }) Context("when stdin is a terminal", func() { var master, slave *os.File BeforeEach(func() { _, stdout, stderr := terminalHelper.StdStreams() var err error master, slave, err = pty.Open() Expect(err).NotTo(HaveOccurred()) fakeTerminalHelper.IsTerminalStub = terminalHelper.IsTerminal fakeTerminalHelper.GetFdInfoStub = terminalHelper.GetFdInfo fakeTerminalHelper.GetWinsizeStub = terminalHelper.GetWinsize fakeTerminalHelper.StdStreamsReturns(slave, stdout, stderr) terminalHelper = fakeTerminalHelper }) AfterEach(func() { master.Close() // slave.Close() // race }) Context("when a command is not specified", func() { var terminalType string BeforeEach(func() { terminalType = os.Getenv("TERM") os.Setenv("TERM", "test-terminal-type") winsize := &term.Winsize{Width: 1024, Height: 256} fakeTerminalHelper.GetWinsizeReturns(winsize, nil) fakeSecureSession.ShellStub = func() error { Expect(fakeTerminalHelper.SetRawTerminalCallCount()).To(Equal(1)) Expect(fakeTerminalHelper.RestoreTerminalCallCount()).To(Equal(0)) return nil } }) AfterEach(func() { os.Setenv("TERM", terminalType) }) It("requests a pty with the correct terminal type, window size, and modes", func() { Expect(fakeSecureSession.RequestPtyCallCount()).To(Equal(1)) Expect(fakeTerminalHelper.GetWinsizeCallCount()).To(Equal(1)) termType, height, width, modes := fakeSecureSession.RequestPtyArgsForCall(0) Expect(termType).To(Equal("test-terminal-type")) Expect(height).To(Equal(256)) Expect(width).To(Equal(1024)) expectedModes := ssh.TerminalModes{ ssh.ECHO: 1, ssh.TTY_OP_ISPEED: 115200, ssh.TTY_OP_OSPEED: 115200, } Expect(modes).To(Equal(expectedModes)) }) Context("when the TERM environment variable is not set", func() { BeforeEach(func() { os.Unsetenv("TERM") }) It("requests a pty with the default terminal type", func() { Expect(fakeSecureSession.RequestPtyCallCount()).To(Equal(1)) termType, _, _, _ := fakeSecureSession.RequestPtyArgsForCall(0) Expect(termType).To(Equal("xterm")) }) }) It("puts the terminal into raw mode and restores it after running the shell", func() { Expect(fakeSecureSession.ShellCallCount()).To(Equal(1)) Expect(fakeTerminalHelper.SetRawTerminalCallCount()).To(Equal(1)) Expect(fakeTerminalHelper.RestoreTerminalCallCount()).To(Equal(1)) }) Context("when the pty allocation fails", func() { var ptyError error BeforeEach(func() { ptyError = errors.New("pty allocation error") fakeSecureSession.RequestPtyReturns(ptyError) }) It("returns the error", func() { Expect(sessionError).To(Equal(ptyError)) }) }) Context("when placing the terminal into raw mode fails", func() { BeforeEach(func() { fakeTerminalHelper.SetRawTerminalReturns(nil, errors.New("woops")) }) It("keeps calm and carries on", func() { Expect(fakeSecureSession.ShellCallCount()).To(Equal(1)) }) It("does not not restore the terminal", func() { Expect(fakeSecureSession.ShellCallCount()).To(Equal(1)) Expect(fakeTerminalHelper.SetRawTerminalCallCount()).To(Equal(1)) Expect(fakeTerminalHelper.RestoreTerminalCallCount()).To(Equal(0)) }) }) }) Context("when a command is specified", func() { BeforeEach(func() { opts.Command = []string{"echo", "-n", "hello"} }) Context("when a terminal is requested", func() { BeforeEach(func() { opts.TerminalRequest = options.REQUEST_TTY_YES }) It("requests a pty", func() { Expect(fakeSecureSession.RequestPtyCallCount()).To(Equal(1)) }) }) Context("when a terminal is not explicitly requested", func() { It("does not request a pty", func() { Expect(fakeSecureSession.RequestPtyCallCount()).To(Equal(0)) }) }) }) }) Context("when stdin is not a terminal", func() { BeforeEach(func() { _, stdout, stderr := terminalHelper.StdStreams() stdin := &fake_io.FakeReadCloser{} stdin.ReadStub = func(p []byte) (int, error) { return 0, io.EOF } fakeTerminalHelper.IsTerminalStub = terminalHelper.IsTerminal fakeTerminalHelper.GetFdInfoStub = terminalHelper.GetFdInfo fakeTerminalHelper.GetWinsizeStub = terminalHelper.GetWinsize fakeTerminalHelper.StdStreamsReturns(stdin, stdout, stderr) terminalHelper = fakeTerminalHelper }) Context("when a terminal is not requested", func() { It("does not request a pty", func() { Expect(fakeSecureSession.RequestPtyCallCount()).To(Equal(0)) }) }) Context("when a terminal is requested", func() { BeforeEach(func() { opts.TerminalRequest = options.REQUEST_TTY_YES }) It("does not request a pty", func() { Expect(fakeSecureSession.RequestPtyCallCount()).To(Equal(0)) }) }) }) Context("when a terminal is forced", func() { BeforeEach(func() { opts.TerminalRequest = options.REQUEST_TTY_FORCE }) It("requests a pty", func() { Expect(fakeSecureSession.RequestPtyCallCount()).To(Equal(1)) }) }) Context("when a terminal is disabled", func() { BeforeEach(func() { opts.TerminalRequest = options.REQUEST_TTY_NO }) It("does not request a pty", func() { Expect(fakeSecureSession.RequestPtyCallCount()).To(Equal(0)) }) }) Context("when a command is not specified", func() { It("requests an interactive shell", func() { Expect(fakeSecureSession.ShellCallCount()).To(Equal(1)) }) Context("when the shell request returns an error", func() { BeforeEach(func() { fakeSecureSession.ShellReturns(errors.New("oh bother")) }) It("returns the error", func() { Expect(sessionError).To(MatchError("oh bother")) }) }) }) Context("when a command is specifed", func() { BeforeEach(func() { opts.Command = []string{"echo", "-n", "hello"} }) It("starts the command", func() { Expect(fakeSecureSession.StartCallCount()).To(Equal(1)) Expect(fakeSecureSession.StartArgsForCall(0)).To(Equal("echo -n hello")) }) Context("when the command fails to start", func() { BeforeEach(func() { fakeSecureSession.StartReturns(errors.New("oh well")) }) It("returns the error", func() { Expect(sessionError).To(MatchError("oh well")) }) }) }) Context("when the shell or command has started", func() { var ( stdin *fake_io.FakeReadCloser stdout, stderr *fake_io.FakeWriter stdinPipe *fake_io.FakeWriteCloser stdoutPipe, stderrPipe *fake_io.FakeReader ) BeforeEach(func() { stdin = &fake_io.FakeReadCloser{} stdin.ReadStub = func(p []byte) (int, error) { p[0] = 0 return 1, io.EOF } stdinPipe = &fake_io.FakeWriteCloser{} stdinPipe.WriteStub = func(p []byte) (int, error) { defer GinkgoRecover() Expect(p[0]).To(Equal(byte(0))) return 1, nil } stdoutPipe = &fake_io.FakeReader{} stdoutPipe.ReadStub = func(p []byte) (int, error) { p[0] = 1 return 1, io.EOF } stdout = &fake_io.FakeWriter{} stdout.WriteStub = func(p []byte) (int, error) { defer GinkgoRecover() Expect(p[0]).To(Equal(byte(1))) return 1, nil } stderrPipe = &fake_io.FakeReader{} stderrPipe.ReadStub = func(p []byte) (int, error) { p[0] = 2 return 1, io.EOF } stderr = &fake_io.FakeWriter{} stderr.WriteStub = func(p []byte) (int, error) { defer GinkgoRecover() Expect(p[0]).To(Equal(byte(2))) return 1, nil } fakeTerminalHelper.StdStreamsReturns(stdin, stdout, stderr) terminalHelper = fakeTerminalHelper fakeSecureSession.StdinPipeReturns(stdinPipe, nil) fakeSecureSession.StdoutPipeReturns(stdoutPipe, nil) fakeSecureSession.StderrPipeReturns(stderrPipe, nil) fakeSecureSession.WaitReturns(errors.New("error result")) }) It("copies data from the stdin stream to the session stdin pipe", func() { Eventually(stdin.ReadCallCount).Should(Equal(1)) Eventually(stdinPipe.WriteCallCount).Should(Equal(1)) }) It("copies data from the session stdout pipe to the stdout stream", func() { Eventually(stdoutPipe.ReadCallCount).Should(Equal(1)) Eventually(stdout.WriteCallCount).Should(Equal(1)) }) It("copies data from the session stderr pipe to the stderr stream", func() { Eventually(stderrPipe.ReadCallCount).Should(Equal(1)) Eventually(stderr.WriteCallCount).Should(Equal(1)) }) It("waits for the session to end", func() { Expect(fakeSecureSession.WaitCallCount()).To(Equal(1)) }) It("returns the result from wait", func() { Expect(sessionError).To(MatchError("error result")) }) Context("when the session terminates before stream copies complete", func() { var sessionErrorCh chan error BeforeEach(func() { sessionErrorCh = make(chan error, 1) interactiveSessionInvoker = func(secureShell sshCmd.SecureShell) { go func() { sessionErrorCh <- secureShell.InteractiveSession() }() } stdoutPipe.ReadStub = func(p []byte) (int, error) { defer GinkgoRecover() Eventually(fakeSecureSession.WaitCallCount).Should(Equal(1)) Consistently(sessionErrorCh).ShouldNot(Receive()) p[0] = 1 return 1, io.EOF } stderrPipe.ReadStub = func(p []byte) (int, error) { defer GinkgoRecover() Eventually(fakeSecureSession.WaitCallCount).Should(Equal(1)) Consistently(sessionErrorCh).ShouldNot(Receive()) p[0] = 2 return 1, io.EOF } }) It("waits for the copies to complete", func() { Eventually(sessionErrorCh).Should(Receive()) Expect(stdoutPipe.ReadCallCount()).To(Equal(1)) Expect(stderrPipe.ReadCallCount()).To(Equal(1)) }) }) Context("when stdin is closed", func() { BeforeEach(func() { stdin.ReadStub = func(p []byte) (int, error) { defer GinkgoRecover() Consistently(stdinPipe.CloseCallCount).Should(Equal(0)) p[0] = 0 return 1, io.EOF } }) It("closes the stdinPipe", func() { Eventually(stdinPipe.CloseCallCount).Should(Equal(1)) }) }) }) Context("when stdout is a terminal and a window size change occurs", func() { var master, slave *os.File BeforeEach(func() { stdin, _, stderr := terminalHelper.StdStreams() var err error master, slave, err = pty.Open() Expect(err).NotTo(HaveOccurred()) fakeTerminalHelper.IsTerminalStub = terminalHelper.IsTerminal fakeTerminalHelper.GetFdInfoStub = terminalHelper.GetFdInfo fakeTerminalHelper.GetWinsizeStub = terminalHelper.GetWinsize fakeTerminalHelper.StdStreamsReturns(stdin, slave, stderr) terminalHelper = fakeTerminalHelper winsize := &term.Winsize{Height: 100, Width: 100} err = term.SetWinsize(slave.Fd(), winsize) Expect(err).NotTo(HaveOccurred()) fakeSecureSession.WaitStub = func() error { fakeSecureSession.SendRequestCallCount() Expect(fakeSecureSession.SendRequestCallCount()).To(Equal(0)) // No dimension change for i := 0; i < 3; i++ { winsize := &term.Winsize{Height: 100, Width: 100} err = term.SetWinsize(slave.Fd(), winsize) Expect(err).NotTo(HaveOccurred()) } winsize := &term.Winsize{Height: 100, Width: 200} err = term.SetWinsize(slave.Fd(), winsize) Expect(err).NotTo(HaveOccurred()) err = syscall.Kill(syscall.Getpid(), syscall.SIGWINCH) Expect(err).NotTo(HaveOccurred()) Eventually(fakeSecureSession.SendRequestCallCount).Should(Equal(1)) return nil } }) AfterEach(func() { master.Close() slave.Close() }) It("sends window change events when the window dimensions change", func() { Expect(fakeSecureSession.SendRequestCallCount()).To(Equal(1)) requestType, wantReply, message := fakeSecureSession.SendRequestArgsForCall(0) Expect(requestType).To(Equal("window-change")) Expect(wantReply).To(BeFalse()) type resizeMessage struct { Width uint32 Height uint32 PixelWidth uint32 PixelHeight uint32 } var resizeMsg resizeMessage err := ssh.Unmarshal(message, &resizeMsg) Expect(err).NotTo(HaveOccurred()) Expect(resizeMsg).To(Equal(resizeMessage{Height: 100, Width: 200})) }) }) Describe("keep alive messages", func() { var times []time.Time var timesCh chan []time.Time var done chan struct{} BeforeEach(func() { keepAliveDuration = 100 * time.Millisecond times = []time.Time{} timesCh = make(chan []time.Time, 1) done = make(chan struct{}, 1) fakeConnection.SendRequestStub = func(reqName string, wantReply bool, message []byte) (bool, []byte, error) { Expect(reqName).To(Equal("keepalive@cloudfoundry.org")) Expect(wantReply).To(BeTrue()) Expect(message).To(BeNil()) times = append(times, time.Now()) if len(times) == 3 { timesCh <- times close(done) } return true, nil, nil } fakeSecureSession.WaitStub = func() error { Eventually(done).Should(BeClosed()) return nil } }) It("sends keep alive messages at the expected interval", func() { times := <-timesCh Expect(times[2]).To(BeTemporally("~", times[0].Add(200*time.Millisecond), 100*time.Millisecond)) }) }) }) Describe("LocalPortForward", func() { var ( opts *options.SSHOptions localForwardError error echoAddress string echoListener *fake_net.FakeListener echoHandler *fake_server.FakeConnectionHandler echoServer *server.Server localAddress string realLocalListener net.Listener fakeLocalListener *fake_net.FakeListener ) BeforeEach(func() { logger := lagertest.NewTestLogger("test") var err error realLocalListener, err = net.Listen("tcp", "127.0.0.1:0") Expect(err).NotTo(HaveOccurred()) localAddress = realLocalListener.Addr().String() fakeListenerFactory.ListenReturns(realLocalListener, nil) echoHandler = &fake_server.FakeConnectionHandler{} echoHandler.HandleConnectionStub = func(conn net.Conn) { io.Copy(conn, conn) conn.Close() } realListener, err := net.Listen("tcp", "127.0.0.1:0") Expect(err).NotTo(HaveOccurred()) echoAddress = realListener.Addr().String() echoListener = &fake_net.FakeListener{} echoListener.AcceptStub = realListener.Accept echoListener.CloseStub = realListener.Close echoListener.AddrStub = realListener.Addr fakeLocalListener = &fake_net.FakeListener{} fakeLocalListener.AcceptReturns(nil, errors.New("Not Accepting Connections")) echoServer = server.NewServer(logger.Session("echo"), "", echoHandler) echoServer.SetListener(echoListener) go echoServer.Serve() opts = &options.SSHOptions{ AppName: "app-1", ForwardSpecs: []options.ForwardSpec{{ ListenAddress: localAddress, ConnectAddress: echoAddress, }}, } currentApp.State = "STARTED" currentApp.Diego = true sshEndpointFingerprint = "" sshEndpoint = "" token = "" fakeSecureClient.DialStub = net.Dial }) JustBeforeEach(func() { connectErr := secureShell.Connect(opts) Expect(connectErr).NotTo(HaveOccurred()) localForwardError = secureShell.LocalPortForward() }) AfterEach(func() { err := secureShell.Close() Expect(err).NotTo(HaveOccurred()) echoServer.Shutdown() realLocalListener.Close() }) validateConnectivity := func(addr string) { conn, err := net.Dial("tcp", addr) Expect(err).NotTo(HaveOccurred()) msg := fmt.Sprintf("Hello from %s\n", addr) n, err := conn.Write([]byte(msg)) Expect(err).NotTo(HaveOccurred()) Expect(n).To(Equal(len(msg))) response := make([]byte, len(msg)) n, err = conn.Read(response) Expect(err).NotTo(HaveOccurred()) Expect(n).To(Equal(len(msg))) err = conn.Close() Expect(err).NotTo(HaveOccurred()) Expect(response).To(Equal([]byte(msg))) } It("dials the connect address when a local connection is made", func() { Expect(localForwardError).NotTo(HaveOccurred()) conn, err := net.Dial("tcp", localAddress) Expect(err).NotTo(HaveOccurred()) Eventually(echoListener.AcceptCallCount).Should(BeNumerically(">=", 1)) Eventually(fakeSecureClient.DialCallCount).Should(Equal(1)) network, addr := fakeSecureClient.DialArgsForCall(0) Expect(network).To(Equal("tcp")) Expect(addr).To(Equal(echoAddress)) Expect(conn.Close()).NotTo(HaveOccurred()) }) It("copies data between the local and remote connections", func() { validateConnectivity(localAddress) }) Context("when a local connection is already open", func() { var ( conn net.Conn err error ) JustBeforeEach(func() { conn, err = net.Dial("tcp", localAddress) Expect(err).NotTo(HaveOccurred()) }) AfterEach(func() { err = conn.Close() Expect(err).NotTo(HaveOccurred()) }) It("allows for new incoming connections as well", func() { validateConnectivity(localAddress) }) }) Context("when there are multiple port forward specs", func() { var realLocalListener2 net.Listener var localAddress2 string BeforeEach(func() { var err error realLocalListener2, err = net.Listen("tcp", "127.0.0.1:0") Expect(err).NotTo(HaveOccurred()) localAddress2 = realLocalListener2.Addr().String() fakeListenerFactory.ListenStub = func(network, addr string) (net.Listener, error) { if addr == localAddress { return realLocalListener, nil } if addr == localAddress2 { return realLocalListener2, nil } return nil, errors.New("unexpected address") } opts = &options.SSHOptions{ AppName: "app-1", ForwardSpecs: []options.ForwardSpec{{ ListenAddress: localAddress, ConnectAddress: echoAddress, }, { ListenAddress: localAddress2, ConnectAddress: echoAddress, }}, } }) AfterEach(func() { realLocalListener2.Close() }) It("listens to all the things", func() { Eventually(fakeListenerFactory.ListenCallCount).Should(Equal(2)) network, addr := fakeListenerFactory.ListenArgsForCall(0) Expect(network).To(Equal("tcp")) Expect(addr).To(Equal(localAddress)) network, addr = fakeListenerFactory.ListenArgsForCall(1) Expect(network).To(Equal("tcp")) Expect(addr).To(Equal(localAddress2)) }) It("forwards to the correct target", func() { validateConnectivity(localAddress) validateConnectivity(localAddress2) }) Context("when the secure client is closed", func() { BeforeEach(func() { fakeListenerFactory.ListenReturns(fakeLocalListener, nil) fakeLocalListener.AcceptReturns(nil, errors.New("not accepting connections")) }) It("closes the listeners ", func() { Eventually(fakeListenerFactory.ListenCallCount).Should(Equal(2)) Eventually(fakeLocalListener.AcceptCallCount).Should(Equal(2)) originalCloseCount := fakeLocalListener.CloseCallCount() err := secureShell.Close() Expect(err).NotTo(HaveOccurred()) Expect(fakeLocalListener.CloseCallCount()).Should(Equal(originalCloseCount + 2)) }) }) }) Context("when listen fails", func() { BeforeEach(func() { fakeListenerFactory.ListenReturns(nil, errors.New("failure is an option")) }) It("returns the error", func() { Expect(localForwardError).To(MatchError("failure is an option")) }) }) Context("when the client it closed", func() { BeforeEach(func() { fakeListenerFactory.ListenReturns(fakeLocalListener, nil) fakeLocalListener.AcceptReturns(nil, errors.New("not accepting and connections")) }) It("closes the listener when the client is closed", func() { Eventually(fakeListenerFactory.ListenCallCount).Should(Equal(1)) Eventually(fakeLocalListener.AcceptCallCount).Should(Equal(1)) originalCloseCount := fakeLocalListener.CloseCallCount() err := secureShell.Close() Expect(err).NotTo(HaveOccurred()) Expect(fakeLocalListener.CloseCallCount()).Should(Equal(originalCloseCount + 1)) }) }) Context("when accept fails", func() { var fakeConn *fake_net.FakeConn BeforeEach(func() { fakeConn = &fake_net.FakeConn{} fakeConn.ReadReturns(0, io.EOF) fakeListenerFactory.ListenReturns(fakeLocalListener, nil) }) Context("with a permanent error", func() { BeforeEach(func() { fakeLocalListener.AcceptReturns(nil, errors.New("boom")) }) It("stops trying to accept connections", func() { Eventually(fakeLocalListener.AcceptCallCount).Should(Equal(1)) Consistently(fakeLocalListener.AcceptCallCount).Should(Equal(1)) Expect(fakeLocalListener.CloseCallCount()).To(Equal(1)) }) }) Context("with a temporary error", func() { var timeCh chan time.Time BeforeEach(func() { timeCh = make(chan time.Time, 3) fakeLocalListener.AcceptStub = func() (net.Conn, error) { timeCh := timeCh if fakeLocalListener.AcceptCallCount() > 3 { close(timeCh) return nil, test_helpers.NewTestNetError(false, false) } else { timeCh <- time.Now() return nil, test_helpers.NewTestNetError(false, true) } } }) It("retries connecting after a short delay", func() { Eventually(fakeLocalListener.AcceptCallCount).Should(Equal(3)) Expect(timeCh).To(HaveLen(3)) times := make([]time.Time, 0) for t := range timeCh { times = append(times, t) } Expect(times[1]).To(BeTemporally("~", times[0].Add(115*time.Millisecond), 30*time.Millisecond)) Expect(times[2]).To(BeTemporally("~", times[1].Add(115*time.Millisecond), 30*time.Millisecond)) }) }) }) Context("when dialing the connect address fails", func() { var fakeTarget *fake_net.FakeConn BeforeEach(func() { fakeTarget = &fake_net.FakeConn{} fakeSecureClient.DialReturns(fakeTarget, errors.New("boom")) }) It("does not call close on the target connection", func() { Consistently(fakeTarget.CloseCallCount).Should(Equal(0)) }) }) }) Describe("Wait", func() { var opts *options.SSHOptions var waitErr error BeforeEach(func() { opts = &options.SSHOptions{ AppName: "app-1", } currentApp.State = "STARTED" currentApp.Diego = true sshEndpointFingerprint = "" sshEndpoint = "" token = "" }) JustBeforeEach(func() { connectErr := secureShell.Connect(opts) Expect(connectErr).NotTo(HaveOccurred()) waitErr = secureShell.Wait() }) It("calls wait on the secureClient", func() { Expect(waitErr).NotTo(HaveOccurred()) Expect(fakeSecureClient.WaitCallCount()).To(Equal(1)) }) Describe("keep alive messages", func() { var times []time.Time var timesCh chan []time.Time var done chan struct{} BeforeEach(func() { keepAliveDuration = 100 * time.Millisecond times = []time.Time{} timesCh = make(chan []time.Time, 1) done = make(chan struct{}, 1) fakeConnection.SendRequestStub = func(reqName string, wantReply bool, message []byte) (bool, []byte, error) { Expect(reqName).To(Equal("keepalive@cloudfoundry.org")) Expect(wantReply).To(BeTrue()) Expect(message).To(BeNil()) times = append(times, time.Now()) if len(times) == 3 { timesCh <- times close(done) } return true, nil, nil } fakeSecureClient.WaitStub = func() error { Eventually(done).Should(BeClosed()) return nil } }) It("sends keep alive messages at the expected interval", func() { Expect(waitErr).NotTo(HaveOccurred()) times := <-timesCh Expect(times[2]).To(BeTemporally("~", times[0].Add(200*time.Millisecond), 100*time.Millisecond)) }) }) }) Describe("Close", func() { var opts *options.SSHOptions BeforeEach(func() { opts = &options.SSHOptions{ AppName: "app-1", } currentApp.State = "STARTED" currentApp.Diego = true sshEndpointFingerprint = "" sshEndpoint = "" token = "" }) JustBeforeEach(func() { connectErr := secureShell.Connect(opts) Expect(connectErr).NotTo(HaveOccurred()) }) It("calls close on the secureClient", func() { err := secureShell.Close() Expect(err).NotTo(HaveOccurred()) Expect(fakeSecureClient.CloseCallCount()).To(Equal(1)) }) }) })
apache-2.0
joeyvanderbie/Bluetooth-LE-Library---Android
sample_app/src/main/java/org/hva/createit/btlescan/util/BluetoothUtils.java
1489
package org.hva.createit.btlescan.util; import android.app.Activity; import android.bluetooth.BluetoothAdapter; import android.bluetooth.BluetoothManager; import android.content.Context; import android.content.Intent; import android.content.pm.PackageManager; public final class BluetoothUtils { public final static int REQUEST_ENABLE_BT = 2001; private final Activity mActivity; private final BluetoothAdapter mBluetoothAdapter; public BluetoothUtils(final Activity activity) { mActivity = activity; final BluetoothManager btManager = (BluetoothManager) mActivity.getSystemService(Context.BLUETOOTH_SERVICE); mBluetoothAdapter = btManager.getAdapter(); } public void askUserToEnableBluetoothIfNeeded() { if (isBluetoothLeSupported() && (mBluetoothAdapter == null || !mBluetoothAdapter.isEnabled())) { final Intent enableBtIntent = new Intent(BluetoothAdapter.ACTION_REQUEST_ENABLE); mActivity.startActivityForResult(enableBtIntent, REQUEST_ENABLE_BT); } } public BluetoothAdapter getBluetoothAdapter() { return mBluetoothAdapter; } public boolean isBluetoothLeSupported() { return mActivity.getPackageManager().hasSystemFeature(PackageManager.FEATURE_BLUETOOTH_LE); } public boolean isBluetoothOn() { if (mBluetoothAdapter == null) { return false; } else { return mBluetoothAdapter.isEnabled(); } } }
apache-2.0
lugray/amphtml
test/functional/test-sanitizer.js
4401
/** * Copyright 2015 The AMP HTML Authors. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS-IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ import {sanitizeFormattingHtml, sanitizeHtml} from '../../src/sanitizer'; describe('sanitizeHtml', () => { it('should output basic text', () => { expect(sanitizeHtml('abc')).to.be.equal('abc'); }); it('should output valid markup', () => { expect(sanitizeHtml('<h1>abc</h1>')).to.be.equal('<h1>abc</h1>'); expect(sanitizeHtml('<h1>a<i>b</i>c</h1>')).to.be.equal( '<h1>a<i>b</i>c</h1>'); expect(sanitizeHtml('<h1>a<i>b</i><br>c</h1>')).to.be.equal( '<h1>a<i>b</i><br>c</h1>'); expect(sanitizeHtml( '<h1>a<i>b</i>c' + '<amp-img src="http://example.com/1.png"></amp-img></h1>')) .to.be.equal( '<h1>a<i>b</i>c' + '<amp-img src="http://example.com/1.png"></amp-img></h1>'); }); it('should NOT output security-sensitive markup', () => { expect(sanitizeHtml('a<script>b</script>c')).to.be.equal('ac'); expect(sanitizeHtml('a<style>b</style>c')).to.be.equal('ac'); expect(sanitizeHtml('a<img>c')).to.be.equal('ac'); expect(sanitizeHtml('a<iframe></iframe>c')).to.be.equal('ac'); expect(sanitizeHtml('a<template></template>c')).to.be.equal('ac'); }); it('should NOT output security-sensitive markup when nested', () => { expect(sanitizeHtml('a<script><style>b</style></script>c')) .to.be.equal('ac'); expect(sanitizeHtml('a<style><iframe>b</iframe></style>c')) .to.be.equal('ac'); expect(sanitizeHtml('a<script><img></script>c')) .to.be.equal('ac'); }); it('should NOT output security-sensitive markup when broken', () => { expect(sanitizeHtml('a<script>bc')).to.be.equal('a'); expect(sanitizeHtml('a<SCRIPT>bc')).to.be.equal('a'); }); it('should output "on" attribute', () => { expect(sanitizeHtml('a<a on="tap">b</a>')).to.be.equal( 'a<a on="tap">b</a>'); }); it('should output "href" attribute', () => { expect(sanitizeHtml('a<a href="http://acme.com">b</a>')).to.be.equal( 'a<a href="http://acme.com">b</a>'); }); it('should NOT output security-sensitive attributes', () => { expect(sanitizeHtml('a<a onclick="alert">b</a>')).to.be.equal('a<a>b</a>'); expect(sanitizeHtml('a<a style="color: red;">b</a>')).to.be.equal( 'a<a>b</a>'); expect(sanitizeHtml('a<a STYLE="color: red;">b</a>')).to.be.equal( 'a<a>b</a>'); expect(sanitizeHtml('a<a href="javascript:alert">b</a>')).to.be.equal( 'a<a>b</a>'); expect(sanitizeHtml('a<a href="JAVASCRIPT:alert">b</a>')).to.be.equal( 'a<a>b</a>'); }); it('should NOT output security-sensitive attributes', () => { expect(sanitizeHtml('a<a onclick="alert">b</a>')).to.be.equal('a<a>b</a>'); }); }); describe('sanitizeFormattingHtml', () => { it('should output basic text', () => { expect(sanitizeFormattingHtml('abc')).to.be.equal('abc'); }); it('should output valid markup', () => { expect(sanitizeFormattingHtml('<b>abc</b>')).to.be.equal('<b>abc</b>'); expect(sanitizeFormattingHtml('<b>ab<br>c</b>')).to.be.equal( '<b>ab<br>c</b>'); expect(sanitizeFormattingHtml('<b>a<i>b</i>c</b>')).to.be.equal( '<b>a<i>b</i>c</b>'); }); it('should NOT output non-whitelisted markup', () => { expect(sanitizeFormattingHtml('a<div>b</div>c')).to.be.equal('ac'); expect(sanitizeFormattingHtml('a<style>b</style>c')).to.be.equal('ac'); expect(sanitizeFormattingHtml('a<img>c')).to.be.equal('ac'); }); it('should NOT output attributes', () => { expect(sanitizeFormattingHtml('<b color=red style="color: red">abc</b>')) .to.be.equal('<b>abc</b>'); }); it('should compentsate for broken markup', () => { expect(sanitizeFormattingHtml('<b>a<i>b')).to.be.equal( '<b>a<i>b</i></b>'); }); });
apache-2.0
wayshall/onetwo
core/modules/boot/src/main/java/org/onetwo/boot/module/cache/ZifishRedisCachePrefix.java
1018
package org.onetwo.boot.module.cache; import org.onetwo.boot.module.redis.RedisOperationService; import org.onetwo.common.utils.StringUtils; import org.springframework.data.redis.cache.DefaultRedisCachePrefix; /** * @author weishao zeng * <br/> */ public class ZifishRedisCachePrefix extends DefaultRedisCachePrefix { private final String cacheDelimiter; private String cacheKeyPrefix = RedisOperationService.DEFAUTL_CACHE_PREFIX; public ZifishRedisCachePrefix() { this(":", RedisOperationService.DEFAUTL_CACHE_PREFIX); } public ZifishRedisCachePrefix(String cacheKeyPrefix) { this(":", cacheKeyPrefix); } public ZifishRedisCachePrefix(String delimiter, String cacheKeyPrefix) { super(delimiter); this.cacheDelimiter = delimiter!=null?delimiter:":"; this.cacheKeyPrefix = cacheKeyPrefix!=null?cacheKeyPrefix:""; } public byte[] prefix(String cacheName) { cacheName = StringUtils.appendEndWith(cacheKeyPrefix, cacheDelimiter).concat(cacheName); return super.prefix(cacheName); } }
apache-2.0
ashokblend/incubator-carbondata
core/src/main/java/org/apache/carbondata/core/carbon/datastore/impl/btree/BTreeNonLeafNode.java
7467
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.carbondata.core.carbon.datastore.impl.btree; import java.util.ArrayList; import java.util.List; import org.apache.carbondata.core.carbon.datastore.DataRefNode; import org.apache.carbondata.core.carbon.datastore.IndexKey; import org.apache.carbondata.core.carbon.datastore.chunk.DimensionColumnDataChunk; import org.apache.carbondata.core.carbon.datastore.chunk.MeasureColumnDataChunk; import org.apache.carbondata.core.constants.CarbonCommonConstants; import org.apache.carbondata.core.datastorage.store.FileHolder; /** * No leaf node of a b+tree class which will keep the matadata(start key) of the * leaf node */ public class BTreeNonLeafNode implements BTreeNode { /** * Child nodes */ private BTreeNode[] children; /** * list of keys in non leaf */ private List<IndexKey> listOfKeys; public BTreeNonLeafNode() { // creating a list which will store all the indexes listOfKeys = new ArrayList<IndexKey>(CarbonCommonConstants.DEFAULT_COLLECTION_SIZE); } /** * below method will return the one node indexes * * @return getting a complete leaf ]node keys */ @Override public IndexKey[] getNodeKeys() { return listOfKeys.toArray(new IndexKey[listOfKeys.size()]); } /** * as it is a non leaf node it will have the reference of all the leaf node * under it, setting all the children * * @param leaf nodes */ @Override public void setChildren(BTreeNode[] children) { this.children = children; } /** * setting the next node */ @Override public void setNextNode(BTreeNode nextNode) { // no required in case of non leaf node } /** * get the leaf node based on children * * @return leaf node */ @Override public BTreeNode getChild(int index) { return this.children[index]; } /** * add a key of a leaf node * * @param leaf node start keys */ @Override public void setKey(IndexKey key) { listOfKeys.add(key); } /** * @return whether its a leaf node or not */ @Override public boolean isLeafNode() { return false; } /** * Method to get the next block this can be used while scanning when * iterator of this class can be used iterate over blocks * * @return next block */ @Override public DataRefNode getNextDataRefNode() { throw new UnsupportedOperationException("Unsupported operation"); } /** * to get the number of keys tuples present in the block * * @return number of keys in the block */ @Override public int nodeSize() { return listOfKeys.size(); } /** * Method can be used to get the block index .This can be used when multiple * thread can be used scan group of blocks in that can we can assign the * some of the blocks to one thread and some to other * * @return block number */ @Override public long nodeNumber() { throw new UnsupportedOperationException("Unsupported operation"); } /** * This method will be used to get the max value of all the columns this can * be used in case of filter query * * @param max value of all the columns */ @Override public byte[][] getColumnsMaxValue() { // operation of getting the max value is not supported as its a non leaf // node // and in case of B+Tree data will be stored only in leaf node and // intermediate // node will be used only for searching the leaf node throw new UnsupportedOperationException("Unsupported operation"); } /** * This method will be used to get the max value of all the columns this can * be used in case of filter query * * @param min value of all the columns */ @Override public byte[][] getColumnsMinValue() { // operation of getting the min value is not supported as its a non leaf // node // and in case of B+Tree data will be stored only in leaf node and // intermediate // node will be used only for searching the leaf node throw new UnsupportedOperationException("Unsupported operation"); } /** * Below method will be used to get the dimension chunks * * @param fileReader file reader to read the chunks from file * @param blockIndexes indexes of the blocks need to be read * @return dimension data chunks */ @Override public DimensionColumnDataChunk[] getDimensionChunks(FileHolder fileReader, int[][] blockIndexes) { // operation of getting the dimension chunks is not supported as its a // non leaf node // and in case of B+Tree data will be stored only in leaf node and // intermediate // node will be used only for searching the leaf node throw new UnsupportedOperationException("Unsupported operation"); } /** * Below method will be used to get the dimension chunk * * @param fileReader file reader to read the chunk from file * @param blockIndex block index to be read * @return dimension data chunk */ @Override public DimensionColumnDataChunk getDimensionChunk(FileHolder fileReader, int blockIndexes) { // operation of getting the dimension chunk is not supported as its a // non leaf node // and in case of B+Tree data will be stored only in leaf node and // intermediate // node will be used only for searching the leaf node throw new UnsupportedOperationException("Unsupported operation"); } /** * Below method will be used to get the measure chunk * * @param fileReader file reader to read the chunk from file * @param blockIndexes block indexes to be read from file * @return measure column data chunk */ @Override public MeasureColumnDataChunk[] getMeasureChunks(FileHolder fileReader, int[][] blockIndexes) { // operation of getting the measure chunk is not supported as its a non // leaf node // and in case of B+Tree data will be stored only in leaf node and // intermediate // node will be used only for searching the leaf node throw new UnsupportedOperationException("Unsupported operation"); } /** * Below method will be used to read the measure chunk * * @param fileReader file read to read the file chunk * @param blockIndex block index to be read from file * @return measure data chunk */ @Override public MeasureColumnDataChunk getMeasureChunk(FileHolder fileReader, int blockIndex) { // operation of getting the measure chunk is not supported as its a non // leaf node // and in case of B+Tree data will be stored only in leaf node and // intermediate // node will be used only for searching the leaf node throw new UnsupportedOperationException("Unsupported operation"); } }
apache-2.0
hgl888/TeamTalk
win-client/core/utility/Multilingual.cpp
1285
/******************************************************************************* * @file Multilingual.cpp 2014\11\25 21:14:50 $ * @author ¿ìµ¶<kuaidao@mogujie.com> * @brief ******************************************************************************/ #include "stdafx.h" #include "GlobalConfig.h" #include "utility/utilCommonAPI.h" #include "utility/Multilingual.h" /******************************************************************************/ NAMESPACE_BEGIN(util) // ----------------------------------------------------------------------------- // Multilingual: Public, Constructor Multilingual::Multilingual() { } // ----------------------------------------------------------------------------- // Multilingual: Public, Destructor Multilingual::~Multilingual() { } CString Multilingual::getStringById(LPCTSTR strID) { CString cfgPath = util::getParentAppPath() + UTIL_MULTILIGNUAL; TCHAR szValue[MAX_PATH]; ::GetPrivateProfileString(_T("DEFAULT"),strID,_T(""), szValue, MAX_PATH , cfgPath); return szValue; } Multilingual* getMultilingual() { static Multilingual multi; return &multi; } NAMESPACE_END(util) /******************************************************************************/
apache-2.0
pister/wint
maven-wint-plugin/src/main/java/wint/maven/plugins/gen/common/ProjectConfig.java
6103
package wint.maven.plugins.gen.common; public class ProjectConfig { private String projectPath; private String groupId; private String artifactId; private String path; private String wintPackage; private String dbName; private String dbUser; private String dbPwd; private String dbUrl = "127.0.0.1"; private String charset = "utf-8"; private boolean sampleSupport = true; private boolean fileUploadSupport = true; private boolean jrebelSupport = false; private String jrebelPath; private String javaVersion ="1.6"; private String wintVersion = "1.5.7"; private String servletVersion = "2.4"; private String junitVersion = "3.8.1"; private String springVersion = "2.5.6"; private String ibatisVersion = "2.3.4.726"; private String c3p0Version = "0.9.1.2"; private String mysqlJdbcVersion = "5.1.30"; private String fileuploadVersion = "1.3.1"; private String commonsIOVersion = "2.1"; private String slf4jApiVersion = "1.6.4"; private String logbackVersion = "1.0.1"; private String driudVersion = "1.0.26"; private String wintPluginVersion = "1.2.1"; private String gsonVersion = "2.8.6"; private String tsonVersion = "1.2.4"; public String getGsonVersion() { return gsonVersion; } public void setGsonVersion(String gsonVersion) { this.gsonVersion = gsonVersion; } public String getTsonVersion() { return tsonVersion; } public void setTsonVersion(String tsonVersion) { this.tsonVersion = tsonVersion; } public String getDriudVersion() { return driudVersion; } public void setDriudVersion(String driudVersion) { this.driudVersion = driudVersion; } public String getPath() { return path; } public void setPath(String path) { this.path = path; } public String getCharset() { return charset; } public void setCharset(String charset) { this.charset = charset; } public String getWintPackage() { return wintPackage; } public void setWintPackage(String wintPackage) { this.wintPackage = wintPackage; } public boolean isSampleSupport() { return sampleSupport; } public void setSampleSupport(boolean sampleSupport) { this.sampleSupport = sampleSupport; } public boolean isFileUploadSupport() { return fileUploadSupport; } public void setFileUploadSupport(boolean fileUploadSupport) { this.fileUploadSupport = fileUploadSupport; } public String getJavaVersion() { return javaVersion; } public void setJavaVersion(String javaVersion) { this.javaVersion = javaVersion; } public String getWintVersion() { return wintVersion; } public void setWintVersion(String wintVersion) { this.wintVersion = wintVersion; } public String getServletVersion() { return servletVersion; } public void setServletVersion(String servletVersion) { this.servletVersion = servletVersion; } public String getJunitVersion() { return junitVersion; } public void setJunitVersion(String junitVersion) { this.junitVersion = junitVersion; } public String getSpringVersion() { return springVersion; } public void setSpringVersion(String springVersion) { this.springVersion = springVersion; } public String getIbatisVersion() { return ibatisVersion; } public void setIbatisVersion(String ibatisVersion) { this.ibatisVersion = ibatisVersion; } public String getC3p0Version() { return c3p0Version; } public void setC3p0Version(String c3p0Version) { this.c3p0Version = c3p0Version; } public String getMysqlJdbcVersion() { return mysqlJdbcVersion; } public void setMysqlJdbcVersion(String mysqlJdbcVersion) { this.mysqlJdbcVersion = mysqlJdbcVersion; } public String getFileuploadVersion() { return fileuploadVersion; } public void setFileuploadVersion(String fileuploadVersion) { this.fileuploadVersion = fileuploadVersion; } public String getCommonsIOVersion() { return commonsIOVersion; } public void setCommonsIOVersion(String commonsIOVersion) { this.commonsIOVersion = commonsIOVersion; } public String getSlf4jApiVersion() { return slf4jApiVersion; } public void setSlf4jApiVersion(String slf4jApiVersion) { this.slf4jApiVersion = slf4jApiVersion; } public String getLogbackVersion() { return logbackVersion; } public void setLogbackVersion(String logbackVersion) { this.logbackVersion = logbackVersion; } public boolean isJrebelSupport() { return jrebelSupport; } public void setJrebelSupport(boolean jrebelSupport) { this.jrebelSupport = jrebelSupport; } public String getJrebelPath() { return jrebelPath; } public void setJrebelPath(String jrebelPath) { this.jrebelPath = jrebelPath; } public String getGroupId() { return groupId; } public void setGroupId(String groupId) { this.groupId = groupId; } public String getArtifactId() { return artifactId; } public void setArtifactId(String artifactId) { this.artifactId = artifactId; } public String getDbName() { return dbName; } public void setDbName(String dbName) { this.dbName = dbName; } public String getDbUser() { return dbUser; } public void setDbUser(String dbUser) { this.dbUser = dbUser; } public String getDbPwd() { return dbPwd; } public void setDbPwd(String dbPwd) { this.dbPwd = dbPwd; } public String getDbUrl() { return dbUrl; } public void setDbUrl(String dbUrl) { this.dbUrl = dbUrl; } public String getWintPluginVersion() { return wintPluginVersion; } public void setWintPluginVersion(String wintPluginVersion) { this.wintPluginVersion = wintPluginVersion; } public String getProjectPath() { return projectPath; } public void setProjectPath(String projectPath) { this.projectPath = projectPath; } }
apache-2.0
Bigotry/OneBase
app/common/service/pay/driver/alipay/alipay-sdk-PHP-3.4.2/aop/request/AlipayEcoMycarDataserviceViolationinfoShareRequest.php
1948
<?php /** * ALIPAY API: alipay.eco.mycar.dataservice.violationinfo.share request * * @author auto create * @since 1.0, 2019-08-29 11:20:52 */ class AlipayEcoMycarDataserviceViolationinfoShareRequest { /** * ISV获取违章车辆信息 **/ private $bizContent; private $apiParas = array(); private $terminalType; private $terminalInfo; private $prodCode; private $apiVersion="1.0"; private $notifyUrl; private $returnUrl; private $needEncrypt=false; public function setBizContent($bizContent) { $this->bizContent = $bizContent; $this->apiParas["biz_content"] = $bizContent; } public function getBizContent() { return $this->bizContent; } public function getApiMethodName() { return "alipay.eco.mycar.dataservice.violationinfo.share"; } public function setNotifyUrl($notifyUrl) { $this->notifyUrl=$notifyUrl; } public function getNotifyUrl() { return $this->notifyUrl; } public function setReturnUrl($returnUrl) { $this->returnUrl=$returnUrl; } public function getReturnUrl() { return $this->returnUrl; } public function getApiParas() { return $this->apiParas; } public function getTerminalType() { return $this->terminalType; } public function setTerminalType($terminalType) { $this->terminalType = $terminalType; } public function getTerminalInfo() { return $this->terminalInfo; } public function setTerminalInfo($terminalInfo) { $this->terminalInfo = $terminalInfo; } public function getProdCode() { return $this->prodCode; } public function setProdCode($prodCode) { $this->prodCode = $prodCode; } public function setApiVersion($apiVersion) { $this->apiVersion=$apiVersion; } public function getApiVersion() { return $this->apiVersion; } public function setNeedEncrypt($needEncrypt) { $this->needEncrypt=$needEncrypt; } public function getNeedEncrypt() { return $this->needEncrypt; } }
apache-2.0
jmaoito/happyCoding
algorithm/reverseWords.cpp
1072
/*************** LeetCode Reverse Words in a String Given an input string, reverse the string word by word. For example, Given s = "the sky is blue", return "blue is sky the". Clarification: A sequence of non-space characters constitutes a word. the input string contain leading or trailing spaces. However, your reversed string should not contain leading or trailing spaces. multiple spaces between two words should be reduced to a single space in the reversed string. time complexity O(n) space complexity O(n) ********************/ class Solution { public: void reverseWords(string &s) { string word, tmp; s += ' '; // concatenate a space to the end of the string. otherwise, it will lost the last word. for (int i=0; i<s.size(); i++) { if (s[i] != ' ') { word += s[i]; } else if (word[0]) { if (tmp[0]) word += ' '; tmp = word + tmp; word = ""; } } s = tmp; } };
apache-2.0
vaadin/designer-tutorials
emailclient-tutorial-data/src/main/java/org/vaadin/example/ui/MessageDesign.java
795
package org.vaadin.example.ui; import com.vaadin.annotations.AutoGenerated; import com.vaadin.annotations.DesignRoot; import com.vaadin.ui.Button; import com.vaadin.ui.HorizontalLayout; import com.vaadin.ui.Label; import com.vaadin.ui.declarative.Design; /** * !! DO NOT EDIT THIS FILE !! * * This class is generated by Vaadin Designer and will be overwritten. * * Please make a subclass with logic and additional interfaces as needed, e.g * class LoginView extends LoginDesign implements View { … } */ @DesignRoot @AutoGenerated @SuppressWarnings("serial") public class MessageDesign extends HorizontalLayout { protected Button indicatorButton; protected Label senderLabel; protected Label messageLabel; public MessageDesign() { Design.read(this); } }
apache-2.0
gdbots/query-parser-php
src/Enum/BoolOperator.php
166
<?php declare(strict_types=1); namespace Gdbots\QueryParser\Enum; enum BoolOperator: int { case OPTIONAL = 0; case REQUIRED = 1; case PROHIBITED = 2; }
apache-2.0
HuangLS/neo4j
community/function/src/main/java/org/neo4j/function/ThrowingIntPredicate.java
1422
/* * Copyright (c) 2002-2018 "Neo Technology," * Network Engine for Objects in Lund AB [http://neotechnology.com] * * This file is part of Neo4j. * * Neo4j is free software: you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with this program. If not, see <http://www.gnu.org/licenses/>. */ package org.neo4j.function; /** * Represents a predicate (boolean-valued function) of one int-valued argument. This is the int-consuming primitive type specialization of {@link Predicate}. * * @param <E> the type of exception that may be thrown from the predicate */ public interface ThrowingIntPredicate<E extends Exception> { /** * Evaluates this predicate on the given argument. * * @param value the input argument * @return true if the input argument matches the predicate, otherwise false * @throws E an exception if the predicate fails */ boolean test( int value ) throws E; }
apache-2.0
williamleif/socialsent
example.py
1252
from socialsent import seeds from socialsent import lexicons from socialsent.polarity_induction_methods import random_walk from socialsent.evaluate_methods import binary_metrics from socialsent.representations.representation_factory import create_representation if __name__ == "__main__": print "Evaluting SentProp with 100 dimensional GloVe embeddings" print "Evaluting only binary classification performance on General Inquirer lexicon" lexicon = lexicons.load_lexicon("inquirer", remove_neutral=True) pos_seeds, neg_seeds = seeds.hist_seeds() embeddings = create_representation("GIGA", "data/example_embeddings/glove.6B.100d.txt", set(lexicon.keys()).union(pos_seeds).union(neg_seeds)) eval_words = [word for word in embeddings.iw if not word in pos_seeds and not word in neg_seeds] # Using SentProp with 10 neighbors and beta=0.99 polarities = random_walk(embeddings, pos_seeds, neg_seeds, beta=0.99, nn=10, sym=True, arccos=True) acc, auc, avg_per = binary_metrics(polarities, lexicon, eval_words) print "Accuracy with best threshold: {:0.2f}".format(acc) print "ROC AUC: {:0.2f}".format(auc) print "Average precision score: {:0.2f}".format(avg_per)
apache-2.0
SimonCat1989/simoncat-framework
simoncat-framework-graph/simoncat-framework-graph-spec/src/main/java/com/simoncat/framework/graph/elements/impl/AbsGraph.java
4782
package com.simoncat.framework.graph.elements.impl; import java.util.Collection; import java.util.Collections; import java.util.Objects; import java.util.Optional; import java.util.stream.Collectors; import lombok.extern.slf4j.Slf4j; import org.apache.commons.lang3.StringUtils; import com.google.common.collect.HashBasedTable; import com.google.common.collect.Table; import com.simoncat.framework.graph.elements.Edge; import com.simoncat.framework.graph.elements.Graph; import com.simoncat.framework.graph.elements.Vertex; @Slf4j abstract class AbsGraph<VERTEX_VAL, EDGE_VAL> implements Graph<VERTEX_VAL, EDGE_VAL> { private final Edge<VERTEX_VAL, EDGE_VAL> PLACEHOLDER_EDGE = new EdgeImpl<>(null, null, null); private Table<Vertex<VERTEX_VAL>, Vertex<VERTEX_VAL>, Edge<VERTEX_VAL, EDGE_VAL>> graph; AbsGraph() { graph = HashBasedTable.create(); } Table<Vertex<VERTEX_VAL>, Vertex<VERTEX_VAL>, Edge<VERTEX_VAL, EDGE_VAL>> getGraph() { return graph; } @Override public int getSizeOfVertexs() { return graph.rowKeySet().size(); } @Override public int getSizeOfEdges() { return graph.values().size() - getSizeOfVertexs(); } @Override public Optional<Edge<VERTEX_VAL, EDGE_VAL>> getEdge(Vertex<VERTEX_VAL> startVertex, Vertex<VERTEX_VAL> endVertex) { if (Objects.isNull(startVertex) || Objects.isNull(endVertex)) { log.error("Detect NULL when get edge with vertex: from {} to {}.", startVertex, endVertex); return Optional.empty(); } log.debug("Start to get edge from vertex {} to vertex {}.", startVertex, endVertex); Edge<VERTEX_VAL, EDGE_VAL> result = graph.get(startVertex, endVertex); if (PLACEHOLDER_EDGE == result) { log.debug("Detect Place Holder from vertex {} to vertex {}.", startVertex, endVertex); return Optional.empty(); } return Optional.ofNullable(result); } @Override public Optional<Vertex<VERTEX_VAL>> getVertexByName(String vertexName) { if (StringUtils.isBlank(vertexName)) { log.error("Detect NULL when get vertex by vertex name: {}.", vertexName); return Optional.empty(); } return graph.rowKeySet().stream().filter(vertex -> vertex.getName().equals(vertexName)).findFirst(); } @Override public Optional<Vertex<VERTEX_VAL>> getVertexByValue(VERTEX_VAL vertexValue) { if (Objects.isNull(vertexValue)) { log.error("Detect NULL when get vertex by vertex value: {}.", vertexValue); return Optional.empty(); } return graph.rowKeySet().stream().filter(vertex -> vertex.getValue().equals(vertexValue)).findFirst(); } @Override public void insertNewVertex(Vertex<VERTEX_VAL> newVertex) { if (Objects.isNull(newVertex)) { log.error("Detect NULL when insert vertex: {}.", newVertex); } else { graph.put(newVertex, newVertex, PLACEHOLDER_EDGE); } } @Override public void insertNewVertexes(Collection<Vertex<VERTEX_VAL>> newVertexes) { if (Objects.isNull(newVertexes)) { log.error("Detect NULL when insert vertex list: {}.", newVertexes); } else { newVertexes.stream().forEach(this::insertNewVertex); } } @Override public void insertNewEdge(Vertex<VERTEX_VAL> startVertex, Vertex<VERTEX_VAL> endVertex, Edge<VERTEX_VAL, EDGE_VAL> newEdge) { if (Objects.isNull(startVertex) || Objects.isNull(endVertex) || Objects.isNull(newEdge)) { log.error("Detect NULL when insert edge: {} with vertex: from {} to {}.", startVertex, endVertex); } else { graph.put(startVertex, endVertex, newEdge); } } @Override public void deleteEdge(Vertex<VERTEX_VAL> startVertex, Vertex<VERTEX_VAL> endVertex) { if (Objects.isNull(startVertex) || Objects.isNull(endVertex)) { log.error("Detect NULL when delete edge from {} to {}.", startVertex, endVertex); } else { graph.remove(startVertex, endVertex); } } @Override public void deleteVertex(Vertex<VERTEX_VAL> vertex) { if (Objects.isNull(vertex)) { log.error("Detect NULL when delete vertex {}.", vertex); } else { graph.row(vertex).clear(); graph.column(vertex).clear(); } } @Override public Collection<Edge<VERTEX_VAL, EDGE_VAL>> getAllAdjacentEdges(Vertex<VERTEX_VAL> startVertex) { if (Objects.isNull(startVertex)) { log.error("Detect NULL when get all adjacent edges with vertex {}.", startVertex); return Collections.emptyList(); } else { return graph.row(startVertex).values().stream().filter(this::isNonEmptyEdge).collect(Collectors.toList()); } } @Override public Collection<Vertex<VERTEX_VAL>> getAllVertexes() { return graph.rowKeySet(); } @Override public Collection<Edge<VERTEX_VAL, EDGE_VAL>> getAllRoutes() { return graph.values().stream().filter(this::isNonEmptyEdge).collect(Collectors.toList()); } protected boolean isNonEmptyEdge(Edge<VERTEX_VAL, EDGE_VAL> targetEdge) { return targetEdge != PLACEHOLDER_EDGE; } }
apache-2.0
gdefias/JavaDemo
InitJava/jcip/src/main/java/net/jcip/examples/part4_advancedtopics/chapter14_customsynctool/GrumpyBoundedBuffer.java
1453
package net.jcip.examples.part4_advancedtopics.chapter14_customsynctool; import net.jcip.annotations.ThreadSafe; /** * GrumpyBoundedBuffer 将前提条件的失败传递给调用者 */ @ThreadSafe public class GrumpyBoundedBuffer <V> extends BaseBoundedBuffer<V> { public GrumpyBoundedBuffer() { this(100); } public GrumpyBoundedBuffer(int size) { super(size); } public synchronized void put(V v) throws BufferFullException { if (isFull()) throw new BufferFullException(); //当不满足前提条件时,有界队列不会执行相应的操作 doPut(v); } public synchronized V take() throws BufferEmptyException { if (isEmpty()) throw new BufferEmptyException(); return doTake(); } } //调用GrumpyBoundedBuffer的代码 class ExampleUsage { private GrumpyBoundedBuffer<String> buffer; int SLEEP_GRANULARITY = 50; void useBuffer() throws InterruptedException { while (true) { try { String item = buffer.take(); // use item break; } catch (BufferEmptyException e) { //调用者必须自行处理前提条件失败的情况BufferEmptyException Thread.sleep(SLEEP_GRANULARITY); } } } } class BufferFullException extends RuntimeException { } class BufferEmptyException extends RuntimeException { }
apache-2.0
saqlainmediasoft/Maintenance-Management-System
admin/kendoui/wrappers/php/web/datetimepicker/rangeselection.php
1793
<?php require_once '../../lib/Kendo/Autoload.php'; require_once '../../include/header.php'; ?> <?php $start = new \Kendo\UI\DateTimePicker('start'); $start->value(new DateTime('now', new DateTimeZone('UTC'))) ->parseFormats(array('MM/dd/yyyy')) ->change('startChange'); $end = new \Kendo\UI\DateTimePicker('end'); $end->value(new DateTime('now', new DateTimeZone('UTC'))) ->parseFormats(array('MM/dd/yyyy')) ->change('endChange'); ?> <div class="demo-section" style="width:535px"> <label for="start">Start date:</label> <?= $start->render() ?> <label for="end" style="margin-left:3em">End date:</label> <?= $end->render() ?> </div> <script> var start, end; function startChange() { var startDate = start.value(); if (startDate) { startDate = new Date(startDate); startDate.setDate(startDate.getDate() + 1); end.min(startDate); } } function endChange() { var endDate = end.value(); if (endDate) { endDate = new Date(endDate); endDate.setDate(endDate.getDate() - 1); start.max(endDate); } } $(document).ready(function() { start = $("#start").data("kendoDateTimePicker"); end = $("#end").data("kendoDateTimePicker"); start.max(end.value()); end.min(start.value()); }); </script> <style scoped> #example .k-datetimepicker { vertical-align: middle; } #example h3 { clear: both; } #example .code-sample { width: 60%; float:left; margin-bottom: 20px; } #example .output { width: 24%; margin-left: 4%; float:left; } </style> <?php require_once '../../include/footer.php'; ?>
apache-2.0
gomatcha/matcha
view/ios/progressview.go
1911
// Package ios implements a native ios views. package ios import ( "image/color" "gomatcha.io/matcha/comm" "gomatcha.io/matcha/internal" "gomatcha.io/matcha/layout/constraint" "gomatcha.io/matcha/paint" pb "gomatcha.io/matcha/proto" pbios "gomatcha.io/matcha/proto/view/ios" "gomatcha.io/matcha/view" ) // ProgressView implements a progess view. type ProgressView struct { view.Embed Progress float64 ProgressNotifier comm.Float64Notifier ProgressColor color.Color PaintStyle *paint.Style progressNotifier comm.Float64Notifier } // NewProgressView returns a new view. func NewProgressView() *ProgressView { return &ProgressView{} } // Lifecycle implements the view.View interface. func (v *ProgressView) Lifecycle(from, to view.Stage) { if view.ExitsStage(from, to, view.StageMounted) { if v.progressNotifier != nil { v.Unsubscribe(v.progressNotifier) } } } // Build implements the view.View interface. func (v *ProgressView) Build(ctx view.Context) view.Model { l := &constraint.Layouter{} l.Solve(func(s *constraint.Solver) { s.Height(2) // 2.5 if its a bar s.WidthEqual(l.MinGuide().Width()) s.TopEqual(l.MaxGuide().Top()) s.LeftEqual(l.MaxGuide().Left()) }) if v.ProgressNotifier != v.progressNotifier { if v.progressNotifier != nil { v.Unsubscribe(v.progressNotifier) } if v.ProgressNotifier != nil { v.Subscribe(v.ProgressNotifier) } v.progressNotifier = v.ProgressNotifier } val := v.Progress if v.ProgressNotifier != nil { val = v.ProgressNotifier.Value() } painter := paint.Painter(nil) if v.PaintStyle != nil { painter = v.PaintStyle } return view.Model{ Painter: painter, Layouter: l, NativeViewName: "gomatcha.io/matcha/view/progressview", NativeViewState: internal.MarshalProtobuf(&pbios.ProgressView{ Progress: val, ProgressColor: pb.ColorEncode(v.ProgressColor), }), } }
apache-2.0
AlekseyZhelo/idea-mob-plugin
src/main/java/com/alekseyzhelo/evilislands/mobplugin/EIMessages.java
618
package com.alekseyzhelo.evilislands.mobplugin; import com.intellij.AbstractBundle; import org.jetbrains.annotations.NonNls; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.PropertyKey; public class EIMessages extends AbstractBundle { public static String message(@NotNull @PropertyKey(resourceBundle = BUNDLE) String key, @NotNull Object... params) { return INSTANCE.getMessage(key, params); } public static final EIMessages INSTANCE = new EIMessages(); @NonNls public static final String BUNDLE = "messages.EIMessages"; private EIMessages() { super(BUNDLE); } }
apache-2.0
danc86/jena-core
src/main/java/com/hp/hpl/jena/ontology/impl/OntPropertyImpl.java
39011
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Package /////////////// package com.hp.hpl.jena.ontology.impl; // Imports /////////////// import java.util.*; import com.hp.hpl.jena.enhanced.*; import com.hp.hpl.jena.graph.*; import com.hp.hpl.jena.ontology.*; import com.hp.hpl.jena.rdf.model.*; import com.hp.hpl.jena.util.iterator.*; /** * <p> * Implementation of the abstraction representing a general ontology property. * </p> * * @author Ian Dickinson, HP Labs * (<a href="mailto:ian_dickinson@users.sourceforge.net" >email</a>) * @version CVS $Id: OntPropertyImpl.java,v 1.4 2010-01-11 09:17:06 chris-dollin Exp $ */ public class OntPropertyImpl extends OntResourceImpl implements OntProperty { // Constants ////////////////////////////////// // Static variables ////////////////////////////////// /** * A factory for generating OntProperty facets from nodes in enhanced graphs. * Note: should not be invoked directly by user code: use * {@link com.hp.hpl.jena.rdf.model.RDFNode#as as()} instead. */ @SuppressWarnings("hiding") public static Implementation factory = new Implementation() { @Override public EnhNode wrap( Node n, EnhGraph eg ) { if (canWrap( n, eg )) { return new OntPropertyImpl( n, eg ); } else { throw new ConversionException( "Cannot convert node " + n + " to OntProperty"); } } @Override public boolean canWrap( Node node, EnhGraph eg ) { // node will support being an OntProperty facet if it has rdf:type owl:Property or equivalent Profile profile = (eg instanceof OntModel) ? ((OntModel) eg).getProfile() : null; return (profile != null) && profile.isSupported( node, eg, OntProperty.class ); } }; // Instance variables ////////////////////////////////// // Constructors ////////////////////////////////// /** * <p> * Construct an ontology property represented by the given node in the given graph. * </p> * * @param n The node that represents the resource * @param g The enh graph that contains n */ public OntPropertyImpl( Node n, EnhGraph g ) { super( n, g ); } // External signature methods ////////////////////////////////// /** * <p> * Answer true to indicate that this resource is an RDF property. * </p> * * @return True. */ @Override public boolean isProperty() { return true; } /** * @see Property#getOrdinal() */ @Override public int getOrdinal() { return (as( Property.class )).getOrdinal(); } // subPropertyOf /** * <p>Assert that this property is sub-property of the given property. Any existing * statements for <code>subPropertyOf</code> will be removed.</p> * @param prop The property that this property is a sub-property of * @exception OntProfileException If the {@link Profile#SUB_PROPERTY_OF()} property is not supported in the current language profile. */ @Override public void setSuperProperty( Property prop ) { setPropertyValue( getProfile().SUB_PROPERTY_OF(), "SUB_PROPERTY_OF", prop ); } /** * <p>Add a super-property of this property.</p> * @param prop A property that is a super-property of this property. * @exception OntProfileException If the {@link Profile#SUB_PROPERTY_OF()} property is not supported in the current language profile. */ @Override public void addSuperProperty( Property prop ) { addPropertyValue( getProfile().SUB_PROPERTY_OF(), "SUB_PROPERTY_OF", prop ); } /** * <p>Answer a property that is the super-property of this property. If there is * more than one such property, an arbitrary selection is made.</p> * @return A super-property of this property * @exception OntProfileException If the {@link Profile#SUB_PROPERTY_OF()} property is not supported in the current language profile. */ @Override public OntProperty getSuperProperty() { return objectAsProperty( getProfile().SUB_PROPERTY_OF(), "SUB_PROPERTY_OF" ); } /** * <p>Answer an iterator over all of the properties that are declared to be super-properties of * this property. Each element of the iterator will be an {@link OntProperty}.</p> * @return An iterator over the super-properties of this property. * @exception OntProfileException If the {@link Profile#SUB_PROPERTY_OF()} property is not supported in the current language profile. */ @Override public ExtendedIterator<OntProperty> listSuperProperties() { return listSuperProperties( false ); } /** * <p>Answer an iterator over all of the properties that are declared to be super-properties of * this property. Each element of the iterator will be an {@link OntProperty}.</p> * @param direct If true, only answer the directly adjacent properties in the * property hierarchy: i&#046;e&#046; eliminate any property for which there is a longer route * to reach that child under the super-property relation. * @return An iterator over the super-properties of this property. * @exception OntProfileException If the {@link Profile#SUB_PROPERTY_OF()} property is not supported in the current language profile. */ @Override public ExtendedIterator<OntProperty> listSuperProperties( boolean direct ) { return listDirectPropertyValues( getProfile().SUB_PROPERTY_OF(), "SUB_PROPERTY_OF", OntProperty.class, getProfile().SUB_PROPERTY_OF(), direct, false ) .filterDrop( new SingleEqualityFilter<OntProperty>( this ) ); } /** * <p>Answer true if the given property is a super-property of this property.</p> * @param prop A property to test. * @param direct If true, only consider the directly adjacent properties in the * property hierarchy * @return True if the given property is a super-property of this property. */ @Override public boolean hasSuperProperty( Property prop, boolean direct ) { return hasPropertyValue( getProfile().SUB_PROPERTY_OF(), "SUB_PROPERTY_OF", prop ); } /** * <p>Remove the given property from the super-properties of this property. If this statement * is not true of the current model, nothing happens.</p> * @param prop A property to be removed from the super-properties of this property * @exception OntProfileException If the {@link Profile#SUB_PROPERTY_OF()} property is not supported in the current language profile. */ @Override public void removeSuperProperty( Property prop ) { removePropertyValue( getProfile().SUB_PROPERTY_OF(), "SUB_PROPERTY_OF", prop ); } /** * <p>Assert that this property is super-property of the given property. Any existing * statements for <code>subPropertyOf</code> on <code>prop</code> will be removed.</p> * @param prop The property that is a sub-property of this property * @exception OntProfileException If the {@link Profile#SUB_PROPERTY_OF()} property is not supported in the current language profile. */ @Override public void setSubProperty( Property prop ) { // first we have to remove all of the inverse sub-prop links checkProfile( getProfile().SUB_PROPERTY_OF(), "SUB_PROPERTY_OF" ); for (StmtIterator i = getModel().listStatements( null, getProfile().SUB_PROPERTY_OF(), this ); i.hasNext(); ) { i.removeNext(); } prop.as( OntProperty.class ).addSuperProperty( this ); } /** * <p>Add a sub-property of this property.</p> * @param prop A property that is a sub-property of this property. * @exception OntProfileException If the {@link Profile#SUB_PROPERTY_OF()} property is not supported in the current language profile. */ @Override public void addSubProperty( Property prop ) { prop.as( OntProperty.class ).addSuperProperty( this ); } /** * <p>Answer a property that is the sub-property of this property. If there is * more than one such property, an arbitrary selection is made.</p> * @return A sub-property of this property * @exception OntProfileException If the {@link Profile#SUB_PROPERTY_OF()} property is not supported in the current language profile. */ @Override public OntProperty getSubProperty() { checkProfile( getProfile().SUB_PROPERTY_OF(), "SUB_PROPERTY_OF" ); return getModel().listStatements( null, getProfile().SUB_PROPERTY_OF(), this ) .nextStatement() .getSubject() .as( OntProperty.class ); } /** * <p>Answer an iterator over all of the properties that are declared to be sub-properties of * this property. Each element of the iterator will be an {@link OntProperty}.</p> * @return An iterator over the sub-properties of this property. * @exception OntProfileException If the {@link Profile#SUB_PROPERTY_OF()} property is not supported in the current language profile. */ @Override public ExtendedIterator<OntProperty> listSubProperties() { return listSubProperties( false ); } /** * <p>Answer an iterator over all of the properties that are declared to be sub-properties of * this property. Each element of the iterator will be an {@link OntProperty}.</p> * @param direct If true, only answer the direcly adjacent properties in the * property hierarchy: i&#046;e&#046; eliminate any property for which there is a longer route * to reach that child under the sub-property relation. * @return An iterator over the sub-properties of this property. * @exception OntProfileException If the {@link Profile#SUB_PROPERTY_OF()} property is not supported in the current language profile. */ @Override public ExtendedIterator<OntProperty> listSubProperties( boolean direct ) { return listDirectPropertyValues( getProfile().SUB_PROPERTY_OF(), "SUB_PROPERTY_OF", OntProperty.class, getProfile().SUB_PROPERTY_OF(), direct, true ); } /** * <p>Answer true if the given property is a sub-property of this property.</p> * @param prop A property to test. * @param direct If true, only consider the direcly adjacent properties in the * property hierarchy * @return True if the given property is a sub-property of this property. */ @Override public boolean hasSubProperty( Property prop, boolean direct ) { return prop.as( OntProperty.class ).hasSuperProperty( this, direct ); } /** * <p>Remove the given property from the sub-properties of this property. If this statement * is not true of the current model, nothing happens.</p> * @param prop A property to be removed from the sub-properties of this property * @exception OntProfileException If the {@link Profile#SUB_PROPERTY_OF()} property is not supported in the current language profile. */ @Override public void removeSubProperty( Property prop ) { prop.as( OntProperty.class ).removeSuperProperty( this ); } // domain /** * <p>Assert that the given resource represents the class of individuals that form the * domain of this property. Any existing <code>domain</code> statements for this property are removed.</p> * @param res The resource that represents the domain class for this property. * @exception OntProfileException If the {@link Profile#DOMAIN()} property is not supported in the current language profile. */ @Override public void setDomain( Resource res ) { setPropertyValue( getProfile().DOMAIN(), "DOMAIN", res ); } /** * <p>Add a resource representing the domain of this property.</p> * @param res A resource that represents a domain class for this property. * @exception OntProfileException If the {@link Profile#DOMAIN()} property is not supported in the current language profile. */ @Override public void addDomain( Resource res ) { addPropertyValue( getProfile().DOMAIN(), "DOMAIN", res ); } /** * <p>Answer a resource that represents the domain class of this property. If there is * more than one such resource, an arbitrary selection is made.</p> * @return An resource representing the class that forms the domain of this property * @exception OntProfileException If the {@link Profile#DOMAIN()} property is not supported in the current language profile. */ @Override public OntResource getDomain() { return objectAsResource( getProfile().DOMAIN(), "DOMAIN" ); } /** * <p>Answer an iterator over all of the declared domain classes of this property. * Each element of the iterator will be an {@link OntResource}.</p> * @return An iterator over the classes that form the domain of this property. * @exception OntProfileException If the {@link Profile#DOMAIN()} property is not supported in the current language profile. */ @Override public ExtendedIterator<OntClass> listDomain() { return listAs( getProfile().DOMAIN(), "DOMAIN", OntClass.class ); } /** * <p>Answer true if the given resource a class specifying the domain of this property.</p> * @param res A resource representing a class * @return True if the given resource is one of the domain classes of this property. */ @Override public boolean hasDomain( Resource res ) { return hasPropertyValue( getProfile().DOMAIN(), "DOMAIN", res ); } /** * <p>Remove the given class from the stated domain(s) of this property. If this statement * is not true of the current model, nothing happens.</p> * @param cls A class to be removed from the declared domain(s) of this property * @exception OntProfileException If the {@link Profile#DOMAIN()} property is not supported in the current language profile. */ @Override public void removeDomain( Resource cls ) { removePropertyValue( getProfile().DOMAIN(), "DOMAIN", cls ); } // range /** * <p>Assert that the given resource represents the class of individuals that form the * range of this property. Any existing <code>range</code> statements for this property are removed.</p> * @param res The resource that represents the range class for this property. * @exception OntProfileException If the {@link Profile#RANGE()} property is not supported in the current language profile. */ @Override public void setRange( Resource res ) { setPropertyValue( getProfile().RANGE(), "RANGE", res ); } /** * <p>Add a resource representing the range of this property.</p> * @param res A resource that represents a range class for this property. * @exception OntProfileException If the {@link Profile#RANGE()} property is not supported in the current language profile. */ @Override public void addRange( Resource res ) { addPropertyValue( getProfile().RANGE(), "RANGE", res ); } /** * <p>Answer a resource that represents the range class of this property. If there is * more than one such resource, an arbitrary selection is made.</p> * @return An resource representing the class that forms the range of this property * @exception OntProfileException If the {@link Profile#RANGE()} property is not supported in the current language profile. */ @Override public OntResource getRange() { return objectAsResource( getProfile().RANGE(), "RANGE" ); } /** * <p>Answer an iterator over all of the declared range classes of this property. * Each element of the iterator will be an {@link OntResource}.</p> * @return An iterator over the classes that form the range of this property. * @exception OntProfileException If the {@link Profile#RANGE()} property is not supported in the current language profile. */ @Override public ExtendedIterator<OntClass> listRange() { return listAs( getProfile().RANGE(), "RANGE", OntClass.class ); } /** * <p>Answer true if the given resource a class specifying the range of this property.</p> * @param res A resource representing a class * @return True if the given resource is one of the range classes of this property. */ @Override public boolean hasRange( Resource res ) { return hasPropertyValue( getProfile().RANGE(), "RANGE", res ); } /** * <p>Remove the given class from the stated range(s) of this property. If this statement * is not true of the current model, nothing happens.</p> * @param cls A class to be removed from the declared range(s) of this property * @exception OntProfileException If the {@link Profile#RANGE()} property is not supported in the current language profile. */ @Override public void removeRange( Resource cls ) { removePropertyValue( getProfile().RANGE(), "RANGE", cls ); } // relationships between properties // equivalentProperty /** * <p>Assert that the given property is equivalent to this property. Any existing * statements for <code>equivalentProperty</code> will be removed.</p> * @param prop The property that this property is a equivalent to. * @exception OntProfileException If the {@link Profile#EQUIVALENT_PROPERTY()} property is not supported in the current language profile. */ @Override public void setEquivalentProperty( Property prop ) { setPropertyValue( getProfile().EQUIVALENT_PROPERTY(), "EQUIVALENT_PROPERTY", prop ); } /** * <p>Add a property that is equivalent to this property.</p> * @param prop A property that is equivalent to this property. * @exception OntProfileException If the {@link Profile#EQUIVALENT_PROPERTY()} property is not supported in the current language profile. */ @Override public void addEquivalentProperty( Property prop ) { addPropertyValue( getProfile().EQUIVALENT_PROPERTY(), "EQUIVALENT_PROPERTY", prop ); } /** * <p>Answer a property that is equivalent to this property. If there is * more than one such property, an arbitrary selection is made.</p> * @return A property equivalent to this property * @exception OntProfileException If the {@link Profile#EQUIVALENT_PROPERTY()} property is not supported in the current language profile. */ @Override public OntProperty getEquivalentProperty() { return objectAsProperty( getProfile().EQUIVALENT_PROPERTY(), "EQUIVALENT_PROPERTY" ); } /** * <p>Answer an iterator over all of the properties that are declared to be equivalent properties to * this property. Each element of the iterator will be an {@link OntProperty}.</p> * @return An iterator over the properties equivalent to this property. * @exception OntProfileException If the {@link Profile#EQUIVALENT_PROPERTY()} property is not supported in the current language profile. */ @Override public ExtendedIterator<OntProperty> listEquivalentProperties() { return listAs( getProfile().EQUIVALENT_PROPERTY(), "EQUIVALENT_PROPERTY", OntProperty.class ); } /** * <p>Answer true if the given property is equivalent to this property.</p> * @param prop A property to test for * @return True if the given property is equivalent to this property. */ @Override public boolean hasEquivalentProperty( Property prop ) { return hasPropertyValue( getProfile().EQUIVALENT_PROPERTY(), "EQUIVALENT_PROPERTY", prop ); } /** * <p>Remove the statement that this property and the given property are * equivalent. If this statement * is not true of the current model, nothing happens.</p> * @param prop A property that may be declared to be equivalent to this property * @exception OntProfileException If the {@link Profile#EQUIVALENT_PROPERTY()} property is not supported in the current language profile. */ @Override public void removeEquivalentProperty( Property prop ) { removePropertyValue( getProfile().EQUIVALENT_PROPERTY(), "EQUIVALENT_PROPERTY", prop ); } // inverseProperty /** * <p>Assert that the given property is the inverse of this property. Any existing * statements for <code>inverseOf</code> will be removed.</p> * @param prop The property that this property is a inverse to. * @exception OntProfileException If the {@link Profile#INVERSE_OF()} property is not supported in the current language profile. */ @Override public void setInverseOf( Property prop ) { setPropertyValue( getProfile().INVERSE_OF(), "INVERSE_OF", prop ); } /** * <p>Add a property that is the inverse of this property.</p> * @param prop A property that is the inverse of this property. * @exception OntProfileException If the {@link Profile#INVERSE_OF()} property is not supported in the current language profile. */ @Override public void addInverseOf( Property prop ) { addPropertyValue( getProfile().INVERSE_OF(), "INVERSE_OF", prop ); } /** * <p>Answer a property that is an inverse of this property. If there is * more than one such property, an arbitrary selection is made.</p> * @return A property inverse to this property * @exception OntProfileException If the {@link Profile#INVERSE_OF()} property is not supported in the current language profile. */ @Override public OntProperty getInverseOf() { return objectAsProperty( getProfile().INVERSE_OF(), "INVERSE_OF" ); } /** * <p>Answer an iterator over all of the properties that are declared to be inverse properties of * this property. Each element of the iterator will be an {@link OntProperty}.</p> * @return An iterator over the properties inverse to this property. * @exception OntProfileException If the {@link Profile#INVERSE_OF()} property is not supported in the current language profile. */ @Override public ExtendedIterator<? extends OntProperty> listInverseOf() { return listAs( getProfile().INVERSE_OF(), "INVERSE_OF", OntProperty.class ); } /** * <p>Answer true if this property is the inverse of the given property.</p> * @param prop A property to test for * @return True if the this property is the inverse of the the given property. */ @Override public boolean isInverseOf( Property prop ) { return hasPropertyValue( getProfile().INVERSE_OF(), "INVERSE_OF", prop ); } /** * <p>Remove the statement that this property is the inverse of the given property. If this statement * is not true of the current model, nothing happens.</p> * @param prop A property that may be declared to be inverse to this property * @exception OntProfileException If the {@link Profile#INVERSE_OF()} property is not supported in the current language profile. */ @Override public void removeInverseProperty( Property prop ) { removePropertyValue( getProfile().INVERSE_OF(), "INVERSE_OF", prop ); } /** * <p>Answer a view of this property as a functional property</p> * @return This property, but viewed as a FunctionalProperty node * @exception ConversionException if the resource cannot be converted to a functional property * given the language profile and the current state of the underlying model. */ @Override public FunctionalProperty asFunctionalProperty() { return as( FunctionalProperty.class ); } /** * <p>Answer a view of this property as a datatype property</p> * @return This property, but viewed as a DatatypeProperty node * @exception ConversionException if the resource cannot be converted to a datatype property * given the language profile and the current state of the underlying model. */ @Override public DatatypeProperty asDatatypeProperty() { return as( DatatypeProperty.class ); } /** * <p>Answer a view of this property as an object property</p> * @return This property, but viewed as an ObjectProperty node * @exception ConversionException if the resource cannot be converted to an object property * given the language profile and the current state of the underlying model. */ @Override public ObjectProperty asObjectProperty() { return as( ObjectProperty.class ); } /** * <p>Answer a view of this property as a transitive property</p> * @return This property, but viewed as a TransitiveProperty node * @exception ConversionException if the resource cannot be converted to a transitive property * given the language profile and the current state of the underlying model. */ @Override public TransitiveProperty asTransitiveProperty() { return as( TransitiveProperty.class ); } /** * <p>Answer a view of this property as an inverse functional property</p> * @return This property, but viewed as an InverseFunctionalProperty node * @exception ConversionException if the resource cannot be converted to an inverse functional property * given the language profile and the current state of the underlying model. */ @Override public InverseFunctionalProperty asInverseFunctionalProperty() { return as( InverseFunctionalProperty.class ); } /** * <p>Answer a view of this property as a symmetric property</p> * @return This property, but viewed as a SymmetricProperty node * @exception ConversionException if the resource cannot be converted to a symmetric property * given the language profile and the current state of the underlying model. */ @Override public SymmetricProperty asSymmetricProperty() { return as( SymmetricProperty.class ); } // conversion functions /** * <p>Answer a facet of this property as a functional property, adding additional information to the model if necessary.</p> * @return This property, but converted to a FunctionalProperty facet */ @Override public FunctionalProperty convertToFunctionalProperty() { return convertToType( getProfile().FUNCTIONAL_PROPERTY(), "FUNCTIONAL_PROPERTY", FunctionalProperty.class ); } /** * <p>Answer a facet of this property as a datatype property, adding additional information to the model if necessary.</p> * @return This property, but converted to a DatatypeProperty facet */ @Override public DatatypeProperty convertToDatatypeProperty() { return convertToType( getProfile().DATATYPE_PROPERTY(), "DATATYPE_PROPERTY", DatatypeProperty.class ); } /** * <p>Answer a facet of this property as an object property, adding additional information to the model if necessary.</p> * @return This property, but converted to an ObjectProperty facet */ @Override public ObjectProperty convertToObjectProperty() { return convertToType( getProfile().OBJECT_PROPERTY(), "OBJECT_PROPERTY", ObjectProperty.class ); } /** * <p>Answer a facet of this property as a transitive property, adding additional information to the model if necessary.</p> * @return This property, but converted to a TransitiveProperty facet */ @Override public TransitiveProperty convertToTransitiveProperty() { return convertToType( getProfile().TRANSITIVE_PROPERTY(), "TRANSITIVE_PROPERTY", TransitiveProperty.class ); } /** * <p>Answer a facet of this property as an inverse functional property, adding additional information to the model if necessary.</p> * @return This property, but converted to an InverseFunctionalProperty facet */ @Override public InverseFunctionalProperty convertToInverseFunctionalProperty() { return convertToType( getProfile().INVERSE_FUNCTIONAL_PROPERTY(), "INVERSE_FUNCTIONAL_PROPERTY", InverseFunctionalProperty.class ); } /** * <p>Answer a facet of this property as a symmetric property, adding additional information to the model if necessary.</p> * @return This property, but converted to a SymmetricProperty facet */ @Override public SymmetricProperty convertToSymmetricProperty() { return convertToType( getProfile().SYMMETRIC_PROPERTY(), "SYMMETRIC_PROPERTY", SymmetricProperty.class ); } // tests on property sub-types /** * <p>Answer true if this property is a functional property</p> * @return True if this this property has an <code>rdf:type</code> that defines it as a functional property. */ @Override public boolean isFunctionalProperty() { return hasRDFType( getProfile().FUNCTIONAL_PROPERTY(), "FUNCTIONAL_PROPERTY", false ); } /** * <p>Answer true if this property is a datatype property</p> * @return True if this this property has an <code>rdf:type</code> that defines it as a datatype property. */ @Override public boolean isDatatypeProperty() { return hasRDFType( getProfile().DATATYPE_PROPERTY(), "DATATYPE_PROPERTY", false ); } /** * <p>Answer true if this property is an object property</p> * @return True if this this property has an <code>rdf:type</code> that defines it as an object property. */ @Override public boolean isObjectProperty() { return hasRDFType( getProfile().OBJECT_PROPERTY(), "OBJECT_PROPERTY", false ); } /** * <p>Answer true if this property is a transitive property</p> * @return True if this this property has an <code>rdf:type</code> that defines it as a transitive property. */ @Override public boolean isTransitiveProperty() { return hasRDFType( getProfile().TRANSITIVE_PROPERTY(), "TRANSITIVE_PROPERTY", false ); } /** * <p>Answer true if this property is an inverse functional property</p> * @return True if this this property has an <code>rdf:type</code> that defines it as an inverse functional property. */ @Override public boolean isInverseFunctionalProperty() { return hasRDFType( getProfile().INVERSE_FUNCTIONAL_PROPERTY(), "INVERSE_FUNCTIONAL_PROPERTY", false ); } /** * <p>Answer true if this property is a symmetric property</p> * @return True if this this property has an <code>rdf:type</code> that defines it as a symmetric property. */ @Override public boolean isSymmetricProperty() { return hasRDFType( getProfile().SYMMETRIC_PROPERTY(), "SYMMETRIC_PROPERTY", false ); } /** * <p>Answer the property that is the inverse of this property. If no such property is defined, * return null. If more than one inverse is defined, return an abritrary selection.</p> * @return The property that is the inverse of this property, or null. */ @Override public OntProperty getInverse() { ExtendedIterator<OntProperty> i = listInverse(); OntProperty p = i.hasNext() ? i.next() : null; i.close(); return p; } /** * <p>Answer an iterator over the properties that are defined to be inverses of this property.</p> * @return An iterator over the properties that declare themselves the <code>inverseOf</code> this property. */ @Override public ExtendedIterator<OntProperty> listInverse() { return getModel().listStatements( null, getProfile().INVERSE_OF(), this ).mapWith( new SubjectAsMapper<OntProperty>( OntProperty.class ) ); } /** * <p>Answer true if there is at least one inverse property for this property.</p> * @return True if property has an inverse. */ @Override public boolean hasInverse() { ExtendedIterator<OntProperty> i = listInverse(); boolean hasInv = i.hasNext(); i.close(); return hasInv; } /** * <p>Answer an iterator of all of the classes in this ontology, such * that each returned class has this property as one of its * properties in {@link OntClass#listDeclaredProperties()}. This * simulates a frame-like view of properties and classes; for more * details see the <a href="../../../../../../how-to/rdf-frames.html"> * RDF frames howto</a>.</p> * @return An iterator of the classes having this property as one * of their declared properties */ @Override public ExtendedIterator<OntClass> listDeclaringClasses() { return listDeclaringClasses( false ); } /** * <p>Answer an iterator of all of the classes in this ontology, such * that each returned class has this property as one of its * properties in {@link OntClass#listDeclaredProperties(boolean)}. This * simulates a frame-like view of properties and classes; for more * details see the <a href="../../../../../../how-to/rdf-frames.html"> * RDF frames howto</a>.</p> * @param direct If true, use only </em>direct</em> associations between classes * and properties * @return An iterator of the classes having this property as one * of their declared properties */ @Override public ExtendedIterator<OntClass> listDeclaringClasses( boolean direct ) { // first list the candidate classes, which will also help us // work out whether this is a "global" property or not Set<OntClass> cands = new HashSet<OntClass>(); for (Iterator<OntClass> i = listDomain(); i.hasNext(); ) { // the candidates include this class and it sub-classes List<OntClass> q = new ArrayList<OntClass>(); q.add( i.next() ); while (!q.isEmpty()) { OntClass c = q.remove( 0 ); if (!c.isOntLanguageTerm() && !cands.contains( c )) { // a new value that is not just a term from OWL or RDFS cands.add( c ); for (Iterator<OntClass> j = c.listSubClasses(); j.hasNext(); ) { q.add( j.next() ); } } } } if (cands.isEmpty()) { // no declared non-global domain, so this is a global prop if (!direct) { // in the non-direct case, global properties appear in the ldp // of all classes, but we ignore the built-in classes return ((OntModel) getModel()).listClasses() .filterDrop( new Filter<OntClass>() { @Override public boolean accept( OntClass c ) { return c.isOntLanguageTerm(); }} ); } else { // in the direct case, global properties only attach to the // local hierarchy roots return ((OntModel) getModel()).listHierarchyRootClasses(); } } else { // not a global property // pick out classes from the domain for which this is a declared prop return WrappedIterator.create( cands.iterator() ) .filterKeep( new FilterDeclaringClass( this, direct )); } } /** * <p>Answer an iterator over any restrictions that mention this property as * the property that the restriction is adding some constraint to. For example:</p> * <code><pre>&lt;owl:Restriction&gt; * &lt;owl:onProperty rdf:resource=&quot;#childOf&quot; /&gt; * &lt;owl:hasValue rdf:resource=&quot;#ian&quot; /&gt; * &lt;/owl:Restriction&gt;</pre></code> * <p><strong>Note</strong> that any such restrictions do not affect the global * semantics of this property itself. Restrictions define new class expressions, * and the property constraints are local to that class expression. This method * is provided as a convenience to assist callers to navigate the relationships * in the model.</p> * @return An iterator whose values are the restrictions from the local * model that reference this property. */ @Override public ExtendedIterator<Restriction> listReferringRestrictions() { return getModel().listStatements( null, getProfile().ON_PROPERTY(), this ) .mapWith( new SubjectAsMapper<Restriction>( Restriction.class ) ); } // Internal implementation methods ////////////////////////////////// /** * <p>Answer a property that is attached to the given model, which will either * be this property or a new property object with the same URI in the given * model. If the given model is an ontology model, make the new property object * an ontproperty.</p> * @param m A model * @return A property equal to this property that is attached to m. */ @Override public Property inModel( Model m ) { return (getModel() == m) ? this : m.createProperty( getURI() ); } //============================================================================== // Inner class definitions //============================================================================== /** * <p>Filter that accepts classes which have the given property as one of * their declared properties.</p> */ private class FilterDeclaringClass extends Filter<OntClass> { private boolean m_direct; private Property m_prop; private FilterDeclaringClass( Property prop, boolean direct ) { m_prop = prop; m_direct = direct; } @Override public boolean accept( OntClass o ) { return o.hasDeclaredProperty( m_prop, m_direct ); } } }
apache-2.0
xLeitix/jcloudscale
core/src/main/java/at/ac/tuwien/infosys/jcloudscale/logging/ClientLoggingConfiguration.java
8677
/* Copyright 2013 Philipp Leitner Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package at.ac.tuwien.infosys.jcloudscale.logging; import java.io.Serializable; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.logging.ConsoleHandler; import java.util.logging.Formatter; import java.util.logging.Handler; import java.util.logging.Level; import java.util.logging.Logger; import javax.xml.bind.annotation.XmlAccessType; import javax.xml.bind.annotation.XmlAccessorType; import javax.xml.bind.annotation.XmlRootElement; import at.ac.tuwien.infosys.jcloudscale.exception.JCloudScaleException; import at.ac.tuwien.infosys.jcloudscale.utility.ReflectionUtil; /** * Defines the configuration of the client-side logging infrastructure. */ @XmlRootElement @XmlAccessorType(XmlAccessType.FIELD) public class ClientLoggingConfiguration implements Serializable, Cloneable { private static final long serialVersionUID = 1L; protected Map<String, String> customLoggingLevels = new HashMap<>(); protected String defaultLoggingLevel = Level.SEVERE.getName(); protected String parentLoggerName = "at.ac.tuwien.infosys.jcloudscale"; protected List<String> handlers = new ArrayList<>(); protected String formatterClass = null; protected long logMessageTimeout = 2000;//determines old messages that have to be discarded. protected transient Logger parentLogger; /** * Creates default instance of the logging configuration with default parameters. */ public ClientLoggingConfiguration() { addHandler(ConsoleHandler.class); } /** * Sets the custom logging level for a specified logger. * @param logger The name of the logger that should have custom configuration * @param level The custom level assigned to this logger */ public synchronized void setCustomLoggingLevel(String logger, Level level) { customLoggingLevels.put(logger, level.getName()); } /** * Removes custom logging level for the specified logger. * @param logger The name of the logger that should not use custom configuration any more. */ public synchronized void removeCustomLoggingLevel(String logger) { customLoggingLevels.remove(logger); } /** * Sets the default logging level for all loggers that have no custom level explicitly set. * @param defaultLoggingLevel The level that should be used by default. */ public void setDefaultLoggingLevel(Level defaultLoggingLevel) { this.defaultLoggingLevel = defaultLoggingLevel.getName(); } /** * Sets the name of the parent logger for all loggers created from this configuration. * @param parentLoggerName */ public void setParentLoggerName(String parentLoggerName) { this.parentLoggerName = parentLoggerName; } /** * Adds logger output handler that will handle output of all loggers created. * @param handlerClass The class that implements <b>Handler</b> and has to handle output of the loggers. */ public synchronized void addHandler(Class<? extends Handler> handlerClass) { if(handlerClass == null) return; this.handlers.add(handlerClass.getName()); } /** * Gets all handlers currently configured to handle logging output. */ protected List<Handler> getHandlers() { List<Handler> realHandlers = new ArrayList<>(); for(String handler : this.handlers) { try { realHandlers.add((Handler)ReflectionUtil.newInstance(Class.forName(handler))); } catch(ClassNotFoundException ex) { } } return realHandlers; } /** * Sets logging output formatter providing class. * @param formatterClass The class that has to format logging messages. */ public void setFormatter(Class<? extends Formatter> formatterClass) { this.formatterClass = formatterClass.getName(); } /** * Gets currently configured formatter. * @return Returns the instance of the formatter class. */ protected Formatter getFormatter() { if(this.formatterClass == null || formatterClass.isEmpty()) return null; try { return (Formatter)ReflectionUtil.newInstance(Class.forName(this.formatterClass)); } catch (ClassNotFoundException e) { return null; } } /** * Determines the deadline after which messages will be just dropped. */ public long getLogMessageTimeout() { return logMessageTimeout; } /** * Determines the deadline after which messages will be just dropped. * @param logMessageTimeout The timeout in milliseconds that specifies the delay when messages should be * dropped instead of printing to the output. */ public void setLogMessageTimeout(long logMessageTimeout) { this.logMessageTimeout = logMessageTimeout; } //------------------------------------------------ private Level getLevelForLogger(String loggerName) { synchronized (this) { if(this.customLoggingLevels.containsKey(loggerName)) return Level.parse(this.customLoggingLevels.get(loggerName)); } return Level.parse(defaultLoggingLevel); } protected synchronized Logger getParentLogger() { if(this.parentLogger == null) { this.parentLogger = Logger.getLogger(parentLoggerName); this.parentLogger.setLevel(getLevelForLogger(parentLoggerName)); this.parentLogger.setUseParentHandlers(false); Formatter formatter = getFormatter(); if(this.handlers != null) { // removing existing handlers for(Handler handler : this.parentLogger.getHandlers()) this.parentLogger.removeHandler(handler); // adding required handlers. for(Handler handler : getHandlers()) { handler.setLevel(Level.ALL); if(formatter != null) handler.setFormatter(formatter); this.parentLogger.addHandler(handler); } } else { if(formatter != null) { Handler[] handlers = this.parentLogger.getHandlers(); if(handlers != null) for(Handler handler : handlers) { handler.setFormatter(formatter); handler.setLevel(Level.ALL); } } } } return parentLogger; } /** * Provides the logger for the specified name with the configured configuration. * @param loggerName The name of the logger to provide. * @return The configured logger with the specified name. */ public Logger getLogger(String loggerName) { if(loggerName.equals(parentLoggerName)) return getParentLogger(); Logger log = Logger.getLogger(loggerName); log.setParent(getParentLogger()); log.setUseParentHandlers(true); log.setLevel(getLevelForLogger(loggerName)); return log; } @Override public synchronized ClientLoggingConfiguration clone() { try { // calling parent class to clone its stuff and make a shallow-copy object. ClientLoggingConfiguration config = (ClientLoggingConfiguration)super.clone(); // cloning fields that has to be cloned. config.customLoggingLevels = new HashMap<>(customLoggingLevels); config.handlers = new ArrayList<>(handlers); return config; } catch (CloneNotSupportedException e) { throw new JCloudScaleException(e, "Failed to clone configuration."); } } }
apache-2.0
pulecp/puppet-corosync
lib/puppet/type/cs_colocation.rb
4434
Puppet::Type.newtype(:cs_colocation) do @doc = "Type for manipulating corosync/pacemaker colocation. Colocation is the grouping together of a set of primitives so that they travel together when one of them fails. For instance, if a web server vhost is colocated with a specific ip address and the web server software crashes, the ip address with migrate to the new host with the vhost. More information on Corosync/Pacemaker colocation can be found here: * http://www.clusterlabs.org/doc/en-US/Pacemaker/1.1/html/Clusters_from_Scratch/_ensuring_resources_run_on_the_same_host.html" ensurable newparam(:name) do desc "Identifier of the colocation entry. This value needs to be unique across the entire Corosync/Pacemaker configuration since it doesn't have the concept of name spaces per type." isnamevar end newproperty(:primitives, array_matching: :all) do desc "At least two Pacemaker primitives to be located together. Order of primitives in colocation groups is important. In Pacemaker, a colocation of 2 primitives behaves different than a colocation between more than 2 primitives. Here the behavior is altered to be more consistent. Examples on how to define colocations here: - 2 primitives: [A, B] will cause A to be located first, and B will be located with A. This is different than how crm configure colocation works, because there [A, B] would mean colocate A with B, thus B should be located first. - multiple primitives: [A, B, C] will cause A to be located first, B next, and finally C. This is identical to how crm configure colocation works with multiple resources, it will add a colocated set. Property will raise an error if you do not provide an array containing at least two values. Values can be either the name of the primitive, or primitive:role. Notice, we can only interpret colocations of single sets, not multiple sets combined. In Pacemaker speak, this means we can support 'A B C' but not e.g. 'A B (C D) E'. Feel free to contribute a patch for this." # Do some validation: the way Pacemaker colocation works we need to only accept # arrays with at least 2 values. def should=(value) super raise Puppet::Error, 'Puppet::Type::Cs_Colocation: The primitives property must be an array.' unless value.is_a? Array raise Puppet::Error, 'Puppet::Type::Cs_Colocation: The primitives property must be an array of at least one element.' if value.empty? @should end end newparam(:cib) do desc "Corosync applies its configuration immediately. Using a CIB allows you to group multiple primitives and relationships to be applied at once. This can be necessary to insert complex configurations into Corosync correctly. This paramater sets the CIB this colocation should be created in. A cs_shadow resource with a title of the same name as this value should also be added to your manifest." end newproperty(:score) do desc "The priority of this colocation. Primitives can be a part of multiple colocation groups and so there is a way to control which primitives get priority when forcing the move of other primitives. This value can be an integer but is often defined as the string INFINITY." defaultto 'INFINITY' end autorequire(:cs_shadow) do autos = [] autos << @parameters[:cib].value if @parameters[:cib] autos end autorequire(:service) do %w(corosync pacemaker) end def extract_primitives result = [] unless @parameters[:primitives].should.nil? if @parameters[:primitives].should.first.is_a?(Hash) @parameters[:primitives].should.each do |colocation_set| if colocation_set.key?('primitives') result << colocation_set['primitives'] end end end if @parameters[:primitives].should.first.is_a?(String) @parameters[:primitives].should.each do |val| result << unmunge_cs_primitive(val) end end end result.flatten end [:cs_clone, :cs_primitive].each do |resource_type| autorequire(resource_type) do extract_primitives end end def unmunge_cs_primitive(name) name = name.split(':')[0] name = name[3..-1] if name.start_with? 'ms_' name end end
apache-2.0
leafclick/intellij-community
platform/platform-impl/src/com/intellij/internal/rulerguide/RulerGuidePainter.java
7462
package com.intellij.internal.rulerguide; import com.intellij.openapi.Disposable; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.ui.AbstractPainter; import com.intellij.openapi.ui.JBPopupMenu; import com.intellij.openapi.util.Disposer; import com.intellij.openapi.wm.IdeGlassPane; import com.intellij.util.ui.GraphicsUtil; import org.jetbrains.annotations.NotNull; import javax.swing.*; import java.awt.*; import java.util.Optional; import java.util.concurrent.atomic.AtomicReference; import static com.intellij.internal.rulerguide.BasePreferences.*; final class RulerGuidePainter extends AbstractPainter implements Disposable { private static final Logger LOG = Logger.getInstance(RulerGuidePainter.class); private final ComponentBoundsFinder finder = new ComponentBoundsFinder(); private final AtomicReference<JRootPane> rootPane = new AtomicReference<>(); private Disposable disposable; RulerGuidePainter(Disposable parent) { Disposer.register(parent, this); } private void installPainter(@NotNull JRootPane rootPane) { Component glassPane = rootPane.getGlassPane(); if (glassPane instanceof IdeGlassPane) { assert disposable == null : "Disposable is not null"; disposable = Disposer.newDisposable("Ruler Guide"); Disposer.register(this, disposable); ((IdeGlassPane) glassPane).addPainter(glassPane, this, disposable); } else { ThiefGlassPane glass = new ThiefGlassPane(glassPane); rootPane.setGlassPane(glass); glass.setVisible(true); glass.revalidate(); } } public void removePainter() { finder.dispose(); Optional.ofNullable(rootPane.getAndSet(null)).ifPresent(this::removePainter); } private void removePainter(@NotNull JRootPane rootPane) { Component glassPane = rootPane.getGlassPane(); if (glassPane instanceof IdeGlassPane) { Disposer.dispose(disposable); disposable = null; glassPane.repaint(); } else if (glassPane instanceof ThiefGlassPane) { Component realGlassPane = ((ThiefGlassPane) glassPane).getRealGlassPane(); rootPane.setGlassPane(realGlassPane); realGlassPane.revalidate(); } else { Disposer.dispose(disposable); disposable = null; LOG.warn("GlassPane may be only IdeGlassPane or ThiefGlassPane ancestor but found " + glassPane); } } public void repaint(Component eventSource, Point eventPoint) { JRootPane newRootPane = SwingUtilities.getRootPane(eventSource); JRootPane oldRootPane = this.rootPane.getAndSet(newRootPane); if (newRootPane != oldRootPane) { Optional.ofNullable(oldRootPane).ifPresent(this::removePainter); Optional.ofNullable(newRootPane).ifPresent(this::installPainter); } if (newRootPane != null) { Point point = SwingUtilities.convertPoint(eventSource, eventPoint, newRootPane); finder.update(newRootPane, point); Component glassPane = newRootPane.getGlassPane(); if (glassPane instanceof IdeGlassPane) { setNeedsRepaint(true, glassPane); } else if (glassPane instanceof ThiefGlassPane) { glassPane.repaint(); } else { throw new IllegalStateException("GlassPane maybe only IdeGlassPane or ThiefGlassPane but found " + glassPane); } } } @Override public boolean needsRepaint() { return finder.getLastResult() != null; } @Override public void executePaint(Component component, Graphics2D g) { Graphics2D g2d = (Graphics2D) g.create(0, 0, component.getWidth(), component.getHeight()); GraphicsUtil.setupAntialiasing(g2d, false, false); ComponentBoundsFinder.Result result = finder.getLastResult(); ComponentBounds pivot = null; for (ComponentBounds bounds : result.getBounds()) { if (bounds.contains(result.getPoint())) { pivot = bounds; g2d.setColor(BACKGROUND_COLOR); g2d.fillRect(pivot.x, pivot.y, pivot.width, pivot.height); paintBaselines(pivot, g2d); break; } } for (ComponentBounds bounds : result.getBounds()) { g2d.setColor(BACKGROUND_COLOR); if (pivot != null && pivot != bounds) { Point baselineOtherPoint = new Point(bounds.x + bounds.verticalBaseline, bounds.y + bounds.horizontalBaseline); Point baselinePivotPoint = new Point(pivot.x + pivot.verticalBaseline, pivot.y + pivot.horizontalBaseline); int verticalOffset = Math.abs(baselineOtherPoint.x - baselinePivotPoint.x); int horizontalOffset = Math.abs(baselineOtherPoint.y - baselinePivotPoint.y); if (verticalOffset == 0 || horizontalOffset == 0) { g2d.setColor(FINE_COLOR); } else if (verticalOffset <= getAllowedGap() || horizontalOffset <= getAllowedGap()) { g2d.setColor(ERROR_COLOR); } else continue; g2d.fillRect(bounds.x, bounds.y, bounds.width, bounds.height); } } g2d.dispose(); } private static void paintBaselines(ComponentBounds bounds, Graphics2D g2d) { Rectangle clipBounds = g2d.getClipBounds(); Point location = bounds.getLocation(); g2d.setColor(COMPONENT_COLOR); g2d.drawLine(clipBounds.x, location.y + bounds.height, clipBounds.x + clipBounds.width, location.y + bounds.height); int hbaseline = bounds.horizontalBaseline; if (hbaseline >= 0) { g2d.setColor(BASE_COLOR); g2d.drawLine(clipBounds.x, location.y + hbaseline, clipBounds.x + clipBounds.width, location.y + hbaseline); } g2d.setColor(COMPONENT_COLOR); g2d.drawLine(location.x, clipBounds.y, location.x, clipBounds.y + clipBounds.height); int vbaseline = bounds.verticalBaseline; if (vbaseline >= 0) { g2d.setColor(BASE_COLOR); g2d.drawLine(location.x + vbaseline, clipBounds.y, location.x + vbaseline, clipBounds.y + clipBounds.height); } } @Override public void dispose() { removePainter(); } // steal own glass pane and using our private class ThiefGlassPane extends JComponent { private final Component realGlassPane; private ThiefGlassPane(Component realGlassPane) { this.realGlassPane = realGlassPane; setOpaque(false); } public Component getRealGlassPane() { return realGlassPane; } @Override protected void paintComponent(Graphics g) { if (g instanceof SneakyGraphics2D || isJBPopupMenu()) { return; } executePaint(SwingUtilities.getRoot(this), (Graphics2D) g); } private boolean isJBPopupMenu() { // fixme JBPopupMenu[97] has own timer, that repaints only own menu Container contentPane = SwingUtilities.getRootPane(this).getContentPane(); return contentPane.getComponentCount() == 1 && contentPane.getComponent(0) instanceof JBPopupMenu; } } }
apache-2.0
mocc/bookkeeper-lab
hedwig-server/src/test/java/org/apache/hedwig/server/persistence/TestBookKeeperPersistenceManagerBlackBox.java
2924
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hedwig.server.persistence; import java.util.concurrent.Executors; import java.util.concurrent.ScheduledExecutorService; import junit.framework.Test; import junit.framework.TestSuite; import org.junit.After; import org.junit.Before; import org.apache.hedwig.server.common.ServerConfiguration; import org.apache.hedwig.server.meta.MetadataManagerFactory; import org.apache.hedwig.server.topics.TrivialOwnAllTopicManager; public class TestBookKeeperPersistenceManagerBlackBox extends TestPersistenceManagerBlackBox { BookKeeperTestBase bktb; private final int numBookies = 3; MetadataManagerFactory metadataManagerFactory = null; @Override @Before protected void setUp() throws Exception { // We need to setUp this class first since the super.setUp() method will // need the BookKeeperTestBase to be instantiated. bktb = new BookKeeperTestBase(numBookies); bktb.setUp(); super.setUp(); } @Override @After protected void tearDown() throws Exception { bktb.tearDown(); super.tearDown(); if (null != metadataManagerFactory) { metadataManagerFactory.shutdown(); } } @Override long getLowestSeqId() { return 1; } @Override PersistenceManager instantiatePersistenceManager() throws Exception { ServerConfiguration conf = new ServerConfiguration(); ScheduledExecutorService scheduler = Executors.newScheduledThreadPool(1); metadataManagerFactory = MetadataManagerFactory.newMetadataManagerFactory(conf, bktb.getZooKeeperClient()); return new BookkeeperPersistenceManager(bktb.bk, metadataManagerFactory, new TrivialOwnAllTopicManager(conf, scheduler), conf, scheduler); } @Override public long getExpectedSeqId(int numPublished) { return numPublished; } public static Test suite() { return new TestSuite(TestBookKeeperPersistenceManagerBlackBox.class); } }
apache-2.0
xieyufish/note
代码/spring-cloud/spring-cloud-eureka-client/src/main/java/com/xieyu/eureka/client/DcController.java
853
package com.xieyu.eureka.client; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.cloud.client.discovery.DiscoveryClient; import org.springframework.web.bind.annotation.GetMapping; import org.springframework.web.bind.annotation.RestController; /** * <b>类作用描述:</b> * <pre> * * </pre> * <b>创建者:</b> xieyu <br> * <b>所属项目:</b> spring-cloud-eureka-client <br> * <b>创建日期:</b> 2019年03月18日 16:36:55 <br> * <b>修订记录:</b> <br> * <b>当前版本:</b> 1.0.0 <br> * <b>参考:</b> */ @RestController public class DcController { @Autowired private DiscoveryClient discoveryClient; @GetMapping("/dc") public String dc() { String services = "Services: " + discoveryClient.getServices(); System.out.println(services); return services; } }
apache-2.0
google/access-bridge-explorer
src/WindowsAccessBridgeInterop/VersionNumber.cs
809
// Copyright 2016 Google Inc. All Rights Reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. namespace WindowsAccessBridgeInterop { public static class VersionNumber { public const string Product = "1.1.0"; public const string File = Product + ".0"; } }
apache-2.0
Grastveit/NEST
src/Tests/Elasticsearch.Net.Integration.Yaml/create/50_parent.yaml.cs
1584
using System; using System.Collections.Generic; using System.Linq; using NUnit.Framework; namespace Elasticsearch.Net.Integration.Yaml.Create8 { public partial class Create8YamlTests { [NCrunch.Framework.ExclusivelyUses("ElasticsearchYamlTests")] public class Parent1Tests : YamlTestsBase { [Test] public void Parent1Test() { //do indices.create _body = new { mappings= new { test= new { _parent= new { type= "foo" } } } }; this.Do(()=> _client.IndicesCreate("test_1", _body)); //do cluster.health this.Do(()=> _client.ClusterHealth(nv=>nv .AddQueryString("wait_for_status", @"yellow") )); //do create _body = new { foo= "bar" }; this.Do(()=> _client.Index("test_1", "test", "1", _body, nv=>nv .AddQueryString("op_type", @"create") ), shouldCatch: @"/RoutingMissingException/"); //do create _body = new { foo= "bar" }; this.Do(()=> _client.Index("test_1", "test", "1", _body, nv=>nv .AddQueryString("parent", 5) .AddQueryString("op_type", @"create") )); //do get this.Do(()=> _client.Get("test_1", "test", "1", nv=>nv .AddQueryString("parent", 5) .AddQueryString("fields", new [] { @"_parent", @"_routing" }) )); //match _response._id: this.IsMatch(_response._id, 1); //match _response.fields._parent: this.IsMatch(_response.fields._parent, 5); //match _response.fields._routing: this.IsMatch(_response.fields._routing, 5); } } } }
apache-2.0
layerhq/instastart-identity-provider
db/migrate/20161116231406_create_sessions.rb
216
class CreateSessions < ActiveRecord::Migration[5.0] def change create_table :sessions do |t| t.integer :user_id t.string :token t.timestamps end add_index :sessions, :token end end
apache-2.0
markus1978/citygml4emf
de.hub.citygml.emf.ecore/src/net/opengis/citygml/ExternalObjectReferenceType.java
2633
/** * <copyright> * </copyright> * * $Id$ */ package net.opengis.citygml; import org.eclipse.emf.ecore.EObject; /** * <!-- begin-user-doc --> * A representation of the model object '<em><b>External Object Reference Type</b></em>'. * <!-- end-user-doc --> * * <p> * The following features are supported: * <ul> * <li>{@link net.opengis.citygml.ExternalObjectReferenceType#getName <em>Name</em>}</li> * <li>{@link net.opengis.citygml.ExternalObjectReferenceType#getUri <em>Uri</em>}</li> * </ul> * </p> * * @see net.opengis.citygml.CitygmlPackage#getExternalObjectReferenceType() * @model extendedMetaData="name='ExternalObjectReferenceType' kind='elementOnly'" * @generated */ public interface ExternalObjectReferenceType extends EObject { /** * Returns the value of the '<em><b>Name</b></em>' attribute. * <!-- begin-user-doc --> * <p> * If the meaning of the '<em>Name</em>' attribute isn't clear, * there really should be more of a description here... * </p> * <!-- end-user-doc --> * @return the value of the '<em>Name</em>' attribute. * @see #setName(String) * @see net.opengis.citygml.CitygmlPackage#getExternalObjectReferenceType_Name() * @model dataType="org.eclipse.emf.ecore.xml.type.String" * extendedMetaData="kind='element' name='name' namespace='##targetNamespace'" * @generated */ String getName(); /** * Sets the value of the '{@link net.opengis.citygml.ExternalObjectReferenceType#getName <em>Name</em>}' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @param value the new value of the '<em>Name</em>' attribute. * @see #getName() * @generated */ void setName(String value); /** * Returns the value of the '<em><b>Uri</b></em>' attribute. * <!-- begin-user-doc --> * <p> * If the meaning of the '<em>Uri</em>' attribute isn't clear, * there really should be more of a description here... * </p> * <!-- end-user-doc --> * @return the value of the '<em>Uri</em>' attribute. * @see #setUri(String) * @see net.opengis.citygml.CitygmlPackage#getExternalObjectReferenceType_Uri() * @model dataType="org.eclipse.emf.ecore.xml.type.AnyURI" * extendedMetaData="kind='element' name='uri' namespace='##targetNamespace'" * @generated */ String getUri(); /** * Sets the value of the '{@link net.opengis.citygml.ExternalObjectReferenceType#getUri <em>Uri</em>}' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @param value the new value of the '<em>Uri</em>' attribute. * @see #getUri() * @generated */ void setUri(String value); } // ExternalObjectReferenceType
apache-2.0
slamdata/quasar
qsu/src/main/scala/quasar/qsu/minimizers/FilterToCond.scala
4540
/* * Copyright 2014–2020 SlamData Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package quasar.qsu package minimizers import slamdata.Predef.{Map => SMap, _} import quasar.{RenderTreeT, Type} import quasar.common.effect.NameGenerator import quasar.contrib.iota._ import quasar.contrib.matryoshka._ import quasar.ejson.{CommonEJson, Str} import quasar.ejson.implicits._ import quasar.fp._ import quasar.qscript.{construction, HoleF, MFC, MonadPlannerErr, RecFreeS}, RecFreeS._ import quasar.qscript.MapFuncsCore.{Constant, Eq, TypeOf} import quasar.qsu.{QScriptUniform => QSU} import matryoshka.data.free._ import matryoshka.patterns.CoEnv import matryoshka.{BirecursiveT, Embed, EqualT, ShowT, delayEqual} import scalaz.{-\/, \/-, Equal, Monad} import scalaz.syntax.equal._ import scalaz.syntax.monad._ import scala.collection import scala.sys sealed abstract class FilterToCond[T[_[_]]: BirecursiveT: EqualT: ShowT: RenderTreeT] extends Minimizer[T] with MraPhase[T] { import MinimizeAutoJoins._ import QSUGraph.Extractors._ implicit def PEqual: Equal[P] private val func = construction.Func[T] private val recFunc = construction.RecFunc[T] def couldApplyTo(candidates: List[QSUGraph]): Boolean = { val (filters, _) = candidates partition { case QSFilter(_, _) => true case _ => false } filters.nonEmpty } def extract[ G[_]: Monad: NameGenerator: MonadPlannerErr: RevIdxM: MinStateM[T, P, ?[_]]]( qgraph: QSUGraph): Option[(QSUGraph, (QSUGraph, FreeMap) => G[QSUGraph])] = qgraph match { case QSFilter(src, predicate) => // this is where we do the actual rewriting def rebuild(src: QSUGraph, fm: FreeMap): G[QSUGraph] = { updateGraph[T, G]( qprov, QSU.Map(src.root, rewriteFilter(predicate.linearize, fm).asRec)).map(_ :++ src) } Some((src, rebuild _)) case qgraph => def rebuild(src: QSUGraph, fm: FreeMap): G[QSUGraph] = { if (fm === HoleF[T]) { src.point[G] } else { // this case should never happen updateGraph[T, G](qprov, QSU.Map(src.root, fm.asRec)) map { rewritten => rewritten :++ src } } } Some((qgraph, rebuild _)) } def apply[ G[_]: Monad: NameGenerator: MonadPlannerErr: RevIdxM: MinStateM[T, P, ?[_]]]( qgraph: QSUGraph, singleSource: QSUGraph, candidates: List[QSUGraph], fm: FreeMapA[Int]): G[Option[(QSUGraph, QSUGraph)]] = { val fms: SMap[Int, RecFreeMap] = candidates.zipWithIndex.map({ case (Map(parent, fm), i) if parent.root === singleSource.root => i -> fm case (parent, i) if parent.root === singleSource.root => i -> recFunc.Hole case _ => sys.error("assertion error") })(collection.breakOut) val collapsed = fm.asRec.flatMap(fms) updateGraph[T, G](qprov, QSU.Map(singleSource.root, collapsed)) map { g => val back = g :++ singleSource Some((back, back)) } } /// private def rewriteFilter(predicate: FreeMap, fm: FreeMap): FreeMap = { val nameToType: SMap[String, Type] = SMap( "number" -> Type.Numeric, "string" -> Type.Str, "boolean" -> Type.Bool, "offsetdatetime" -> Type.OffsetDateTime, "null" -> Type.Null) predicate.resume match { case -\/(MFC(Eq( Embed(CoEnv(\/-(MFC(TypeOf(Embed(CoEnv(-\/(_)))))))), Embed(CoEnv(\/-(MFC(Constant(Embed(CommonEJson(Str(expectedType))))))))))) => nameToType.get(expectedType) .fold(func.Cond(predicate.as(fm).join, fm, func.Undefined))(func.Typecheck(fm, _)) case _ => func.Cond(predicate.as(fm).join, fm, func.Undefined) } } } object FilterToCond { def apply[T[_[_]]: BirecursiveT: EqualT: ShowT: RenderTreeT]( qp: QProv[T])(implicit P: Equal[qp.P]) : Minimizer.Aux[T, qp.P] = new FilterToCond[T] { val qprov: qp.type = qp val PEqual = P } }
apache-2.0
EsriUK/wab-dijit-drilldown
Drilldown/Search/setting/nls/pt-br/strings.js
1783
define( ({ "instruction": "Adicione e configure serviços de geocódigo ou camadas de feição como fontes de pesquisa. Estas fontes especificadas determinam o que é pesquisável dentro da caixa de pesquisa.", "add": "Adicionar Fonte de Pesquisa", "addGeocoder": "Adicionar Geocodificador", "geocoder": "Geocodificador", "setLayerSource": "Configurar Origem da Camada", "setGeocoderURL": "Configurar URL do Geocodificador", "searchableLayer": "Camada de Feição", "name": "Nome", "countryCode": "Código do País", "countryCodeEg": "por exemplo ", "countryCodeHint": "Deixar este valor em branco pesquisará todos os países", "locatorUrl": "URL do Geocodificador", "locatorName": "Nome do Geocodificador", "locatorExample": "Exemplo", "locatorWarning": "Esta versão do serviço de geocodificação não é suportado. O widget suporta serviço de geocodificação 00.0 e superior.", "locatorTips": "As sugestões não estão disponíveis, pois o serviço de geocodificação não suporta o recursos de sugestão.", "layerSource": "Origem da Camada", "searchLayerTips": "As sugestões não estão disponíveis, pois o serviço da feição não suporta o recursos de paginação.", "placeholder": "Texto do Placeholder", "searchFields": "Pesquisar Campos", "displayField": "Campo de Visualização", "exactMatch": "Combinação Exata", "maxResults": "Máximo de Resultados", "setSearchFields": "Configurar Campos de Pesquisa", "set": "Configurar", "fieldSearchable": "pesquisável", "fieldName": "Nome", "fieldAlias": "Nome Alternativo", "ok": "OK", "cancel": "Cancelar", "invalidUrlTip": "O URL que você inseriu é inválido ou inacessível." }) );
apache-2.0
XamarinGarage/GiTracker
Droid/Resources/Resource.designer.cs
12108
#pragma warning disable 1591 //------------------------------------------------------------------------------ // <auto-generated> // This code was generated by a tool. // Runtime Version:4.0.30319.42000 // // Changes to this file may cause incorrect behavior and will be lost if // the code is regenerated. // </auto-generated> //------------------------------------------------------------------------------ [assembly: global::Android.Runtime.ResourceDesignerAttribute("GiTracker.Droid.Resource", IsApplication=true)] namespace GiTracker.Droid { [System.CodeDom.Compiler.GeneratedCodeAttribute("Xamarin.Android.Build.Tasks", "1.0.0.0")] public partial class Resource { static Resource() { global::Android.Runtime.ResourceIdManager.UpdateIdValues(); } public static void UpdateIdValues() { global::Acr.Support.Android.Resource.String.ApplicationName = global::GiTracker.Droid.Resource.String.ApplicationName; global::Acr.Support.Android.Resource.String.Hello = global::GiTracker.Droid.Resource.String.Hello; global::AndroidHUD.Resource.Attribute.ahBarColor = global::GiTracker.Droid.Resource.Attribute.ahBarColor; global::AndroidHUD.Resource.Attribute.ahBarLength = global::GiTracker.Droid.Resource.Attribute.ahBarLength; global::AndroidHUD.Resource.Attribute.ahBarWidth = global::GiTracker.Droid.Resource.Attribute.ahBarWidth; global::AndroidHUD.Resource.Attribute.ahCircleColor = global::GiTracker.Droid.Resource.Attribute.ahCircleColor; global::AndroidHUD.Resource.Attribute.ahDelayMillis = global::GiTracker.Droid.Resource.Attribute.ahDelayMillis; global::AndroidHUD.Resource.Attribute.ahRadius = global::GiTracker.Droid.Resource.Attribute.ahRadius; global::AndroidHUD.Resource.Attribute.ahRimColor = global::GiTracker.Droid.Resource.Attribute.ahRimColor; global::AndroidHUD.Resource.Attribute.ahRimWidth = global::GiTracker.Droid.Resource.Attribute.ahRimWidth; global::AndroidHUD.Resource.Attribute.ahSpinSpeed = global::GiTracker.Droid.Resource.Attribute.ahSpinSpeed; global::AndroidHUD.Resource.Attribute.ahText = global::GiTracker.Droid.Resource.Attribute.ahText; global::AndroidHUD.Resource.Attribute.ahTextColor = global::GiTracker.Droid.Resource.Attribute.ahTextColor; global::AndroidHUD.Resource.Attribute.ahTextSize = global::GiTracker.Droid.Resource.Attribute.ahTextSize; global::AndroidHUD.Resource.Drawable.ic_errorstatus = global::GiTracker.Droid.Resource.Drawable.ic_errorstatus; global::AndroidHUD.Resource.Drawable.ic_successstatus = global::GiTracker.Droid.Resource.Drawable.ic_successstatus; global::AndroidHUD.Resource.Drawable.roundedbg = global::GiTracker.Droid.Resource.Drawable.roundedbg; global::AndroidHUD.Resource.Drawable.roundedbgdark = global::GiTracker.Droid.Resource.Drawable.roundedbgdark; global::AndroidHUD.Resource.Id.loadingImage = global::GiTracker.Droid.Resource.Id.loadingImage; global::AndroidHUD.Resource.Id.loadingProgressBar = global::GiTracker.Droid.Resource.Id.loadingProgressBar; global::AndroidHUD.Resource.Id.loadingProgressWheel = global::GiTracker.Droid.Resource.Id.loadingProgressWheel; global::AndroidHUD.Resource.Id.textViewStatus = global::GiTracker.Droid.Resource.Id.textViewStatus; global::AndroidHUD.Resource.Layout.loading = global::GiTracker.Droid.Resource.Layout.loading; global::AndroidHUD.Resource.Layout.loadingimage = global::GiTracker.Droid.Resource.Layout.loadingimage; global::AndroidHUD.Resource.Layout.loadingprogress = global::GiTracker.Droid.Resource.Layout.loadingprogress; global::AndroidHUD.Resource.String.library_name = global::GiTracker.Droid.Resource.String.library_name; global::AndroidHUD.Resource.Styleable.ProgressWheel = global::GiTracker.Droid.Resource.Styleable.ProgressWheel; global::AndroidHUD.Resource.Styleable.ProgressWheel_ahBarColor = global::GiTracker.Droid.Resource.Styleable.ProgressWheel_ahBarColor; global::AndroidHUD.Resource.Styleable.ProgressWheel_ahBarLength = global::GiTracker.Droid.Resource.Styleable.ProgressWheel_ahBarLength; global::AndroidHUD.Resource.Styleable.ProgressWheel_ahBarWidth = global::GiTracker.Droid.Resource.Styleable.ProgressWheel_ahBarWidth; global::AndroidHUD.Resource.Styleable.ProgressWheel_ahCircleColor = global::GiTracker.Droid.Resource.Styleable.ProgressWheel_ahCircleColor; global::AndroidHUD.Resource.Styleable.ProgressWheel_ahDelayMillis = global::GiTracker.Droid.Resource.Styleable.ProgressWheel_ahDelayMillis; global::AndroidHUD.Resource.Styleable.ProgressWheel_ahRadius = global::GiTracker.Droid.Resource.Styleable.ProgressWheel_ahRadius; global::AndroidHUD.Resource.Styleable.ProgressWheel_ahRimColor = global::GiTracker.Droid.Resource.Styleable.ProgressWheel_ahRimColor; global::AndroidHUD.Resource.Styleable.ProgressWheel_ahRimWidth = global::GiTracker.Droid.Resource.Styleable.ProgressWheel_ahRimWidth; global::AndroidHUD.Resource.Styleable.ProgressWheel_ahSpinSpeed = global::GiTracker.Droid.Resource.Styleable.ProgressWheel_ahSpinSpeed; global::AndroidHUD.Resource.Styleable.ProgressWheel_ahText = global::GiTracker.Droid.Resource.Styleable.ProgressWheel_ahText; global::AndroidHUD.Resource.Styleable.ProgressWheel_ahTextColor = global::GiTracker.Droid.Resource.Styleable.ProgressWheel_ahTextColor; global::AndroidHUD.Resource.Styleable.ProgressWheel_ahTextSize = global::GiTracker.Droid.Resource.Styleable.ProgressWheel_ahTextSize; global::Splat.Resource.String.library_name = global::GiTracker.Droid.Resource.String.library_name; global::Xamarin.Forms.Platform.Resource.String.ApplicationName = global::GiTracker.Droid.Resource.String.ApplicationName; global::Xamarin.Forms.Platform.Resource.String.Hello = global::GiTracker.Droid.Resource.String.Hello; } public partial class Attribute { // aapt resource value: 0x7f010003 public const int ahBarColor = 2130771971; // aapt resource value: 0x7f01000b public const int ahBarLength = 2130771979; // aapt resource value: 0x7f01000a public const int ahBarWidth = 2130771978; // aapt resource value: 0x7f010008 public const int ahCircleColor = 2130771976; // aapt resource value: 0x7f010007 public const int ahDelayMillis = 2130771975; // aapt resource value: 0x7f010009 public const int ahRadius = 2130771977; // aapt resource value: 0x7f010004 public const int ahRimColor = 2130771972; // aapt resource value: 0x7f010005 public const int ahRimWidth = 2130771973; // aapt resource value: 0x7f010006 public const int ahSpinSpeed = 2130771974; // aapt resource value: 0x7f010000 public const int ahText = 2130771968; // aapt resource value: 0x7f010001 public const int ahTextColor = 2130771969; // aapt resource value: 0x7f010002 public const int ahTextSize = 2130771970; static Attribute() { global::Android.Runtime.ResourceIdManager.UpdateIdValues(); } private Attribute() { } } public partial class Drawable { // aapt resource value: 0x7f020000 public const int ic_errorstatus = 2130837504; // aapt resource value: 0x7f020001 public const int ic_successstatus = 2130837505; // aapt resource value: 0x7f020002 public const int icon = 2130837506; // aapt resource value: 0x7f020003 public const int Octicon_Issue_Closed_Colored = 2130837507; // aapt resource value: 0x7f020004 public const int Octicon_Issue_Closed_White = 2130837508; // aapt resource value: 0x7f020005 public const int Octicon_Issue_Open_Colored = 2130837509; // aapt resource value: 0x7f020006 public const int Octicon_Issue_Open_White = 2130837510; // aapt resource value: 0x7f020007 public const int Octicon_Issue_Unknown_Colored = 2130837511; // aapt resource value: 0x7f020008 public const int Octicon_Issue_Unknown_White = 2130837512; // aapt resource value: 0x7f020009 public const int Octicon_PullRequest_Colored = 2130837513; // aapt resource value: 0x7f02000a public const int Octicon_PullRequest_White = 2130837514; // aapt resource value: 0x7f02000b public const int Octicon_Repo_Colored = 2130837515; // aapt resource value: 0x7f02000c public const int roundedbg = 2130837516; // aapt resource value: 0x7f02000d public const int roundedbgdark = 2130837517; // aapt resource value: 0x7f02000e public const int ToolBar_Add = 2130837518; static Drawable() { global::Android.Runtime.ResourceIdManager.UpdateIdValues(); } private Drawable() { } } public partial class Id { // aapt resource value: 0x7f060002 public const int loadingImage = 2131099650; // aapt resource value: 0x7f060000 public const int loadingProgressBar = 2131099648; // aapt resource value: 0x7f060003 public const int loadingProgressWheel = 2131099651; // aapt resource value: 0x7f060001 public const int textViewStatus = 2131099649; static Id() { global::Android.Runtime.ResourceIdManager.UpdateIdValues(); } private Id() { } } public partial class Layout { // aapt resource value: 0x7f030000 public const int loading = 2130903040; // aapt resource value: 0x7f030001 public const int loadingimage = 2130903041; // aapt resource value: 0x7f030002 public const int loadingprogress = 2130903042; static Layout() { global::Android.Runtime.ResourceIdManager.UpdateIdValues(); } private Layout() { } } public partial class String { // aapt resource value: 0x7f040001 public const int ApplicationName = 2130968577; // aapt resource value: 0x7f040000 public const int Hello = 2130968576; // aapt resource value: 0x7f040002 public const int library_name = 2130968578; static String() { global::Android.Runtime.ResourceIdManager.UpdateIdValues(); } private String() { } } public partial class Style { // aapt resource value: 0x7f050000 public const int GiTrackerTheme = 2131034112; static Style() { global::Android.Runtime.ResourceIdManager.UpdateIdValues(); } private Style() { } } public partial class Styleable { public static int[] ProgressWheel = new int[] { 2130771968, 2130771969, 2130771970, 2130771971, 2130771972, 2130771973, 2130771974, 2130771975, 2130771976, 2130771977, 2130771978, 2130771979}; // aapt resource value: 3 public const int ProgressWheel_ahBarColor = 3; // aapt resource value: 11 public const int ProgressWheel_ahBarLength = 11; // aapt resource value: 10 public const int ProgressWheel_ahBarWidth = 10; // aapt resource value: 8 public const int ProgressWheel_ahCircleColor = 8; // aapt resource value: 7 public const int ProgressWheel_ahDelayMillis = 7; // aapt resource value: 9 public const int ProgressWheel_ahRadius = 9; // aapt resource value: 4 public const int ProgressWheel_ahRimColor = 4; // aapt resource value: 5 public const int ProgressWheel_ahRimWidth = 5; // aapt resource value: 6 public const int ProgressWheel_ahSpinSpeed = 6; // aapt resource value: 0 public const int ProgressWheel_ahText = 0; // aapt resource value: 1 public const int ProgressWheel_ahTextColor = 1; // aapt resource value: 2 public const int ProgressWheel_ahTextSize = 2; static Styleable() { global::Android.Runtime.ResourceIdManager.UpdateIdValues(); } private Styleable() { } } } } #pragma warning restore 1591
apache-2.0
rockmkd/datacollector
basic-lib/src/main/java/com/streamsets/pipeline/stage/origin/remote/RemoteDownloadSource.java
23087
/* * Copyright 2017 StreamSets Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.streamsets.pipeline.stage.origin.remote; import com.google.common.base.Optional; import com.streamsets.pipeline.api.BatchMaker; import com.streamsets.pipeline.api.FileRef; import com.streamsets.pipeline.api.Record; import com.streamsets.pipeline.api.StageException; import com.streamsets.pipeline.api.base.BaseSource; import com.streamsets.pipeline.api.base.OnRecordErrorException; import com.streamsets.pipeline.api.el.ELEval; import com.streamsets.pipeline.api.el.ELVars; import com.streamsets.pipeline.api.ext.io.ObjectLengthException; import com.streamsets.pipeline.api.ext.io.OverrunException; import com.streamsets.pipeline.api.impl.Utils; import com.streamsets.pipeline.api.lineage.EndPointType; import com.streamsets.pipeline.api.lineage.LineageEvent; import com.streamsets.pipeline.api.lineage.LineageEventType; import com.streamsets.pipeline.api.lineage.LineageSpecificAttribute; import com.streamsets.pipeline.config.DataFormat; import com.streamsets.pipeline.lib.io.fileref.FileRefUtil; import com.streamsets.pipeline.lib.parser.DataParser; import com.streamsets.pipeline.lib.parser.DataParserException; import com.streamsets.pipeline.lib.parser.RecoverableDataParserException; import com.streamsets.pipeline.stage.common.DefaultErrorRecordHandler; import com.streamsets.pipeline.stage.common.ErrorRecordHandler; import com.streamsets.pipeline.stage.common.HeaderAttributeConstants; import net.schmizz.sshj.sftp.SFTPException; import org.apache.commons.io.FilenameUtils; import org.apache.commons.io.IOUtils; import org.apache.commons.lang3.StringUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import javax.ws.rs.core.UriBuilder; import java.io.BufferedOutputStream; import java.io.File; import java.io.FileNotFoundException; import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.net.URI; import java.nio.channels.ClosedByInterruptException; import java.nio.file.FileSystemException; import java.util.Comparator; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.NavigableSet; import java.util.TreeSet; import java.util.UUID; import static com.streamsets.pipeline.stage.origin.lib.DataFormatParser.DATA_FORMAT_CONFIG_PREFIX; public class RemoteDownloadSource extends BaseSource implements FileQueueChecker { private static final Logger LOG = LoggerFactory.getLogger(RemoteDownloadSource.class); private static final String CONF_PREFIX = "conf."; private static final String REMOTE_ADDRESS_CONF = CONF_PREFIX + "remoteAddress"; private static final String MINUS_ONE = "-1"; private static final String FTP_SCHEME = "ftp"; private static final String SFTP_SCHEME = "sftp"; static final String NOTHING_READ = "null"; static final String REMOTE_URI = "remoteUri"; static final String CONTENT_TYPE = "contentType"; static final String CONTENT_ENCODING = "contentEncoding"; private final RemoteDownloadConfigBean conf; private final File errorArchive; private final byte[] moveBuffer; private RemoteFile next = null; private ELEval rateLimitElEval; private ELVars rateLimitElVars; //By default true so, between pipeline restarts we can always trigger event. private boolean canTriggerNoMoreDataEvent = true; private long noMoreDataRecordCount = 0; private long noMoreDataErrorCount = 0; private long noMoreDataFileCount = 0; private long perFileRecordCount = 0; private long perFileErrorCount = 0; private final NavigableSet<RemoteFile> fileQueue = new TreeSet<>(new Comparator<RemoteFile>() { @Override public int compare(RemoteFile f1, RemoteFile f2) { if (f1.getLastModified() < f2.getLastModified()) { return -1; } else if (f1.getLastModified() > f2.getLastModified()) { return 1; } else { return f1.getFilePath().compareTo(f2.getFilePath()); } } }); private URI remoteURI; private volatile Offset currentOffset = null; private InputStream currentStream = null; private DataParser parser; private ErrorRecordHandler errorRecordHandler; private FileFilter fileFilter; private RemoteDownloadSourceDelegate delegate; public RemoteDownloadSource(RemoteDownloadConfigBean conf) { this.conf = conf; if (conf.errorArchiveDir != null && !conf.errorArchiveDir.isEmpty()) { this.errorArchive = new File(conf.errorArchiveDir); this.moveBuffer = new byte[64 * 1024]; } else { this.errorArchive = null; this.moveBuffer = null; } } @Override public List<ConfigIssue> init() { List<ConfigIssue> issues = super.init(); errorRecordHandler = new DefaultErrorRecordHandler(getContext()); conf.dataFormatConfig.checkForInvalidAvroSchemaLookupMode( conf.dataFormat, "conf.dataFormatConfig", getContext(), issues ); conf.dataFormatConfig.init( getContext(), conf.dataFormat, Groups.REMOTE.getLabel(), DATA_FORMAT_CONFIG_PREFIX, issues ); try { this.remoteURI = new URI(conf.remoteAddress); } catch (Exception ex) { issues.add( getContext().createConfigIssue( Groups.REMOTE.getLabel(), REMOTE_ADDRESS_CONF, Errors.REMOTE_01, conf.remoteAddress)); } if (issues.isEmpty()) { if (remoteURI.getScheme().equals(FTP_SCHEME)) { int port = remoteURI.getPort(); if (port == -1) { port = 21; } delegate = new FTPRemoteDownloadSourceDelegate(conf); remoteURI = UriBuilder.fromUri(remoteURI).port(port).build(); initAndConnect(issues); } else if (remoteURI.getScheme().equals(SFTP_SCHEME)) { int port = remoteURI.getPort(); if (port == -1) { port = 22; } remoteURI = UriBuilder.fromUri(remoteURI).port(port).build(); delegate = new SFTPRemoteDownloadSourceDelegate(conf); initAndConnect(issues); } else { issues.add( getContext().createConfigIssue( Groups.REMOTE.getLabel(), REMOTE_ADDRESS_CONF, Errors.REMOTE_15, conf.remoteAddress)); } } return issues; } private void initAndConnect(List<ConfigIssue> issues) { try { delegate.initAndConnect(issues, getContext(), remoteURI); } catch (IOException ex) { issues.add( getContext().createConfigIssue( Groups.REMOTE.getLabel(), REMOTE_ADDRESS_CONF, Errors.REMOTE_08, conf.remoteAddress, ex.getMessage() )); LOG.error("Error trying to login to remote host", ex); } validateFilePattern(issues); if (issues.isEmpty()) { rateLimitElEval = FileRefUtil.createElEvalForRateLimit(getContext()); rateLimitElVars = getContext().createELVars(); } } private void validateFilePattern(List<ConfigIssue> issues) { if (conf.filePattern == null || conf.filePattern.trim().isEmpty()) { issues.add( getContext().createConfigIssue( Groups.REMOTE.getLabel(), CONF_PREFIX + "filePattern", Errors.REMOTE_13, conf.filePattern)); } else { try { fileFilter = new FileFilter(conf.filePatternMode, conf.filePattern); } catch (IllegalArgumentException ex) { issues.add( getContext().createConfigIssue( Groups.REMOTE.getLabel(), CONF_PREFIX + "filePattern", Errors.REMOTE_14, conf.filePatternMode, conf.filePattern, ex.toString(), ex )); } } } @Override public String produce(String lastSourceOffset, int maxBatchSize, BatchMaker batchMaker) throws StageException { final int batchSize = Math.min(maxBatchSize, conf.basic.maxBatchSize); // Just started up, currentOffset has not yet been set. // This method returns NOTHING_READ when only no events have ever been read if (currentOffset == null) { if(StringUtils.isEmpty(lastSourceOffset) || NOTHING_READ.equals(lastSourceOffset)) { LOG.debug("Detected invalid source offset '{}'", lastSourceOffset); // Use initial file if(!StringUtils.isEmpty(conf.initialFileToProcess)) { try { currentOffset = delegate.createOffset(conf.initialFileToProcess); } catch (IOException e) { throw new StageException(Errors.REMOTE_16, conf.initialFileToProcess, e.toString(), e); } } // Otherwise start from beginning } else { // We have valid offset currentOffset = new Offset(lastSourceOffset); } } String offset = NOTHING_READ; try { Optional<RemoteFile> nextOpt = null; // Time to read the next file if (currentStream == null) { nextOpt = getNextFile(); if (nextOpt.isPresent()) { next = nextOpt.get(); noMoreDataFileCount++; // When starting up, reset to offset 0 of the file picked up for read only if: // -- we are starting up for the very first time, hence current offset is null // -- or the next file picked up for reads is not the same as the one we left off at (because we may have completed that one). if (currentOffset == null || !currentOffset.fileName.equals(next.getFilePath())) { perFileRecordCount = 0; perFileErrorCount = 0; LOG.debug("Sending New File Event. File: {}", next.getFilePath()); RemoteDownloadSourceEvents.NEW_FILE.create(getContext()).with("filepath", next.getFilePath()).createAndSend(); sendLineageEvent(next); currentOffset = delegate.createOffset(next.getFilePath()); } if (conf.dataFormat == DataFormat.WHOLE_FILE) { Map<String, Object> metadata = new HashMap<>(7); long size = delegate.populateMetadata(next.getFilePath(), metadata); metadata.put(HeaderAttributeConstants.FILE, next.getFilePath()); metadata.put(HeaderAttributeConstants.FILE_NAME, FilenameUtils.getName(next.getFilePath())); metadata.put(REMOTE_URI, remoteURI.toString()); FileRef fileRef = new RemoteSourceFileRef.Builder() .bufferSize(conf.dataFormatConfig.wholeFileMaxObjectLen) .totalSizeInBytes(size) .rateLimit(FileRefUtil.evaluateAndGetRateLimit(rateLimitElEval, rateLimitElVars, conf.dataFormatConfig.rateLimit)) .remoteFile(next) .remoteUri(remoteURI) .createMetrics(true) .build(); parser = conf.dataFormatConfig.getParserFactory().getParser(currentOffset.offsetStr, metadata, fileRef); } else { currentStream = next.createInputStream(); LOG.info("Started reading file: {}", next.getFilePath()); parser = conf.dataFormatConfig.getParserFactory().getParser( currentOffset.offsetStr, currentStream, currentOffset.getOffset()); } } else { //Only if we saw data after last trigger/after a pipeline restart, we will trigger no more data event if (canTriggerNoMoreDataEvent) { LOG.debug( "Sending No More Data event. Files:{}.Records:{}, Errors:{}", noMoreDataFileCount, noMoreDataRecordCount, noMoreDataErrorCount ); RemoteDownloadSourceEvents.NO_MORE_DATA.create(getContext()) .with("record-count", noMoreDataRecordCount) .with("error-count", noMoreDataErrorCount) .with("file-count", noMoreDataFileCount) .createAndSend(); noMoreDataErrorCount = 0; noMoreDataRecordCount = 0; noMoreDataFileCount = 0; canTriggerNoMoreDataEvent = false; } if (currentOffset == null) { return offset; } else { return currentOffset.offsetStr; } } } offset = addRecordsToBatch(batchSize, batchMaker, next); } catch (IOException | DataParserException ex) { // Don't retry reading this file since there can be no records produced. offset = MINUS_ONE; handleFatalException(ex, next); } finally { if (!NOTHING_READ.equals(offset) && currentOffset != null) { currentOffset.setOffset(offset); } } if (currentOffset != null) { return currentOffset.offsetStr; } return offset; } private String addRecordsToBatch(int maxBatchSize, BatchMaker batchMaker, RemoteFile remoteFile) throws IOException, StageException { String offset = NOTHING_READ; for (int i = 0; i < maxBatchSize; i++) { try { Record record = parser.parse(); if (record != null) { record.getHeader().setAttribute(REMOTE_URI, remoteURI.toString()); record.getHeader().setAttribute(HeaderAttributeConstants.FILE, remoteFile.getFilePath()); record.getHeader().setAttribute(HeaderAttributeConstants.FILE_NAME, FilenameUtils.getName(remoteFile.getFilePath()) ); record.getHeader().setAttribute( HeaderAttributeConstants.LAST_MODIFIED_TIME, String.valueOf(remoteFile.getLastModified()) ); record.getHeader().setAttribute(HeaderAttributeConstants.OFFSET, offset == null ? "0" : offset); batchMaker.addRecord(record); perFileRecordCount++; noMoreDataRecordCount++; canTriggerNoMoreDataEvent = true; offset = parser.getOffset(); } else { try { parser.close(); if (currentStream != null) { currentStream.close(); } LOG.debug( "Sending Finished File Event for {}.Records:{}, Errors:{}", next.getFilePath(), perFileRecordCount, perFileErrorCount ); RemoteDownloadSourceEvents.FINISHED_FILE.create(getContext()) .with("filepath", next.getFilePath()) .with("record-count", perFileRecordCount) .with("error-count", perFileErrorCount) .createAndSend(); } finally { parser = null; currentStream = null; next = null; } //We will return -1 for finished files (It might happen where we are the last offset and another parse // returns null, in that case empty batch is emitted) offset = MINUS_ONE; break; } } catch (RecoverableDataParserException ex) { // Propagate partially parsed record to error stream Record record = ex.getUnparsedRecord(); errorRecordHandler.onError(new OnRecordErrorException(record, ex.getErrorCode(), ex.getParams())); perFileErrorCount++; noMoreDataErrorCount++; //Even though we had an error in the data, we still saw some data canTriggerNoMoreDataEvent = true; } catch (ObjectLengthException ex) { errorRecordHandler.onError(Errors.REMOTE_02, currentOffset.fileName, offset, ex); //Even though we couldn't process data from the file, we still saw some data canTriggerNoMoreDataEvent = true; } } return offset; } private void moveFileToError(RemoteFile fileToMove) { if (fileToMove == null) { LOG.warn("No file to move to error, since no file is currently in-process"); return; } if (errorArchive != null) { int read; File errorFile = new File(errorArchive, fileToMove.getFilePath()); if (errorFile.exists()) { errorFile = new File(errorArchive, fileToMove.getFilePath() + "-" + UUID.randomUUID().toString()); LOG.info(fileToMove.getFilePath() + " is being written out as " + errorFile.getPath() + " as another file of the same name exists"); } try (InputStream is = fileToMove.createInputStream(); OutputStream os = new BufferedOutputStream(new FileOutputStream(errorFile))) { while ((read = is.read(moveBuffer)) != -1) { os.write(moveBuffer, 0, read); } } catch (Exception ex) { LOG.warn("Error while trying to write out error file to " + errorFile.getName()); } } } private void handleFatalException(Exception ex, RemoteFile next) throws StageException { if (ex instanceof FileSystemException) { LOG.info("FileSystemException '{}'", ex.getMessage()); } if (ex instanceof SFTPException) { LOG.info("SFTPException '{}'", ex.getMessage()); } if (next != null) { LOG.error("Error while attempting to parse file: " + next.getFilePath(), ex); } if (ex instanceof FileNotFoundException) { LOG.warn("File: {} was found in listing, but is not downloadable", next != null ? next.getFilePath() : "(null)", ex); } if (ex instanceof ClosedByInterruptException || ex.getCause() instanceof ClosedByInterruptException) { //If the pipeline was stopped, we may get a ClosedByInterruptException while reading avro data. //This is because the thread is interrupted when the pipeline is stopped. //Instead of sending the file to error, publish batch and move one. } else { try { if (parser != null) { parser.close(); } } catch (IOException ioe) { LOG.error("Error while closing parser", ioe); } finally { parser = null; } try { if (currentStream != null) { currentStream.close(); } } catch (IOException ioe) { LOG.error("Error while closing stream", ioe); } finally { currentStream = null; } String exOffset; if (ex instanceof OverrunException) { exOffset = String.valueOf(((OverrunException) ex).getStreamOffset()); } else { try { exOffset = (parser != null) ? parser.getOffset() : NOTHING_READ; } catch (IOException ex1) { exOffset = NOTHING_READ; } } switch (getContext().getOnErrorRecord()) { case DISCARD: break; case TO_ERROR: // we failed to produce a record, which leaves the input file in an unknown state. moveFileToError(next); break; case STOP_PIPELINE: if (currentOffset != null) { throw new StageException(Errors.REMOTE_04, currentOffset.fileName, exOffset, ex); } else { throw new StageException(Errors.REMOTE_05, ex); } default: throw new IllegalStateException(Utils.format("Unknown OnError value '{}'", getContext().getOnErrorRecord(), ex)); } } } private Optional<RemoteFile> getNextFile() throws IOException, StageException { if (fileQueue.isEmpty()) { queueFiles(); } return Optional.fromNullable(fileQueue.pollFirst()); } private void queueFiles() throws IOException, StageException { delegate.queueFiles(this, fileQueue, fileFilter); } @Override public boolean shouldQueue(RemoteFile remoteFile) { // Case: We started up for the first time, so anything we see must be queued if (currentOffset == null) { if (LOG.isTraceEnabled()) { LOG.trace("Initial file: {}", remoteFile.getFilePath()); } return true; } // We poll for new files only when fileQueue is empty, so we don't need to check if this file is in the queue. // The file can be in the fileQueue only if the file was already queued in this iteration - // which is not possible, since we are iterating through the children, // so this is the first time we are seeing the file. // Case: It is the same file as we were reading, but we have not read the whole thing, so queue it again // - recovering from a shutdown. if ((remoteFile.getFilePath().equals(currentOffset.fileName)) && !(currentOffset.getOffset().equals(MINUS_ONE))) { if (LOG.isTraceEnabled()) { LOG.trace("Offset not complete: {}. Re-queueing.", remoteFile.getFilePath()); } return true; } // Case: The file is newer than the last one we read/are reading, and its not the same last one if ((remoteFile.getLastModified() > currentOffset.timestamp) && !(remoteFile.getFilePath().equals(currentOffset.fileName))) { if (LOG.isTraceEnabled()) { LOG.trace("Updated file: {}", remoteFile.getFilePath()); } return true; } // Case: The file has the same timestamp as the last one we read, but is lexicographically higher, // and we have not queued it before. if ((remoteFile.getLastModified() == currentOffset.timestamp) && (remoteFile.getFilePath().compareTo(currentOffset.fileName) > 0)) { if (LOG.isTraceEnabled()) { LOG.trace("Same timestamp as currentOffset, lexicographically higher file: {}", remoteFile.getFilePath()); } return true; } // For all other things .. we don't add. return false; } @Override public void destroy() { LOG.info(Utils.format("Destroying {}", getInfo().getInstanceName())); try { IOUtils.closeQuietly(currentStream); IOUtils.closeQuietly(parser); if (delegate != null) { delegate.close(); } } catch (IOException ex) { LOG.warn("Error during destroy", ex); } finally { delegate = null; //This forces the use of same RemoteDownloadSource object //not to have dangling reference to old stream (which is closed) //Also forces to initialize the next in produce call. currentStream = null; parser = null; currentOffset = null; next = null; fileFilter = null; } } private void sendLineageEvent(RemoteFile next) { LineageEvent event = getContext().createLineageEvent(LineageEventType.ENTITY_READ); event.setSpecificAttribute(LineageSpecificAttribute.ENTITY_NAME, next.getFilePath()); event.setSpecificAttribute(LineageSpecificAttribute.ENDPOINT_TYPE, EndPointType.FTP.name()); event.setSpecificAttribute(LineageSpecificAttribute.DESCRIPTION, conf.filePattern); Map<String, String> props = new HashMap<>(); props.put("Resource URL", conf.remoteAddress); event.setProperties(props); getContext().publishLineageEvent(event); } }
apache-2.0
alump/BeforeUnload
beforeunload-demo/src/main/java/org/vaadin/alump/beforeunload/demo/BeforeUnloadDemoServlet.java
1153
/** * BeforeUnloadServlet.java (BeforeUnload) * * Copyright 2013 Vaadin Ltd, Sami Viitanen <alump@vaadin.org> * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.vaadin.alump.beforeunload.demo; import com.vaadin.annotations.VaadinServletConfiguration; import com.vaadin.server.VaadinServlet; import javax.servlet.annotation.WebServlet; /** * Servlet of BeforeUnload Demo */ @WebServlet(value = "/*") @VaadinServletConfiguration(productionMode = false, ui = BeforeUnloadDemoUI.class, widgetset = "org.vaadin.alump.beforeunload.demo.gwt.BeforeUnloadDemoWidgetSet") public class BeforeUnloadDemoServlet extends VaadinServlet { }
apache-2.0
CodeAndMagic/TypefaceLibrary
Library/src/main/java/org/codeandmagic/android/SourceSansProTextStyle.java
898
package org.codeandmagic.android; /** * Implementation of {@link TextStyle} defining the possible values for the 'textStyle' attribute * using the SourceSansPro font. * Created by evelina on 17/01/2014. */ public enum SourceSansProTextStyle implements TextStyle { NORMAL("regular", "sourcesanspro/SourceSansPro-Regular.ttf"), LIGHT_ITALIC("lightItalic", "sourcesanspro/SourceSansPro-LightItalic.ttf"), SEMI_BOLD_ITALIC("semiBoldItalic", "sourcesanspro/SourceSansPro-SemiBoldItalic.ttf"), BLACK("black", "sourcesanspro/SourceSansPro-Black.ttf"); private String mName; private String mFontName; SourceSansProTextStyle(String name, String fontName) { mName = name; mFontName = fontName; } @Override public String getFontName() { return mFontName; } @Override public String getName() { return mName; } }
apache-2.0