repo_name
stringlengths 4
116
| path
stringlengths 4
379
| size
stringlengths 1
7
| content
stringlengths 3
1.05M
| license
stringclasses 15
values |
---|---|---|---|---|
bitclaw/netsuite-php
|
src/Classes/ManufacturingOperationTaskPredecessor.php
|
494
|
<?php namespace Fungku\NetSuite\Classes;
class ManufacturingOperationTaskPredecessor {
public $task;
public $type;
public $startDate;
public $endDate;
public $lagType;
public $lagAmount;
public $lagUnits;
static $paramtypesmap = array(
"task" => "RecordRef",
"type" => "ManufacturingOperationTaskPredecessorPredecessorType",
"startDate" => "dateTime",
"endDate" => "dateTime",
"lagType" => "ManufacturingLagType",
"lagAmount" => "integer",
"lagUnits" => "string",
);
}
|
apache-2.0
|
VHAINNOVATIONS/TheDailyPlan
|
LegacyApp/tdpWeb/src/main/java/gov/va/medora/mdws/emrsvc/TextTO.java
|
1379
|
package gov.va.medora.mdws.emrsvc;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlType;
/**
* <p>Java class for TextTO complex type.
*
* <p>The following schema fragment specifies the expected content contained within this class.
*
* <pre>
* <complexType name="TextTO">
* <complexContent>
* <extension base="{http://mdws.medora.va.gov/EmrSvc}AbstractTO">
* <sequence>
* <element name="text" type="{http://www.w3.org/2001/XMLSchema}string" minOccurs="0"/>
* </sequence>
* </extension>
* </complexContent>
* </complexType>
* </pre>
*
*
*/
@XmlAccessorType(XmlAccessType.FIELD)
@XmlType(name = "TextTO", propOrder = {
"text"
})
public class TextTO
extends AbstractTO
{
protected String text;
/**
* Gets the value of the text property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getText() {
return text;
}
/**
* Sets the value of the text property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setText(String value) {
this.text = value;
}
}
|
apache-2.0
|
plasma-framework/plasma
|
plasma-metamodel/src/main/java/org/plasma/metamodel/adapter/ModelAdapter.java
|
14333
|
/**
* Copyright 2017 TerraMeta Software, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.plasma.metamodel.adapter;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.plasma.common.exception.ProvisioningException;
import org.plasma.metamodel.Alias;
import org.plasma.metamodel.Class;
import org.plasma.metamodel.ClassRef;
import org.plasma.metamodel.Enumeration;
import org.plasma.metamodel.EnumerationRef;
import org.plasma.metamodel.Model;
import org.plasma.metamodel.Package;
import org.plasma.metamodel.Property;
public class ModelAdapter implements ProvisioningModel {
private static Log log = LogFactory.getLog(ModelAdapter.class);
private Model model;
private Map<String, TypeAdapter> typeMap = new HashMap<String, TypeAdapter>();
private Map<String, TypeAdapter> physicalNameTypeMap = new HashMap<String, TypeAdapter>();
private List<Package> leafPackages = new ArrayList<Package>();
private List<Package> allPackages = new ArrayList<Package>();
private Map<TypeAdapter, Package> packageTypeMap = new HashMap<TypeAdapter, Package>();
@SuppressWarnings("unused")
private ModelAdapter() {
}
public ModelAdapter(Model model) {
this.model = model;
construct();
}
/*
* (non-Javadoc)
*
* @see org.plasma.provisioning.adapter.MetamodelAdapter#getModel()
*/
@Override
public Model getModel() {
return model;
}
@Override
public List<Package> getPackages() {
return Collections.unmodifiableList(this.allPackages);
}
/*
* (non-Javadoc)
*
* @see org.plasma.provisioning.adapter.MetamodelAdapter#getLeafPackages()
*/
@Override
public List<Package> getLeafPackages() {
return Collections.unmodifiableList(this.leafPackages);
}
@Override
public Package getPackage(TypeAdapter type) {
return this.packageTypeMap.get(type);
}
/*
* (non-Javadoc)
*
* @see org.plasma.provisioning.adapter.MetamodelAdapter#getTypes()
*/
@Override
public Collection<TypeAdapter> getTypes() {
return typeMap.values();
}
/*
* (non-Javadoc)
*
* @see org.plasma.provisioning.adapter.MetamodelAdapter#getTypesArray()
*/
@Override
public TypeAdapter[] getTypesArray() {
TypeAdapter[] result = new TypeAdapter[typeMap.size()];
typeMap.values().toArray(result);
return result;
}
/*
* (non-Javadoc)
*
* @see org.plasma.provisioning.adapter.MetamodelAdapter#getTypesArray()
*/
@Override
public Map<String, TypeAdapter> getTypeMap() {
return Collections.unmodifiableMap(typeMap);
}
/*
* (non-Javadoc)
*
* @see
* org.plasma.provisioning.adapter.MetamodelAdapter#findType(java.lang.String)
*/
@Override
public TypeAdapter findType(String key) {
TypeAdapter result = typeMap.get(key);
return result;
}
private void findPackages(Package parent, List<Package> packages) {
packages.add(parent);
for (Package childPkg : parent.getPackages()) {
findPackages(childPkg, packages);
}
}
private void construct() {
if (log.isDebugEnabled())
log.debug("constructing...");
findPackages(this.model, this.allPackages);
for (Package pkg : this.allPackages)
if (pkg.getPackages().size() == 0)
this.leafPackages.add(pkg);
Map<String, Package> packageMap = new HashMap<>();
for (Package pkg : this.leafPackages) {
String key = pkg.getName();
if (packageMap.containsKey(key))
throw new PackageNameCollisionException("detected multiple (leaf) packages named '" + key
+ "' within the same provisioning context");
packageMap.put(key, pkg);
}
Map<String, Package> packagePhysicalMap = new HashMap<>();
for (Package pkg : this.leafPackages) {
if (pkg.getAlias() != null && pkg.getAlias().getPhysicalName() != null) {
String physicalName = pkg.getAlias().getPhysicalName();
String key = physicalName;
if (packagePhysicalMap.containsKey(key))
throw new PackageNameCollisionException(
"detected multiple (leaf) packages with physical name '" + key
+ "' within the same provisioning context");
packagePhysicalMap.put(key, pkg);
}
}
for (Package pkg : this.allPackages)
mapEnumerations(pkg);
for (Package pkg : this.allPackages)
mapClasses(pkg);
for (TypeAdapter adapter : typeMap.values()) {
if (adapter.getType() instanceof Class) {
if (log.isDebugEnabled())
log.debug("constructing class: " + adapter.getKey());
construct(adapter, null);
}
}
for (TypeAdapter adapter : typeMap.values()) {
if (adapter.getType() instanceof Class) {
for (ClassRef baseClassRef : ((Class) adapter.getType()).getSuperClasses()) {
String key = baseClassRef.getUri() + "#" + baseClassRef.getName();
TypeAdapter baseAdapter = typeMap.get(key);
if (baseAdapter == null)
throw new IllegalStateException("no mapping found for base type: " + key);
if (log.isDebugEnabled())
log.debug("construct deep: " + adapter.getKey());
constructDeep(adapter, baseAdapter);
}
}
}
}
private void mapEnumerations(Package pkg) {
if (log.isDebugEnabled())
log.debug("mapping enumerations for package " + pkg.getUri() + " (" + pkg.getName() + ")");
for (Enumeration enm : pkg.getEnumerations()) {
String key = enm.getUri() + "#" + enm.getName();
if (log.isDebugEnabled())
log.debug("mapping enumeration: " + key);
if (typeMap.get(key) != null)
throw new TypeNameCollisionException("detected multiple types named '" + enm.getName()
+ "' under the same URI '" + enm.getUri() + "'");
TypeAdapter adapter = new TypeAdapter(enm);
this.typeMap.put(key, adapter);
this.packageTypeMap.put(adapter, pkg);
if (enm.getAlias() != null && enm.getAlias().getPhysicalName() != null) {
String physicalName = enm.getAlias().getPhysicalName();
key = enm.getUri() + "#" + physicalName;
TypeAdapter existing = physicalNameTypeMap.get(key);
if (existing != null)
throw new TypeNameCollisionException("detected multiple types [" + existing.getName()
+ "," + enm.getName() + "] with the same types name '" + physicalName
+ "' under the same URI '" + enm.getUri() + "'");
this.physicalNameTypeMap.put(key, adapter);
}
}
}
private void mapClasses(Package pkg) {
for (Class cls : pkg.getClazzs()) {
String key = cls.getUri() + "#" + cls.getName();
if (log.isDebugEnabled())
log.debug("mapping class: " + key);
if (typeMap.get(key) != null)
throw new TypeNameCollisionException("detected multiple types named '" + cls.getName()
+ "' under the same URI '" + cls.getUri() + "'");
TypeAdapter adapter = new TypeAdapter(cls);
this.typeMap.put(key, adapter);
this.packageTypeMap.put(adapter, pkg);
if (log.isDebugEnabled())
log.debug("map: " + adapter.getKey());
if (cls.getAlias() != null && cls.getAlias().getPhysicalName() != null) {
String physicalName = cls.getAlias().getPhysicalName();
key = cls.getUri() + "#" + physicalName;
TypeAdapter existing = physicalNameTypeMap.get(key);
if (existing != null)
throw new TypeNameCollisionException("detected multiple types [" + existing.getName()
+ "," + cls.getName() + "] with the same physical name '" + physicalName
+ "' under the same URI '" + cls.getUri() + "'");
this.physicalNameTypeMap.put(key, adapter);
}
}
}
private void construct(TypeAdapter adapter, TypeAdapter source) {
for (Property prop : ((Class) adapter.getType()).getProperties()) {
if (adapter.getDeclaredProperty(prop.getName()) != null)
throw new PropertyNameCollisionException(
"detected multiple properties with the same logical name '" + prop.getName()
+ "' defined for class '" + adapter.getKey()
+ "' the set of logical names for a class " + "must be unique");
adapter.putDeclaredProperty(prop.getName(), prop);
adapter.putProperty(prop.getName(), prop); // note: all property
// collection not declared only
if (prop.getAlias() != null) {
Alias alias = prop.getAlias();
if (alias.getPhysicalName() != null && alias.getPhysicalName().trim().length() > 0) {
String physicalName = alias.getPhysicalName().trim();
if (adapter.getAliasedProperty(physicalName) != null)
throw new PropertyNameCollisionException(
"detected multiple properties with the same physical name '" + alias
+ "' defined for class '" + adapter.getKey()
+ "' the set of physical names for a class " + "must be unique");
adapter.putAliasedProperty(physicalName, prop);
}
if (alias.getLocalName() != null && alias.getLocalName().trim().length() > 0) {
String localName = prop.getAlias().getLocalName().trim();
if (adapter.getAliasedProperty(localName) != null)
throw new PropertyNameCollisionException(
"detected multiple properties with the same local name '" + alias
+ "' defined for class '" + adapter.getKey()
+ "' the set of local names for a class " + "must be unique");
adapter.putAliasedProperty(localName, prop);
}
}
}
}
private void constructDeep(TypeAdapter adapter, TypeAdapter baseAdapter) {
for (Property prop : ((Class) adapter.getType()).getProperties()) {
validate(adapter, prop);
}
// copy base properties into subclass
for (Property prop : ((Class) baseAdapter.getType()).getProperties()) {
if (adapter.getProperty(prop.getName()) != null)
throw new PropertyNameCollisionException(
"detected multiple properties with the same logical name '" + prop.getName()
+ "' defined for class '" + adapter.getKey() + "' as well as its superclass '"
+ baseAdapter.getKey() + "' - the set of logical names for a class and "
+ "superclasses must be unique");
validate(baseAdapter, prop);
adapter.putProperty(prop.getName(), prop); // note: all property
// collection not declared only
if (prop.getAlias() != null && prop.getAlias().getPhysicalName() != null
&& prop.getAlias().getPhysicalName().trim().length() > 0) {
String alias = prop.getAlias().getPhysicalName().trim();
if (adapter.getAliasedProperty(alias) != null)
throw new PropertyNameCollisionException(
"detected multiple properties with the same physical name '" + alias
+ "' defined for class '" + adapter.getKey() + "' as well as its superclass '"
+ baseAdapter.getKey() + "' - the set of logical names for a class and "
+ "superclasses must be unique");
adapter.putAliasedProperty(alias, prop);
}
}
for (ClassRef baseClassRef : ((Class) baseAdapter.getType()).getSuperClasses()) {
String key2 = baseClassRef.getUri() + "#" + baseClassRef.getName();
TypeAdapter baseTypeAdapter = typeMap.get(key2);
if (baseTypeAdapter == null)
throw new IllegalStateException("no mapping found for base type: " + key2);
constructDeep(adapter, baseTypeAdapter);
}
}
private void validate(TypeAdapter adapter, Property prop) {
if (prop.getType() instanceof ClassRef) {
ClassRef ref = (ClassRef) prop.getType();
String refkey = ref.getUri() + "#" + ref.getName();
if (typeMap.get(refkey) == null)
throw new ProvisioningException("invalid type reference detected for property '"
+ adapter.getKey() + "." + prop.getName() + "' no class or enumeration '" + refkey
+ "' is defined");
if (prop.getOpposite() != null) {
Class oppositeClass = (Class) typeMap.get(refkey).getType();
Property oppositeProperty = findPropertyByName(oppositeClass, prop.getOpposite());
if (oppositeProperty == null)
throw new ProvisioningException("invalid opposite reference detected for property '"
+ adapter.getKey() + "." + prop.getName() + "' no opposite property '"
+ prop.getOpposite() + "' is defined for class '" + refkey + "'");
}
}
if (prop.getType() instanceof EnumerationRef) {
EnumerationRef ref = (EnumerationRef) prop.getType();
String refkey = ref.getUri() + "#" + ref.getName();
if (typeMap.get(refkey) == null)
throw new ProvisioningException("invalid type reference detected for property '"
+ prop.getName() + "' defined for class '" + adapter.getKey() + "'");
}
}
private Property findPropertyByName(Class clss, String name) {
for (Property prop : clss.getProperties()) {
if (name.equals(prop.getName()))
return prop;
}
return null;
}
}
|
apache-2.0
|
mrinsss/Full-Repo
|
tripezi/system/database/drivers/mssql/mssql_forge.php
|
6075
|
<?php if ( ! defined('BASEPATH')) exit('No direct script access allowed');
/**
* CodeIgniter
*
* An open source application development framework for PHP 4.3.2 or newer
*
* @package CodeIgniter
* @author ExpressionEngine Dev Team
* @copyright Copyright (c) 2008 - 2010, EllisLab, Inc.
* @license http://codeigniter.com/user_guide/license.html
* @link http://codeigniter.com
* @since Version 1.0
* @filesource
*/
// ------------------------------------------------------------------------
/**
* MS SQL Forge Class
*
* @category Database
* @author ExpressionEngine Dev Team
* @link http://codeigniter.com/user_guide/database/
*/
class CI_DB_mssql_forge extends CI_DB_forge {
/**
* Create database
*
* @access private
* @param string the database name
* @return bool
*/
function _create_database($name)
{
return "CREATE DATABASE ".$name;
}
// --------------------------------------------------------------------
/**
* Drop database
*
* @access private
* @param string the database name
* @return bool
*/
function _drop_database($name)
{
return "DROP DATABASE ".$name;
}
// --------------------------------------------------------------------
/**
* Drop Table
*
* @access private
* @return bool
*/
function _drop_table($table)
{
return "DROP TABLE ".$this->db->_escape_identifiers($table);
}
// --------------------------------------------------------------------
/**
* Create Table
*
* @access private
* @param string the table name
* @param array the fields
* @param mixed primary key(s)
* @param mixed key(s)
* @param boolean should 'IF NOT EXISTS' be added to the SQL
* @return bool
*/
function _create_table($table, $fields, $primary_keys, $keys, $if_not_exists)
{
$sql = 'CREATE TABLE ';
if ($if_not_exists === TRUE)
{
$sql .= 'IF NOT EXISTS ';
}
$sql .= $this->db->_escape_identifiers($table)." (";
$current_field_count = 0;
foreach ($fields as $field=>$attributes)
{
// Numeric field names aren't allowed in databases, so if the key is
// numeric, we know it was assigned by PHP and the developer manually
// entered the field information, so we'll simply add it to the list
if (is_numeric($field))
{
$sql .= "\n\t$attributes";
}
else
{
$attributes = array_change_key_case($attributes, CASE_UPPER);
$sql .= "\n\t".$this->db->_protect_identifiers($field);
$sql .= ' '.$attributes['TYPE'];
if (array_key_exists('CONSTRAINT', $attributes))
{
$sql .= '('.$attributes['CONSTRAINT'].')';
}
if (array_key_exists('UNSIGNED', $attributes) && $attributes['UNSIGNED'] === TRUE)
{
$sql .= ' UNSIGNED';
}
if (array_key_exists('DEFAULT', $attributes))
{
$sql .= ' DEFAULT \''.$attributes['DEFAULT'].'\'';
}
if (array_key_exists('NULL', $attributes) && $attributes['NULL'] === TRUE)
{
$sql .= ' NULL';
}
else
{
$sql .= ' NOT NULL';
}
if (array_key_exists('AUTO_INCREMENT', $attributes) && $attributes['AUTO_INCREMENT'] === TRUE)
{
$sql .= ' AUTO_INCREMENT';
}
}
// don't add a comma on the end of the last field
if (++$current_field_count < count($fields))
{
$sql .= ',';
}
}
if (count($primary_keys) > 0)
{
$primary_keys = $this->db->_protect_identifiers($primary_keys);
$sql .= ",\n\tPRIMARY KEY (" . implode(', ', $primary_keys) . ")";
}
if (is_array($keys) && count($keys) > 0)
{
foreach ($keys as $key)
{
if (is_array($key))
{
$key = $this->db->_protect_identifiers($key);
}
else
{
$key = array($this->db->_protect_identifiers($key));
}
$sql .= ",\n\tFOREIGN KEY (" . implode(', ', $key) . ")";
}
}
$sql .= "\n)";
return $sql;
}
// --------------------------------------------------------------------
/**
* Alter table query
*
* Generates a platform-specific query so that a table can be altered
* Called by add_column(), drop_column(), and column_alter(),
*
* @access private
* @param string the ALTER type (ADD, DROP, CHANGE)
* @param string the column name
* @param string the table name
* @param string the column definition
* @param string the default value
* @param boolean should 'NOT NULL' be added
* @param string the field after which we should add the new field
* @return object
*/
function _alter_table($alter_type, $table, $column_name, $column_definition = '', $default_value = '', $null = '', $after_field = '')
{
$sql = 'ALTER TABLE '.$this->db->_protect_identifiers($table)." $alter_type ".$this->db->_protect_identifiers($column_name);
// DROP has everything it needs now.
if ($alter_type == 'DROP')
{
return $sql;
}
$sql .= " $column_definition";
if ($default_value != '')
{
$sql .= " DEFAULT \"$default_value\"";
}
if ($null === NULL)
{
$sql .= ' NULL';
}
else
{
$sql .= ' NOT NULL';
}
if ($after_field != '')
{
$sql .= ' AFTER ' . $this->db->_protect_identifiers($after_field);
}
return $sql;
}
// --------------------------------------------------------------------
/**
* Rename a table
*
* Generates a platform-specific query so that a table can be renamed
*
* @access private
* @param string the old table name
* @param string the new table name
* @return string
*/
function _rename_table($table_name, $new_table_name)
{
// I think this syntax will work, but can find little documentation on renaming tables in MSSQL
$sql = 'ALTER TABLE '.$this->db->_protect_identifiers($table_name)." RENAME TO ".$this->db->_protect_identifiers($new_table_name);
return $sql;
}
}
/* End of file mssql_forge.php */
/* Location: ./system/database/drivers/mssql/mssql_forge.php */
|
apache-2.0
|
exa-analytics/atomic
|
exatomic/qe/__pw/header.py
|
376
|
## -*- coding: utf-8 -*-
## Copyright (c) 2015-2020, Exa Analytics Development Team
## Distributed under the terms of the Apache License 2.0
#"""
#PW Header Subsection Parser
##############################
#"""
#from exa.typed import TypedProperty
#from exa.core.parser import Sections, Parser
#
#
#class Header(Parser):
# """
# """
# def _parse(self):
# pass
|
apache-2.0
|
karlmortensen/autopsy
|
Core/src/org/sleuthkit/autopsy/timeline/ui/TimeLineResultView.java
|
4279
|
/*
* Autopsy Forensic Browser
*
* Copyright 2013 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.sleuthkit.autopsy.timeline.ui;
import java.util.HashSet;
import java.util.Set;
import javafx.beans.Observable;
import javax.swing.SwingUtilities;
import org.joda.time.format.DateTimeFormatter;
import org.openide.nodes.Node;
import org.openide.util.NbBundle;
import org.sleuthkit.autopsy.corecomponentinterfaces.DataContent;
import org.sleuthkit.autopsy.corecomponents.DataResultPanel;
import org.sleuthkit.autopsy.timeline.TimeLineController;
import org.sleuthkit.autopsy.timeline.datamodel.FilteredEventsModel;
import org.sleuthkit.autopsy.timeline.explorernodes.EventRootNode;
/**
* Since it was too hard to derive from {@link DataResultPanel}, this class
* implements {@link TimeLineView}, listens to the events/state of a the
* assigned {@link FilteredEventsModel} and acts appropriately on its
* {@link DataResultPanel}. That is, this class acts as a sort of bridge/adapter
* between a FilteredEventsModel instance and a DataResultPanel instance.
*/
public class TimeLineResultView {
/**
* the {@link DataResultPanel} that is the real view proxied by this class
*/
private final DataResultPanel dataResultPanel;
private final TimeLineController controller;
private final FilteredEventsModel filteredEvents;
private Set<Long> selectedEventIDs = new HashSet<>();
public DataResultPanel getDataResultPanel() {
return dataResultPanel;
}
public TimeLineResultView(TimeLineController controller, DataContent dataContent) {
this.controller = controller;
this.filteredEvents = controller.getEventsModel();
dataResultPanel = DataResultPanel.createInstanceUninitialized("", "", Node.EMPTY, 0, dataContent);
//set up listeners on relevant properties
TimeLineController.getTimeZone().addListener((Observable observable) -> {
dataResultPanel.setPath(getSummaryString());
});
controller.getSelectedEventIDs().addListener((Observable o) -> {
refresh();
});
refresh();
}
/**
* @return a String representation of all the Events displayed
*/
private String getSummaryString() {
if (controller.getSelectedTimeRange().get() != null) {
final DateTimeFormatter zonedFormatter = TimeLineController.getZonedFormatter();
return NbBundle.getMessage(this.getClass(), "TimeLineResultView.startDateToEndDate.text",
controller.getSelectedTimeRange().get().getStart()
.withZone(TimeLineController.getJodaTimeZone())
.toString(zonedFormatter),
controller.getSelectedTimeRange().get().getEnd()
.withZone(TimeLineController.getJodaTimeZone())
.toString(zonedFormatter));
}
return "";
}
/**
* refresh this view with the events selected in the controller
*/
public final void refresh() {
Set<Long> newSelectedEventIDs = new HashSet<>(controller.getSelectedEventIDs());
if (selectedEventIDs.equals(newSelectedEventIDs) == false) {
selectedEventIDs = newSelectedEventIDs;
final EventRootNode root = new EventRootNode(
NbBundle.getMessage(this.getClass(), "Timeline.node.root"), selectedEventIDs,
filteredEvents);
//this must be in edt or exception is thrown
SwingUtilities.invokeLater(() -> {
dataResultPanel.setPath(getSummaryString());
dataResultPanel.setNode(root);
});
}
}
}
|
apache-2.0
|
masonmei/apm-agent
|
bootstrap/src/test/java/com/baidu/oped/apm/bootstrap/config/ProfilableClassFilterTest.java
|
2336
|
/*
* Copyright 2014 NAVER Corp.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.baidu.oped.apm.bootstrap.config;
import org.junit.Assert;
import org.junit.Test;
import com.baidu.oped.apm.bootstrap.config.ProfilableClassFilter;
import java.io.IOException;
public class ProfilableClassFilterTest {
@Test
public void testIsProfilableClassWithNoConfiguration() throws IOException {
ProfilableClassFilter filter = new ProfilableClassFilter("com.baidu.oped.apm.testweb.controller.*,com.baidu.oped.apm.testweb.MyClass");
Assert.assertFalse(filter.filter("com/baidu/oped/apm/testweb/controllers/MyController"));
Assert.assertFalse(filter.filter("net/spider/king/wang/Jjang"));
Assert.assertFalse(filter.filter("com/baidu/oped/apm/testweb2/controller/MyController"));
Assert.assertFalse(filter.filter("com/baidu/oped/apm/testweb2/MyClass"));
}
/**
* <pre>
* configuration is
* profile.package.include=com.baidu.oped.apm.testweb.controller.*,com.baidu.oped.apm.testweb.MyClass
* </pre>
*
* @throws IOException
*/
@Test
public void testIsProfilableClass() throws IOException {
ProfilableClassFilter filter = new ProfilableClassFilter("com.baidu.oped.apm.testweb.controller.*,com.baidu.oped.apm.testweb.MyClass");
Assert.assertTrue(filter.filter("com/baidu/oped/apm/testweb/MyClass"));
Assert.assertTrue(filter.filter("com/baidu/oped/apm/testweb/controller/MyController"));
Assert.assertTrue(filter.filter("com/baidu/oped/apm/testweb/controller/customcontroller/MyCustomController"));
Assert.assertFalse(filter.filter("com/baidu/oped/apm/testweb/MyUnknownClass"));
Assert.assertFalse(filter.filter("com/baidu/oped/apm/testweb/controller2/MyController"));
}
}
|
apache-2.0
|
ludovicc/testng-debian
|
test-14/v4/src/test/enabled/VerifyEnabledTest.java
|
406
|
package test.enabled;
import test.BaseTest;
public class VerifyEnabledTest extends BaseTest {
/**
* @testng.test
*/
public void disabledMethodsShouldNotRun() {
addClass("test.enabled.EnabledTest");
run();
String[] failed = {
};
String[] passed = {
};
verifyTests("Passed", passed, getPassedTests());
verifyTests("Failed", failed, getFailedTests());
}
}
|
apache-2.0
|
evias/imeals
|
public/js/jquery.highcharts-3d.min.js
|
20436
|
/*
Highcharts JS v4.1.9 (2015-10-07)
(c) 2009-2013 Torstein H?nsi
License: www.highcharts.com/license
*/
(function(d){function p(c,b,a){var e,g,f=b.options.chart.options3d,h=!1;a?(h=b.inverted,a=b.plotWidth/2,b=b.plotHeight/2,e=f.depth/2,g=z(f.depth,1)*z(f.viewDistance,0)):(a=b.plotLeft+b.plotWidth/2,b=b.plotTop+b.plotHeight/2,e=f.depth/2,g=z(f.depth,1)*z(f.viewDistance,0));var j=[],i=a,k=b,l=e,q=g,a=A*(h?f.beta:-f.beta),f=A*(h?-f.alpha:f.alpha),o=m(a),x=n(a),r=m(f),v=n(f),t,u,y,w,s,p;d.each(c,function(a){t=(h?a.y:a.x)-i;u=(h?a.x:a.y)-k;y=(a.z||0)-l;w=x*t-o*y;s=-o*r*t-x*r*y+v*u;p=o*v*t+x*v*y+r*u;q>0&&
q<Number.POSITIVE_INFINITY&&(w*=q/(p+l+q),s*=q/(p+l+q));w+=i;s+=k;p+=l;j.push({x:h?s:w,y:h?w:s,z:p})});return j}function B(c){return c!==void 0&&c!==null}function F(c){var b=0,a,e;for(a=0;a<c.length;a++)e=(a+1)%c.length,b+=c[a].x*c[e].y-c[e].x*c[a].y;return b/2}function D(c){var b=0,a;for(a=0;a<c.length;a++)b+=c[a].z;return c.length?b/c.length:0}function s(c,b,a,e,g,f,d,j){var i=[];return f>g&&f-g>o/2+1.0E-4?(i=i.concat(s(c,b,a,e,g,g+o/2,d,j)),i=i.concat(s(c,b,a,e,g+o/2,f,d,j))):f<g&&g-f>o/2+1.0E-4?
(i=i.concat(s(c,b,a,e,g,g-o/2,d,j)),i=i.concat(s(c,b,a,e,g-o/2,f,d,j))):(i=f-g,["C",c+a*n(g)-a*C*i*m(g)+d,b+e*m(g)+e*C*i*n(g)+j,c+a*n(f)+a*C*i*m(f)+d,b+e*m(f)-e*C*i*n(f)+j,c+a*n(f)+d,b+e*m(f)+j])}function G(c){if(this.chart.is3d()){var b=this.chart.options.plotOptions.column.grouping;if(b!==void 0&&!b&&this.group.zIndex!==void 0&&!this.zIndexSet)this.group.attr({zIndex:this.group.zIndex*10}),this.zIndexSet=!0;var a=this.options,e=this.options.states;this.borderWidth=a.borderWidth=B(a.edgeWidth)?a.edgeWidth:
1;d.each(this.data,function(b){if(b.y!==null)b=b.pointAttr,this.borderColor=d.pick(a.edgeColor,b[""].fill),b[""].stroke=this.borderColor,b.hover.stroke=d.pick(e.hover.edgeColor,this.borderColor),b.select.stroke=d.pick(e.select.edgeColor,this.borderColor)})}c.apply(this,[].slice.call(arguments,1))}var o=Math.PI,A=o/180,m=Math.sin,n=Math.cos,z=d.pick,H=Math.round;d.perspective=p;var C=4*(Math.sqrt(2)-1)/3/(o/2);d.SVGRenderer.prototype.toLinePath=function(c,b){var a=[];d.each(c,function(b){a.push("L",
b.x,b.y)});c.length&&(a[0]="M",b&&a.push("Z"));return a};d.SVGRenderer.prototype.cuboid=function(c){var b=this.g(),c=this.cuboidPath(c);b.front=this.path(c[0]).attr({zIndex:c[3],"stroke-linejoin":"round"}).add(b);b.top=this.path(c[1]).attr({zIndex:c[4],"stroke-linejoin":"round"}).add(b);b.side=this.path(c[2]).attr({zIndex:c[5],"stroke-linejoin":"round"}).add(b);b.fillSetter=function(a){var b=d.Color(a).brighten(0.1).get(),c=d.Color(a).brighten(-0.1).get();this.front.attr({fill:a});this.top.attr({fill:b});
this.side.attr({fill:c});this.color=a;return this};b.opacitySetter=function(a){this.front.attr({opacity:a});this.top.attr({opacity:a});this.side.attr({opacity:a});return this};b.attr=function(a){a.shapeArgs||B(a.x)?(a=this.renderer.cuboidPath(a.shapeArgs||a),this.front.attr({d:a[0],zIndex:a[3]}),this.top.attr({d:a[1],zIndex:a[4]}),this.side.attr({d:a[2],zIndex:a[5]})):d.SVGElement.prototype.attr.call(this,a);return this};b.animate=function(a,b,c){B(a.x)&&B(a.y)?(a=this.renderer.cuboidPath(a),this.front.attr({zIndex:a[3]}).animate({d:a[0]},
b,c),this.top.attr({zIndex:a[4]}).animate({d:a[1]},b,c),this.side.attr({zIndex:a[5]}).animate({d:a[2]},b,c)):a.opacity?(this.front.animate(a,b,c),this.top.animate(a,b,c),this.side.animate(a,b,c)):d.SVGElement.prototype.animate.call(this,a,b,c);return this};b.destroy=function(){this.front.destroy();this.top.destroy();this.side.destroy();return null};b.attr({zIndex:-c[3]});return b};d.SVGRenderer.prototype.cuboidPath=function(c){var b=c.x,a=c.y,e=c.z,g=c.height,f=c.width,h=c.depth,j=d.map,i=[{x:b,y:a,
z:e},{x:b+f,y:a,z:e},{x:b+f,y:a+g,z:e},{x:b,y:a+g,z:e},{x:b,y:a+g,z:e+h},{x:b+f,y:a+g,z:e+h},{x:b+f,y:a,z:e+h},{x:b,y:a,z:e+h}],i=p(i,d.charts[this.chartIndex],c.insidePlotArea),a=function(a,b){a=j(a,function(a){return i[a]});b=j(b,function(a){return i[a]});return F(a)<0?a:F(b)<0?b:[]},c=a([3,2,1,0],[7,6,5,4]),b=a([1,6,7,0],[4,5,2,3]),a=a([1,2,5,6],[0,7,4,3]);return[this.toLinePath(c,!0),this.toLinePath(b,!0),this.toLinePath(a,!0),D(c),D(b),D(a)]};d.SVGRenderer.prototype.arc3d=function(c){c.alpha*=
A;c.beta*=A;var b=this.g(),a=this.arc3dPath(c),e=b.renderer,g=a.zTop*100;b.shapeArgs=c;b.top=e.path(a.top).setRadialReference(c.center).attr({zIndex:a.zTop}).add(b);b.side1=e.path(a.side2).attr({zIndex:a.zSide1});b.side2=e.path(a.side1).attr({zIndex:a.zSide2});b.inn=e.path(a.inn).attr({zIndex:a.zInn});b.out=e.path(a.out).attr({zIndex:a.zOut});b.fillSetter=function(a){this.color=a;var b=d.Color(a).brighten(-0.1).get();this.side1.attr({fill:b});this.side2.attr({fill:b});this.inn.attr({fill:b});this.out.attr({fill:b});
this.top.attr({fill:a});return this};b.translateXSetter=function(a){this.out.attr({translateX:a});this.inn.attr({translateX:a});this.side1.attr({translateX:a});this.side2.attr({translateX:a});this.top.attr({translateX:a})};b.translateYSetter=function(a){this.out.attr({translateY:a});this.inn.attr({translateY:a});this.side1.attr({translateY:a});this.side2.attr({translateY:a});this.top.attr({translateY:a})};b.animate=function(a,b,c){B(a.end)||B(a.start)?(this._shapeArgs=this.shapeArgs,d.SVGElement.prototype.animate.call(this,
{_args:a},{duration:b,start:function(){var a=arguments[0].elem,b=a._shapeArgs;b.fill!==a.color&&a.attr({fill:b.fill})},step:function(){var a=arguments[1],b=a.elem,c=b._shapeArgs,e=a.end,a=a.pos,c=d.merge(c,{x:c.x+(e.x-c.x)*a,y:c.y+(e.y-c.y)*a,r:c.r+(e.r-c.r)*a,innerR:c.innerR+(e.innerR-c.innerR)*a,start:c.start+(e.start-c.start)*a,end:c.end+(e.end-c.end)*a}),e=b.renderer.arc3dPath(c);b.shapeArgs=c;b.top.attr({d:e.top,zIndex:e.zTop});b.inn.attr({d:e.inn,zIndex:e.zInn});b.out.attr({d:e.out,zIndex:e.zOut});
b.side1.attr({d:e.side1,zIndex:e.zSide1});b.side2.attr({d:e.side2,zIndex:e.zSide2})}},c)):d.SVGElement.prototype.animate.call(this,a,b,c);return this};b.destroy=function(){this.top.destroy();this.out.destroy();this.inn.destroy();this.side1.destroy();this.side2.destroy();d.SVGElement.prototype.destroy.call(this)};b.hide=function(){this.top.hide();this.out.hide();this.inn.hide();this.side1.hide();this.side2.hide()};b.show=function(){this.top.show();this.out.show();this.inn.show();this.side1.show();
this.side2.show()};b.zIndex=g;b.attr({zIndex:g});return b};d.SVGRenderer.prototype.arc3dPath=function(c){function b(a){a%=2*o;a>o&&(a=2*o-a);return a}var a=c.x,e=c.y,d=c.start,f=c.end-1.0E-5,h=c.r,j=c.innerR,i=c.depth,k=c.alpha,l=c.beta,q=n(d),p=m(d),c=n(f),x=m(f),r=h*n(l);h*=n(k);var v=j*n(l),t=j*n(k),j=i*m(l),u=i*m(k),i=["M",a+r*q,e+h*p],i=i.concat(s(a,e,r,h,d,f,0,0)),i=i.concat(["L",a+v*c,e+t*x]),i=i.concat(s(a,e,v,t,f,d,0,0)),i=i.concat(["Z"]),y=l>0?o/2:0,l=k>0?0:o/2,y=d>-y?d:f>-y?-y:d,w=f<o-
l?f:d<o-l?o-l:f,k=["M",a+r*n(y),e+h*m(y)],k=k.concat(s(a,e,r,h,y,w,0,0));f>o-l&&d<o-l&&(k=k.concat(["L",a+r*n(w)+j,e+h*m(w)+u]),k=k.concat(s(a,e,r,h,w,f,j,u)),k=k.concat(["L",a+r*n(f),e+h*m(f)]),k=k.concat(s(a,e,r,h,f,w,0,0)));k=k.concat(["L",a+r*n(w)+j,e+h*m(w)+u]);k=k.concat(s(a,e,r,h,w,y,j,u));k=k.concat(["Z"]);l=["M",a+v*q,e+t*p];l=l.concat(s(a,e,v,t,d,f,0,0));l=l.concat(["L",a+v*n(f)+j,e+t*m(f)+u]);l=l.concat(s(a,e,v,t,f,d,j,u));l=l.concat(["Z"]);q=["M",a+r*q,e+h*p,"L",a+r*q+j,e+h*p+u,"L",a+
v*q+j,e+t*p+u,"L",a+v*q,e+t*p,"Z"];a=["M",a+r*c,e+h*x,"L",a+r*c+j,e+h*x+u,"L",a+v*c+j,e+t*x+u,"L",a+v*c,e+t*x,"Z"];x=Math.atan2(u,-j);e=Math.abs(f+x);c=Math.abs(d+x);d=Math.abs((d+f)/2+x);e=b(e);c=b(c);d=b(d);d*=1E5;f=c*1E5;e*=1E5;return{top:i,zTop:o*1E5+1,out:k,zOut:Math.max(d,f,e),inn:l,zInn:Math.max(d,f,e),side1:q,zSide1:e*0.99,side2:a,zSide2:f*0.99}};d.Chart.prototype.is3d=function(){return this.options.chart.options3d&&this.options.chart.options3d.enabled};d.wrap(d.Chart.prototype,"isInsidePlot",
function(c){return this.is3d()?!0:c.apply(this,[].slice.call(arguments,1))});d.getOptions().chart.options3d={enabled:!1,alpha:0,beta:0,depth:100,viewDistance:25,frame:{bottom:{size:1,color:"rgba(255,255,255,0)"},side:{size:1,color:"rgba(255,255,255,0)"},back:{size:1,color:"rgba(255,255,255,0)"}}};d.wrap(d.Chart.prototype,"init",function(c){var b=[].slice.call(arguments,1),a;if(b[0].chart.options3d&&b[0].chart.options3d.enabled)b[0].chart.options3d.alpha=(b[0].chart.options3d.alpha||0)%360,b[0].chart.options3d.beta=
(b[0].chart.options3d.beta||0)%360,a=b[0].plotOptions||{},a=a.pie||{},a.borderColor=d.pick(a.borderColor,void 0);c.apply(this,b)});d.wrap(d.Chart.prototype,"setChartSize",function(c){c.apply(this,[].slice.call(arguments,1));if(this.is3d()){var b=this.inverted,a=this.clipBox,e=this.margin;a[b?"y":"x"]=-(e[3]||0);a[b?"x":"y"]=-(e[0]||0);a[b?"height":"width"]=this.chartWidth+(e[3]||0)+(e[1]||0);a[b?"width":"height"]=this.chartHeight+(e[0]||0)+(e[2]||0)}});d.wrap(d.Chart.prototype,"redraw",function(c){if(this.is3d())this.isDirtyBox=
!0;c.apply(this,[].slice.call(arguments,1))});d.wrap(d.Chart.prototype,"renderSeries",function(c){var b=this.series.length;if(this.is3d())for(;b--;)c=this.series[b],c.translate(),c.render();else c.call(this)});d.Chart.prototype.retrieveStacks=function(c){var b=this.series,a={},e,g=1;d.each(this.series,function(d){e=z(d.options.stack,c?0:b.length-1-d.index);a[e]?a[e].series.push(d):(a[e]={series:[d],position:g},g++)});a.totalStacks=g+1;return a};d.wrap(d.Axis.prototype,"setOptions",function(c,b){var a;
c.call(this,b);if(this.chart.is3d())a=this.options,a.tickWidth=d.pick(a.tickWidth,0),a.gridLineWidth=d.pick(a.gridLineWidth,1)});d.wrap(d.Axis.prototype,"render",function(c){c.apply(this,[].slice.call(arguments,1));if(this.chart.is3d()){var b=this.chart,a=b.renderer,e=b.options.chart.options3d,d=e.frame,f=d.bottom,h=d.back,d=d.side,j=e.depth,i=this.height,k=this.width,l=this.left,q=this.top;if(!this.isZAxis)this.horiz?(h={x:l,y:q+(b.xAxis[0].opposite?-f.size:i),z:0,width:k,height:f.size,depth:j,insidePlotArea:!1},
this.bottomFrame?this.bottomFrame.animate(h):this.bottomFrame=a.cuboid(h).attr({fill:f.color,zIndex:b.yAxis[0].reversed&&e.alpha>0?4:-1}).css({stroke:f.color}).add()):(e={x:l+(b.yAxis[0].opposite?0:-d.size),y:q+(b.xAxis[0].opposite?-f.size:0),z:j,width:k+d.size,height:i+f.size,depth:h.size,insidePlotArea:!1},this.backFrame?this.backFrame.animate(e):this.backFrame=a.cuboid(e).attr({fill:h.color,zIndex:-3}).css({stroke:h.color}).add(),b={x:l+(b.yAxis[0].opposite?k:-d.size),y:q+(b.xAxis[0].opposite?
-f.size:0),z:0,width:d.size,height:i+f.size,depth:j,insidePlotArea:!1},this.sideFrame?this.sideFrame.animate(b):this.sideFrame=a.cuboid(b).attr({fill:d.color,zIndex:-2}).css({stroke:d.color}).add())}});d.wrap(d.Axis.prototype,"getPlotLinePath",function(c){var b=c.apply(this,[].slice.call(arguments,1));if(!this.chart.is3d())return b;if(b===null)return b;var a=this.chart.options.chart.options3d,a=this.isZAxis?this.chart.plotWidth:a.depth,d=this.opposite;this.horiz&&(d=!d);b=[this.swapZ({x:b[1],y:b[2],
z:d?a:0}),this.swapZ({x:b[1],y:b[2],z:a}),this.swapZ({x:b[4],y:b[5],z:a}),this.swapZ({x:b[4],y:b[5],z:d?0:a})];b=p(b,this.chart,!1);return b=this.chart.renderer.toLinePath(b,!1)});d.wrap(d.Axis.prototype,"getLinePath",function(c){return this.chart.is3d()?[]:c.apply(this,[].slice.call(arguments,1))});d.wrap(d.Axis.prototype,"getPlotBandPath",function(c){if(this.chart.is3d()){var b=arguments,a=b[1],b=this.getPlotLinePath(b[2]);(a=this.getPlotLinePath(a))&&b?a.push("L",b[10],b[11],"L",b[7],b[8],"L",
b[4],b[5],"L",b[1],b[2]):a=null;return a}else return c.apply(this,[].slice.call(arguments,1))});d.wrap(d.Tick.prototype,"getMarkPath",function(c){var b=c.apply(this,[].slice.call(arguments,1));if(!this.axis.chart.is3d())return b;b=[this.axis.swapZ({x:b[1],y:b[2],z:0}),this.axis.swapZ({x:b[4],y:b[5],z:0})];b=p(b,this.axis.chart,!1);return b=["M",b[0].x,b[0].y,"L",b[1].x,b[1].y]});d.wrap(d.Tick.prototype,"getLabelPosition",function(c){var b=c.apply(this,[].slice.call(arguments,1));if(!this.axis.chart.is3d())return b;
var a=p([this.axis.swapZ({x:b.x,y:b.y,z:0})],this.axis.chart,!1)[0];a.x-=!this.axis.horiz&&this.axis.opposite?this.axis.transA:0;a.old=b;return a});d.wrap(d.Tick.prototype,"handleOverflow",function(c,b){if(this.axis.chart.is3d())b=b.old;return c.call(this,b)});d.wrap(d.Axis.prototype,"getTitlePosition",function(c){var b=c.apply(this,[].slice.call(arguments,1));return!this.chart.is3d()?b:b=p([this.swapZ({x:b.x,y:b.y,z:0})],this.chart,!1)[0]});d.wrap(d.Axis.prototype,"drawCrosshair",function(c){var b=
arguments;this.chart.is3d()&&b[2]&&(b[2]={plotX:b[2].plotXold||b[2].plotX,plotY:b[2].plotYold||b[2].plotY});c.apply(this,[].slice.call(b,1))});d.Axis.prototype.swapZ=function(c,b){if(this.isZAxis){var a=b?0:this.chart.plotLeft,d=this.chart;return{x:a+(d.yAxis[0].opposite?c.z:d.xAxis[0].width-c.z),y:c.y,z:c.x-a}}else return c};var E=d.ZAxis=function(){this.isZAxis=!0;this.init.apply(this,arguments)};d.extend(E.prototype,d.Axis.prototype);d.extend(E.prototype,{setOptions:function(c){c=d.merge({offset:0,
lineWidth:0},c);d.Axis.prototype.setOptions.call(this,c);this.coll="zAxis"},setAxisSize:function(){d.Axis.prototype.setAxisSize.call(this);this.width=this.len=this.chart.options.chart.options3d.depth;this.right=this.chart.chartWidth-this.width-this.left},getSeriesExtremes:function(){var c=this,b=c.chart;c.hasVisibleSeries=!1;c.dataMin=c.dataMax=c.ignoreMinPadding=c.ignoreMaxPadding=null;c.buildStacks&&c.buildStacks();d.each(c.series,function(a){if(a.visible||!b.options.chart.ignoreHiddenSeries)if(c.hasVisibleSeries=
!0,a=a.zData,a.length)c.dataMin=Math.min(z(c.dataMin,a[0]),Math.min.apply(null,a)),c.dataMax=Math.max(z(c.dataMax,a[0]),Math.max.apply(null,a))})}});d.wrap(d.Chart.prototype,"getAxes",function(c){var b=this,a=this.options,a=a.zAxis=d.splat(a.zAxis||{});c.call(this);if(b.is3d())this.zAxis=[],d.each(a,function(a,c){a.index=c;a.isX=!0;(new E(b,a)).setScale()})});d.wrap(d.seriesTypes.column.prototype,"translate",function(c){c.apply(this,[].slice.call(arguments,1));if(this.chart.is3d()){var b=this.chart,
a=this.options,e=a.depth||25,g=(a.stacking?a.stack||0:this._i)*(e+(a.groupZPadding||1));a.grouping!==!1&&(g=0);g+=a.groupZPadding||1;d.each(this.data,function(a){if(a.y!==null){var c=a.shapeArgs,d=a.tooltipPos;a.shapeType="cuboid";c.z=g;c.depth=e;c.insidePlotArea=!0;d=p([{x:d[0],y:d[1],z:g}],b,!1)[0];a.tooltipPos=[d.x,d.y]}});this.z=g}});d.wrap(d.seriesTypes.column.prototype,"animate",function(c){if(this.chart.is3d()){var b=arguments[1],a=this.yAxis,e=this,g=this.yAxis.reversed;if(d.svg)b?d.each(e.data,
function(b){if(b.y!==null&&(b.height=b.shapeArgs.height,b.shapey=b.shapeArgs.y,b.shapeArgs.height=1,!g))b.shapeArgs.y=b.stackY?b.plotY+a.translate(b.stackY):b.plotY+(b.negative?-b.height:b.height)}):(d.each(e.data,function(a){if(a.y!==null)a.shapeArgs.height=a.height,a.shapeArgs.y=a.shapey,a.graphic&&a.graphic.animate(a.shapeArgs,e.options.animation)}),this.drawDataLabels(),e.animate=null)}else c.apply(this,[].slice.call(arguments,1))});d.wrap(d.seriesTypes.column.prototype,"init",function(c){c.apply(this,
[].slice.call(arguments,1));if(this.chart.is3d()){var b=this.options,a=b.grouping,d=b.stacking,g=0;if(a===void 0||a){a=this.chart.retrieveStacks(d);d=b.stack||0;for(g=0;g<a[d].series.length;g++)if(a[d].series[g]===this)break;g=a.totalStacks*10-10*(a.totalStacks-a[d].position)-g}b.zIndex=g}});d.wrap(d.Series.prototype,"alignDataLabel",function(c){if(this.chart.is3d()&&(this.type==="column"||this.type==="columnrange")){var b=arguments[4],a={x:b.x,y:b.y,z:this.z},a=p([a],this.chart,!0)[0];b.x=a.x;b.y=
a.y}c.apply(this,[].slice.call(arguments,1))});d.seriesTypes.columnrange&&d.wrap(d.seriesTypes.columnrange.prototype,"drawPoints",G);d.wrap(d.seriesTypes.column.prototype,"drawPoints",G);d.wrap(d.seriesTypes.pie.prototype,"translate",function(c){c.apply(this,[].slice.call(arguments,1));if(this.chart.is3d()){var b=this,a=b.chart,e=b.options,g=e.depth||0,f=a.options.chart.options3d,h={x:a.plotWidth/2,y:a.plotHeight/2,z:f.depth},j=f.alpha,i=f.beta,k=e.stacking?(e.stack||0)*g:b._i*g;k+=g/2;e.grouping!==
!1&&(k=0);d.each(b.data,function(a){var c=a.shapeArgs;a.shapeType="arc3d";c.z=k;c.depth=g*0.75;c.origin=h;c.alpha=j;c.beta=i;c.center=b.center;c=(c.end+c.start)/2;a.slicedTranslation={translateX:H(n(c)*e.slicedOffset*n(j*A)),translateY:H(m(c)*e.slicedOffset*n(j*A))}})}});d.wrap(d.seriesTypes.pie.prototype.pointClass.prototype,"haloPath",function(c){var b=arguments;return this.series.chart.is3d()?[]:c.call(this,b[1])});d.wrap(d.seriesTypes.pie.prototype,"drawPoints",function(c){var b=this.group,a=
this.options,e=a.states;if(this.chart.is3d())this.borderWidth=a.borderWidth=a.edgeWidth||1,this.borderColor=a.edgeColor=d.pick(a.edgeColor,a.borderColor,void 0),e.hover.borderColor=d.pick(e.hover.edgeColor,this.borderColor),e.hover.borderWidth=d.pick(e.hover.edgeWidth,this.borderWidth),e.select.borderColor=d.pick(e.select.edgeColor,this.borderColor),e.select.borderWidth=d.pick(e.select.edgeWidth,this.borderWidth),d.each(this.data,function(a){var b=a.pointAttr;b[""].stroke=a.series.borderColor||a.color;
b[""]["stroke-width"]=a.series.borderWidth;b.hover.stroke=e.hover.borderColor;b.hover["stroke-width"]=e.hover.borderWidth;b.select.stroke=e.select.borderColor;b.select["stroke-width"]=e.select.borderWidth});c.apply(this,[].slice.call(arguments,1));this.chart.is3d()&&d.each(this.points,function(a){var c=a.graphic;c.out.add(b);c.inn.add(b);c.side1.add(b);c.side2.add(b);c[a.y?"show":"hide"]()})});d.wrap(d.seriesTypes.pie.prototype,"drawDataLabels",function(c){if(this.chart.is3d()){var b=this;d.each(b.data,
function(a){var c=a.shapeArgs,d=c.r,f=c.depth,h=(c.alpha||b.chart.options.chart.options3d.alpha)*A,c=(c.start+c.end)/2,a=a.labelPos;a[1]+=-d*(1-n(h))*m(c)+(m(c)>0?m(h)*f:0);a[3]+=-d*(1-n(h))*m(c)+(m(c)>0?m(h)*f:0);a[5]+=-d*(1-n(h))*m(c)+(m(c)>0?m(h)*f:0)})}c.apply(this,[].slice.call(arguments,1))});d.wrap(d.seriesTypes.pie.prototype,"addPoint",function(c){c.apply(this,[].slice.call(arguments,1));this.chart.is3d()&&this.update(this.userOptions,!0)});d.wrap(d.seriesTypes.pie.prototype,"animate",function(c){if(this.chart.is3d()){var b=
arguments[1],a=this.options.animation,e=this.center,g=this.group,f=this.markerGroup;if(d.svg)if(a===!0&&(a={}),b){if(g.oldtranslateX=g.translateX,g.oldtranslateY=g.translateY,b={translateX:e[0],translateY:e[1],scaleX:0.001,scaleY:0.001},g.attr(b),f)f.attrSetters=g.attrSetters,f.attr(b)}else b={translateX:g.oldtranslateX,translateY:g.oldtranslateY,scaleX:1,scaleY:1},g.animate(b,a),f&&f.animate(b,a),this.animate=null}else c.apply(this,[].slice.call(arguments,1))});d.wrap(d.seriesTypes.scatter.prototype,
"translate",function(c){c.apply(this,[].slice.call(arguments,1));if(this.chart.is3d()){var b=this.chart,a=d.pick(this.zAxis,b.options.zAxis[0]),e=[],g,f,h;for(h=0;h<this.data.length;h++)g=this.data[h],f=a.isLog&&a.val2lin?a.val2lin(g.z):g.z,g.plotZ=a.translate(f),g.isInside=g.isInside?f>=a.min&&f<=a.max:!1,e.push({x:g.plotX,y:g.plotY,z:g.plotZ});b=p(e,b,!0);for(h=0;h<this.data.length;h++)g=this.data[h],a=b[h],g.plotXold=g.plotX,g.plotYold=g.plotY,g.plotX=a.x,g.plotY=a.y,g.plotZ=a.z}});d.wrap(d.seriesTypes.scatter.prototype,
"init",function(c,b,a){if(b.is3d())this.axisTypes=["xAxis","yAxis","zAxis"],this.pointArrayMap=["x","y","z"],this.parallelArrays=["x","y","z"];c=c.apply(this,[b,a]);if(this.chart.is3d())this.tooltipOptions.pointFormat=this.userOptions.tooltip?this.userOptions.tooltip.pointFormat||"x: <b>{point.x}</b><br/>y: <b>{point.y}</b><br/>z: <b>{point.z}</b><br/>":"x: <b>{point.x}</b><br/>y: <b>{point.y}</b><br/>z: <b>{point.z}</b><br/>";return c});if(d.VMLRenderer)d.setOptions({animate:!1}),d.VMLRenderer.prototype.cuboid=
d.SVGRenderer.prototype.cuboid,d.VMLRenderer.prototype.cuboidPath=d.SVGRenderer.prototype.cuboidPath,d.VMLRenderer.prototype.toLinePath=d.SVGRenderer.prototype.toLinePath,d.VMLRenderer.prototype.createElement3D=d.SVGRenderer.prototype.createElement3D,d.VMLRenderer.prototype.arc3d=function(c){c=d.SVGRenderer.prototype.arc3d.call(this,c);c.css({zIndex:c.zIndex});return c},d.VMLRenderer.prototype.arc3dPath=d.SVGRenderer.prototype.arc3dPath,d.wrap(d.Axis.prototype,"render",function(c){c.apply(this,[].slice.call(arguments,
1));this.sideFrame&&(this.sideFrame.css({zIndex:0}),this.sideFrame.front.attr({fill:this.sideFrame.color}));this.bottomFrame&&(this.bottomFrame.css({zIndex:1}),this.bottomFrame.front.attr({fill:this.bottomFrame.color}));this.backFrame&&(this.backFrame.css({zIndex:0}),this.backFrame.front.attr({fill:this.backFrame.color}))})})(Highcharts);
|
apache-2.0
|
minewhat/druid
|
processing/src/main/java/io/druid/query/extraction/TimeDimExtractionFn.java
|
3408
|
/*
* Druid - a distributed column store.
* Copyright 2012 - 2015 Metamarkets Group Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.druid.query.extraction;
import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.google.common.base.Preconditions;
import com.ibm.icu.text.SimpleDateFormat;
import com.metamx.common.StringUtils;
import java.nio.ByteBuffer;
import java.text.ParseException;
import java.util.Date;
/**
*/
public class TimeDimExtractionFn extends DimExtractionFn
{
private static final byte CACHE_TYPE_ID = 0x0;
private final String timeFormat;
private final SimpleDateFormat timeFormatter;
private final String resultFormat;
private final SimpleDateFormat resultFormatter;
@JsonCreator
public TimeDimExtractionFn(
@JsonProperty("timeFormat") String timeFormat,
@JsonProperty("resultFormat") String resultFormat
)
{
Preconditions.checkNotNull(timeFormat, "timeFormat must not be null");
Preconditions.checkNotNull(resultFormat, "resultFormat must not be null");
this.timeFormat = timeFormat;
this.timeFormatter = new SimpleDateFormat(timeFormat);
this.timeFormatter.setLenient(true);
this.resultFormat = resultFormat;
this.resultFormatter = new SimpleDateFormat(resultFormat);
}
@Override
public byte[] getCacheKey()
{
byte[] timeFormatBytes = StringUtils.toUtf8(timeFormat);
return ByteBuffer.allocate(1 + timeFormatBytes.length)
.put(CACHE_TYPE_ID)
.put(timeFormatBytes)
.array();
}
@Override
public String apply(String dimValue)
{
Date date;
try {
date = timeFormatter.parse(dimValue);
}
catch (ParseException e) {
return dimValue;
}
return resultFormatter.format(date);
}
@JsonProperty("timeFormat")
public String getTimeFormat()
{
return timeFormat;
}
@JsonProperty("resultFormat")
public String getResultFormat()
{
return resultFormat;
}
@Override
public boolean preservesOrdering()
{
return false;
}
@Override
public String toString()
{
return "TimeDimExtractionFn{" +
"timeFormat='" + timeFormat + '\'' +
", resultFormat='" + resultFormat + '\'' +
'}';
}
@Override
public boolean equals(Object o)
{
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
TimeDimExtractionFn that = (TimeDimExtractionFn) o;
if (!resultFormat.equals(that.resultFormat)) {
return false;
}
if (!timeFormat.equals(that.timeFormat)) {
return false;
}
return true;
}
@Override
public int hashCode()
{
int result = timeFormat.hashCode();
result = 31 * result + resultFormat.hashCode();
return result;
}
}
|
apache-2.0
|
ApocalypsjeNL/OpenAudioMc
|
plugin/src/main/java/com/craftmend/openaudiomc/spigot/modules/show/runnables/CommandRunnable.java
|
1824
|
package com.craftmend.openaudiomc.spigot.modules.show.runnables;
import com.craftmend.openaudiomc.generic.redis.packets.ExecuteCommandPacket;
import com.craftmend.openaudiomc.spigot.OpenAudioMcSpigot;
import com.craftmend.openaudiomc.spigot.modules.show.interfaces.ShowRunnable;
import lombok.AllArgsConstructor;
import lombok.NoArgsConstructor;
import org.bukkit.Bukkit;
import org.bukkit.World;
import org.bukkit.entity.Entity;
@AllArgsConstructor
@NoArgsConstructor
public class CommandRunnable extends ShowRunnable {
private String command;
private String worldName;
@Override
public void prepare(String serialized, World world) {
this.command = serialized;
this.worldName = world.getName();
if (this.command.startsWith("/")) this.command = this.command.replace("/" , "");
}
@Override
public String serialize() {
return command;
}
@Override
public void run() {
if (!isExecutedFromRedis() && !command.toLowerCase().startsWith("oa show")) new ExecuteCommandPacket(command).send();
Bukkit.getScheduler().runTask(OpenAudioMcSpigot.getInstance(), () -> Bukkit.getServer().dispatchCommand(Bukkit.getConsoleSender(), command));
/**
if (worldName == null) {
Bukkit.getScheduler().runTask(OpenAudioMcSpigot.getInstance(), () -> Bukkit.getServer().dispatchCommand(Bukkit.getConsoleSender(), command));
} else {
Entity executor = getExecutorEntity(worldName);
if (executor == null) {
throw new IllegalStateException("There is no entity loaded to execute the show trigger");
}
Bukkit.getScheduler().runTask(OpenAudioMcSpigot.getInstance(), () -> Bukkit.getServer().dispatchCommand(executor, command));
}
**/
}
}
|
apache-2.0
|
apaprocki/bde
|
groups/bsl/bslalg/bslalg_bidirectionalnode.t.cpp
|
23154
|
// bslalg_bidirectionalnode.t.cpp -*-C++-*-
#include <bslalg_bidirectionalnode.h>
#include <bslalg_scalardestructionprimitives.h>
#include <bslalg_scalarprimitives.h>
#include <bslma_allocator.h>
#include <bslma_default.h>
#include <bslma_defaultallocatorguard.h>
#include <bslma_testallocator.h>
#include <bslmf_isconst.h>
#include <bsls_asserttest.h>
#include <bsls_bsltestutil.h>
#include <bsls_platform.h>
#include <bsls_types.h>
#include <new>
#include <stdio.h> // 'printf'
#include <stdlib.h> // 'atoi'
#include <string.h>
using namespace BloombergLP;
//=============================================================================
// TEST PLAN
//-----------------------------------------------------------------------------
// Overview
// --------
//
// Global Concerns:
//: o Pointer/reference parameters are declared 'const'.
//: o No memory is ever allocated.
//: o Precondition violations are detected in appropriate build modes.
//-----------------------------------------------------------------------------
// [ 4] BASE CLASS MANIPULATORS AND ACCESSORS
// [ 3] BASIC ACCESSORS
// [ 2] MANIPULATORS: VALUE_TYPE& value();
// [ 2] MANIPULATORS: const VALUE_TYPE& value() const;
// ----------------------------------------------------------------------------
// [ 5] USAGE EXAMPLE
// [ 1] BREATHING TEST
// ============================================================================
// STANDARD BSL ASSERT TEST FUNCTION
// ----------------------------------------------------------------------------
namespace {
int testStatus = 0;
void aSsErT(bool condition, const char *message, int line)
{
if (condition) {
printf("Error " __FILE__ "(%d): %s (failed)\n", line, message);
if (0 <= testStatus && testStatus <= 100) {
++testStatus;
}
}
}
} // close unnamed namespace
// ============================================================================
// STANDARD BSL TEST DRIVER MACRO ABBREVIATIONS
// ----------------------------------------------------------------------------
#define ASSERT BSLS_BSLTESTUTIL_ASSERT
#define ASSERTV BSLS_BSLTESTUTIL_ASSERTV
#define LOOP_ASSERT BSLS_BSLTESTUTIL_LOOP_ASSERT
#define LOOP0_ASSERT BSLS_BSLTESTUTIL_LOOP0_ASSERT
#define LOOP1_ASSERT BSLS_BSLTESTUTIL_LOOP1_ASSERT
#define LOOP2_ASSERT BSLS_BSLTESTUTIL_LOOP2_ASSERT
#define LOOP3_ASSERT BSLS_BSLTESTUTIL_LOOP3_ASSERT
#define LOOP4_ASSERT BSLS_BSLTESTUTIL_LOOP4_ASSERT
#define LOOP5_ASSERT BSLS_BSLTESTUTIL_LOOP5_ASSERT
#define LOOP6_ASSERT BSLS_BSLTESTUTIL_LOOP6_ASSERT
#define Q BSLS_BSLTESTUTIL_Q // Quote identifier literally.
#define P BSLS_BSLTESTUTIL_P // Print identifier and value.
#define P_ BSLS_BSLTESTUTIL_P_ // P(X) without '\n'.
#define T_ BSLS_BSLTESTUTIL_T_ // Print a tab (w/o newline).
#define L_ BSLS_BSLTESTUTIL_L_ // current Line number
#define RUN_EACH_TYPE BSLTF_TEMPLATETESTFACILITY_RUN_EACH_TYPE
// ============================================================================
// NEGATIVE-TEST MACRO ABBREVIATIONS
// ----------------------------------------------------------------------------
#define ASSERT_SAFE_PASS(EXPR) BSLS_ASSERTTEST_ASSERT_SAFE_PASS(EXPR)
#define ASSERT_SAFE_FAIL(EXPR) BSLS_ASSERTTEST_ASSERT_SAFE_FAIL(EXPR)
#define ASSERT_PASS(EXPR) BSLS_ASSERTTEST_ASSERT_PASS(EXPR)
#define ASSERT_FAIL(EXPR) BSLS_ASSERTTEST_ASSERT_FAIL(EXPR)
#define ASSERT_OPT_PASS(EXPR) BSLS_ASSERTTEST_ASSERT_OPT_PASS(EXPR)
#define ASSERT_OPT_FAIL(EXPR) BSLS_ASSERTTEST_ASSERT_OPT_FAIL(EXPR)
//=============================================================================
// GLOBAL HELPER FUNCTIONS FOR TESTING
//-----------------------------------------------------------------------------
//=============================================================================
// GLOBAL TYPEDEFS/CONSTANTS FOR TESTING
//-----------------------------------------------------------------------------
class TestType1 {
// CLASS DATA
static int s_numConstructions;
// DATA
int d_value;
public:
// CLASS METHODS
static int numConstructions() { return s_numConstructions; }
// CREATORS
explicit
TestType1(int i) : d_value(i) { ++s_numConstructions; }
~TestType1() { --s_numConstructions; }
// MANIPULATOR
void set(int i) { d_value = i; }
// ACCESSOR
int get() const { return d_value; }
};
int TestType1::s_numConstructions = 0;
template <class TYPE>
bool isConst(TYPE *)
{
return bsl::is_const<TYPE>::value;
}
//=============================================================================
// USAGE EXAMPLE
//-----------------------------------------------------------------------------
///Usage
///-----
// This section illustrates intended usage of this component.
//
///Example 1: Creating and Using a List Template Class
///- - - - - - - - - - - - - - - - - - - - - - - - - -
// Suppose we want to create a linked list template class called 'MyList'.
//
// First, we create an iterator helper class, which will eventually be defined
// as a nested type within the 'MyList' class.
//..
// ===============
// MyList_Iterator
// ===============
//..
template <class PAYLOAD>
class MyList_Iterator {
// This iterator is used to refer to positions within a list.
// PRIVATE TYPES
typedef bslalg::BidirectionalNode<PAYLOAD> Node;
// DATA
Node *d_node; // Pointer to a node within a list.
// FRIENDS
template <class OTHER_PAYLOAD>
friend bool operator==(MyList_Iterator<OTHER_PAYLOAD>,
MyList_Iterator<OTHER_PAYLOAD>);
public:
// CREATORS
MyList_Iterator() : d_node(0) {}
explicit
MyList_Iterator(Node *node) : d_node(node) {}
//! MyList_Iterator(const MyList_Iterator& original) = default;
//! MyList_Iterator& operator=(const MyList_Iterator& other) = default;
//! ~MyList_Iterator() = default;
// MANIPULATORS
MyList_Iterator operator++();
// ACCESSORS
const PAYLOAD& operator*() const { return d_node->value(); }
};
// ============================================================================
// FREE OPERATORS
// ----------------------------------------------------------------------------
template <class PAYLOAD>
bool operator==(MyList_Iterator<PAYLOAD> lhs,
MyList_Iterator<PAYLOAD> rhs);
template <class PAYLOAD>
bool operator!=(MyList_Iterator<PAYLOAD> lhs,
MyList_Iterator<PAYLOAD> rhs);
//..
// Then, we implment the functions for the iterator type.
//..
// ---------------
// MyList_Iterator
// ---------------
// MANIPULATORS
template <class PAYLOAD>
inline
MyList_Iterator<PAYLOAD> MyList_Iterator<PAYLOAD>::operator++()
{
d_node = static_cast<Node *>(d_node->nextLink());
return *this;
}
template <class PAYLOAD>
inline
bool operator==(MyList_Iterator<PAYLOAD> lhs,
MyList_Iterator<PAYLOAD> rhs)
{
return lhs.d_node == rhs.d_node;
}
template <class PAYLOAD>
inline
bool operator!=(MyList_Iterator<PAYLOAD> lhs,
MyList_Iterator<PAYLOAD> rhs)
{
return !(lhs == rhs);
}
//..
// Next, we define our 'MyList' class, with 'MyList::Iterator' being a public
// typedef of 'MyList_Iterator'. For brevity, we will omit much of te that a
// full, general-purpose list class would have.
//..
// ======
// MyList
// ======
template <class PAYLOAD>
class MyList {
// Doubly-linked list storing objects of type 'PAYLOAD'.
// PRIVATE TYPES
typedef bslalg::BidirectionalNode<PAYLOAD> Node;
public:
// PUBLIC TYPES
typedef PAYLOAD ValueType;
typedef MyList_Iterator<ValueType> Iterator;
// DATA
Node *d_begin; // First node, if any, in the list.
Node *d_end; // Last node, if any, in the list.
bslma::Allocator *d_allocator_p; // Allocator used for allocating and
// freeing nodes.
public:
// CREATORS
explicit
MyList(bslma::Allocator *basicAllocator = 0)
: d_begin(0)
, d_end(0)
, d_allocator_p(bslma::Default::allocator(basicAllocator))
{}
~MyList();
// MANIPULATORS
Iterator begin();
Iterator end();
void pushBack(const ValueType& value);
void popBack();
};
//..
// Then, we implement the functions for the 'MyList' class:
//..
// ------
// MyList
// ------
// CREATORS
template <class PAYLOAD>
MyList<PAYLOAD>::~MyList()
{
for (Node *p = d_begin; p; ) {
Node *toDelete = p;
p = (Node *) p->nextLink();
bslalg::ScalarDestructionPrimitives::destroy(&toDelete->value());
d_allocator_p->deleteObjectRaw(
static_cast<bslalg::BidirectionalLink *>(toDelete));
}
}
// MANIPULATORS
template <class PAYLOAD>
inline
typename MyList<PAYLOAD>::Iterator MyList<PAYLOAD>::begin()
{
return Iterator(d_begin);
}
template <class PAYLOAD>
inline
typename MyList<PAYLOAD>::Iterator MyList<PAYLOAD>::end()
{
return Iterator(0);
}
template <class PAYLOAD>
void MyList<PAYLOAD>::pushBack(const PAYLOAD& value)
{
Node *node = (Node *) d_allocator_p->allocate(sizeof(Node));
node->setNextLink(0);
node->setPreviousLink(d_end);
bslalg::ScalarPrimitives::copyConstruct(&node->value(),
value,
d_allocator_p);
if (d_end) {
BSLS_ASSERT_SAFE(d_begin);
d_end->setNextLink(node);
d_end = node;
}
else {
BSLS_ASSERT_SAFE(0 == d_begin);
d_begin = d_end = node;
}
}
template <class PAYLOAD>
void MyList<PAYLOAD>::popBack()
{
BSLS_ASSERT_SAFE(d_begin && d_end);
Node *toDelete = d_end;
d_end = (Node *) d_end->previousLink();
if (d_begin != toDelete) {
BSLS_ASSERT_SAFE(0 != d_end);
d_end->setNextLink(0);
}
else {
BSLS_ASSERT_SAFE(0 == d_end);
d_begin = 0;
}
bslalg::ScalarDestructionPrimitives::destroy(&toDelete->value());
d_allocator_p->deleteObjectRaw(
static_cast<bslalg::BidirectionalLink *>(toDelete));
}
//..
//=============================================================================
// MAIN PROGRAM
//-----------------------------------------------------------------------------
int main(int argc, char *argv[])
{
int test = argc > 1 ? atoi(argv[1]) : 0;
bool verbose = argc > 2;
bool veryVerbose = argc > 3;
bool veryVeryVerbose = argc > 4;
bool veryVeryVeryVerbose = argc > 5;
(void)veryVerbose; // suppress warning
(void)veryVeryVerbose; // suppress warning
setbuf(stdout, NULL); // Use unbuffered output
printf("TEST " __FILE__ " CASE %d\n", test);
// CONCERN: In no case is memory allocated from the global allocator.
bslma::TestAllocator globalAllocator("global", veryVeryVeryVerbose);
bslma::Default::setGlobalAllocator(&globalAllocator);
switch (test) { case 0:
case 5: {
// --------------------------------------------------------------------
// USAGE EXAMPLE
//
// Concern:
// Demonstrate the usefulness of the 'BidirectionalNode' type.
//
// Plan:
// Use it to build a linked list.
// --------------------------------------------------------------------
if (verbose) printf("USAGE EXAMPLE\n"
"=============\n");
//..
// Next, in 'main', we use our 'MyList' class to store a list of ints:
//..
MyList<int> intList;
//..
// Then, we declare an array of ints to populate it with:
//..
int intArray[] = { 8, 2, 3, 5, 7, 2 };
enum { NUM_INTS = sizeof intArray / sizeof *intArray };
//..
// Now, we iterate, pushing ints to the list:
//..
for (const int *pInt = intArray; pInt != intArray + NUM_INTS; ++pInt) {
intList.pushBack(*pInt);
}
//..
// Finally, we use our 'Iterator' type to traverse the list and observe its
// values:
//..
MyList<int>::Iterator it = intList.begin();
ASSERT(8 == *it);
ASSERT(2 == *++it);
ASSERT(3 == *++it);
ASSERT(5 == *++it);
ASSERT(7 == *++it);
ASSERT(2 == *++it);
ASSERT(intList.end() == ++it);
} break;
case 4: {
// --------------------------------------------------------------------
// TESTING BASE CLASS MANIPULATORS AND ACCESSORS
//
// Concerns:
//: 1 That the base class manipulators and accessors are accessible
//: (not private or protected inheritance).
//:
//: 2 That the base class accessors are const methods.
//
// Plan:
//: 1 Create an object and a const reference to it. Manipulate the
//: object with the accessors using the non-const object, and observe
//: it via the accessors using the const object.
// --------------------------------------------------------------------
if (verbose) printf(
"\nTESTING BASE CLASS MANIPULATORS AND ACCESSORS"
"\n=============================================\n");
bslma::TestAllocator da("default");
bslma::TestAllocator oa("object");
bslma::DefaultAllocatorGuard defaultGuard(&da);
{
typedef bslalg::BidirectionalNode<int> Obj;
Obj * const K1 = (Obj *) 0xaddc0c0a;
Obj * const K2 = (Obj *) 0xbaddeed5;
Obj * const K3 = (Obj *) 0x50fabed5;
Obj * const K4 = (Obj *) 0x5eaf00d;
const int KA_INT = 0xa0a0a0a0;
#ifdef BSLS_PLATFORM_CPU_32_BIT
Obj * const KA = (Obj *) KA_INT;
#else
bsls::Types::Uint64 KA_UNSIGNED = 0xa0a0a0a0;
Obj * const KA = (Obj *) ((KA_UNSIGNED << 32) | KA_UNSIGNED);
#endif
Obj *xPtr = (Obj *) oa.allocate(sizeof(Obj));
Obj& mX = *xPtr; const Obj& X = mX;
memset(xPtr, 0xa0, sizeof(mX));
ASSERT(KA_INT == X.value());
ASSERT(0 != X.previousLink());
ASSERT(0 != X.nextLink());
ASSERTV((void *) KA, KA == X.previousLink());
ASSERT(KA == X.nextLink());
mX.reset();
ASSERT(KA_INT == X.value());// 'reset' affected base class only
ASSERT(0 == X.previousLink());
ASSERT(0 == X.nextLink());
mX.setPreviousLink(K1);
mX.setNextLink( K2);
mX.value() = 5;
ASSERT(K1 == X.previousLink());
ASSERT(K2 == X.nextLink());
ASSERT(5 == X.value());
mX.value() = -1776;
mX.setNextLink( K3);
mX.setPreviousLink(K4);
ASSERT(-1776 == X.value());
ASSERT(K3 == X.nextLink());
ASSERT(K4 == X.previousLink());
oa.deallocate(&mX);
}
} break;
case 3: {
// --------------------------------------------------------------------
// TESTING BASIC ACCESSORS
// Ensure each basic accessor properly interprets object state.
//
// Concerns:
//: 1 Each accessor returns the value of the corresponding attribute
//: of the object.
//:
//: 2 Each accessor method is declared 'const'.
//
// Plan:
//: 1 Using the manipulators, set the object to the desired state,
//: and observe the state from the 'const' accessors.
// --------------------------------------------------------------------
if (verbose) printf("\nTESTING BASIC ACCESSORS"
"\n=======================\n");
bslma::TestAllocator da("default");
bslma::TestAllocator oa("object");
bslma::DefaultAllocatorGuard defaultGuard(&da);
static struct {
int d_line;
int d_value;
} DATA[] = {
{ L_, 0 },
{ L_, 1 },
{ L_, -1 },
{ L_, 56789 },
{ L_, -98765 } };
enum { NUM_DATA = sizeof DATA / sizeof *DATA };
if (verbose) printf("Table-driven test, re-using object\n");
{
typedef bslalg::BidirectionalNode<int> Obj;
Obj *xPtr = (Obj *) oa.allocate(sizeof(Obj));
Obj& mX = *xPtr; const Obj& X = mX;
for (int i = 0; i < NUM_DATA; ++i) {
const int LINE = DATA[i].d_line;
const int VALUE = DATA[i].d_value;
mX.value() = VALUE;
ASSERT(1 == isConst(&X.value()));
ASSERTV(LINE, VALUE == X.value());
}
oa.deallocate(xPtr);
}
} break;
case 2: {
// --------------------------------------------------------------------
// TESTING PRIMARY MANIPULATORS
//
// Concerns:
//: 1 Manipulators can set value.
//:
//: 2 Accessor return value set by manipulator.
//:
//: 3 Accessor is declared const.
//
// Plan:
//: 1 Create a 'BidirectionalNode' with 'VALUE_TYPE' as 'int' and set
//: 'value' distinct numbers. Verify the values are set with the
//: accessor.
//:
//: 2 Create a 'BidirectionalNode' with a type that has a constructor
//: that can be verified if it has been invoked. Verify that the
//: constructor is invoked when 'allocator_traits::construct' is
//: used.
//
// Testing:
// VALUE_TYPE& value();
// const VALUE_TYPE& value() const;
// --------------------------------------------------------------------
if (verbose) printf("\nTESTING PRIMARY MANIPULATORS"
"\n============================\n");
bslma::TestAllocator da("default");
bslma::TestAllocator oa("object");
bslma::DefaultAllocatorGuard defaultGuard(&da);
if (verbose) printf("\nTesting for payload of 'int'.\n");
{
typedef bslalg::BidirectionalNode<int> Obj;
typedef Obj::ValueType VT;
Obj *xPtr = (Obj *) oa.allocate(sizeof(Obj));
Obj& mX = *xPtr; const Obj& X = mX;
::new (&xPtr->value()) VT(7);
ASSERTV(X.value(), 7 == X.value());
mX.value() = 5;
ASSERTV(X.value(), 5 == X.value());
mX.value() = 21;
ASSERTV(X.value(), 21 == X.value());
mX.value() = -3;
ASSERTV(X.value(), -3 == X.value());
ASSERTV(0 == da.numBlocksTotal());
ASSERTV(1 == oa.numBlocksInUse());
mX.value().~VT();
oa.deallocate(xPtr);
ASSERTV(0 == oa.numBlocksInUse());
}
if (verbose) printf("\nTesting for payload of 'TestType1'.\n");
{
typedef bslalg::BidirectionalNode<TestType1> Obj;
typedef Obj::ValueType VT;
Obj *xPtr = (Obj *) oa.allocate(sizeof(Obj));
Obj& mX = *xPtr; const Obj& X = mX;
::new (&xPtr->value()) Obj::ValueType(7);
ASSERTV(1 == TestType1::numConstructions());
ASSERTV(X.value().get(), 7 == X.value().get());
mX.value().set(5);
ASSERTV(X.value().get(), 5 == X.value().get());
mX.value().set(21);
ASSERTV(X.value().get(), 21 == X.value().get());
mX.value().set(-3);
ASSERTV(X.value().get(), -3 == X.value().get());
ASSERTV(0 == da.numBlocksTotal());
ASSERTV(1 == oa.numBlocksInUse());
mX.value().~VT();
oa.deallocate(&mX);
ASSERTV(0 == oa.numBlocksInUse());
ASSERTV(0 == TestType1::numConstructions());
}
} break;
case 1: {
// --------------------------------------------------------------------
// BREATHING TEST
// This case exercises (but does not fully test) basic functionality.
//
// Concerns:
//: 1 The class is sufficiently functional to enable comprehensive
//: testing in subsequent test cases.
//
// Plan:
//: 1 Perform and ad-hoc test of the primary modifiers and accessors.
//
// Testing:
// BREATHING TEST
// --------------------------------------------------------------------
if (verbose) printf("\nBREATHING TEST"
"\n==============\n");
bslma::TestAllocator da("default");
bslma::DefaultAllocatorGuard defaultGuard(&da);
typedef bslalg::BidirectionalNode<int> Obj;
Obj *xPtr = static_cast<Obj *>(da.allocate(sizeof(Obj)));
typedef bslalg::BidirectionalNode<int> Obj;
Obj& mX = *xPtr; const Obj& X = mX;
mX.value() = 0;
ASSERTV(X.value(), 0 == X.value());
mX.value() = 1;
ASSERTV(X.value(), 1 == X.value());
da.deallocate(&mX);
ASSERTV(0 == da.numBytesInUse());
} break;
default: {
fprintf(stderr, "WARNING: CASE `%d' NOT FOUND.\n", test);
testStatus = -1;
}
}
ASSERTV(0 == TestType1::numConstructions());
// CONCERN: In no case is memory allocated from the global allocator.
ASSERTV(globalAllocator.numBlocksTotal(),
0 == globalAllocator.numBlocksTotal());
if (testStatus > 0) {
fprintf(stderr, "Error, non-zero test status = %d.\n", testStatus);
}
return testStatus;
}
// ----------------------------------------------------------------------------
// Copyright 2013 Bloomberg Finance L.P.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// ----------------------------- END-OF-FILE ----------------------------------
|
apache-2.0
|
limoncello-php/framework
|
components/Passport/tests/Adaptors/PostgreSql/TokenRepositoryTest.php
|
3377
|
<?php declare(strict_types=1);
namespace Limoncello\Tests\Passport\Adaptors\PostgreSql;
/**
* Copyright 2015-2019 info@neomerx.com
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
use DateTimeImmutable;
use Doctrine\DBAL\Connection;
use Doctrine\DBAL\Types\Type;
use Exception;
use Limoncello\Passport\Adaptors\PostgreSql\TokenRepository;
use Limoncello\Passport\Contracts\Entities\DatabaseSchemaInterface;
use Limoncello\Passport\Entities\DatabaseSchema;
/**
* @package Limoncello\Tests\Passport
*/
class TokenRepositoryTest extends TestCase
{
const TEST_TOKEN_VALUE = 'some_token';
/**
* Test read passport.
*
* @throws Exception
*/
public function testReadPassport()
{
$connection = $this->createConnection();
$schema = new DatabaseSchema('users_table', 'id_user');
$this->preparePassportTable($connection, $schema);
/** @var Connection $connection */
/** @var DatabaseSchemaInterface $schema */
$repository = new TokenRepository($connection, $schema);
$this->assertNotEmpty($repository->readPassport(self::TEST_TOKEN_VALUE, 3600));
}
/**
* Emulate database problems.
*
* @expectedException \Limoncello\Passport\Exceptions\RepositoryException
*
* @throws Exception
*/
public function testReadPassportFromBadDatabase()
{
$connection = $this->createConnection();
$schema = new DatabaseSchema('users_table', 'id_user');
$repository = new TokenRepository($connection, $schema);
$this->assertNotEmpty($repository->readPassport(self::TEST_TOKEN_VALUE, 3600));
}
/**
* @param Connection $connection
* @param DatabaseSchema $schema
*
* @return void
*
* @throws Exception
*/
private function preparePassportTable(Connection $connection, DatabaseSchema $schema)
{
// emulate view with table
$types = [
$schema->getTokensIdentityColumn() => Type::INTEGER,
$schema->getTokensValueColumn() => Type::STRING,
$schema->getTokensViewScopesColumn() => Type::STRING,
$schema->getTokensIsEnabledColumn() => Type::BOOLEAN,
$schema->getTokensValueCreatedAtColumn() => Type::DATETIME,
];
$data = [
$schema->getTokensIdentityColumn() => 1,
$schema->getTokensValueColumn() => self::TEST_TOKEN_VALUE,
$schema->getTokensViewScopesColumn() => '{one,two,three}',
$schema->getTokensIsEnabledColumn() => true,
$schema->getTokensValueCreatedAtColumn() => new DateTimeImmutable(),
];
$this->createTable($connection, $schema->getPassportView(), $types);
$connection->insert($schema->getPassportView(), $data, $types);
}
}
|
apache-2.0
|
kikov79/scalr
|
app/src/Scalr/Tests/Fixtures/Util/Api/TestMutator.php
|
612
|
<?php
namespace Scalr\Tests\Fixtures\Util\Api;
use Scalr\System\Config\Yaml;
use Scalr\Util\Api\SpecMutator;
/**
* Mutator Test
*
* @author N.V.
*/
class TestMutator extends SpecMutator
{
private $modifications;
public function __construct(array $modifications)
{
$this->modifications = $modifications;
}
/**
* {@inheritdoc}
* @see SpecMutator::apply()
*/
public function apply(Yaml $config, $version)
{
foreach ($this->modifications as $modification) {
call_user_func_array([$this, 'removeItem'], $modification);
}
}
}
|
apache-2.0
|
zzcclp/carbondata
|
integration/spark/src/main/scala/org/apache/carbondata/events/AlterTableEvents.scala
|
8003
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.carbondata.events
import org.apache.spark.sql.SparkSession
import org.apache.spark.sql.catalyst.catalog.CatalogTypes.TablePartitionSpec
import org.apache.spark.sql.execution.command._
import org.apache.carbondata.core.metadata.schema.table.CarbonTable
import org.apache.carbondata.processing.loading.model.CarbonLoadModel
/**
*
* @param carbonTable
* @param alterTableDropColumnModel
* @param sparkSession
*/
case class AlterTableDropColumnPreEvent(
carbonTable: CarbonTable,
alterTableDropColumnModel: AlterTableDropColumnModel,
sparkSession: SparkSession) extends Event with AlterTableDropColumnEventInfo
/**
* Class for handling clean up in case of any failure and abort the operation
*
* @param carbonTable
* @param alterTableDataTypeChangeModel
*/
case class AlterTableColRenameAndDataTypeChangePreEvent(
sparkSession: SparkSession,
carbonTable: CarbonTable,
alterTableDataTypeChangeModel: AlterTableDataTypeChangeModel)
extends Event with AlterTableDataTypeChangeEventInfo
/**
* Class for handling clean up in case of any failure and abort the operation
*
* @param carbonTable
* @param alterTableDataTypeChangeModel
*/
case class AlterTableColRenameAndDataTypeChangePostEvent(
sparkSession: SparkSession,
carbonTable: CarbonTable,
alterTableDataTypeChangeModel: AlterTableDataTypeChangeModel)
extends Event with AlterTableDataTypeChangeEventInfo
/**
*
* @param carbonTable
* @param alterTableDropColumnModel
* @param sparkSession
*/
case class AlterTableDropColumnPostEvent(
carbonTable: CarbonTable,
alterTableDropColumnModel: AlterTableDropColumnModel,
sparkSession: SparkSession) extends Event with AlterTableDropColumnEventInfo
/**
*
* @param carbonTable
* @param alterTableDropColumnModel
* @param sparkSession
*/
case class AlterTableDropColumnAbortEvent(
carbonTable: CarbonTable,
alterTableDropColumnModel: AlterTableDropColumnModel,
sparkSession: SparkSession) extends Event with AlterTableDropColumnEventInfo
/**
*
* @param carbonTable
* @param alterTableRenameModel
* @param newTablePath
* @param sparkSession
*/
case class AlterTableRenamePreEvent(
carbonTable: CarbonTable,
alterTableRenameModel: AlterTableRenameModel, newTablePath: String,
sparkSession: SparkSession) extends Event with AlterTableRenameEventInfo
/**
*
* @param carbonTable
* @param alterTableAddColumnsModel
*/
case class AlterTableAddColumnPreEvent(
sparkSession: SparkSession,
carbonTable: CarbonTable,
alterTableAddColumnsModel: AlterTableAddColumnsModel)
extends Event with AlterTableAddColumnEventInfo
/**
*
* @param carbonTable
* @param alterTableAddColumnsModel
*/
case class AlterTableAddColumnPostEvent(
sparkSession: SparkSession,
carbonTable: CarbonTable,
alterTableAddColumnsModel: AlterTableAddColumnsModel)
extends Event with AlterTableAddColumnEventInfo
/**
*
* @param carbonTable
* @param alterTableRenameModel
* @param newTablePath
* @param sparkSession
*/
case class AlterTableRenamePostEvent(
carbonTable: CarbonTable,
alterTableRenameModel: AlterTableRenameModel, newTablePath: String,
sparkSession: SparkSession) extends Event with AlterTableRenameEventInfo
/**
*
* @param carbonTable
* @param alterTableRenameModel
* @param newTablePath
* @param sparkSession
*/
case class AlterTableRenameAbortEvent(
carbonTable: CarbonTable,
alterTableRenameModel: AlterTableRenameModel, newTablePath: String,
sparkSession: SparkSession) extends Event with AlterTableRenameEventInfo
/**
* Event for handling pre compaction operations, lister has to implement this event on pre execution
*
* @param sparkSession
* @param carbonTable
*/
case class AlterTableCompactionPreEvent(sparkSession: SparkSession,
carbonTable: CarbonTable,
carbonMergerMapping: CarbonMergerMapping,
mergedLoadName: String) extends Event with AlterTableCompactionEventInfo
/**
* Compaction Event for handling pre update status file operations, lister has to implement this
* event before updating the table status file
* @param sparkSession
* @param carbonTable
* @param carbonMergerMapping
*/
case class AlterTableCompactionPostEvent(sparkSession: SparkSession,
carbonTable: CarbonTable,
carbonMergerMapping: CarbonMergerMapping,
compactedLoads: java.util.List[String]) extends Event with AlterTableCompactionEventInfo
/**
* Compaction Event for handling pre update status file operations, lister has to implement this
* event before updating the table status file
* @param sparkSession
* @param carbonTable
* @param carbonMergerMapping
* @param carbonLoadModel
* @param mergedLoadName
*/
case class AlterTableCompactionPreStatusUpdateEvent(sparkSession: SparkSession,
carbonTable: CarbonTable,
carbonMergerMapping: CarbonMergerMapping,
carbonLoadModel: CarbonLoadModel,
mergedLoadName: String) extends Event with AlterTableCompactionStatusUpdateEventInfo
/**
* Compaction Event for handling post update status file operations, like committing child
* indexes in one transaction
*/
case class AlterTableCompactionPostStatusUpdateEvent(sparkSession: SparkSession,
carbonTable: CarbonTable,
carbonMergerMapping: CarbonMergerMapping,
carbonLoadModel: CarbonLoadModel,
mergedLoadName: String) extends Event with AlterTableCompactionStatusUpdateEventInfo
/**
* Compaction Event for handling clean up in case of any compaction failure and abort the
* * operation, lister has to implement this event to handle failure scenarios
*
* @param sparkSession
* @param carbonTable
* @param alterTableModel
*/
case class AlterTableCompactionAbortEvent(sparkSession: SparkSession,
carbonTable: CarbonTable,
alterTableModel: AlterTableModel) extends Event with AlterTableCompactionEventInfo
/**
* Compaction Event for handling merge index in alter DDL
*
* @param sparkSession spark session
* @param carbonTable carbon table
* @param alterTableModel alter request
*/
case class AlterTableMergeIndexEvent(sparkSession: SparkSession,
carbonTable: CarbonTable,
alterTableModel: AlterTableModel) extends Event with AlterTableCompactionEventInfo
/**
* pre event for standard hive partition
* @param sparkSession
* @param carbonTable
*/
case class PreAlterTableHivePartitionCommandEvent(sparkSession: SparkSession,
carbonTable: CarbonTable) extends Event with AlterTableHivePartitionInfo
/**
* post event for standard hive partition
* @param sparkSession
* @param carbonTable
*/
case class PostAlterTableHivePartitionCommandEvent(sparkSession: SparkSession,
carbonTable: CarbonTable) extends Event with AlterTableHivePartitionInfo
case class AlterTableDropPartitionMetaEvent(parentCarbonTable: CarbonTable,
specs: Seq[TablePartitionSpec],
ifExists: Boolean,
purge: Boolean,
retainData: Boolean,
sparkSession: SparkSession)
extends Event with AlterTableDropPartitionEventInfo
case class AlterTableDropPartitionPreStatusEvent(carbonTable: CarbonTable,
sparkSession: SparkSession) extends Event
case class AlterTableDropPartitionPostStatusEvent(carbonTable: CarbonTable) extends Event
|
apache-2.0
|
11xor6/presto
|
lib/trino-memory-context/src/main/java/io/trino/memory/context/SimpleLocalMemoryContext.java
|
3462
|
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.trino.memory.context;
import com.google.common.util.concurrent.Futures;
import com.google.common.util.concurrent.ListenableFuture;
import javax.annotation.concurrent.GuardedBy;
import javax.annotation.concurrent.ThreadSafe;
import static com.google.common.base.MoreObjects.toStringHelper;
import static com.google.common.base.Preconditions.checkArgument;
import static com.google.common.base.Preconditions.checkState;
import static com.google.common.base.Verify.verify;
import static java.util.Objects.requireNonNull;
@ThreadSafe
public final class SimpleLocalMemoryContext
implements LocalMemoryContext
{
private static final ListenableFuture<?> NOT_BLOCKED = Futures.immediateFuture(null);
private final AbstractAggregatedMemoryContext parentMemoryContext;
private final String allocationTag;
@GuardedBy("this")
private long usedBytes;
@GuardedBy("this")
private boolean closed;
public SimpleLocalMemoryContext(AggregatedMemoryContext parentMemoryContext, String allocationTag)
{
verify(parentMemoryContext instanceof AbstractAggregatedMemoryContext);
this.parentMemoryContext = (AbstractAggregatedMemoryContext) requireNonNull(parentMemoryContext, "parentMemoryContext is null");
this.allocationTag = requireNonNull(allocationTag, "allocationTag is null");
}
@Override
public synchronized long getBytes()
{
return usedBytes;
}
@Override
public synchronized ListenableFuture<?> setBytes(long bytes)
{
checkState(!closed, "SimpleLocalMemoryContext is already closed");
checkArgument(bytes >= 0, "bytes cannot be negative");
if (bytes == usedBytes) {
return NOT_BLOCKED;
}
// update the parent first as it may throw a runtime exception (e.g., ExceededMemoryLimitException)
ListenableFuture<?> future = parentMemoryContext.updateBytes(allocationTag, bytes - usedBytes);
usedBytes = bytes;
return future;
}
@Override
public synchronized boolean trySetBytes(long bytes)
{
checkState(!closed, "SimpleLocalMemoryContext is already closed");
checkArgument(bytes >= 0, "bytes cannot be negative");
long delta = bytes - usedBytes;
if (parentMemoryContext.tryUpdateBytes(allocationTag, delta)) {
usedBytes = bytes;
return true;
}
return false;
}
@Override
public synchronized void close()
{
if (closed) {
return;
}
closed = true;
parentMemoryContext.updateBytes(allocationTag, -usedBytes);
usedBytes = 0;
}
@Override
public synchronized String toString()
{
return toStringHelper(this)
.add("allocationTag", allocationTag)
.add("usedBytes", usedBytes)
.toString();
}
}
|
apache-2.0
|
tgroh/incubator-beam
|
runners/core-construction-java/src/main/java/org/apache/beam/runners/core/construction/graph/FusedPipeline.java
|
5050
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.beam.runners.core.construction.graph;
import com.google.auto.value.AutoValue;
import com.google.common.collect.Sets;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.stream.Collectors;
import java.util.stream.StreamSupport;
import org.apache.beam.model.pipeline.v1.RunnerApi;
import org.apache.beam.model.pipeline.v1.RunnerApi.Components;
import org.apache.beam.model.pipeline.v1.RunnerApi.PTransform;
import org.apache.beam.model.pipeline.v1.RunnerApi.Pipeline;
import org.apache.beam.runners.core.construction.SyntheticComponents;
import org.apache.beam.runners.core.construction.graph.PipelineNode.PTransformNode;
/** A {@link Pipeline} which has been separated into collections of executable components. */
@AutoValue
public abstract class FusedPipeline {
static FusedPipeline of(
Components components,
Set<ExecutableStage> environmentalStages,
Set<PTransformNode> runnerStages) {
return new AutoValue_FusedPipeline(components, environmentalStages, runnerStages);
}
abstract Components getComponents();
/** The {@link ExecutableStage executable stages} that are executed by SDK harnesses. */
public abstract Set<ExecutableStage> getFusedStages();
/** The {@link PTransform PTransforms} that a runner is responsible for executing. */
public abstract Set<PTransformNode> getRunnerExecutedTransforms();
/**
* Returns the {@link RunnerApi.Pipeline} representation of this {@link FusedPipeline}.
*
* <p>The {@link Components} of the returned pipeline will contain all of the {@link PTransform
* PTransforms} present in the original Pipeline that this {@link FusedPipeline} was created from,
* plus all of the {@link ExecutableStage ExecutableStages} contained within this {@link
* FusedPipeline}. The {@link Pipeline#getRootTransformIdsList()} will contain all of the runner
* executed transforms and all of the {@link ExecutableStage execuable stages} contained within
* the Pipeline.
*/
public RunnerApi.Pipeline toPipeline() {
Map<String, PTransform> executableStageTransforms = getEnvironmentExecutedTransforms();
Set<String> executableTransformIds =
Sets.union(
executableStageTransforms.keySet(),
getRunnerExecutedTransforms()
.stream()
.map(PTransformNode::getId)
.collect(Collectors.toSet()));
// Augment the initial transforms with all of the executable transforms.
Components fusedComponents =
getComponents().toBuilder().putAllTransforms(executableStageTransforms).build();
List<String> rootTransformIds =
StreamSupport.stream(
QueryablePipeline.forTransforms(executableTransformIds, fusedComponents)
.getTopologicallyOrderedTransforms()
.spliterator(),
false)
.map(PTransformNode::getId)
.collect(Collectors.toList());
return Pipeline.newBuilder()
.setComponents(fusedComponents)
.addAllRootTransformIds(rootTransformIds)
.build();
}
/**
* Return a map of IDs to {@link PTransform} which are executed by an SDK Harness.
*
* <p>The transforms that are present in the returned map are the {@link RunnerApi.PTransform}
* versions of the {@link ExecutableStage ExecutableStages} returned in {@link #getFusedStages()}.
* The IDs of the returned transforms will not collide with any transform ID present in {@link
* #getComponents()}.
*/
private Map<String, PTransform> getEnvironmentExecutedTransforms() {
Map<String, PTransform> topLevelTransforms = new HashMap<>();
for (ExecutableStage stage : getFusedStages()) {
String baseName =
String.format(
"%s/%s",
stage.getInputPCollection().getPCollection().getUniqueName(),
stage.getEnvironment().getUrl());
Set<String> usedNames =
Sets.union(topLevelTransforms.keySet(), getComponents().getTransformsMap().keySet());
topLevelTransforms.put(
SyntheticComponents.uniqueId(baseName, usedNames::contains), stage.toPTransform());
}
return topLevelTransforms;
}
}
|
apache-2.0
|
DiamantiCom/kubernetes
|
pkg/apis/core/fuzzer/fuzzer.go
|
16962
|
/*
Copyright 2017 The Kubernetes Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package fuzzer
import (
"reflect"
"strconv"
"time"
fuzz "github.com/google/gofuzz"
corev1 "k8s.io/api/core/v1"
"k8s.io/apimachinery/pkg/api/resource"
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
"k8s.io/apimachinery/pkg/runtime"
"k8s.io/apimachinery/pkg/runtime/schema"
runtimeserializer "k8s.io/apimachinery/pkg/runtime/serializer"
"k8s.io/apimachinery/pkg/util/intstr"
"k8s.io/kubernetes/pkg/apis/core"
)
// Funcs returns the fuzzer functions for the core group.
var Funcs = func(codecs runtimeserializer.CodecFactory) []interface{} {
return []interface{}{
func(q *resource.Quantity, c fuzz.Continue) {
*q = *resource.NewQuantity(c.Int63n(1000), resource.DecimalExponent)
},
func(j *core.ObjectReference, c fuzz.Continue) {
// We have to customize the randomization of TypeMetas because their
// APIVersion and Kind must remain blank in memory.
j.APIVersion = c.RandString()
j.Kind = c.RandString()
j.Namespace = c.RandString()
j.Name = c.RandString()
j.ResourceVersion = strconv.FormatUint(c.RandUint64(), 10)
j.FieldPath = c.RandString()
},
func(j *core.PodExecOptions, c fuzz.Continue) {
j.Stdout = true
j.Stderr = true
},
func(j *core.PodAttachOptions, c fuzz.Continue) {
j.Stdout = true
j.Stderr = true
},
func(j *core.PodPortForwardOptions, c fuzz.Continue) {
if c.RandBool() {
j.Ports = make([]int32, c.Intn(10))
for i := range j.Ports {
j.Ports[i] = c.Int31n(65535)
}
}
},
func(s *core.PodSpec, c fuzz.Continue) {
c.FuzzNoCustom(s)
// has a default value
ttl := int64(30)
if c.RandBool() {
ttl = int64(c.Uint32())
}
s.TerminationGracePeriodSeconds = &ttl
c.Fuzz(s.SecurityContext)
if s.SecurityContext == nil {
s.SecurityContext = new(core.PodSecurityContext)
}
if s.Affinity == nil {
s.Affinity = new(core.Affinity)
}
if s.SchedulerName == "" {
s.SchedulerName = core.DefaultSchedulerName
}
if s.EnableServiceLinks == nil {
enableServiceLinks := corev1.DefaultEnableServiceLinks
s.EnableServiceLinks = &enableServiceLinks
}
},
func(j *core.PodPhase, c fuzz.Continue) {
statuses := []core.PodPhase{core.PodPending, core.PodRunning, core.PodFailed, core.PodUnknown}
*j = statuses[c.Rand.Intn(len(statuses))]
},
func(j *core.Binding, c fuzz.Continue) {
c.Fuzz(&j.ObjectMeta)
j.Target.Name = c.RandString()
},
func(j *core.ReplicationController, c fuzz.Continue) {
c.FuzzNoCustom(j)
// match defaulting
if j.Spec.Template != nil {
if len(j.Labels) == 0 {
j.Labels = j.Spec.Template.Labels
}
if len(j.Spec.Selector) == 0 {
j.Spec.Selector = j.Spec.Template.Labels
}
}
},
func(j *core.ReplicationControllerSpec, c fuzz.Continue) {
c.FuzzNoCustom(j) // fuzz self without calling this function again
//j.TemplateRef = nil // this is required for round trip
},
func(j *core.List, c fuzz.Continue) {
c.FuzzNoCustom(j) // fuzz self without calling this function again
// TODO: uncomment when round trip starts from a versioned object
if false { //j.Items == nil {
j.Items = []runtime.Object{}
}
},
func(q *core.ResourceRequirements, c fuzz.Continue) {
randomQuantity := func() resource.Quantity {
var q resource.Quantity
c.Fuzz(&q)
// precalc the string for benchmarking purposes
_ = q.String()
return q
}
q.Limits = make(core.ResourceList)
q.Requests = make(core.ResourceList)
cpuLimit := randomQuantity()
q.Limits[core.ResourceCPU] = cpuLimit.DeepCopy()
q.Requests[core.ResourceCPU] = cpuLimit.DeepCopy()
memoryLimit := randomQuantity()
q.Limits[core.ResourceMemory] = memoryLimit.DeepCopy()
q.Requests[core.ResourceMemory] = memoryLimit.DeepCopy()
storageLimit := randomQuantity()
q.Limits[core.ResourceStorage] = storageLimit.DeepCopy()
q.Requests[core.ResourceStorage] = storageLimit.DeepCopy()
},
func(q *core.LimitRangeItem, c fuzz.Continue) {
var cpuLimit resource.Quantity
c.Fuzz(&cpuLimit)
q.Type = core.LimitTypeContainer
q.Default = make(core.ResourceList)
q.Default[core.ResourceCPU] = cpuLimit.DeepCopy()
q.DefaultRequest = make(core.ResourceList)
q.DefaultRequest[core.ResourceCPU] = cpuLimit.DeepCopy()
q.Max = make(core.ResourceList)
q.Max[core.ResourceCPU] = cpuLimit.DeepCopy()
q.Min = make(core.ResourceList)
q.Min[core.ResourceCPU] = cpuLimit.DeepCopy()
q.MaxLimitRequestRatio = make(core.ResourceList)
q.MaxLimitRequestRatio[core.ResourceCPU] = resource.MustParse("10")
},
func(p *core.PullPolicy, c fuzz.Continue) {
policies := []core.PullPolicy{core.PullAlways, core.PullNever, core.PullIfNotPresent}
*p = policies[c.Rand.Intn(len(policies))]
},
func(rp *core.RestartPolicy, c fuzz.Continue) {
policies := []core.RestartPolicy{core.RestartPolicyAlways, core.RestartPolicyNever, core.RestartPolicyOnFailure}
*rp = policies[c.Rand.Intn(len(policies))]
},
// core.DownwardAPIVolumeFile needs to have a specific func since FieldRef has to be
// defaulted to a version otherwise roundtrip will fail
func(m *core.DownwardAPIVolumeFile, c fuzz.Continue) {
m.Path = c.RandString()
versions := []string{"v1"}
m.FieldRef = &core.ObjectFieldSelector{}
m.FieldRef.APIVersion = versions[c.Rand.Intn(len(versions))]
m.FieldRef.FieldPath = c.RandString()
c.Fuzz(m.Mode)
if m.Mode != nil {
*m.Mode &= 0777
}
},
func(s *core.SecretVolumeSource, c fuzz.Continue) {
c.FuzzNoCustom(s) // fuzz self without calling this function again
if c.RandBool() {
opt := c.RandBool()
s.Optional = &opt
}
// DefaultMode should always be set, it has a default
// value and it is expected to be between 0 and 0777
var mode int32
c.Fuzz(&mode)
mode &= 0777
s.DefaultMode = &mode
},
func(cm *core.ConfigMapVolumeSource, c fuzz.Continue) {
c.FuzzNoCustom(cm) // fuzz self without calling this function again
if c.RandBool() {
opt := c.RandBool()
cm.Optional = &opt
}
// DefaultMode should always be set, it has a default
// value and it is expected to be between 0 and 0777
var mode int32
c.Fuzz(&mode)
mode &= 0777
cm.DefaultMode = &mode
},
func(d *core.DownwardAPIVolumeSource, c fuzz.Continue) {
c.FuzzNoCustom(d) // fuzz self without calling this function again
// DefaultMode should always be set, it has a default
// value and it is expected to be between 0 and 0777
var mode int32
c.Fuzz(&mode)
mode &= 0777
d.DefaultMode = &mode
},
func(s *core.ProjectedVolumeSource, c fuzz.Continue) {
c.FuzzNoCustom(s) // fuzz self without calling this function again
// DefaultMode should always be set, it has a default
// value and it is expected to be between 0 and 0777
var mode int32
c.Fuzz(&mode)
mode &= 0777
s.DefaultMode = &mode
},
func(k *core.KeyToPath, c fuzz.Continue) {
c.FuzzNoCustom(k) // fuzz self without calling this function again
k.Key = c.RandString()
k.Path = c.RandString()
// Mode is not mandatory, but if it is set, it should be
// a value between 0 and 0777
if k.Mode != nil {
*k.Mode &= 0777
}
},
func(vs *core.VolumeSource, c fuzz.Continue) {
// Exactly one of the fields must be set.
v := reflect.ValueOf(vs).Elem()
i := int(c.RandUint64() % uint64(v.NumField()))
t := v.Field(i).Addr()
for v.Field(i).IsNil() {
c.Fuzz(t.Interface())
}
},
func(i *core.ISCSIVolumeSource, c fuzz.Continue) {
i.ISCSIInterface = c.RandString()
if i.ISCSIInterface == "" {
i.ISCSIInterface = "default"
}
},
func(i *core.ISCSIPersistentVolumeSource, c fuzz.Continue) {
i.ISCSIInterface = c.RandString()
if i.ISCSIInterface == "" {
i.ISCSIInterface = "default"
}
},
func(d *core.DNSPolicy, c fuzz.Continue) {
policies := []core.DNSPolicy{core.DNSClusterFirst, core.DNSDefault}
*d = policies[c.Rand.Intn(len(policies))]
},
func(p *core.Protocol, c fuzz.Continue) {
protocols := []core.Protocol{core.ProtocolTCP, core.ProtocolUDP, core.ProtocolSCTP}
*p = protocols[c.Rand.Intn(len(protocols))]
},
func(p *core.ServiceAffinity, c fuzz.Continue) {
types := []core.ServiceAffinity{core.ServiceAffinityClientIP, core.ServiceAffinityNone}
*p = types[c.Rand.Intn(len(types))]
},
func(p *core.ServiceType, c fuzz.Continue) {
types := []core.ServiceType{core.ServiceTypeClusterIP, core.ServiceTypeNodePort, core.ServiceTypeLoadBalancer}
*p = types[c.Rand.Intn(len(types))]
},
func(p *core.ServiceExternalTrafficPolicyType, c fuzz.Continue) {
types := []core.ServiceExternalTrafficPolicyType{core.ServiceExternalTrafficPolicyTypeCluster, core.ServiceExternalTrafficPolicyTypeLocal}
*p = types[c.Rand.Intn(len(types))]
},
func(ct *core.Container, c fuzz.Continue) {
c.FuzzNoCustom(ct) // fuzz self without calling this function again
ct.TerminationMessagePath = "/" + ct.TerminationMessagePath // Must be non-empty
ct.TerminationMessagePolicy = "File"
},
func(p *core.Probe, c fuzz.Continue) {
c.FuzzNoCustom(p)
// These fields have default values.
intFieldsWithDefaults := [...]string{"TimeoutSeconds", "PeriodSeconds", "SuccessThreshold", "FailureThreshold"}
v := reflect.ValueOf(p).Elem()
for _, field := range intFieldsWithDefaults {
f := v.FieldByName(field)
if f.Int() == 0 {
f.SetInt(1)
}
}
},
func(ev *core.EnvVar, c fuzz.Continue) {
ev.Name = c.RandString()
if c.RandBool() {
ev.Value = c.RandString()
} else {
ev.ValueFrom = &core.EnvVarSource{}
ev.ValueFrom.FieldRef = &core.ObjectFieldSelector{}
versions := []schema.GroupVersion{
{Group: "admission.k8s.io", Version: "v1alpha1"},
{Group: "apps", Version: "v1beta1"},
{Group: "apps", Version: "v1beta2"},
{Group: "foo", Version: "v42"},
}
ev.ValueFrom.FieldRef.APIVersion = versions[c.Rand.Intn(len(versions))].String()
ev.ValueFrom.FieldRef.FieldPath = c.RandString()
}
},
func(ev *core.EnvFromSource, c fuzz.Continue) {
if c.RandBool() {
ev.Prefix = "p_"
}
if c.RandBool() {
c.Fuzz(&ev.ConfigMapRef)
} else {
c.Fuzz(&ev.SecretRef)
}
},
func(cm *core.ConfigMapEnvSource, c fuzz.Continue) {
c.FuzzNoCustom(cm) // fuzz self without calling this function again
if c.RandBool() {
opt := c.RandBool()
cm.Optional = &opt
}
},
func(s *core.SecretEnvSource, c fuzz.Continue) {
c.FuzzNoCustom(s) // fuzz self without calling this function again
},
func(sc *core.SecurityContext, c fuzz.Continue) {
c.FuzzNoCustom(sc) // fuzz self without calling this function again
if c.RandBool() {
priv := c.RandBool()
sc.Privileged = &priv
}
if c.RandBool() {
sc.Capabilities = &core.Capabilities{
Add: make([]core.Capability, 0),
Drop: make([]core.Capability, 0),
}
c.Fuzz(&sc.Capabilities.Add)
c.Fuzz(&sc.Capabilities.Drop)
}
},
func(s *core.Secret, c fuzz.Continue) {
c.FuzzNoCustom(s) // fuzz self without calling this function again
s.Type = core.SecretTypeOpaque
},
func(r *core.RBDVolumeSource, c fuzz.Continue) {
r.RBDPool = c.RandString()
if r.RBDPool == "" {
r.RBDPool = "rbd"
}
r.RadosUser = c.RandString()
if r.RadosUser == "" {
r.RadosUser = "admin"
}
r.Keyring = c.RandString()
if r.Keyring == "" {
r.Keyring = "/etc/ceph/keyring"
}
},
func(r *core.RBDPersistentVolumeSource, c fuzz.Continue) {
r.RBDPool = c.RandString()
if r.RBDPool == "" {
r.RBDPool = "rbd"
}
r.RadosUser = c.RandString()
if r.RadosUser == "" {
r.RadosUser = "admin"
}
r.Keyring = c.RandString()
if r.Keyring == "" {
r.Keyring = "/etc/ceph/keyring"
}
},
func(obj *core.HostPathVolumeSource, c fuzz.Continue) {
c.FuzzNoCustom(obj)
types := []core.HostPathType{core.HostPathUnset, core.HostPathDirectoryOrCreate, core.HostPathDirectory,
core.HostPathFileOrCreate, core.HostPathFile, core.HostPathSocket, core.HostPathCharDev, core.HostPathBlockDev}
typeVol := types[c.Rand.Intn(len(types))]
if obj.Type == nil {
obj.Type = &typeVol
}
},
func(pv *core.PersistentVolume, c fuzz.Continue) {
c.FuzzNoCustom(pv) // fuzz self without calling this function again
types := []core.PersistentVolumePhase{core.VolumeAvailable, core.VolumePending, core.VolumeBound, core.VolumeReleased, core.VolumeFailed}
pv.Status.Phase = types[c.Rand.Intn(len(types))]
pv.Status.Message = c.RandString()
reclamationPolicies := []core.PersistentVolumeReclaimPolicy{core.PersistentVolumeReclaimRecycle, core.PersistentVolumeReclaimRetain}
pv.Spec.PersistentVolumeReclaimPolicy = reclamationPolicies[c.Rand.Intn(len(reclamationPolicies))]
volumeModes := []core.PersistentVolumeMode{core.PersistentVolumeFilesystem, core.PersistentVolumeBlock}
pv.Spec.VolumeMode = &volumeModes[c.Rand.Intn(len(volumeModes))]
},
func(pvc *core.PersistentVolumeClaim, c fuzz.Continue) {
c.FuzzNoCustom(pvc) // fuzz self without calling this function again
types := []core.PersistentVolumeClaimPhase{core.ClaimBound, core.ClaimPending, core.ClaimLost}
pvc.Status.Phase = types[c.Rand.Intn(len(types))]
volumeModes := []core.PersistentVolumeMode{core.PersistentVolumeFilesystem, core.PersistentVolumeBlock}
pvc.Spec.VolumeMode = &volumeModes[c.Rand.Intn(len(volumeModes))]
},
func(obj *core.AzureDiskVolumeSource, c fuzz.Continue) {
if obj.CachingMode == nil {
obj.CachingMode = new(core.AzureDataDiskCachingMode)
*obj.CachingMode = core.AzureDataDiskCachingReadWrite
}
if obj.Kind == nil {
obj.Kind = new(core.AzureDataDiskKind)
*obj.Kind = core.AzureSharedBlobDisk
}
if obj.FSType == nil {
obj.FSType = new(string)
*obj.FSType = "ext4"
}
if obj.ReadOnly == nil {
obj.ReadOnly = new(bool)
*obj.ReadOnly = false
}
},
func(sio *core.ScaleIOVolumeSource, c fuzz.Continue) {
sio.StorageMode = c.RandString()
if sio.StorageMode == "" {
sio.StorageMode = "ThinProvisioned"
}
sio.FSType = c.RandString()
if sio.FSType == "" {
sio.FSType = "xfs"
}
},
func(sio *core.ScaleIOPersistentVolumeSource, c fuzz.Continue) {
sio.StorageMode = c.RandString()
if sio.StorageMode == "" {
sio.StorageMode = "ThinProvisioned"
}
sio.FSType = c.RandString()
if sio.FSType == "" {
sio.FSType = "xfs"
}
},
func(s *core.NamespaceSpec, c fuzz.Continue) {
s.Finalizers = []core.FinalizerName{core.FinalizerKubernetes}
},
func(s *core.NamespaceStatus, c fuzz.Continue) {
s.Phase = core.NamespaceActive
},
func(http *core.HTTPGetAction, c fuzz.Continue) {
c.FuzzNoCustom(http) // fuzz self without calling this function again
http.Path = "/" + http.Path // can't be blank
http.Scheme = "x" + http.Scheme // can't be blank
},
func(ss *core.ServiceSpec, c fuzz.Continue) {
c.FuzzNoCustom(ss) // fuzz self without calling this function again
if len(ss.Ports) == 0 {
// There must be at least 1 port.
ss.Ports = append(ss.Ports, core.ServicePort{})
c.Fuzz(&ss.Ports[0])
}
for i := range ss.Ports {
switch ss.Ports[i].TargetPort.Type {
case intstr.Int:
ss.Ports[i].TargetPort.IntVal = 1 + ss.Ports[i].TargetPort.IntVal%65535 // non-zero
case intstr.String:
ss.Ports[i].TargetPort.StrVal = "x" + ss.Ports[i].TargetPort.StrVal // non-empty
}
}
types := []core.ServiceAffinity{core.ServiceAffinityNone, core.ServiceAffinityClientIP}
ss.SessionAffinity = types[c.Rand.Intn(len(types))]
switch ss.SessionAffinity {
case core.ServiceAffinityClientIP:
timeoutSeconds := int32(c.Rand.Intn(int(core.MaxClientIPServiceAffinitySeconds)))
ss.SessionAffinityConfig = &core.SessionAffinityConfig{
ClientIP: &core.ClientIPConfig{
TimeoutSeconds: &timeoutSeconds,
},
}
case core.ServiceAffinityNone:
ss.SessionAffinityConfig = nil
}
},
func(s *core.NodeStatus, c fuzz.Continue) {
c.FuzzNoCustom(s)
s.Allocatable = s.Capacity
},
func(e *core.Event, c fuzz.Continue) {
c.FuzzNoCustom(e)
e.EventTime = metav1.MicroTime{Time: time.Unix(1, 1000)}
if e.Series != nil {
e.Series.LastObservedTime = metav1.MicroTime{Time: time.Unix(3, 3000)}
}
},
}
}
|
apache-2.0
|
jmhodges/couchrest
|
lib/couchrest/monkeypatches.rb
|
932
|
# This file must be loaded after the JSON gem and any other library that beats up the Time class.
class Time
# This date format sorts lexicographically
# and is compatible with Javascript's <tt>new Date(time_string)</tt> constructor.
# Note this this format stores all dates in UTC so that collation
# order is preserved. (There's no longer a need to set <tt>ENV['TZ'] = 'UTC'</tt>
# in your application.)
def to_json(options = nil)
u = self.utc
%("#{u.strftime("%Y/%m/%d %H:%M:%S +0000")}")
end
# Decodes the JSON time format to a UTC time.
# Based on Time.parse from ActiveSupport. ActiveSupport's version
# is more complete, returning a time in your current timezone,
# rather than keeping the time in UTC. YMMV.
# def self.parse string, fallback=nil
# d = DateTime.parse(string).new_offset
# self.utc(d.year, d.month, d.day, d.hour, d.min, d.sec)
# rescue
# fallback
# end
end
|
apache-2.0
|
camunda/camunda-consulting
|
snippets/rugby-teams-dmn/src/main/java/org/camunda/demo/dmnTest/RugbyRules/LoggerDelegate.java
|
1037
|
package org.camunda.demo.dmnTest.RugbyRules;
import java.util.logging.Logger;
import org.camunda.bpm.engine.delegate.DelegateExecution;
import org.camunda.bpm.engine.delegate.JavaDelegate;
/**
* This is an empty service implementation illustrating how to use a plain Java
* class as a BPMN 2.0 Service Task delegate.
*/
public class LoggerDelegate implements JavaDelegate {
private final Logger LOGGER = Logger.getLogger(LoggerDelegate.class.getName());
public void execute(DelegateExecution execution) throws Exception {
LOGGER.info("\n\n ... LoggerDelegate invoked by "
+ "processDefinitionId=" + execution.getProcessDefinitionId()
+ ", activtyId=" + execution.getCurrentActivityId()
+ ", activtyName='" + execution.getCurrentActivityName() + "'"
+ ", processInstanceId=" + execution.getProcessInstanceId()
+ ", businessKey=" + execution.getProcessBusinessKey()
+ ", executionId=" + execution.getId()
+ " \n\n");
}
}
|
apache-2.0
|
wuyuntao/Nostradamus
|
NotradamusUnity/Assets/Plugins/BulletUnity/BulletSharp/Dynamics/Featherstone/MultiBodySolverConstraint.cs
|
18653
|
using System;
using System.Runtime.InteropServices;
using System.Security;
using BulletSharp.Math;
namespace BulletSharp
{
public class MultiBodySolverConstraint : IDisposable
{
internal IntPtr _native;
protected MultiBody _multiBodyA;
protected MultiBody _multiBodyB;
internal MultiBodySolverConstraint(IntPtr native)
{
_native = native;
}
public MultiBodySolverConstraint()
{
_native = btMultiBodySolverConstraint_new();
}
public Vector3 AngularComponentA
{
get
{
Vector3 value;
btMultiBodySolverConstraint_getAngularComponentA(_native, out value);
return value;
}
set { btMultiBodySolverConstraint_setAngularComponentA(_native, ref value); }
}
public Vector3 AngularComponentB
{
get
{
Vector3 value;
btMultiBodySolverConstraint_getAngularComponentB(_native, out value);
return value;
}
set { btMultiBodySolverConstraint_setAngularComponentB(_native, ref value); }
}
public float AppliedImpulse
{
get { return btMultiBodySolverConstraint_getAppliedImpulse(_native); }
set { btMultiBodySolverConstraint_setAppliedImpulse(_native, value); }
}
public float AppliedPushImpulse
{
get { return btMultiBodySolverConstraint_getAppliedPushImpulse(_native); }
set { btMultiBodySolverConstraint_setAppliedPushImpulse(_native, value); }
}
public float Cfm
{
get { return btMultiBodySolverConstraint_getCfm(_native); }
set { btMultiBodySolverConstraint_setCfm(_native, value); }
}
public Vector3 ContactNormal1
{
get
{
Vector3 value;
btMultiBodySolverConstraint_getContactNormal1(_native, out value);
return value;
}
set { btMultiBodySolverConstraint_setContactNormal1(_native, ref value); }
}
public Vector3 ContactNormal2
{
get
{
Vector3 value;
btMultiBodySolverConstraint_getContactNormal2(_native, out value);
return value;
}
set { btMultiBodySolverConstraint_setContactNormal2(_native, ref value); }
}
public int DeltaVelAindex
{
get { return btMultiBodySolverConstraint_getDeltaVelAindex(_native); }
set { btMultiBodySolverConstraint_setDeltaVelAindex(_native, value); }
}
public int DeltaVelBindex
{
get { return btMultiBodySolverConstraint_getDeltaVelBindex(_native); }
set { btMultiBodySolverConstraint_setDeltaVelBindex(_native, value); }
}
public float Friction
{
get { return btMultiBodySolverConstraint_getFriction(_native); }
set { btMultiBodySolverConstraint_setFriction(_native, value); }
}
public int FrictionIndex
{
get { return btMultiBodySolverConstraint_getFrictionIndex(_native); }
set { btMultiBodySolverConstraint_setFrictionIndex(_native, value); }
}
public int JacAindex
{
get { return btMultiBodySolverConstraint_getJacAindex(_native); }
set { btMultiBodySolverConstraint_setJacAindex(_native, value); }
}
public int JacBindex
{
get { return btMultiBodySolverConstraint_getJacBindex(_native); }
set { btMultiBodySolverConstraint_setJacBindex(_native, value); }
}
public float JacDiagABInv
{
get { return btMultiBodySolverConstraint_getJacDiagABInv(_native); }
set { btMultiBodySolverConstraint_setJacDiagABInv(_native, value); }
}
public int LinkA
{
get { return btMultiBodySolverConstraint_getLinkA(_native); }
set { btMultiBodySolverConstraint_setLinkA(_native, value); }
}
public int LinkB
{
get { return btMultiBodySolverConstraint_getLinkB(_native); }
set { btMultiBodySolverConstraint_setLinkB(_native, value); }
}
public float LowerLimit
{
get { return btMultiBodySolverConstraint_getLowerLimit(_native); }
set { btMultiBodySolverConstraint_setLowerLimit(_native, value); }
}
public MultiBody MultiBodyA
{
get
{
if (_multiBodyA == null)
{
_multiBodyA = new MultiBody(btMultiBodySolverConstraint_getMultiBodyA(_native));
}
return _multiBodyA;
}
set
{
btMultiBodySolverConstraint_setMultiBodyA(_native, value._native);
_multiBodyA = value;
}
}
public MultiBody MultiBodyB
{
get
{
if (_multiBodyB == null)
{
_multiBodyB = new MultiBody(btMultiBodySolverConstraint_getMultiBodyB(_native));
}
return _multiBodyB;
}
set
{
btMultiBodySolverConstraint_setMultiBodyB(_native, value._native);
_multiBodyB = value;
}
}
/*
public MultiBodyConstraint OrgConstraint
{
get { return btMultiBodySolverConstraint_getOrgConstraint(_native); }
set { btMultiBodySolverConstraint_setOrgConstraint(_native, value._native); }
}
*/
public int OrgDofIndex
{
get { return btMultiBodySolverConstraint_getOrgDofIndex(_native); }
set { btMultiBodySolverConstraint_setOrgDofIndex(_native, value); }
}
public IntPtr OriginalContactPoint
{
get { return btMultiBodySolverConstraint_getOriginalContactPoint(_native); }
set { btMultiBodySolverConstraint_setOriginalContactPoint(_native, value); }
}
public int OverrideNumSolverIterations
{
get { return btMultiBodySolverConstraint_getOverrideNumSolverIterations(_native); }
set { btMultiBodySolverConstraint_setOverrideNumSolverIterations(_native, value); }
}
public Vector3 Relpos1CrossNormal
{
get
{
Vector3 value;
btMultiBodySolverConstraint_getRelpos1CrossNormal(_native, out value);
return value;
}
set { btMultiBodySolverConstraint_setRelpos1CrossNormal(_native, ref value); }
}
public Vector3 Relpos2CrossNormal
{
get
{
Vector3 value;
btMultiBodySolverConstraint_getRelpos2CrossNormal(_native, out value);
return value;
}
set { btMultiBodySolverConstraint_setRelpos2CrossNormal(_native, ref value); }
}
public float Rhs
{
get { return btMultiBodySolverConstraint_getRhs(_native); }
set { btMultiBodySolverConstraint_setRhs(_native, value); }
}
public float RhsPenetration
{
get { return btMultiBodySolverConstraint_getRhsPenetration(_native); }
set { btMultiBodySolverConstraint_setRhsPenetration(_native, value); }
}
public int SolverBodyIdA
{
get { return btMultiBodySolverConstraint_getSolverBodyIdA(_native); }
set { btMultiBodySolverConstraint_setSolverBodyIdA(_native, value); }
}
public int SolverBodyIdB
{
get { return btMultiBodySolverConstraint_getSolverBodyIdB(_native); }
set { btMultiBodySolverConstraint_setSolverBodyIdB(_native, value); }
}
public float UnusedPadding4
{
get { return btMultiBodySolverConstraint_getUnusedPadding4(_native); }
set { btMultiBodySolverConstraint_setUnusedPadding4(_native, value); }
}
public float UpperLimit
{
get { return btMultiBodySolverConstraint_getUpperLimit(_native); }
set { btMultiBodySolverConstraint_setUpperLimit(_native, value); }
}
public void Dispose()
{
Dispose(true);
GC.SuppressFinalize(this);
}
protected virtual void Dispose(bool disposing)
{
if (_native != IntPtr.Zero)
{
btMultiBodySolverConstraint_delete(_native);
_native = IntPtr.Zero;
}
}
~MultiBodySolverConstraint()
{
Dispose(false);
}
[DllImport(Native.Dll, CallingConvention = Native.Conv), SuppressUnmanagedCodeSecurity]
static extern IntPtr btMultiBodySolverConstraint_new();
[DllImport(Native.Dll, CallingConvention = Native.Conv), SuppressUnmanagedCodeSecurity]
static extern void btMultiBodySolverConstraint_getAngularComponentA(IntPtr obj, [Out] out Vector3 value);
[DllImport(Native.Dll, CallingConvention = Native.Conv), SuppressUnmanagedCodeSecurity]
static extern void btMultiBodySolverConstraint_getAngularComponentB(IntPtr obj, [Out] out Vector3 value);
[DllImport(Native.Dll, CallingConvention = Native.Conv), SuppressUnmanagedCodeSecurity]
static extern float btMultiBodySolverConstraint_getAppliedImpulse(IntPtr obj);
[DllImport(Native.Dll, CallingConvention = Native.Conv), SuppressUnmanagedCodeSecurity]
static extern float btMultiBodySolverConstraint_getAppliedPushImpulse(IntPtr obj);
[DllImport(Native.Dll, CallingConvention = Native.Conv), SuppressUnmanagedCodeSecurity]
static extern float btMultiBodySolverConstraint_getCfm(IntPtr obj);
[DllImport(Native.Dll, CallingConvention = Native.Conv), SuppressUnmanagedCodeSecurity]
static extern void btMultiBodySolverConstraint_getContactNormal1(IntPtr obj, [Out] out Vector3 value);
[DllImport(Native.Dll, CallingConvention = Native.Conv), SuppressUnmanagedCodeSecurity]
static extern void btMultiBodySolverConstraint_getContactNormal2(IntPtr obj, [Out] out Vector3 value);
[DllImport(Native.Dll, CallingConvention = Native.Conv), SuppressUnmanagedCodeSecurity]
static extern int btMultiBodySolverConstraint_getDeltaVelAindex(IntPtr obj);
[DllImport(Native.Dll, CallingConvention = Native.Conv), SuppressUnmanagedCodeSecurity]
static extern int btMultiBodySolverConstraint_getDeltaVelBindex(IntPtr obj);
[DllImport(Native.Dll, CallingConvention = Native.Conv), SuppressUnmanagedCodeSecurity]
static extern float btMultiBodySolverConstraint_getFriction(IntPtr obj);
[DllImport(Native.Dll, CallingConvention = Native.Conv), SuppressUnmanagedCodeSecurity]
static extern int btMultiBodySolverConstraint_getFrictionIndex(IntPtr obj);
[DllImport(Native.Dll, CallingConvention = Native.Conv), SuppressUnmanagedCodeSecurity]
static extern int btMultiBodySolverConstraint_getJacAindex(IntPtr obj);
[DllImport(Native.Dll, CallingConvention = Native.Conv), SuppressUnmanagedCodeSecurity]
static extern int btMultiBodySolverConstraint_getJacBindex(IntPtr obj);
[DllImport(Native.Dll, CallingConvention = Native.Conv), SuppressUnmanagedCodeSecurity]
static extern float btMultiBodySolverConstraint_getJacDiagABInv(IntPtr obj);
[DllImport(Native.Dll, CallingConvention = Native.Conv), SuppressUnmanagedCodeSecurity]
static extern int btMultiBodySolverConstraint_getLinkA(IntPtr obj);
[DllImport(Native.Dll, CallingConvention = Native.Conv), SuppressUnmanagedCodeSecurity]
static extern int btMultiBodySolverConstraint_getLinkB(IntPtr obj);
[DllImport(Native.Dll, CallingConvention = Native.Conv), SuppressUnmanagedCodeSecurity]
static extern float btMultiBodySolverConstraint_getLowerLimit(IntPtr obj);
[DllImport(Native.Dll, CallingConvention = Native.Conv), SuppressUnmanagedCodeSecurity]
static extern IntPtr btMultiBodySolverConstraint_getMultiBodyA(IntPtr obj);
[DllImport(Native.Dll, CallingConvention = Native.Conv), SuppressUnmanagedCodeSecurity]
static extern IntPtr btMultiBodySolverConstraint_getMultiBodyB(IntPtr obj);
[DllImport(Native.Dll, CallingConvention = Native.Conv), SuppressUnmanagedCodeSecurity]
static extern IntPtr btMultiBodySolverConstraint_getOrgConstraint(IntPtr obj);
[DllImport(Native.Dll, CallingConvention = Native.Conv), SuppressUnmanagedCodeSecurity]
static extern int btMultiBodySolverConstraint_getOrgDofIndex(IntPtr obj);
[DllImport(Native.Dll, CallingConvention = Native.Conv), SuppressUnmanagedCodeSecurity]
static extern IntPtr btMultiBodySolverConstraint_getOriginalContactPoint(IntPtr obj);
[DllImport(Native.Dll, CallingConvention = Native.Conv), SuppressUnmanagedCodeSecurity]
static extern int btMultiBodySolverConstraint_getOverrideNumSolverIterations(IntPtr obj);
[DllImport(Native.Dll, CallingConvention = Native.Conv), SuppressUnmanagedCodeSecurity]
static extern void btMultiBodySolverConstraint_getRelpos1CrossNormal(IntPtr obj, [Out] out Vector3 value);
[DllImport(Native.Dll, CallingConvention = Native.Conv), SuppressUnmanagedCodeSecurity]
static extern void btMultiBodySolverConstraint_getRelpos2CrossNormal(IntPtr obj, [Out] out Vector3 value);
[DllImport(Native.Dll, CallingConvention = Native.Conv), SuppressUnmanagedCodeSecurity]
static extern float btMultiBodySolverConstraint_getRhs(IntPtr obj);
[DllImport(Native.Dll, CallingConvention = Native.Conv), SuppressUnmanagedCodeSecurity]
static extern float btMultiBodySolverConstraint_getRhsPenetration(IntPtr obj);
[DllImport(Native.Dll, CallingConvention = Native.Conv), SuppressUnmanagedCodeSecurity]
static extern int btMultiBodySolverConstraint_getSolverBodyIdA(IntPtr obj);
[DllImport(Native.Dll, CallingConvention = Native.Conv), SuppressUnmanagedCodeSecurity]
static extern int btMultiBodySolverConstraint_getSolverBodyIdB(IntPtr obj);
[DllImport(Native.Dll, CallingConvention = Native.Conv), SuppressUnmanagedCodeSecurity]
static extern float btMultiBodySolverConstraint_getUnusedPadding4(IntPtr obj);
[DllImport(Native.Dll, CallingConvention = Native.Conv), SuppressUnmanagedCodeSecurity]
static extern float btMultiBodySolverConstraint_getUpperLimit(IntPtr obj);
[DllImport(Native.Dll, CallingConvention = Native.Conv), SuppressUnmanagedCodeSecurity]
static extern void btMultiBodySolverConstraint_setAngularComponentA(IntPtr obj, [In] ref Vector3 value);
[DllImport(Native.Dll, CallingConvention = Native.Conv), SuppressUnmanagedCodeSecurity]
static extern void btMultiBodySolverConstraint_setAngularComponentB(IntPtr obj, [In] ref Vector3 value);
[DllImport(Native.Dll, CallingConvention = Native.Conv), SuppressUnmanagedCodeSecurity]
static extern void btMultiBodySolverConstraint_setAppliedImpulse(IntPtr obj, float value);
[DllImport(Native.Dll, CallingConvention = Native.Conv), SuppressUnmanagedCodeSecurity]
static extern void btMultiBodySolverConstraint_setAppliedPushImpulse(IntPtr obj, float value);
[DllImport(Native.Dll, CallingConvention = Native.Conv), SuppressUnmanagedCodeSecurity]
static extern void btMultiBodySolverConstraint_setCfm(IntPtr obj, float value);
[DllImport(Native.Dll, CallingConvention = Native.Conv), SuppressUnmanagedCodeSecurity]
static extern void btMultiBodySolverConstraint_setContactNormal1(IntPtr obj, [In] ref Vector3 value);
[DllImport(Native.Dll, CallingConvention = Native.Conv), SuppressUnmanagedCodeSecurity]
static extern void btMultiBodySolverConstraint_setContactNormal2(IntPtr obj, [In] ref Vector3 value);
[DllImport(Native.Dll, CallingConvention = Native.Conv), SuppressUnmanagedCodeSecurity]
static extern void btMultiBodySolverConstraint_setDeltaVelAindex(IntPtr obj, int value);
[DllImport(Native.Dll, CallingConvention = Native.Conv), SuppressUnmanagedCodeSecurity]
static extern void btMultiBodySolverConstraint_setDeltaVelBindex(IntPtr obj, int value);
[DllImport(Native.Dll, CallingConvention = Native.Conv), SuppressUnmanagedCodeSecurity]
static extern void btMultiBodySolverConstraint_setFriction(IntPtr obj, float value);
[DllImport(Native.Dll, CallingConvention = Native.Conv), SuppressUnmanagedCodeSecurity]
static extern void btMultiBodySolverConstraint_setFrictionIndex(IntPtr obj, int value);
[DllImport(Native.Dll, CallingConvention = Native.Conv), SuppressUnmanagedCodeSecurity]
static extern void btMultiBodySolverConstraint_setJacAindex(IntPtr obj, int value);
[DllImport(Native.Dll, CallingConvention = Native.Conv), SuppressUnmanagedCodeSecurity]
static extern void btMultiBodySolverConstraint_setJacBindex(IntPtr obj, int value);
[DllImport(Native.Dll, CallingConvention = Native.Conv), SuppressUnmanagedCodeSecurity]
static extern void btMultiBodySolverConstraint_setJacDiagABInv(IntPtr obj, float value);
[DllImport(Native.Dll, CallingConvention = Native.Conv), SuppressUnmanagedCodeSecurity]
static extern void btMultiBodySolverConstraint_setLinkA(IntPtr obj, int value);
[DllImport(Native.Dll, CallingConvention = Native.Conv), SuppressUnmanagedCodeSecurity]
static extern void btMultiBodySolverConstraint_setLinkB(IntPtr obj, int value);
[DllImport(Native.Dll, CallingConvention = Native.Conv), SuppressUnmanagedCodeSecurity]
static extern void btMultiBodySolverConstraint_setLowerLimit(IntPtr obj, float value);
[DllImport(Native.Dll, CallingConvention = Native.Conv), SuppressUnmanagedCodeSecurity]
static extern void btMultiBodySolverConstraint_setMultiBodyA(IntPtr obj, IntPtr value);
[DllImport(Native.Dll, CallingConvention = Native.Conv), SuppressUnmanagedCodeSecurity]
static extern void btMultiBodySolverConstraint_setMultiBodyB(IntPtr obj, IntPtr value);
[DllImport(Native.Dll, CallingConvention = Native.Conv), SuppressUnmanagedCodeSecurity]
static extern void btMultiBodySolverConstraint_setOrgConstraint(IntPtr obj, IntPtr value);
[DllImport(Native.Dll, CallingConvention = Native.Conv), SuppressUnmanagedCodeSecurity]
static extern void btMultiBodySolverConstraint_setOrgDofIndex(IntPtr obj, int value);
[DllImport(Native.Dll, CallingConvention = Native.Conv), SuppressUnmanagedCodeSecurity]
static extern void btMultiBodySolverConstraint_setOriginalContactPoint(IntPtr obj, IntPtr value);
[DllImport(Native.Dll, CallingConvention = Native.Conv), SuppressUnmanagedCodeSecurity]
static extern void btMultiBodySolverConstraint_setOverrideNumSolverIterations(IntPtr obj, int value);
[DllImport(Native.Dll, CallingConvention = Native.Conv), SuppressUnmanagedCodeSecurity]
static extern void btMultiBodySolverConstraint_setRelpos1CrossNormal(IntPtr obj, [In] ref Vector3 value);
[DllImport(Native.Dll, CallingConvention = Native.Conv), SuppressUnmanagedCodeSecurity]
static extern void btMultiBodySolverConstraint_setRelpos2CrossNormal(IntPtr obj, [In] ref Vector3 value);
[DllImport(Native.Dll, CallingConvention = Native.Conv), SuppressUnmanagedCodeSecurity]
static extern void btMultiBodySolverConstraint_setRhs(IntPtr obj, float value);
[DllImport(Native.Dll, CallingConvention = Native.Conv), SuppressUnmanagedCodeSecurity]
static extern void btMultiBodySolverConstraint_setRhsPenetration(IntPtr obj, float value);
[DllImport(Native.Dll, CallingConvention = Native.Conv), SuppressUnmanagedCodeSecurity]
static extern void btMultiBodySolverConstraint_setSolverBodyIdA(IntPtr obj, int value);
[DllImport(Native.Dll, CallingConvention = Native.Conv), SuppressUnmanagedCodeSecurity]
static extern void btMultiBodySolverConstraint_setSolverBodyIdB(IntPtr obj, int value);
[DllImport(Native.Dll, CallingConvention = Native.Conv), SuppressUnmanagedCodeSecurity]
static extern void btMultiBodySolverConstraint_setUnusedPadding4(IntPtr obj, float value);
[DllImport(Native.Dll, CallingConvention = Native.Conv), SuppressUnmanagedCodeSecurity]
static extern void btMultiBodySolverConstraint_setUpperLimit(IntPtr obj, float value);
[DllImport(Native.Dll, CallingConvention = Native.Conv), SuppressUnmanagedCodeSecurity]
static extern void btMultiBodySolverConstraint_delete(IntPtr obj);
}
}
|
apache-2.0
|
pfxuan/NuProcess
|
src/main/java/com/zaxxer/nuprocess/linux/ProcessEpoll.java
|
9947
|
/*
* Copyright (C) 2013 Brett Wooldridge
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.zaxxer.nuprocess.linux;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import java.util.concurrent.ArrayBlockingQueue;
import java.util.concurrent.BlockingQueue;
import java.util.concurrent.atomic.AtomicInteger;
import com.sun.jna.Native;
import com.sun.jna.ptr.IntByReference;
import com.zaxxer.nuprocess.NuProcess;
import com.zaxxer.nuprocess.internal.BaseEventProcessor;
import com.zaxxer.nuprocess.internal.LibC;
import static com.zaxxer.nuprocess.internal.LibC.WIFEXITED;
import static com.zaxxer.nuprocess.internal.LibC.WEXITSTATUS;
import static com.zaxxer.nuprocess.internal.LibC.WIFSIGNALED;
import static com.zaxxer.nuprocess.internal.LibC.WTERMSIG;
/**
* @author Brett Wooldridge
*/
class ProcessEpoll extends BaseEventProcessor<LinuxProcess>
{
private static final int EVENT_POOL_SIZE = 32;
private int epoll;
private EpollEvent triggeredEvent;
private List<LinuxProcess> deadPool;
private static BlockingQueue<EpollEvent> eventPool;
ProcessEpoll()
{
epoll = LibEpoll.epoll_create(1024);
if (epoll < 0) {
throw new RuntimeException("Unable to create kqueue: " + Native.getLastError());
}
triggeredEvent = new EpollEvent();
deadPool = new LinkedList<LinuxProcess>();
eventPool = new ArrayBlockingQueue<EpollEvent>(EVENT_POOL_SIZE);
for (int i = 0; i < EVENT_POOL_SIZE; i++) {
eventPool.add(new EpollEvent());
}
}
// ************************************************************************
// IEventProcessor methods
// ************************************************************************
@Override
public void registerProcess(LinuxProcess process)
{
if (shutdown) {
return;
}
int stdoutFd = process.getStdout().get();
int stderrFd = process.getStderr().get();
pidToProcessMap.put(process.getPid(), process);
fildesToProcessMap.put(process.getStdin().get(), process);
fildesToProcessMap.put(stdoutFd, process);
fildesToProcessMap.put(stderrFd, process);
try {
EpollEvent event = eventPool.take();
event.events = LibEpoll.EPOLLIN;
event.data.fd = stdoutFd;
int rc = LibEpoll.epoll_ctl(epoll, LibEpoll.EPOLL_CTL_ADD, stdoutFd, event);
if (rc == -1) {
rc = Native.getLastError();
eventPool.put(event);
throw new RuntimeException("Unable to register new events to epoll, errorcode: " + rc);
}
eventPool.put(event);
event = eventPool.take();
event.events = LibEpoll.EPOLLIN;
event.data.fd = stderrFd;
rc = LibEpoll.epoll_ctl(epoll, LibEpoll.EPOLL_CTL_ADD, stderrFd, event);
if (rc == -1) {
rc = Native.getLastError();
eventPool.put(event);
throw new RuntimeException("Unable to register new events to epoll, errorcode: " + rc);
}
eventPool.put(event);
}
catch (InterruptedException ie) {
throw new RuntimeException(ie);
}
}
@Override
public void queueWrite(LinuxProcess process)
{
if (shutdown) {
return;
}
try {
int stdin = process.getStdin().get();
if (stdin == -1) {
return;
}
EpollEvent event = eventPool.take();
event.events = LibEpoll.EPOLLOUT | LibEpoll.EPOLLONESHOT | LibEpoll.EPOLLRDHUP | LibEpoll.EPOLLHUP;
event.data.fd = stdin;
int rc = LibEpoll.epoll_ctl(epoll, LibEpoll.EPOLL_CTL_MOD, stdin, event);
if (rc == -1) {
rc = LibEpoll.epoll_ctl(epoll, LibEpoll.EPOLL_CTL_DEL, stdin, event);
rc = LibEpoll.epoll_ctl(epoll, LibEpoll.EPOLL_CTL_ADD, stdin, event);
}
eventPool.put(event);
if (rc == -1) {
throw new RuntimeException("Unable to register new event to epoll queue");
}
}
catch (InterruptedException ie) {
throw new RuntimeException(ie);
}
}
@Override
public void closeStdin(LinuxProcess process)
{
int stdin = process.getStdin().get();
if (stdin != -1) {
fildesToProcessMap.remove(stdin);
LibEpoll.epoll_ctl(epoll, LibEpoll.EPOLL_CTL_DEL, stdin, null);
}
}
@Override
public boolean process()
{
try {
int nev = LibEpoll.epoll_wait(epoll, triggeredEvent, 1, DEADPOOL_POLL_INTERVAL);
if (nev == -1) {
throw new RuntimeException("Error waiting for epoll");
}
if (nev == 0) {
return false;
}
EpollEvent epEvent = triggeredEvent;
int ident = epEvent.data.fd;
int events = epEvent.events;
LinuxProcess linuxProcess = fildesToProcessMap.get(ident);
if (linuxProcess == null) {
return true;
}
if ((events & LibEpoll.EPOLLIN) != 0) // stdout/stderr data available to read
{
if (ident == linuxProcess.getStdout().get()) {
linuxProcess.readStdout(NuProcess.BUFFER_CAPACITY);
}
else {
linuxProcess.readStderr(NuProcess.BUFFER_CAPACITY);
}
}
else if ((events & LibEpoll.EPOLLOUT) != 0) // Room in stdin pipe available to write
{
if (linuxProcess.getStdin().get() != -1) {
if (linuxProcess.writeStdin(NuProcess.BUFFER_CAPACITY)) {
epEvent.events = LibEpoll.EPOLLOUT | LibEpoll.EPOLLONESHOT | LibEpoll.EPOLLRDHUP | LibEpoll.EPOLLHUP;
LibEpoll.epoll_ctl(epoll, LibEpoll.EPOLL_CTL_MOD, ident, epEvent);
}
}
}
if ((events & LibEpoll.EPOLLHUP) != 0 || (events & LibEpoll.EPOLLRDHUP) != 0 || (events & LibEpoll.EPOLLERR) != 0) {
LibEpoll.epoll_ctl(epoll, LibEpoll.EPOLL_CTL_DEL, ident, null);
if (ident == linuxProcess.getStdout().get()) {
linuxProcess.readStdout(-1);
}
else if (ident == linuxProcess.getStderr().get()) {
linuxProcess.readStderr(-1);
}
else if (ident == linuxProcess.getStdin().get()) {
linuxProcess.closeStdin(true);
}
}
if (linuxProcess.isSoftExit()) {
cleanupProcess(linuxProcess);
}
return true;
}
finally {
triggeredEvent.clear();
checkDeadPool();
}
}
// ************************************************************************
// Private methods
// ************************************************************************
AtomicInteger count = new AtomicInteger();
private void cleanupProcess(LinuxProcess linuxProcess)
{
pidToProcessMap.remove(linuxProcess.getPid());
fildesToProcessMap.remove(linuxProcess.getStdin().get());
fildesToProcessMap.remove(linuxProcess.getStdout().get());
fildesToProcessMap.remove(linuxProcess.getStderr().get());
// linuxProcess.close(linuxProcess.getStdin());
// linuxProcess.close(linuxProcess.getStdout());
// linuxProcess.close(linuxProcess.getStderr());
if (linuxProcess.cleanlyExitedBeforeProcess.get()) {
linuxProcess.onExit(0);
return;
}
IntByReference ret = new IntByReference();
int rc = LibC.waitpid(linuxProcess.getPid(), ret, LibC.WNOHANG);
if (rc == 0) {
deadPool.add(linuxProcess);
}
else if (rc < 0) {
linuxProcess.onExit((Native.getLastError() == LibC.ECHILD) ? Integer.MAX_VALUE : Integer.MIN_VALUE);
}
else {
int status = ret.getValue();
if (WIFEXITED(status)) {
status = WEXITSTATUS(status);
if (status == 127) {
linuxProcess.onExit(Integer.MIN_VALUE);
}
else {
linuxProcess.onExit(status);
}
}
else if (WIFSIGNALED(status)) {
linuxProcess.onExit(WTERMSIG(status));
}
else {
linuxProcess.onExit(Integer.MIN_VALUE);
}
}
}
private void checkDeadPool()
{
if (deadPool.isEmpty()) {
return;
}
IntByReference ret = new IntByReference();
Iterator<LinuxProcess> iterator = deadPool.iterator();
while (iterator.hasNext()) {
LinuxProcess process = iterator.next();
int rc = LibC.waitpid(process.getPid(), ret, LibC.WNOHANG);
if (rc == 0) {
continue;
}
iterator.remove();
if (rc < 0) {
process.onExit((Native.getLastError() == LibC.ECHILD) ? Integer.MAX_VALUE : Integer.MIN_VALUE);
continue;
}
int status = ret.getValue();
if (WIFEXITED(status)) {
status = WEXITSTATUS(status);
if (status == 127) {
process.onExit(Integer.MIN_VALUE);
}
else {
process.onExit(status);
}
}
else if (WIFSIGNALED(status)) {
process.onExit(WTERMSIG(status));
}
else {
process.onExit(Integer.MIN_VALUE);
}
}
}
}
|
apache-2.0
|
Ervii/garage-time
|
garage/src/python/pants/process/xargs.py
|
1767
|
# coding=utf-8
# Copyright 2014 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from __future__ import (nested_scopes, generators, division, absolute_import, with_statement,
print_function, unicode_literals)
import errno
import subprocess
class Xargs(object):
"""A subprocess execution wrapper in the spirit of the xargs command line tool.
Specifically allows encapsulated commands to be passed very large argument lists by chunking up
the argument lists into a minimal set and then invoking the encapsulated command against each
chunk in turn.
"""
@classmethod
def subprocess(cls, cmd, **kwargs):
"""Creates an xargs engine that uses subprocess.call to execute the given cmd array with extra
arg chunks.
"""
def call(args):
return subprocess.call(cmd + args, **kwargs)
return cls(call)
def __init__(self, cmd):
"""Creates an xargs engine that calls cmd with argument chunks.
:param cmd: A function that can execute a command line in the form of a list of strings
passed as its sole argument.
"""
self._cmd = cmd
def _split_args(self, args):
half = len(args) // 2
return args[:half], args[half:]
def execute(self, args):
"""Executes the configured cmd passing args in one or more rounds xargs style.
:param list args: Extra arguments to pass to cmd.
"""
all_args = list(args)
try:
return self._cmd(all_args)
except OSError as e:
if errno.E2BIG == e.errno:
args1, args2 = self._split_args(all_args)
result = self.execute(args1)
if result != 0:
return result
return self.execute(args2)
else:
raise e
|
apache-2.0
|
gencer/python-phonenumbers
|
python/phonenumbers/shortdata/region_CU.py
|
581
|
"""Auto-generated file, do not edit by hand. CU metadata"""
from ..phonemetadata import NumberFormat, PhoneNumberDesc, PhoneMetadata
PHONE_METADATA_CU = PhoneMetadata(id='CU', country_code=None, international_prefix=None,
general_desc=PhoneNumberDesc(national_number_pattern='1\\d{2,5}', possible_length=(3, 6)),
emergency=PhoneNumberDesc(national_number_pattern='10[456]', example_number='106', possible_length=(3,)),
short_code=PhoneNumberDesc(national_number_pattern='1(?:0[456]|1(?:6111|8)|40)', example_number='140', possible_length=(3, 6)),
short_data=True)
|
apache-2.0
|
GeoinformationSystems/GeoprocessingAppstore
|
src/com/esri/gpt/catalog/schema/indexable/IndexableProperty.java
|
17817
|
/* See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* Esri Inc. licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.esri.gpt.catalog.schema.indexable;
import com.esri.gpt.catalog.discovery.Discoverable;
import com.esri.gpt.catalog.discovery.PropertyMeaning;
import com.esri.gpt.catalog.discovery.PropertyValueType;
import com.esri.gpt.catalog.schema.CfgContext;
import com.esri.gpt.catalog.schema.EsriTags;
import com.esri.gpt.catalog.schema.Schema;
import com.esri.gpt.catalog.search.ResourceIdentifier;
import com.esri.gpt.framework.geometry.Envelope;
import com.esri.gpt.framework.util.Val;
import com.esri.gpt.framework.xml.DomUtil;
import java.util.ArrayList;
import java.util.List;
import javax.xml.namespace.QName;
import javax.xml.xpath.XPath;
import javax.xml.xpath.XPathConstants;
import javax.xml.xpath.XPathExpressionException;
import org.w3c.dom.Document;
import org.w3c.dom.NamedNodeMap;
import org.w3c.dom.Node;
import org.w3c.dom.NodeList;
/**
* An indexable property associated with a metadata schema.
*/
public class IndexableProperty {
/** instance variables ====================================================== */
private List<IndexableProperty> children;
private List<String> evaluatedValues;
private String meaningName;
private String xpathExpression;
private String xpathType;
/** constructors ============================================================ */
/** Default constructor. */
public IndexableProperty() {}
/**
* Construct by duplicating an existing object.
* @param objectToDuplicate the object to duplicate
*/
public IndexableProperty(IndexableProperty objectToDuplicate) {
if (objectToDuplicate == null) {
} else {
this.setMeaningName(objectToDuplicate.getMeaningName());
this.setXPathExpression(objectToDuplicate.getXPathExpression());
this.setXPathType(objectToDuplicate.getXPathType());
if (objectToDuplicate.getChildren() != null) {
if (this.getChildren() == null) {
this.setChildren(new ArrayList<IndexableProperty>());
}
for (IndexableProperty child: objectToDuplicate.getChildren()) {
this.getChildren().add(new IndexableProperty(child));
}
}
}
}
/** properties ============================================================== */
/**
* Gets the child properties.
* @return the child properties
*/
public List<IndexableProperty> getChildren() {
return this.children;
}
/**
* Sets the child properties.
* @param children the child properties
*/
public void setChildren(List<IndexableProperty> children) {
this.children = children;
}
/**
* Gets the first evaluated value.
* @return the first evaluated value (can be null)
*/
public String getEvaluatedValue() {
if (this.getEvaluatedValues() != null) {
for (String value: this.getEvaluatedValues()) {
value = Val.chkStr(value);
if (value.length() > 0) {
return value;
}
}
}
return null;
}
/**
* Gets the evaluated values.
* @return the evaluated values
*/
public List<String> getEvaluatedValues() {
return this.evaluatedValues;
}
/**
* Sets the evaluated values.
* @param values the evaluated values
*/
public void setEvaluatedValues(List<String> values) {
this.evaluatedValues = values;
}
/**
* Gets the meaning name.
* @return the meaning name
*/
public String getMeaningName() {
return this.meaningName;
}
/**
* Sets the meaning name.
* @param name the meaning name
*/
public void setMeaningName(String name) {
this.meaningName = name;
}
/**
* Gets the XPath expression.
* @return the XPath expression
*/
public String getXPathExpression() {
return this.xpathExpression;
}
/**
* Sets the XPath expression.
* @param expression the XPath expression
*/
public void setXPathExpression(String expression) {
this.xpathExpression = expression;
}
/**
* Gets the XPath expression result type.
* <br/>BOOLEAN NODE NODESET NUMBER STRING
* @return the XPath expression result type
*/
public String getXPathType() {
return this.xpathType;
}
/**
* Sets the XPath expression result type.
* <br/>BOOLEAN NODE NODESET NUMBER STRING
* @param xpathType the expression result type
*/
public void setXPathType(String xpathType) {
this.xpathType = xpathType;
}
/** methods ================================================================= */
/**
* Adds an evaluated value to the collection.
* @param context the active indexable context
* @param meaning the associated property meaning
* @param value the evaluated value
*/
protected void addValue(IndexableContext context, PropertyMeaning meaning, String value) {
value = Val.chkStr(value);
if (value.length() > 0) {
if (this.getEvaluatedValues() == null) {
this.setEvaluatedValues(new ArrayList<String>());
}
this.getEvaluatedValues().add(value);
}
}
/**
* Configures the object based upon a node loaded from a
* schema configuration XML.
* <p/>
* The following attributes are configured:
* <br/>meaning xpathType xpath
* <p/>
* The following child nodes are configured:
* <br/>property
* @param context the configuration context
* @param node the configuration node
* @param attributes the attributes of the configuration node
*/
public void configure(CfgContext context, Node node, NamedNodeMap attributes) {
String mName = DomUtil.getAttributeValue(attributes,"meaning");
if (Val.chkStr(mName).equalsIgnoreCase("anytext")) mName = "body";
if (Val.chkStr(mName).equalsIgnoreCase("resource.type")) mName = "contentType";
this.setMeaningName(mName);
this.setXPathExpression(Val.chkStr(DomUtil.getAttributeValue(attributes,"xpath")));
this.setXPathType(Val.chkStr(DomUtil.getAttributeValue(attributes,"xpathType")));
// loop through the children
NodeList nl = node.getChildNodes();
for (int i=0;i<nl.getLength();i++) {
Node nd = nl.item(i);
if (nd.getNodeType() == Node.ELEMENT_NODE) {
String nodeName = Val.chkStr(nd.getNodeName());
if (nodeName.equalsIgnoreCase("property")) {
if (this.getChildren() == null) {
this.setChildren(new ArrayList<IndexableProperty>());
}
IndexableProperty child = new IndexableProperty();
child.configure(context,nd,nd.getAttributes());
this.getChildren().add(child);
}
}
}
}
/**
* Evaluates the property based upon the supplied metadata document.
* @param schema the schema being evaluated
* @param context the active indexable context
* @param dom the metadata document
* @param parent the metadata document node the is actively being processed (can be null)
* @param xpath an XPath object configured with an appropriate
* Namespace context for the schema
* @throws XPathExpressionException if an evaluation expression fails
*/
public void evaluate(Schema schema,
IndexableContext context,
Document dom,
Node parent,
XPath xpath)
throws XPathExpressionException {
// initialize
String mName = Val.chkStr(this.getMeaningName());
String xpExpr = Val.chkStr(this.getXPathExpression());
String xpType = Val.chkStr(this.getXPathType());
boolean hasChildren = (this.getChildren() != null) && (this.getChildren().size() > 0);
// determine the meaning
PropertyMeaning meaning = null;
if (mName.length() > 0) {
meaning = context.getPropertyMeanings().get(mName);
if (meaning == null) {
Discoverable discoverable = context.getPropertyMeanings().getAllAliased().get(mName);
if (discoverable != null) {
meaning = discoverable.getMeaning();
} else {
// TODO warn if the meaning is null
}
}
}
// loop through children if no XPath expression was supplied
if (xpExpr.length() == 0) {
if (hasChildren) {
for (IndexableProperty child: this.getChildren()) {
child.evaluate(schema,context,dom,null,xpath);
}
}
} else {
// determine the starting context for the expression
Object startItem = dom;
if (parent != null) {
if (!xpExpr.startsWith("/")) {
startItem = parent;
}
}
// determine the return type, BOOLEAN NODE NODESET NUMBER STRING
QName returnType = null;
if (xpType.equalsIgnoreCase("BOOLEAN")) {
returnType = XPathConstants.BOOLEAN;
} else if (xpType.equalsIgnoreCase("NODE")) {
returnType = XPathConstants.NODE;
} else if (xpType.equalsIgnoreCase("NODESET") ||
xpType.equalsIgnoreCase("NODELIST") ||
xpType.equalsIgnoreCase("LIST")) {
returnType = XPathConstants.NODESET;
} else if (xpType.equalsIgnoreCase("NUMBER")) {
returnType = XPathConstants.NUMBER;
} else if (xpType.equalsIgnoreCase("STRING")) {
returnType = XPathConstants.STRING;
} else {
if (hasChildren) {
returnType = XPathConstants.NODESET;
} else {
if (xpExpr.toLowerCase().endsWith("text()")) {
returnType = XPathConstants.STRING;
} else {
returnType = XPathConstants.NODESET;
}
}
}
// evaluate the expression, process the result
Object result = xpath.evaluate(xpExpr,startItem,returnType);
if (result != null) {
if (returnType.equals(XPathConstants.STRING)) {
String value = (String)result;
this.addValue(context,meaning,value);
} else if (returnType.equals(XPathConstants.NUMBER)) {
String value = ""+((Number)result);
this.addValue(context,meaning,value);
} else if (returnType.equals(XPathConstants.BOOLEAN)) {
String value = ""+((Boolean)result);
this.addValue(context,meaning,value);
} else if (returnType.equals(XPathConstants.NODE)) {
Node nd = (Node)result;
String value = this.getTextContent(nd,true);
this.addValue(context,meaning,value);
if (hasChildren) {
for (IndexableProperty child: this.getChildren()) {
child.evaluate(schema,context,dom,nd,xpath);
}
}
} else if (returnType.equals(XPathConstants.NODESET)) {
NodeList nl = (NodeList)result;
for (int i=0;i<nl.getLength();i++) {
Node nd = nl.item(i);
String value = this.getTextContent(nd,true);
this.addValue(context,meaning,value);
if (hasChildren) {
for (IndexableProperty child: this.getChildren()) {
child.evaluate(schema,context,dom,nd,xpath);
}
}
}
}
}
}
}
/**
* Gets the text associated with a node.
* @param node the subject node
* @param first if true, get the first text node associated with an element node
* @return the text (can be null)
*/
protected String getTextContent(Node node, boolean first) {
String text = null;
if (node.getNodeType() == Node.ATTRIBUTE_NODE ) {
text = node.getNodeValue();
} else if (node.getNodeType() == Node.TEXT_NODE) {
text = node.getNodeValue();
} else if (node.getNodeType() == Node.CDATA_SECTION_NODE) {
text = node.getNodeValue();
} else if (node.getNodeType() == Node.ELEMENT_NODE) {
if (first) {
NodeList nl = node.getChildNodes();
for (int i=0;i<nl.getLength();i++) {
Node nd = nl.item(i);
if (nd.getNodeType() == Node.TEXT_NODE) {
text = nd.getNodeValue();
} else {
break;
}
}
} else {
text = node.getTextContent();
}
}
if (text != null) {
text = Val.chkStr(text);
if (text.length() == 0) {
text = null;
}
}
return text;
}
/**
* Resolves an evaluated property.
* @param schema the schema being evaluated
* @param context the active indexable context
* @param parent the parent property
*/
public void resolve(Schema schema, IndexableContext context, IndexableProperty parent) {
// initialize
String mName = Val.chkStr(this.getMeaningName());
boolean hasChildren = (this.getChildren() != null) && (this.getChildren().size() > 0);
List<String> values = this.getEvaluatedValues();
// determine the meaning
PropertyMeaning meaning = null;
if (mName.length() > 0) {
meaning = context.getPropertyMeanings().get(mName);
if (meaning == null) {
Discoverable discoverable = context.getPropertyMeanings().getAllAliased().get(mName);
if (discoverable != null) {
meaning = discoverable.getMeaning();
} else {
// TODO warn if the meaning is null
}
}
}
// handle geometries
if ((meaning != null) && (meaning.getValueType() != null)) {
if (meaning.getValueType().equals(PropertyValueType.GEOMETRY)) {
if (hasChildren) {
hasChildren = false;
Envelope envelope = new Envelope();
for (IndexableProperty child: this.getChildren()) {
String mn = Val.chkStr(child.getMeaningName());
String ev = Val.chkStr(child.getEvaluatedValue());
if (ev.length() > 0) {
if (mn.equalsIgnoreCase("envelope.west")) {
envelope.setMinX(ev);
} else if (mn.equalsIgnoreCase("envelope.south")) {
envelope.setMinY(ev);
} else if (mn.equalsIgnoreCase("envelope.east")) {
envelope.setMaxX(ev);
} else if (mn.equalsIgnoreCase("envelope.north")) {
envelope.setMaxY(ev);
} else if (mn.equalsIgnoreCase("envelope.lowerCorner")) {
String[] pt = ev.split(" ");
if (pt.length == 2) {
envelope.setMinX(pt[0]);
envelope.setMinY(pt[1]);
}
} else if (mn.equalsIgnoreCase("envelope.upperCorner")) {
String[] pt = ev.split(" ");
if (pt.length == 2) {
envelope.setMaxX(pt[0]);
envelope.setMaxY(pt[1]);
}
}
}
}
if (!envelope.isEmpty()) {
context.addStoreableValue(meaning,envelope);
}
}
return;
}
}
// ArcIMS content type
if ((meaning != null) && (values != null) && (values.size() > 0)) {
if (Val.chkStr(meaning.getName()).equalsIgnoreCase("contentType")) {
ArrayList<String> al = new ArrayList<String>();
boolean changed = false;
ResourceIdentifier ri = context.ensureResourceIdentifier();
for (String value: values) {
String s = Val.chkStr(ri.guessArcIMSContentTypeFromResourceType(value));
if ((s != null) && (s.length() > 0)) {
al.add(s);
changed = true;
} else {
// TODO: allow non-enumerated values?
al.add(value);
}
}
if (changed) {
this.setEvaluatedValues(al);
values = this.getEvaluatedValues();
}
}
}
// data themes (ISO MD_TopicCategoryCode)
if ((meaning != null) && (values != null) && (values.size() > 0)) {
if (Val.chkStr(meaning.getName()).equalsIgnoreCase("dataTheme")) {
ArrayList<String> al = new ArrayList<String>();
boolean changed = false;
for (String value: values) {
String s = EsriTags.DATA_THEME_CODES.get(value);
if ((s != null) && (s.length() > 0)) {
al.add(s);
changed = true;
} else {
// TODO: allow non-enumerated values?
al.add(value);
}
}
if (changed) {
this.setEvaluatedValues(al);
values = this.getEvaluatedValues();
}
}
}
// process the evaluated values
if (this.getEvaluatedValues() != null) {
boolean isSingleValued = false;
if (isSingleValued) {
String singleValue = Val.chkStr(this.getEvaluatedValue());
if (singleValue.length() == 0) {
this.setEvaluatedValues(null);
} else {
this.getEvaluatedValues().clear();
this.addValue(context,meaning,singleValue);
}
} else {
context.addStorableValues(meaning,this.getEvaluatedValues().toArray(new String[0]));
}
}
// process the children
if (hasChildren) {
for (IndexableProperty child: this.getChildren()) {
child.resolve(schema,context,this);
}
}
}
}
|
apache-2.0
|
dmage/origin
|
pkg/build/clientset/release_v3_6/typed/build/v1/fake/fake_build.go
|
3103
|
package fake
import (
v1 "github.com/openshift/origin/pkg/build/api/v1"
api "k8s.io/kubernetes/pkg/api"
unversioned "k8s.io/kubernetes/pkg/api/unversioned"
api_v1 "k8s.io/kubernetes/pkg/api/v1"
core "k8s.io/kubernetes/pkg/client/testing/core"
labels "k8s.io/kubernetes/pkg/labels"
watch "k8s.io/kubernetes/pkg/watch"
)
// FakeBuilds implements BuildResourceInterface
type FakeBuilds struct {
Fake *FakeBuildV1
ns string
}
var buildsResource = unversioned.GroupVersionResource{Group: "build.openshift.io", Version: "v1", Resource: "builds"}
func (c *FakeBuilds) Create(build *v1.Build) (result *v1.Build, err error) {
obj, err := c.Fake.
Invokes(core.NewCreateAction(buildsResource, c.ns, build), &v1.Build{})
if obj == nil {
return nil, err
}
return obj.(*v1.Build), err
}
func (c *FakeBuilds) Update(build *v1.Build) (result *v1.Build, err error) {
obj, err := c.Fake.
Invokes(core.NewUpdateAction(buildsResource, c.ns, build), &v1.Build{})
if obj == nil {
return nil, err
}
return obj.(*v1.Build), err
}
func (c *FakeBuilds) UpdateStatus(build *v1.Build) (*v1.Build, error) {
obj, err := c.Fake.
Invokes(core.NewUpdateSubresourceAction(buildsResource, "status", c.ns, build), &v1.Build{})
if obj == nil {
return nil, err
}
return obj.(*v1.Build), err
}
func (c *FakeBuilds) Delete(name string, options *api_v1.DeleteOptions) error {
_, err := c.Fake.
Invokes(core.NewDeleteAction(buildsResource, c.ns, name), &v1.Build{})
return err
}
func (c *FakeBuilds) DeleteCollection(options *api_v1.DeleteOptions, listOptions api_v1.ListOptions) error {
action := core.NewDeleteCollectionAction(buildsResource, c.ns, listOptions)
_, err := c.Fake.Invokes(action, &v1.BuildList{})
return err
}
func (c *FakeBuilds) Get(name string) (result *v1.Build, err error) {
obj, err := c.Fake.
Invokes(core.NewGetAction(buildsResource, c.ns, name), &v1.Build{})
if obj == nil {
return nil, err
}
return obj.(*v1.Build), err
}
func (c *FakeBuilds) List(opts api_v1.ListOptions) (result *v1.BuildList, err error) {
obj, err := c.Fake.
Invokes(core.NewListAction(buildsResource, c.ns, opts), &v1.BuildList{})
if obj == nil {
return nil, err
}
label, _, _ := core.ExtractFromListOptions(opts)
if label == nil {
label = labels.Everything()
}
list := &v1.BuildList{}
for _, item := range obj.(*v1.BuildList).Items {
if label.Matches(labels.Set(item.Labels)) {
list.Items = append(list.Items, item)
}
}
return list, err
}
// Watch returns a watch.Interface that watches the requested builds.
func (c *FakeBuilds) Watch(opts api_v1.ListOptions) (watch.Interface, error) {
return c.Fake.
InvokesWatch(core.NewWatchAction(buildsResource, c.ns, opts))
}
// Patch applies the patch and returns the patched build.
func (c *FakeBuilds) Patch(name string, pt api.PatchType, data []byte, subresources ...string) (result *v1.Build, err error) {
obj, err := c.Fake.
Invokes(core.NewPatchSubresourceAction(buildsResource, c.ns, name, data, subresources...), &v1.Build{})
if obj == nil {
return nil, err
}
return obj.(*v1.Build), err
}
|
apache-2.0
|
kuujo/copycat
|
utils/src/main/java/io/atomix/utils/concurrent/NullThreadContext.java
|
1248
|
/*
* Copyright 2018-present Open Networking Foundation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.atomix.utils.concurrent;
import java.time.Duration;
/**
* Null thread context.
*/
public class NullThreadContext implements ThreadContext {
@Override
public Scheduled schedule(Duration delay, Runnable callback) {
return null;
}
@Override
public Scheduled schedule(Duration initialDelay, Duration interval, Runnable callback) {
return null;
}
@Override
public boolean isBlocked() {
return false;
}
@Override
public void block() {
}
@Override
public void unblock() {
}
@Override
public void close() {
}
@Override
public void execute(Runnable command) {
}
}
|
apache-2.0
|
tomncooper/heron
|
heron/api/src/java/org/apache/heron/streamlet/impl/streamlets/LogStreamlet.java
|
1792
|
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.heron.streamlet.impl.streamlets;
import java.util.Set;
import org.apache.heron.api.topology.TopologyBuilder;
import org.apache.heron.streamlet.impl.StreamletImpl;
import org.apache.heron.streamlet.impl.sinks.LogSink;
/**
* LogStreamlet represents en empty Streamlet that is made up of elements from the parent
* streamlet after logging each element. Since elements of the parents are just logged
* nothing is emitted, thus this streamlet is empty.
*/
public class LogStreamlet<R> extends StreamletImpl<R> {
private StreamletImpl<R> parent;
public LogStreamlet(StreamletImpl<R> parent) {
this.parent = parent;
setNumPartitions(parent.getNumPartitions());
}
@Override
public boolean doBuild(TopologyBuilder bldr, Set<String> stageNames) {
setDefaultNameIfNone(StreamletNamePrefix.LOGGER, stageNames);
bldr.setBolt(getName(), new LogSink<R>(),
getNumPartitions()).shuffleGrouping(parent.getName());
return true;
}
}
|
apache-2.0
|
kubeflow/pipelines
|
backend/src/apiserver/common/paths.go
|
846
|
// Copyright 2018 The Kubeflow Authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// https://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package common
import "fmt"
// CreateArtifactPath creates artifact resource path.
func CreateArtifactPath(runID string, nodeID string, artifactName string) string {
return fmt.Sprintf("runs/%s/nodes/%s/artifacts/%s", runID, nodeID, artifactName)
}
|
apache-2.0
|
signalfx/metricproxy
|
vendor/github.com/uber/tchannel-go/calloptions.go
|
3237
|
// Copyright (c) 2015 Uber Technologies, Inc.
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
// THE SOFTWARE.
package tchannel
// Format is the arg scheme used for a specific call.
type Format string
// The list of formats supported by tchannel.
const (
HTTP Format = "http"
JSON Format = "json"
Raw Format = "raw"
Thrift Format = "thrift"
)
func (f Format) String() string {
return string(f)
}
// CallOptions are options for a specific call.
type CallOptions struct {
// Format is arg scheme used for this call, sent in the "as" header.
// This header is only set if the Format is set.
Format Format
// ShardKey determines where this call request belongs, used with ringpop applications.
ShardKey string
// RequestState stores request state across retry attempts.
RequestState *RequestState
// RoutingKey identifies the destined traffic group. Relays may favor the
// routing key over the service name to route the request to a specialized
// traffic group.
RoutingKey string
// RoutingDelegate identifies a traffic group capable of routing a request
// to an instance of the intended service.
RoutingDelegate string
// CallerName defaults to the channel's service name for an outbound call.
// Optionally override this field to support transparent proxying when inbound
// caller names vary across calls.
CallerName string
}
var defaultCallOptions = &CallOptions{}
func (c *CallOptions) setHeaders(headers transportHeaders) {
headers[ArgScheme] = Raw.String()
c.overrideHeaders(headers)
}
// overrideHeaders sets headers if the call options contains non-default values.
func (c *CallOptions) overrideHeaders(headers transportHeaders) {
if c.Format != "" {
headers[ArgScheme] = c.Format.String()
}
if c.ShardKey != "" {
headers[ShardKey] = c.ShardKey
}
if c.RoutingKey != "" {
headers[RoutingKey] = c.RoutingKey
}
if c.RoutingDelegate != "" {
headers[RoutingDelegate] = c.RoutingDelegate
}
if c.CallerName != "" {
headers[CallerName] = c.CallerName
}
}
// setResponseHeaders copies some headers from the incoming call request to the response.
func setResponseHeaders(reqHeaders, respHeaders transportHeaders) {
respHeaders[ArgScheme] = reqHeaders[ArgScheme]
}
|
apache-2.0
|
vivint-smarthome/scalyr-logback
|
src/test/java/com/scalyr/util/UtilTest.java
|
2852
|
package com.scalyr.util;
import com.scalyr.api.logs.EventAttributes;
import org.testng.annotations.AfterMethod;
import org.testng.annotations.BeforeMethod;
import org.testng.annotations.Test;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
import static org.testng.Assert.assertEquals;
import static org.testng.Assert.assertEqualsDeep;
public class UtilTest {
@BeforeMethod
public void setUp() throws Exception {
}
@AfterMethod
public void tearDown() throws Exception {
}
@Test
public void testStringToIntMemory() throws Exception {
assertEquals(new Integer(4194304), Util.stringToIntMemory("4m"));
assertEquals(new Integer(4194304), Util.stringToIntMemory("4194304"));
assertEquals(new Integer(4194304), Util.stringToIntMemory("4096k"));
assertEquals(Util.stringToIntMemory("4m"), Util.stringToIntMemory("4096k"));
assertEquals(null, Util.stringToIntMemory(""));
}
@Test
public void testMakeEventAttributesFromString() throws Exception {
EventAttributes originalAttrs = new EventAttributes();
originalAttrs.put("serverHost", "123");
originalAttrs.put("logfile", "loggy");
originalAttrs.put("env", "proddy");
originalAttrs.addAll(Util.makeEventAttributesFromString("appName=appofdoom,zodiac=rooster"));
EventAttributes testAttrs = new EventAttributes();
testAttrs.put("serverHost", "123");
testAttrs.put("logfile", "loggy");
testAttrs.put("env", "proddy");
testAttrs.put("appName", "appofdoom");
testAttrs.put("zodiac", "rooster");
assertEquals(testAttrs, originalAttrs);
}
@Test
public void testKvStringToMap() throws Exception {
assertEqualsDeep(new HashMap<String, String>(), Util.kvStringToMap(""));
assertEqualsDeep(new HashMap<String, String>(), Util.kvStringToMap(null));
assertEqualsDeep(Collections.singletonMap("zodiac", "rooster"), Util.kvStringToMap("zodiac=rooster"));
assertEqualsDeep(Collections.singletonMap("zodiac", "rooster"), Util.kvStringToMap("zodiac =rooster"));
assertEqualsDeep(Collections.singletonMap("zodiac", "rooster"), Util.kvStringToMap(" zodiac=rooster"));
assertEqualsDeep(Collections.singletonMap("zodiac", "rooster"), Util.kvStringToMap("zodiac= rooster"));
assertEqualsDeep(Collections.singletonMap("zodiac", "rooster"), Util.kvStringToMap("zodiac=rooster "));
assertEqualsDeep(Collections.singletonMap("zodiac", "roo ster"), Util.kvStringToMap(" zodiac=roo ster "));
Map<String, String> test = new HashMap<String, String>();
test.put("zodiac", "rooster");
test.put("he llo", "wor ld");
assertEqualsDeep(test, Util.kvStringToMap(" zodiac = rooster , he llo = wor ld"));
}
}
|
apache-2.0
|
danielpalme/ReportGenerator
|
src/Testprojects/CSharp/Project/CoverageExcludeAttribute.cs
|
221
|
using System;
namespace Test
{
[CoverageExclude]
[AttributeUsage(AttributeTargets.Method | AttributeTargets.Property | AttributeTargets.Class)]
public class CoverageExcludeAttribute : Attribute
{
}
}
|
apache-2.0
|
fuhongliang/2015weitonghui
|
Application/User/Model/ProductDiningtableModel.class.php
|
805
|
<?php
// .-----------------------------------------------------------------------------------
// |
// | WE TRY THE BEST WAY
// | Site: http://www.gooraye.net
// |-----------------------------------------------------------------------------------
// | Author: 贝贝 <hebiduhebi@163.com>
// | Copyright (c) 2012-2014, http://www.gooraye.net. All Rights Reserved.
// |-----------------------------------------------------------------------------------
namespace User\Model;
use Think\Model;
class ProductDiningtableModel extends Model{
protected $_validate = array(
array('name','require','名称不能为空',1),
);
protected $_auto = array (
array('token','gettoken',1,'callback')
);
function gettoken(){
return session('token');
}
}
?>
|
apache-2.0
|
greese/dasein-cloud-flexiant
|
src/main/java/org/dasein/cloud/flexiant/FCOConfigurationException.java
|
1425
|
/**
* Copyright (C) 2012-2013 Dell, Inc.
* See annotations for authorship information
*
* ====================================================================
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* ====================================================================
*/
package org.dasein.cloud.flexiant;
import org.dasein.cloud.CloudException;
import javax.annotation.Nonnull;
/**
* An error in configuring FCOProvider's context in some manner.
* <p>Created by George Reese: 12/06/2012 9:44 AM</p>
* @author George Reese
* @version 2013.1 initial version
* @since 2013.1
*/
public class FCOConfigurationException extends CloudException {
private static final long serialVersionUID = 1560517901858964463L;
public FCOConfigurationException(@Nonnull String message) {
super(message);
}
public FCOConfigurationException(@Nonnull Throwable cause) {
super(cause);
}
}
|
apache-2.0
|
androidx/constraintlayout
|
projects/MotionLayoutVerification/app/src/main/java/android/support/constraint/app/RotationAngular.java
|
8699
|
/*
* Copyright (C) 2021 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package android.support.constraint.app;
import android.content.Context;
import android.content.res.Configuration;
import android.hardware.Sensor;
import android.hardware.SensorEvent;
import android.hardware.SensorEventListener;
import android.hardware.SensorManager;
import android.os.Build;
import android.os.Bundle;
import android.util.Log;
import android.view.Surface;
import android.view.View;
import android.view.Window;
import android.view.WindowManager;
import android.widget.SeekBar;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
import androidx.annotation.RequiresApi;
import androidx.appcompat.app.ActionBar;
import androidx.appcompat.app.AppCompatActivity;
import androidx.constraintlayout.motion.widget.Debug;
import androidx.constraintlayout.motion.widget.MotionLayout;
import org.jetbrains.annotations.NotNull;
public class RotationAngular extends AppCompatActivity {
private static final String TAG = "CheckSharedValues";
String layout_name;
MotionLayout mMotionLayout;
private SensorManager sensorManager;
float mAngle;
final float D_FACTOR = 10;
int mCurrentTransitionType = -1;
final int TRANSITION_LEFT_FROM_TOP = 0;
final int TRANSITION_RIGHT_FROM_TOP = 1;
final int TRANSITION_RIGHT_FROM_LEFT = 2;
final int TRANSITION_LEFT_FROM_RIGHT = 3;
int[][] mTransition = {
{R.id.portrait, R.id.landscape_R90}, //TRANSITION_LEFT_FROM_TOP
{R.id.portrait, R.id.landscape_right_RN90}, //TRANSITION_RIGHT_FROM_TOP
{R.id.landscape, R.id.portrait_R90}, //TRANSITION_RIGHT_FROM_LEFT
{R.id.landscape_right, R.id.portrait_RN90},//TRANSITION_LEFT_FROM_RIGHT
};
int count = 0;
float[] mAccValues;
int mLastProcessOrientation;
@Override
protected void onCreate(@Nullable @org.jetbrains.annotations.Nullable Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
Bundle extra = getIntent().getExtras();
String prelayout = extra.getString(Utils.KEY);
layout_name = prelayout;
Context ctx = getApplicationContext();
int id = ctx.getResources().getIdentifier(prelayout, "layout", ctx.getPackageName());
getWindow().getDecorView().setSystemUiVisibility(View.SYSTEM_UI_FLAG_HIDE_NAVIGATION);
this.getWindow().setFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN, WindowManager.LayoutParams.FLAG_FULLSCREEN);
ActionBar bar = getSupportActionBar();
if (bar != null) {
bar.hide();
}
setContentView(id);
mMotionLayout = Utils.findMotionLayout(this);
boolean landscape = (getWindowManager().getDefaultDisplay().getRotation() & 1) == 1;
sensorManager = (SensorManager) getSystemService(Context.SENSOR_SERVICE);
Sensor sensor = sensorManager.getDefaultSensor(Sensor.TYPE_GRAVITY);
mMotionLayout.setState(landscape ? R.id.landscape : R.id.portrait, -1, -1);
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN_MR2) {
sensorManager.registerListener(gravity_listener, sensor, 10000);
}
}
@RequiresApi(api = Build.VERSION_CODES.JELLY_BEAN_MR2)
@Override
public void onAttachedToWindow() {
super.onAttachedToWindow();
int rotationAnimation = WindowManager.LayoutParams.ROTATION_ANIMATION_JUMPCUT;
Window win = getWindow();
WindowManager.LayoutParams winParams = win.getAttributes();
winParams.rotationAnimation = rotationAnimation;
win.setAttributes(winParams);
boolean landscape = (getWindowManager().getDefaultDisplay().getRotation() & 1) == 1;
mMotionLayout.setState(getLayoutForOrientation(), -1, -1);
}
@RequiresApi(api = Build.VERSION_CODES.JELLY_BEAN_MR2)
@Override
public void setRequestedOrientation(int requestedOrientation) {
super.setRequestedOrientation(requestedOrientation);
int rotationAnimation = WindowManager.LayoutParams.ROTATION_ANIMATION_JUMPCUT;
Window win = getWindow();
WindowManager.LayoutParams winParams = win.getAttributes();
winParams.rotationAnimation = rotationAnimation;
win.setAttributes(winParams);
}
@RequiresApi(api = Build.VERSION_CODES.JELLY_BEAN_MR1)
@Override
public void onConfigurationChanged(@NonNull @NotNull Configuration newConfig) {
mCurrentTransitionType = -1;
orientation();
super.onConfigurationChanged(newConfig);
}
long waitTill = System.currentTimeMillis();
/**
* Compute the constraint set to transition to.
*
* @return
*/
private int getLayoutForOrientation() {
switch (getWindowManager().getDefaultDisplay().getRotation()) {
default:
case Surface.ROTATION_0:
return R.id.portrait;
case Surface.ROTATION_90:
return R.id.landscape;
case Surface.ROTATION_180:
return R.id.portrait;
case Surface.ROTATION_270:
if (null != mMotionLayout.getConstraintSet(R.id.landscape_right)) {
return R.id.landscape_right;
}
return R.id.landscape;
}
}
float curve(float accX, float accY) {
float ang = (float) Math.atan2(accX, accY);
ang /= (Math.PI / 2);
return ang;
}
public void orientation() {
int id = mMotionLayout.getEndState();
int rotation = getWindowManager().getDefaultDisplay().getRotation();
boolean set_transition = false;
if (mLastProcessOrientation != rotation) {
set_transition = true;
}
mLastProcessOrientation = rotation;
if (mCurrentTransitionType == -1) {
set_transition = true;
}
float p = mAngle;
int type;
float progress = 0;
switch (rotation) {
case Surface.ROTATION_90:
type = TRANSITION_RIGHT_FROM_LEFT;
progress = pmap(Math.max(1 - p, 0));
break;
case Surface.ROTATION_270:
type = TRANSITION_LEFT_FROM_RIGHT;
progress = pmap(Math.max(1 + p, 0));
break;
case Surface.ROTATION_0:
default:
if (p > 0) {
type = TRANSITION_LEFT_FROM_TOP;
} else {
type = TRANSITION_RIGHT_FROM_TOP;
}
progress = pmap(Math.abs(p));
}
if (type != mCurrentTransitionType || set_transition) {
mMotionLayout.setTransition(mTransition[type][0], mTransition[type][1]);
mCurrentTransitionType = type;
}
mMotionLayout.setProgress(progress);
}
SensorEventListener gravity_listener = new SensorEventListener() {
float dampX, dampY;
float prevX, prevY;
@Override
public void onSensorChanged(SensorEvent sensorEvent) {
mAccValues = sensorEvent.values;
float x = mAccValues[0];
float y = mAccValues[1];
if (Math.hypot(x, y) < 2) {
return;
}
dampX = x + dampX * D_FACTOR;
dampY = y + dampY * D_FACTOR;
dampX /= (1 + D_FACTOR);
dampY /= (1 + D_FACTOR);
if ((Math.abs(dampX - prevX) < 0.01 && Math.abs(dampY - prevY) < 0.01)) {
return;
}
prevX = dampX;
prevY = dampY;
mAngle = curve(dampX, dampY);
orientation();
}
@Override
public void onAccuracyChanged(Sensor sensor, int i) {
}
};
int mHold = 0;
float pmap(float p) {
float t = p - 0.5f;
t *= 1 + mHold / 20f;
return Math.max(0, Math.min(1, t + 0.5f));
}
public void hold(View v) {
SeekBar bar = findViewById(R.id.seek);
int prog = bar.getProgress();
Log.v(TAG, Debug.getLoc() + " (@) " + prog);
mHold = prog;
}
}
|
apache-2.0
|
chrz89/upb-son-editor-backend
|
src/son_editor/apis/nsfslookup.py
|
2694
|
'''
Created on 22.07.2016
@author: Jonas
'''
from flask.globals import session
from flask_restplus import Namespace, Model, fields
from flask_restplus import Resource
from son_editor.impl import nsfslookupimpl
from son_editor.util.constants import WORKSPACES, PROJECTS, NSFS, SERVICES, VNFS
from son_editor.util.requestutil import prepare_response
namespace = Namespace(WORKSPACES + '/<int:ws_id>/' + PROJECTS + '/<int:project_id>/' + NSFS,
description="Project VNF Resources")
vendor_name_version_path = "/<string:vendor>/<string:name>/<string:version>"
funct = namespace.model("VNF", {
'name': fields.String(required=True, description='The VNF Name'),
'vendor': fields.String(required=True, description='The VNF Vendor'),
'version': fields.String(required=True, description='The VNF Version')
})
funct_response = namespace.inherit("Response", funct, {
"descriptor": fields.Nested(model=funct, description="The Complete VNF Descriptor"),
"id": fields.Integer(description='The Project ID'),
"project_id": fields.Integer(description='The parent project id'),
})
@namespace.route('/' + SERVICES + vendor_name_version_path)
@namespace.param('ws_id', 'The Workspace identifier')
@namespace.param('project_id', 'The Project identifier')
@namespace.param('vendor', 'The Network Service vendor')
@namespace.param('name', 'The Network Service name')
@namespace.param('version', 'The Network Service version')
class Lookup(Resource):
@namespace.response(200, "OK", [funct_response])
def get(self, ws_id, project_id, vendor, name, version):
"""Retrieves a network service by vendor name version
Finds a specific network service with given vendor / name / version"""
service = nsfslookupimpl.find_network_service(session["user_data"], ws_id, project_id, vendor, name, version)
return prepare_response(service)
@namespace.route('/' + VNFS + vendor_name_version_path)
@namespace.param('ws_id', 'The Workspace identifier')
@namespace.param('project_id', 'The Project identifier')
@namespace.param('vendor', 'The Virtual Nework Function vendor')
@namespace.param('name', 'The Virtual Nework Function name')
@namespace.param('version', 'The Virtual Nework Function version')
class Lookup(Resource):
@namespace.response(200, "OK", [funct_response])
def get(self, ws_id, project_id, vendor, name, version):
"""Retrieves a virtual network function by vendor name version
Finds a specific virtual network with given vendor / name / version"""
function = nsfslookupimpl.find_vnf(session["user_data"], ws_id, project_id, vendor, name, version)
return prepare_response(function)
|
apache-2.0
|
devdattakulkarni/Cassandra-KVAC
|
src/java/org/apache/cassandra/gms/VersionedValue.java
|
6616
|
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.cassandra.gms;
import java.io.DataInputStream;
import java.io.DataOutputStream;
import java.io.IOException;
import java.net.InetAddress;
import java.util.UUID;
import org.apache.cassandra.dht.IPartitioner;
import org.apache.cassandra.dht.Token;
import org.apache.cassandra.io.ICompactSerializer;
import org.apache.cassandra.utils.FBUtilities;
/**
* This abstraction represents the state associated with a particular node which an
* application wants to make available to the rest of the nodes in the cluster.
* Whenever a piece of state needs to be disseminated to the rest of cluster wrap
* the state in an instance of <i>ApplicationState</i> and add it to the Gossiper.
*
* e.g. if we want to disseminate load information for node A do the following:
*
* ApplicationState loadState = new ApplicationState(<string representation of load>);
* Gossiper.instance.addApplicationState("LOAD STATE", loadState);
*/
public class VersionedValue implements Comparable<VersionedValue>
{
public static final ICompactSerializer<VersionedValue> serializer = new VersionedValueSerializer();
// this must be a char that cannot be present in any token
public final static char DELIMITER = ',';
public final static String DELIMITER_STR = new String(new char[] { DELIMITER });
// values for ApplicationState.STATUS
public final static String STATUS_BOOTSTRAPPING = "BOOT";
public final static String STATUS_NORMAL = "NORMAL";
public final static String STATUS_LEAVING = "LEAVING";
public final static String STATUS_LEFT = "LEFT";
public final static String STATUS_MOVING = "MOVING";
public final static String REMOVING_TOKEN = "removing";
public final static String REMOVED_TOKEN = "removed";
// values for ApplicationState.REMOVAL_COORDINATOR
public final static String REMOVAL_COORDINATOR = "REMOVER";
public final int version;
public final String value;
private VersionedValue(String value, int version)
{
this.value = value;
this.version = version;
}
private VersionedValue(String value)
{
this.value = value;
version = VersionGenerator.getNextVersion();
}
public int compareTo(VersionedValue value)
{
return this.version - value.version;
}
@Override
public String toString()
{
return "Value(" + value + "," + version + ")";
}
public static class VersionedValueFactory
{
IPartitioner partitioner;
public VersionedValueFactory(IPartitioner partitioner)
{
this.partitioner = partitioner;
}
public VersionedValue bootstrapping(Token token)
{
return new VersionedValue(VersionedValue.STATUS_BOOTSTRAPPING + VersionedValue.DELIMITER + partitioner.getTokenFactory().toString(token));
}
public VersionedValue normal(Token token)
{
return new VersionedValue(VersionedValue.STATUS_NORMAL + VersionedValue.DELIMITER + partitioner.getTokenFactory().toString(token));
}
public VersionedValue load(double load)
{
return new VersionedValue(String.valueOf(load));
}
public VersionedValue migration(UUID newVersion)
{
return new VersionedValue(newVersion.toString());
}
public VersionedValue leaving(Token token)
{
return new VersionedValue(VersionedValue.STATUS_LEAVING + VersionedValue.DELIMITER + partitioner.getTokenFactory().toString(token));
}
public VersionedValue left(Token token)
{
return new VersionedValue(VersionedValue.STATUS_LEFT + VersionedValue.DELIMITER + partitioner.getTokenFactory().toString(token));
}
public VersionedValue moving(Token token)
{
return new VersionedValue(VersionedValue.STATUS_MOVING + VersionedValue.DELIMITER + partitioner.getTokenFactory().toString(token));
}
public VersionedValue removingNonlocal(Token token)
{
return new VersionedValue(VersionedValue.REMOVING_TOKEN + VersionedValue.DELIMITER + partitioner.getTokenFactory().toString(token));
}
public VersionedValue removedNonlocal(Token token)
{
return new VersionedValue(VersionedValue.REMOVED_TOKEN + VersionedValue.DELIMITER + partitioner.getTokenFactory().toString(token));
}
public VersionedValue removalCoordinator(Token token)
{
return new VersionedValue(VersionedValue.REMOVAL_COORDINATOR + VersionedValue.DELIMITER + partitioner.getTokenFactory().toString(token));
}
public VersionedValue datacenter(String dcId)
{
return new VersionedValue(dcId);
}
public VersionedValue rack(String rackId)
{
return new VersionedValue(rackId);
}
public VersionedValue rpcaddress(InetAddress endpoint)
{
return new VersionedValue(endpoint.getHostAddress());
}
public VersionedValue releaseVersion()
{
return new VersionedValue(FBUtilities.getReleaseVersionString());
}
}
private static class VersionedValueSerializer implements ICompactSerializer<VersionedValue>
{
public void serialize(VersionedValue value, DataOutputStream dos, int version) throws IOException
{
dos.writeUTF(value.value);
dos.writeInt(value.version);
}
public VersionedValue deserialize(DataInputStream dis, int version) throws IOException
{
String value = dis.readUTF();
int valVersion = dis.readInt();
return new VersionedValue(value, valVersion);
}
}
}
|
apache-2.0
|
asanka88/apache-synapse
|
modules/commons/src/main/java/org/apache/synapse/commons/evaluators/config/EqualSerializer.java
|
2022
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.synapse.commons.evaluators.config;
import org.apache.axiom.om.OMElement;
import org.apache.synapse.commons.evaluators.*;
import javax.xml.namespace.QName;
/**
* Serialize the {@link EqualEvaluator} to the XML configuration defined in
* the {@link EqualFactory}.
*/
public class EqualSerializer extends TextProcessingEvaluatorSerializer {
public OMElement serialize(OMElement parent, Evaluator evaluator) throws EvaluatorException {
if (!(evaluator instanceof EqualEvaluator)) {
throw new IllegalArgumentException("Evaluator must be an EqualEvaluator");
}
EqualEvaluator equalEvaluator = (EqualEvaluator) evaluator;
OMElement equalElement = fac.createOMElement(EvaluatorConstants.EQUAL,
EvaluatorConstants.SYNAPSE_NAMESPACE, EvaluatorConstants.EMPTY_PREFIX);
serializeSourceTextRetriever(equalEvaluator.getTextRetriever(), equalElement);
equalElement.addAttribute(fac.createOMAttribute(EvaluatorConstants.VALUE, nullNS,
equalEvaluator.getValue()));
if (parent != null) {
parent.addChild(equalElement);
}
return equalElement;
}
}
|
apache-2.0
|
jendap/usergrid-stack
|
core/src/main/java/org/usergrid/persistence/schema/CollectionInfo.java
|
6897
|
/*******************************************************************************
* Copyright 2012 Apigee Corporation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
******************************************************************************/
package org.usergrid.persistence.schema;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Set;
import java.util.TreeSet;
import org.usergrid.persistence.annotations.EntityCollection;
public class CollectionInfo {
private String name;
private EntityInfo container;
private boolean indexingDynamicProperties;
private boolean indexingDynamicDictionaries;
private String linkedCollection;
private Set<String> propertiesIndexed = new TreeSet<String>(
String.CASE_INSENSITIVE_ORDER);
private boolean publicVisible = true;
private final Set<String> dictionariesIndexed = new TreeSet<String>(
String.CASE_INSENSITIVE_ORDER);
private Set<String> subkeys = new LinkedHashSet<String>();
private String type;
private boolean reversed;
private boolean includedInExport = true;
private String sort;
public CollectionInfo() {
}
public CollectionInfo(EntityCollection collectionAnnotation) {
setIndexingDynamicProperties(collectionAnnotation
.indexingDynamicProperties());
setIndexingDynamicDictionaries(collectionAnnotation
.indexingDynamicDictionaries());
setLinkedCollection(collectionAnnotation.linkedCollection());
setPropertiesIndexed(new LinkedHashSet<String>(
Arrays.asList(collectionAnnotation.propertiesIndexed())));
setPublic(collectionAnnotation.publicVisible());
setDictionariesIndexed(new LinkedHashSet<String>(
Arrays.asList(collectionAnnotation.dictionariesIndexed())));
setSubkeys(Arrays.asList(collectionAnnotation.subkeys()));
setType(collectionAnnotation.type());
setReversed(collectionAnnotation.reversed());
setIncludedInExport(collectionAnnotation.includedInExport());
setSort(collectionAnnotation.sort());
}
public String getType() {
return type;
}
public void setType(String type) {
if ("".equals(type)) {
type = null;
}
this.type = type;
}
public boolean isPropertyIndexed(String propertyName) {
return propertiesIndexed.contains(propertyName);
}
public boolean hasIndexedProperties() {
return !propertiesIndexed.isEmpty();
}
public Set<String> getPropertiesIndexed() {
return propertiesIndexed;
}
public void setPropertiesIndexed(Set<String> propertiesIndexed) {
this.propertiesIndexed = new TreeSet<String>(
String.CASE_INSENSITIVE_ORDER);
this.propertiesIndexed.addAll(propertiesIndexed);
}
public boolean isDictionaryIndexed(String propertyName) {
return dictionariesIndexed.contains(propertyName);
}
public Set<String> getDictionariesIndexed() {
return dictionariesIndexed;
}
public void setDictionariesIndexed(Set<String> dictionariesIndexed) {
dictionariesIndexed = new TreeSet<String>(String.CASE_INSENSITIVE_ORDER);
dictionariesIndexed.addAll(dictionariesIndexed);
}
public boolean isIndexingDynamicProperties() {
return indexingDynamicProperties;
}
public void setIndexingDynamicProperties(boolean indexingProperties) {
indexingDynamicProperties = indexingProperties;
}
public boolean isIndexingDynamicDictionaries() {
return indexingDynamicDictionaries;
}
public void setIndexingDynamicDictionaries(
boolean indexingDynamicDictionaries) {
this.indexingDynamicDictionaries = indexingDynamicDictionaries;
}
public String getLinkedCollection() {
return linkedCollection;
}
public void setLinkedCollection(String linkedCollection) {
if ("".equals(linkedCollection)) {
linkedCollection = null;
}
this.linkedCollection = linkedCollection;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public EntityInfo getContainer() {
return container;
}
public void setContainer(EntityInfo entityInfo) {
container = entityInfo;
}
public boolean isSubkeyProperty(String propertyName) {
return subkeys.contains(propertyName);
}
public boolean hasSubkeys() {
return !subkeys.isEmpty();
}
public Set<String> getSubkeySet() {
return subkeys;
}
public List<String> getSubkeys() {
return new ArrayList<String>(subkeys);
}
public void setSubkeys(List<String> s) {
subkeys = new LinkedHashSet<String>();
subkeys.addAll(s);
makeSubkeyCombos();
}
List<String[]> subkeyCombinations = new ArrayList<String[]>();
void makeSubkeyCombos() {
subkeyCombinations = new ArrayList<String[]>();
if (subkeys.size() > 0) {
int combos = (1 << subkeys.size());
// System.out.println(subkeys.size() + " elements = " +
// combos
// + " combos");
for (int i = 1; i < combos; i++) {
List<String> combo = new ArrayList<String>();
int j = 0;
for (String subkey : subkeys) {
if (((1 << j) & i) != 0) {
combo.add(subkey);
} else {
combo.add(null);
}
j++;
}
subkeyCombinations.add(combo.toArray(new String[0]));
}
}
}
public List<String[]> getSubkeyCombinations() {
return subkeyCombinations;
}
public boolean isPublic() {
return publicVisible;
}
public void setPublic(boolean publicVisible) {
this.publicVisible = publicVisible;
}
public boolean isReversed() {
return reversed;
}
public void setReversed(boolean reversed) {
this.reversed = reversed;
}
public void setIncludedInExport(boolean includedInExport) {
this.includedInExport = includedInExport;
}
public boolean isIncludedInExport() {
return includedInExport;
}
public String getSort() {
return sort;
}
public void setSort(String sort) {
if ("".equals(sort)) {
sort = null;
}
this.sort = sort;
}
@Override
public String toString() {
return "CollectionInfo [name=" + name + ", container=" + container
+ ", indexingDynamicProperties=" + indexingDynamicProperties
+ ", indexingDynamicDictionaries="
+ indexingDynamicDictionaries + ", linkedCollection="
+ linkedCollection + ", propertiesIndexed=" + propertiesIndexed
+ ", publicVisible=" + publicVisible + ", dictionariesIndexed="
+ dictionariesIndexed + ", subkeys=" + subkeys + ", type="
+ type + ", reversed=" + reversed + ", includedInExport="
+ includedInExport + ", sort=" + sort + ", subkeyCombinations="
+ subkeyCombinations + "]";
}
}
|
apache-2.0
|
mhlx/blog5
|
src/main/java/me/qyh/blog/template/render/thymeleaf/ThymeleafRenderExecutor.java
|
9148
|
/*
* Copyright 2016 qyh.me
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package me.qyh.blog.template.render.thymeleaf;
import java.util.HashMap;
import java.util.Locale;
import java.util.Map;
import java.util.Objects;
import java.util.Set;
import javax.servlet.ServletContext;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.ApplicationContext;
import org.springframework.context.i18n.LocaleContextHolder;
import org.springframework.core.convert.ConversionService;
import org.springframework.http.MediaType;
import org.springframework.web.servlet.HandlerMapping;
import org.springframework.web.servlet.support.RequestContext;
import org.springframework.web.servlet.view.AbstractTemplateView;
import org.thymeleaf.IEngineConfiguration;
import org.thymeleaf.TemplateEngine;
import org.thymeleaf.TemplateSpec;
import org.thymeleaf.context.WebExpressionContext;
import org.thymeleaf.exceptions.TemplateProcessingException;
import org.thymeleaf.spring5.context.webmvc.SpringWebMvcThymeleafRequestContext;
import org.thymeleaf.spring5.expression.ThymeleafEvaluationContext;
import org.thymeleaf.spring5.naming.SpringContextVariableNames;
import org.thymeleaf.standard.expression.FragmentExpression;
import org.thymeleaf.standard.expression.IStandardExpressionParser;
import org.thymeleaf.standard.expression.StandardExpressions;
import org.thymeleaf.templatemode.TemplateMode;
import me.qyh.blog.core.context.Environment;
import me.qyh.blog.core.exception.LogicException;
import me.qyh.blog.core.util.Validators;
import me.qyh.blog.template.entity.Fragment;
import me.qyh.blog.template.render.ParseContextHolder;
import me.qyh.blog.template.render.ReadOnlyResponse;
import me.qyh.blog.template.render.TemplateRenderExecutor;
import me.qyh.blog.template.validator.FragmentValidator;
/**
* 用来将模板解析成字符串
*
* @author Administrator
*
*/
public final class ThymeleafRenderExecutor implements TemplateRenderExecutor {
@Autowired
private ServletContext servletContext;
@Autowired
private ApplicationContext applicationContext;
@Autowired
private TemplateEngine viewTemplateEngine;
private static final String X_PJAX_Container_HEADER_NAME = "X-PJAX-Container";
private static final String X_PJAX_FRAGMENT = "X-Fragment";
private static final String X_FULLAPGE = "X-Fullpage";
@Override
public String execute(String viewTemplateName, final Map<String, Object> model, final HttpServletRequest request,
final ReadOnlyResponse response) {
return doExecutor(viewTemplateName, model, request, response);
}
@Override
public String processPjaxTemplateName(String templateName, HttpServletRequest request) throws LogicException {
Boolean fullpage = Boolean.parseBoolean(request.getHeader(X_FULLAPGE));
if (fullpage) {
return templateName;
}
String fragment = request.getHeader(X_PJAX_FRAGMENT);
if (!Validators.isEmptyOrNull(fragment, true)) {
fragment = FragmentValidator.validName(fragment, true);
return Fragment.getTemplateName(fragment, Environment.getSpace());
}
String container = request.getHeader(X_PJAX_Container_HEADER_NAME);
if (Validators.isEmptyOrNull(container, true)) {
return templateName;
} else {
return templateName + " :: " + container;
}
}
// COPIED FROM ThymeleafView 3.0.9.RELEASE
private String doExecutor(String viewTemplateName, final Map<String, Object> model,
final HttpServletRequest request, final HttpServletResponse response) {
Objects.requireNonNull(viewTemplateName);
Locale locale = LocaleContextHolder.getLocale();
final Map<String, Object> mergedModel = new HashMap<>(30);
// View.PATH_VARIABLES 只能获取被PathVariable annotation属性标记的属性
// 这里需要获取optional PathVariable
@SuppressWarnings("unchecked")
final Map<String, Object> pathVars = (Map<String, Object>) request
.getAttribute(HandlerMapping.URI_TEMPLATE_VARIABLES_ATTRIBUTE);
if (pathVars != null) {
mergedModel.putAll(pathVars);
}
if (model != null) {
mergedModel.putAll(model);
}
final RequestContext requestContext = new RequestContext(request, response, servletContext, mergedModel);
final SpringWebMvcThymeleafRequestContext thymeleafRequestContext = new SpringWebMvcThymeleafRequestContext(
requestContext, request);
// For compatibility with ThymeleafView
addRequestContextAsVariable(mergedModel, SpringContextVariableNames.SPRING_REQUEST_CONTEXT, requestContext);
// For compatibility with AbstractTemplateView
addRequestContextAsVariable(mergedModel, AbstractTemplateView.SPRING_MACRO_REQUEST_CONTEXT_ATTRIBUTE,
requestContext);
// Add the Thymeleaf RequestContext wrapper that we will be using in
// this dialect (the bare RequestContext
// stays in the context to for compatibility with other dialects)
mergedModel.put(SpringContextVariableNames.THYMELEAF_REQUEST_CONTEXT, thymeleafRequestContext);
final ConversionService conversionService = (ConversionService) request
.getAttribute(ConversionService.class.getName());
final NoRestrictedEvaluationContext evaluationContext = new NoRestrictedEvaluationContext(applicationContext,
conversionService);
mergedModel.put(ThymeleafEvaluationContext.THYMELEAF_EVALUATION_CONTEXT_CONTEXT_VARIABLE_NAME,
evaluationContext);
final IEngineConfiguration configuration = viewTemplateEngine.getConfiguration();
final WebExpressionContext context = new WebExpressionContext(configuration, request, response, servletContext,
locale, mergedModel);
final String templateName;
final Set<String> markupSelectors;
if (!viewTemplateName.contains("::")) {
templateName = viewTemplateName;
markupSelectors = null;
} else {
final IStandardExpressionParser parser = StandardExpressions.getExpressionParser(configuration);
final FragmentExpression fragmentExpression;
try {
fragmentExpression = (FragmentExpression) parser.parseExpression(context,
"~{" + viewTemplateName + "}");
} catch (final TemplateProcessingException e) {
throw new IllegalArgumentException("Invalid template name specification: '" + viewTemplateName + "'");
}
final FragmentExpression.ExecutedFragmentExpression fragment = FragmentExpression
.createExecutedFragmentExpression(context, fragmentExpression);
templateName = FragmentExpression.resolveTemplateName(fragment);
markupSelectors = FragmentExpression.resolveFragments(fragment);
final Map<String, Object> nameFragmentParameters = fragment.getFragmentParameters();
if (nameFragmentParameters != null) {
if (fragment.hasSyntheticParameters()) {
// We cannot allow synthetic parameters because there is no way to specify them
// at the template
// engine execution!
throw new IllegalArgumentException(
"Parameters in a view specification must be named (non-synthetic): '" + viewTemplateName
+ "'");
}
context.setVariables(nameFragmentParameters);
}
}
final Set<String> processMarkupSelectors;
if (markupSelectors != null && markupSelectors.size() > 0) {
processMarkupSelectors = markupSelectors;
} else {
processMarkupSelectors = null;
}
String contentType = ParseContextHolder.getContext().getConfig().getContentType();
TemplateSpec sec = new TemplateSpec(templateName, processMarkupSelectors, parseMode(contentType), null);
return viewTemplateEngine.process(sec, context);
}
/**
* 根据模板名称来获取解析模式
*
* @since 6.5
* @param templateName
* @return
*/
protected TemplateMode parseMode(String contentType) {
if (Validators.isEmptyOrNull(contentType, true)) {
return TemplateMode.HTML;
}
if (contentType.startsWith(MediaType.TEXT_HTML_VALUE)) {
return TemplateMode.HTML;
}
if (contentType.startsWith(MediaType.APPLICATION_XML_VALUE)) {
return TemplateMode.XML;
}
if (contentType.startsWith("text/css")) {
return TemplateMode.CSS;
}
if (contentType.startsWith("text/javascript")) {
return TemplateMode.JAVASCRIPT;
}
if (contentType.startsWith(MediaType.TEXT_PLAIN_VALUE)) {
return TemplateMode.TEXT;
}
return TemplateMode.HTML;
}
private void addRequestContextAsVariable(final Map<String, Object> model, final String variableName,
final RequestContext requestContext) throws TemplateProcessingException {
if (model.containsKey(variableName)) {
throw new TemplateProcessingException("属性" + variableName + "已经存在与request中");
}
model.put(variableName, requestContext);
}
}
|
apache-2.0
|
gameduell/kythe
|
cxx/indexer/cxx/testdata/template/template_var_ps_completes.cc
|
270
|
// Checks completion edges for variable template partial specializations.
template <typename T, typename S> extern T z;
//- @z defines VarZPsAbsDecl
template <typename U> extern int z<int, U>;
//- @z completes/uniquely VarZPsAbsDecl
template <typename U> int z<int, U>;
|
apache-2.0
|
CaptainHayashi/roslyn
|
concepts/code/CsTypeClasses/Eq.cs
|
4340
|
using System;
namespace Eq
{
/*
We represent Haskell type classes as Generic interfaces.
class Eq a where
(==) :: a -> a -> Bool
*/
interface Eq<A>
{
bool Equals(A a, A b);
}
/*
The Haskell declaration of class Eq implicitly declares the overloaded operations induced by class Eq as members.
(==) :: (Eq a) => a -> a -> Bool
In CS, have to do this explicitly, for each member.
An operation over some class is a static generic method, parameterized by both a dictionary type parameter,
and the constrained type parameter itself.
The dictionary is marked "struct" so we can access its virtual operations
through a default value heap allocating or raising NullReference exception and withoug passing around
dictionary values (just types!).
*/
static class Overloads
{
public static bool Eq<EqA, A>(A a, A b) where EqA : struct, Eq<A>
{
return default(EqA).Equals(a, b);
}
}
/*
A Haskell ground instance, eg.
instance Eq Integer where
x == y = x `integerEq` y
instance Eq Float where
x == y = x `floatEq` y
is translated to a non-generic struct implementing the appropriate type class interface.
*/
struct EqInt : Eq<int>
{
public bool Equals(int a, int b) { return a == b; }
}
struct EqFloat : Eq<float>
{
public bool Equals(float a, float b) { return a == b; }
}
/*
We can represent a Haskell parameterized instance as a generic struct,
implementing an interface but parameterized by suitably constrained type parameters.
instance (Eq a) => Eq ([a]) where
nil == nil = true
(a:as) == (b:bs) = (a == b) && (as == bs)
This Haskell code defines, given an equality on type as (any a) an equality operation on type list of a, written [a].
Substituting, for simplicity, arrays for lists in CS we can write:
*/
struct EqArray<A, EqA> : Eq<A[]> where EqA : struct, Eq<A>
{
public bool Equals(A[] a, A[] b)
{
if (a == null) return b == null;
if (b == null) return false;
if (a.Length != b.Length) return false;
for (int i = 0; i < a.Length; i++)
if (!Overloads.Eq<EqA, A>(a[i], b[i])) return false;
return true;
}
}
/* Derived operations
We translate Haskells qualified types as extra type parameters, constrained to be both structs and bound by translations of their type class constraints.
For example, list membership in Haskell is
elem :: Eq a => a -> [a] -> bool
x `elem` [] = False
x `elem` (y:ys) = x==y || (x `elem` ys)
In C#, we can define:
*/
public class Test
{
static bool Elem<EqA, A>(A x, A[] ys) where EqA : struct, Eq<A>
{
for (int i = 0; i < ys.Length; i++)
{
if (Overloads.Eq<EqA, A>(x, ys[i])) return true;
}
return false;
}
/*
Now every dictionary that implements Ord<A> must implement Eq<A> too, and can be used
as such when required.
Of course, in CS we have to make Dictionary construction and passing (as types, not values) explicit.
This is what a modified C# compiler would hopefully be able to do for you,
just like Haskell does by solving constraints to construct dictionaries at compile time.
The point is that the underlying mechanism for implementing Haskell Type Classes is already there!
*/
public static void Run()
{
Console.WriteLine("Find Test {0}", Elem<EqInt, int>(1, new int[] { 1, 2, 3 }));
Console.WriteLine("Find Test {0}", Elem<EqInt, int>(4, new int[] { 1, 2, 3 }));
Console.WriteLine("Equals Test {0}",
Overloads.Eq<EqArray<int[], EqArray<int, EqInt>>, int[][]>(new int[][] { new int[] { 1, 2 }, new int[] { 3, 4 } },
new int[][] { new int[] { 1, 2 }, new int[] { 3, 4 } }));
Console.WriteLine("Equals Test {0}",
Overloads.Eq<EqArray<int[], EqArray<int, EqInt>>, int[][]>(new int[][] { new int[] { 1, 2 }, new int[] { 3, 4 } },
new int[][] { new int[] { 1, 2 }, new int[] { 3, 5 } }));
}
/*
Finally we translate Haskell subclassing to interface inheritance.
See file NumEq for an example... */
}
}
|
apache-2.0
|
applift/vitess
|
go/sqltypes/proto3_test.go
|
6972
|
/*
Copyright 2017 Google Inc.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package sqltypes
import (
"testing"
"github.com/golang/protobuf/proto"
querypb "github.com/youtube/vitess/go/vt/proto/query"
vtrpcpb "github.com/youtube/vitess/go/vt/proto/vtrpc"
"github.com/youtube/vitess/go/vt/vterrors"
)
func TestResult(t *testing.T) {
fields := []*querypb.Field{{
Name: "col1",
Type: VarChar,
}, {
Name: "col2",
Type: Int64,
}, {
Name: "col3",
Type: Float64,
}}
sqlResult := &Result{
Fields: fields,
InsertID: 1,
RowsAffected: 2,
Rows: [][]Value{{
testVal(VarChar, "aa"),
testVal(Int64, "1"),
testVal(Float64, "2"),
}, {
MakeTrusted(VarChar, []byte("bb")),
NULL,
NULL,
}},
Extras: &querypb.ResultExtras{
EventToken: &querypb.EventToken{
Timestamp: 123,
Shard: "shard0",
Position: "position0",
},
},
}
p3Result := &querypb.QueryResult{
Fields: fields,
InsertId: 1,
RowsAffected: 2,
Rows: []*querypb.Row{{
Lengths: []int64{2, 1, 1},
Values: []byte("aa12"),
}, {
Lengths: []int64{2, -1, -1},
Values: []byte("bb"),
}},
Extras: &querypb.ResultExtras{
EventToken: &querypb.EventToken{
Timestamp: 123,
Shard: "shard0",
Position: "position0",
},
},
}
p3converted := ResultToProto3(sqlResult)
if !proto.Equal(p3converted, p3Result) {
t.Errorf("P3:\n%v, want\n%v", p3converted, p3Result)
}
reverse := Proto3ToResult(p3Result)
if !reverse.Equal(sqlResult) {
t.Errorf("reverse:\n%#v, want\n%#v", reverse, sqlResult)
}
// Test custom fields.
fields[1].Type = VarBinary
sqlResult.Rows[0][1] = testVal(VarBinary, "1")
reverse = CustomProto3ToResult(fields, p3Result)
if !reverse.Equal(sqlResult) {
t.Errorf("reverse:\n%#v, want\n%#v", reverse, sqlResult)
}
}
func TestResults(t *testing.T) {
fields1 := []*querypb.Field{{
Name: "col1",
Type: VarChar,
}, {
Name: "col2",
Type: Int64,
}, {
Name: "col3",
Type: Float64,
}}
fields2 := []*querypb.Field{{
Name: "col11",
Type: VarChar,
}, {
Name: "col12",
Type: Int64,
}, {
Name: "col13",
Type: Float64,
}}
sqlResults := []Result{{
Fields: fields1,
InsertID: 1,
RowsAffected: 2,
Rows: [][]Value{{
testVal(VarChar, "aa"),
testVal(Int64, "1"),
testVal(Float64, "2"),
}},
Extras: &querypb.ResultExtras{
EventToken: &querypb.EventToken{
Timestamp: 123,
Shard: "shard0",
Position: "position0",
},
},
}, {
Fields: fields2,
InsertID: 3,
RowsAffected: 4,
Rows: [][]Value{{
testVal(VarChar, "bb"),
testVal(Int64, "3"),
testVal(Float64, "4"),
}},
Extras: &querypb.ResultExtras{
EventToken: &querypb.EventToken{
Timestamp: 123,
Shard: "shard1",
Position: "position1",
},
},
}}
p3Results := []*querypb.QueryResult{{
Fields: fields1,
InsertId: 1,
RowsAffected: 2,
Rows: []*querypb.Row{{
Lengths: []int64{2, 1, 1},
Values: []byte("aa12"),
}},
Extras: &querypb.ResultExtras{
EventToken: &querypb.EventToken{
Timestamp: 123,
Shard: "shard0",
Position: "position0",
},
},
}, {
Fields: fields2,
InsertId: 3,
RowsAffected: 4,
Rows: []*querypb.Row{{
Lengths: []int64{2, 1, 1},
Values: []byte("bb34"),
}},
Extras: &querypb.ResultExtras{
EventToken: &querypb.EventToken{
Timestamp: 123,
Shard: "shard1",
Position: "position1",
},
},
}}
p3converted := ResultsToProto3(sqlResults)
if !Proto3ResultsEqual(p3converted, p3Results) {
t.Errorf("P3:\n%v, want\n%v", p3converted, p3Results)
}
reverse := Proto3ToResults(p3Results)
if !ResultsEqual(reverse, sqlResults) {
t.Errorf("reverse:\n%#v, want\n%#v", reverse, sqlResults)
}
}
func TestQueryReponses(t *testing.T) {
fields1 := []*querypb.Field{{
Name: "col1",
Type: VarChar,
}, {
Name: "col2",
Type: Int64,
}, {
Name: "col3",
Type: Float64,
}}
fields2 := []*querypb.Field{{
Name: "col11",
Type: VarChar,
}, {
Name: "col12",
Type: Int64,
}, {
Name: "col13",
Type: Float64,
}}
queryResponses := []QueryResponse{
{
QueryResult: &Result{
Fields: fields1,
InsertID: 1,
RowsAffected: 2,
Rows: [][]Value{{
testVal(VarChar, "aa"),
testVal(Int64, "1"),
testVal(Float64, "2"),
}},
Extras: &querypb.ResultExtras{
EventToken: &querypb.EventToken{
Timestamp: 123,
Shard: "shard0",
Position: "position0",
},
},
},
QueryError: nil,
}, {
QueryResult: &Result{
Fields: fields2,
InsertID: 3,
RowsAffected: 4,
Rows: [][]Value{{
testVal(VarChar, "bb"),
testVal(Int64, "3"),
testVal(Float64, "4"),
}},
Extras: &querypb.ResultExtras{
EventToken: &querypb.EventToken{
Timestamp: 123,
Shard: "shard1",
Position: "position1",
},
},
},
QueryError: nil,
}, {
QueryResult: nil,
QueryError: vterrors.New(vtrpcpb.Code_DEADLINE_EXCEEDED, "deadline exceeded"),
},
}
p3ResultWithError := []*querypb.ResultWithError{
{
Error: nil,
Result: &querypb.QueryResult{
Fields: fields1,
InsertId: 1,
RowsAffected: 2,
Rows: []*querypb.Row{{
Lengths: []int64{2, 1, 1},
Values: []byte("aa12"),
}},
Extras: &querypb.ResultExtras{
EventToken: &querypb.EventToken{
Timestamp: 123,
Shard: "shard0",
Position: "position0",
},
},
},
}, {
Error: nil,
Result: &querypb.QueryResult{
Fields: fields2,
InsertId: 3,
RowsAffected: 4,
Rows: []*querypb.Row{{
Lengths: []int64{2, 1, 1},
Values: []byte("bb34"),
}},
Extras: &querypb.ResultExtras{
EventToken: &querypb.EventToken{
Timestamp: 123,
Shard: "shard1",
Position: "position1",
},
},
},
}, {
Error: &vtrpcpb.RPCError{
LegacyCode: vtrpcpb.LegacyErrorCode_DEADLINE_EXCEEDED_LEGACY,
Message: "deadline exceeded",
Code: vtrpcpb.Code_DEADLINE_EXCEEDED,
},
Result: nil,
},
}
p3converted := QueryResponsesToProto3(queryResponses)
if !Proto3QueryResponsesEqual(p3converted, p3ResultWithError) {
t.Errorf("P3:\n%v, want\n%v", p3converted, p3ResultWithError)
}
reverse := Proto3ToQueryReponses(p3ResultWithError)
if !QueryResponsesEqual(reverse, queryResponses) {
t.Errorf("reverse:\n%#v, want\n%#v", reverse, queryResponses)
}
}
|
apache-2.0
|
lxsmnv/spark
|
python/pyspark/ml/regression.py
|
66013
|
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import sys
import warnings
from pyspark import since, keyword_only
from pyspark.ml.param.shared import *
from pyspark.ml.util import *
from pyspark.ml.wrapper import JavaEstimator, JavaModel, JavaWrapper
from pyspark.ml.common import inherit_doc
from pyspark.sql import DataFrame
__all__ = ['AFTSurvivalRegression', 'AFTSurvivalRegressionModel',
'DecisionTreeRegressor', 'DecisionTreeRegressionModel',
'GBTRegressor', 'GBTRegressionModel',
'GeneralizedLinearRegression', 'GeneralizedLinearRegressionModel',
'GeneralizedLinearRegressionSummary', 'GeneralizedLinearRegressionTrainingSummary',
'IsotonicRegression', 'IsotonicRegressionModel',
'LinearRegression', 'LinearRegressionModel',
'LinearRegressionSummary', 'LinearRegressionTrainingSummary',
'RandomForestRegressor', 'RandomForestRegressionModel']
@inherit_doc
class LinearRegression(JavaEstimator, HasFeaturesCol, HasLabelCol, HasPredictionCol, HasMaxIter,
HasRegParam, HasTol, HasElasticNetParam, HasFitIntercept,
HasStandardization, HasSolver, HasWeightCol, HasAggregationDepth, HasLoss,
JavaMLWritable, JavaMLReadable):
"""
Linear regression.
The learning objective is to minimize the specified loss function, with regularization.
This supports two kinds of loss:
* squaredError (a.k.a squared loss)
* huber (a hybrid of squared error for relatively small errors and absolute error for \
relatively large ones, and we estimate the scale parameter from training data)
This supports multiple types of regularization:
* none (a.k.a. ordinary least squares)
* L2 (ridge regression)
* L1 (Lasso)
* L2 + L1 (elastic net)
Note: Fitting with huber loss only supports none and L2 regularization.
>>> from pyspark.ml.linalg import Vectors
>>> df = spark.createDataFrame([
... (1.0, 2.0, Vectors.dense(1.0)),
... (0.0, 2.0, Vectors.sparse(1, [], []))], ["label", "weight", "features"])
>>> lr = LinearRegression(maxIter=5, regParam=0.0, solver="normal", weightCol="weight")
>>> model = lr.fit(df)
>>> test0 = spark.createDataFrame([(Vectors.dense(-1.0),)], ["features"])
>>> abs(model.transform(test0).head().prediction - (-1.0)) < 0.001
True
>>> abs(model.coefficients[0] - 1.0) < 0.001
True
>>> abs(model.intercept - 0.0) < 0.001
True
>>> test1 = spark.createDataFrame([(Vectors.sparse(1, [0], [1.0]),)], ["features"])
>>> abs(model.transform(test1).head().prediction - 1.0) < 0.001
True
>>> lr.setParams("vector")
Traceback (most recent call last):
...
TypeError: Method setParams forces keyword arguments.
>>> lr_path = temp_path + "/lr"
>>> lr.save(lr_path)
>>> lr2 = LinearRegression.load(lr_path)
>>> lr2.getMaxIter()
5
>>> model_path = temp_path + "/lr_model"
>>> model.save(model_path)
>>> model2 = LinearRegressionModel.load(model_path)
>>> model.coefficients[0] == model2.coefficients[0]
True
>>> model.intercept == model2.intercept
True
>>> model.numFeatures
1
.. versionadded:: 1.4.0
"""
solver = Param(Params._dummy(), "solver", "The solver algorithm for optimization. Supported " +
"options: auto, normal, l-bfgs.", typeConverter=TypeConverters.toString)
loss = Param(Params._dummy(), "loss", "The loss function to be optimized. Supported " +
"options: squaredError, huber.", typeConverter=TypeConverters.toString)
epsilon = Param(Params._dummy(), "epsilon", "The shape parameter to control the amount of " +
"robustness. Must be > 1.0. Only valid when loss is huber",
typeConverter=TypeConverters.toFloat)
@keyword_only
def __init__(self, featuresCol="features", labelCol="label", predictionCol="prediction",
maxIter=100, regParam=0.0, elasticNetParam=0.0, tol=1e-6, fitIntercept=True,
standardization=True, solver="auto", weightCol=None, aggregationDepth=2,
loss="squaredError", epsilon=1.35):
"""
__init__(self, featuresCol="features", labelCol="label", predictionCol="prediction", \
maxIter=100, regParam=0.0, elasticNetParam=0.0, tol=1e-6, fitIntercept=True, \
standardization=True, solver="auto", weightCol=None, aggregationDepth=2, \
loss="squaredError", epsilon=1.35)
"""
super(LinearRegression, self).__init__()
self._java_obj = self._new_java_obj(
"org.apache.spark.ml.regression.LinearRegression", self.uid)
self._setDefault(maxIter=100, regParam=0.0, tol=1e-6, loss="squaredError", epsilon=1.35)
kwargs = self._input_kwargs
self.setParams(**kwargs)
@keyword_only
@since("1.4.0")
def setParams(self, featuresCol="features", labelCol="label", predictionCol="prediction",
maxIter=100, regParam=0.0, elasticNetParam=0.0, tol=1e-6, fitIntercept=True,
standardization=True, solver="auto", weightCol=None, aggregationDepth=2,
loss="squaredError", epsilon=1.35):
"""
setParams(self, featuresCol="features", labelCol="label", predictionCol="prediction", \
maxIter=100, regParam=0.0, elasticNetParam=0.0, tol=1e-6, fitIntercept=True, \
standardization=True, solver="auto", weightCol=None, aggregationDepth=2, \
loss="squaredError", epsilon=1.35)
Sets params for linear regression.
"""
kwargs = self._input_kwargs
return self._set(**kwargs)
def _create_model(self, java_model):
return LinearRegressionModel(java_model)
@since("2.3.0")
def setEpsilon(self, value):
"""
Sets the value of :py:attr:`epsilon`.
"""
return self._set(epsilon=value)
@since("2.3.0")
def getEpsilon(self):
"""
Gets the value of epsilon or its default value.
"""
return self.getOrDefault(self.epsilon)
class LinearRegressionModel(JavaModel, JavaPredictionModel, JavaMLWritable, JavaMLReadable):
"""
Model fitted by :class:`LinearRegression`.
.. versionadded:: 1.4.0
"""
@property
@since("2.0.0")
def coefficients(self):
"""
Model coefficients.
"""
return self._call_java("coefficients")
@property
@since("1.4.0")
def intercept(self):
"""
Model intercept.
"""
return self._call_java("intercept")
@property
@since("2.3.0")
def scale(self):
"""
The value by which \|y - X'w\| is scaled down when loss is "huber", otherwise 1.0.
"""
return self._call_java("scale")
@property
@since("2.0.0")
def summary(self):
"""
Gets summary (e.g. residuals, mse, r-squared ) of model on
training set. An exception is thrown if
`trainingSummary is None`.
"""
if self.hasSummary:
java_lrt_summary = self._call_java("summary")
return LinearRegressionTrainingSummary(java_lrt_summary)
else:
raise RuntimeError("No training summary available for this %s" %
self.__class__.__name__)
@property
@since("2.0.0")
def hasSummary(self):
"""
Indicates whether a training summary exists for this model
instance.
"""
return self._call_java("hasSummary")
@since("2.0.0")
def evaluate(self, dataset):
"""
Evaluates the model on a test dataset.
:param dataset:
Test dataset to evaluate model on, where dataset is an
instance of :py:class:`pyspark.sql.DataFrame`
"""
if not isinstance(dataset, DataFrame):
raise ValueError("dataset must be a DataFrame but got %s." % type(dataset))
java_lr_summary = self._call_java("evaluate", dataset)
return LinearRegressionSummary(java_lr_summary)
class LinearRegressionSummary(JavaWrapper):
"""
.. note:: Experimental
Linear regression results evaluated on a dataset.
.. versionadded:: 2.0.0
"""
@property
@since("2.0.0")
def predictions(self):
"""
Dataframe outputted by the model's `transform` method.
"""
return self._call_java("predictions")
@property
@since("2.0.0")
def predictionCol(self):
"""
Field in "predictions" which gives the predicted value of
the label at each instance.
"""
return self._call_java("predictionCol")
@property
@since("2.0.0")
def labelCol(self):
"""
Field in "predictions" which gives the true label of each
instance.
"""
return self._call_java("labelCol")
@property
@since("2.0.0")
def featuresCol(self):
"""
Field in "predictions" which gives the features of each instance
as a vector.
"""
return self._call_java("featuresCol")
@property
@since("2.0.0")
def explainedVariance(self):
"""
Returns the explained variance regression score.
explainedVariance = 1 - variance(y - \hat{y}) / variance(y)
.. seealso:: `Wikipedia explain variation \
<http://en.wikipedia.org/wiki/Explained_variation>`_
.. note:: This ignores instance weights (setting all to 1.0) from
`LinearRegression.weightCol`. This will change in later Spark
versions.
"""
return self._call_java("explainedVariance")
@property
@since("2.0.0")
def meanAbsoluteError(self):
"""
Returns the mean absolute error, which is a risk function
corresponding to the expected value of the absolute error
loss or l1-norm loss.
.. note:: This ignores instance weights (setting all to 1.0) from
`LinearRegression.weightCol`. This will change in later Spark
versions.
"""
return self._call_java("meanAbsoluteError")
@property
@since("2.0.0")
def meanSquaredError(self):
"""
Returns the mean squared error, which is a risk function
corresponding to the expected value of the squared error
loss or quadratic loss.
.. note:: This ignores instance weights (setting all to 1.0) from
`LinearRegression.weightCol`. This will change in later Spark
versions.
"""
return self._call_java("meanSquaredError")
@property
@since("2.0.0")
def rootMeanSquaredError(self):
"""
Returns the root mean squared error, which is defined as the
square root of the mean squared error.
.. note:: This ignores instance weights (setting all to 1.0) from
`LinearRegression.weightCol`. This will change in later Spark
versions.
"""
return self._call_java("rootMeanSquaredError")
@property
@since("2.0.0")
def r2(self):
"""
Returns R^2, the coefficient of determination.
.. seealso:: `Wikipedia coefficient of determination \
<http://en.wikipedia.org/wiki/Coefficient_of_determination>`_
.. note:: This ignores instance weights (setting all to 1.0) from
`LinearRegression.weightCol`. This will change in later Spark
versions.
"""
return self._call_java("r2")
@property
@since("2.4.0")
def r2adj(self):
"""
Returns Adjusted R^2, the adjusted coefficient of determination.
.. seealso:: `Wikipedia coefficient of determination, Adjusted R^2 \
<https://en.wikipedia.org/wiki/Coefficient_of_determination#Adjusted_R2>`_
.. note:: This ignores instance weights (setting all to 1.0) from
`LinearRegression.weightCol`. This will change in later Spark versions.
"""
return self._call_java("r2adj")
@property
@since("2.0.0")
def residuals(self):
"""
Residuals (label - predicted value)
"""
return self._call_java("residuals")
@property
@since("2.0.0")
def numInstances(self):
"""
Number of instances in DataFrame predictions
"""
return self._call_java("numInstances")
@property
@since("2.2.0")
def degreesOfFreedom(self):
"""
Degrees of freedom.
"""
return self._call_java("degreesOfFreedom")
@property
@since("2.0.0")
def devianceResiduals(self):
"""
The weighted residuals, the usual residuals rescaled by the
square root of the instance weights.
"""
return self._call_java("devianceResiduals")
@property
@since("2.0.0")
def coefficientStandardErrors(self):
"""
Standard error of estimated coefficients and intercept.
This value is only available when using the "normal" solver.
If :py:attr:`LinearRegression.fitIntercept` is set to True,
then the last element returned corresponds to the intercept.
.. seealso:: :py:attr:`LinearRegression.solver`
"""
return self._call_java("coefficientStandardErrors")
@property
@since("2.0.0")
def tValues(self):
"""
T-statistic of estimated coefficients and intercept.
This value is only available when using the "normal" solver.
If :py:attr:`LinearRegression.fitIntercept` is set to True,
then the last element returned corresponds to the intercept.
.. seealso:: :py:attr:`LinearRegression.solver`
"""
return self._call_java("tValues")
@property
@since("2.0.0")
def pValues(self):
"""
Two-sided p-value of estimated coefficients and intercept.
This value is only available when using the "normal" solver.
If :py:attr:`LinearRegression.fitIntercept` is set to True,
then the last element returned corresponds to the intercept.
.. seealso:: :py:attr:`LinearRegression.solver`
"""
return self._call_java("pValues")
@inherit_doc
class LinearRegressionTrainingSummary(LinearRegressionSummary):
"""
.. note:: Experimental
Linear regression training results. Currently, the training summary ignores the
training weights except for the objective trace.
.. versionadded:: 2.0.0
"""
@property
@since("2.0.0")
def objectiveHistory(self):
"""
Objective function (scaled loss + regularization) at each
iteration.
This value is only available when using the "l-bfgs" solver.
.. seealso:: :py:attr:`LinearRegression.solver`
"""
return self._call_java("objectiveHistory")
@property
@since("2.0.0")
def totalIterations(self):
"""
Number of training iterations until termination.
This value is only available when using the "l-bfgs" solver.
.. seealso:: :py:attr:`LinearRegression.solver`
"""
return self._call_java("totalIterations")
@inherit_doc
class IsotonicRegression(JavaEstimator, HasFeaturesCol, HasLabelCol, HasPredictionCol,
HasWeightCol, JavaMLWritable, JavaMLReadable):
"""
Currently implemented using parallelized pool adjacent violators algorithm.
Only univariate (single feature) algorithm supported.
>>> from pyspark.ml.linalg import Vectors
>>> df = spark.createDataFrame([
... (1.0, Vectors.dense(1.0)),
... (0.0, Vectors.sparse(1, [], []))], ["label", "features"])
>>> ir = IsotonicRegression()
>>> model = ir.fit(df)
>>> test0 = spark.createDataFrame([(Vectors.dense(-1.0),)], ["features"])
>>> model.transform(test0).head().prediction
0.0
>>> model.boundaries
DenseVector([0.0, 1.0])
>>> ir_path = temp_path + "/ir"
>>> ir.save(ir_path)
>>> ir2 = IsotonicRegression.load(ir_path)
>>> ir2.getIsotonic()
True
>>> model_path = temp_path + "/ir_model"
>>> model.save(model_path)
>>> model2 = IsotonicRegressionModel.load(model_path)
>>> model.boundaries == model2.boundaries
True
>>> model.predictions == model2.predictions
True
.. versionadded:: 1.6.0
"""
isotonic = \
Param(Params._dummy(), "isotonic",
"whether the output sequence should be isotonic/increasing (true) or" +
"antitonic/decreasing (false).", typeConverter=TypeConverters.toBoolean)
featureIndex = \
Param(Params._dummy(), "featureIndex",
"The index of the feature if featuresCol is a vector column, no effect otherwise.",
typeConverter=TypeConverters.toInt)
@keyword_only
def __init__(self, featuresCol="features", labelCol="label", predictionCol="prediction",
weightCol=None, isotonic=True, featureIndex=0):
"""
__init__(self, featuresCol="features", labelCol="label", predictionCol="prediction", \
weightCol=None, isotonic=True, featureIndex=0):
"""
super(IsotonicRegression, self).__init__()
self._java_obj = self._new_java_obj(
"org.apache.spark.ml.regression.IsotonicRegression", self.uid)
self._setDefault(isotonic=True, featureIndex=0)
kwargs = self._input_kwargs
self.setParams(**kwargs)
@keyword_only
def setParams(self, featuresCol="features", labelCol="label", predictionCol="prediction",
weightCol=None, isotonic=True, featureIndex=0):
"""
setParams(self, featuresCol="features", labelCol="label", predictionCol="prediction", \
weightCol=None, isotonic=True, featureIndex=0):
Set the params for IsotonicRegression.
"""
kwargs = self._input_kwargs
return self._set(**kwargs)
def _create_model(self, java_model):
return IsotonicRegressionModel(java_model)
def setIsotonic(self, value):
"""
Sets the value of :py:attr:`isotonic`.
"""
return self._set(isotonic=value)
def getIsotonic(self):
"""
Gets the value of isotonic or its default value.
"""
return self.getOrDefault(self.isotonic)
def setFeatureIndex(self, value):
"""
Sets the value of :py:attr:`featureIndex`.
"""
return self._set(featureIndex=value)
def getFeatureIndex(self):
"""
Gets the value of featureIndex or its default value.
"""
return self.getOrDefault(self.featureIndex)
class IsotonicRegressionModel(JavaModel, JavaMLWritable, JavaMLReadable):
"""
Model fitted by :class:`IsotonicRegression`.
.. versionadded:: 1.6.0
"""
@property
@since("1.6.0")
def boundaries(self):
"""
Boundaries in increasing order for which predictions are known.
"""
return self._call_java("boundaries")
@property
@since("1.6.0")
def predictions(self):
"""
Predictions associated with the boundaries at the same index, monotone because of isotonic
regression.
"""
return self._call_java("predictions")
class TreeEnsembleParams(DecisionTreeParams):
"""
Mixin for Decision Tree-based ensemble algorithms parameters.
"""
subsamplingRate = Param(Params._dummy(), "subsamplingRate", "Fraction of the training data " +
"used for learning each decision tree, in range (0, 1].",
typeConverter=TypeConverters.toFloat)
supportedFeatureSubsetStrategies = ["auto", "all", "onethird", "sqrt", "log2"]
featureSubsetStrategy = \
Param(Params._dummy(), "featureSubsetStrategy",
"The number of features to consider for splits at each tree node. Supported " +
"options: " + ", ".join(supportedFeatureSubsetStrategies) + ", (0.0-1.0], [1-n].",
typeConverter=TypeConverters.toString)
def __init__(self):
super(TreeEnsembleParams, self).__init__()
@since("1.4.0")
def setSubsamplingRate(self, value):
"""
Sets the value of :py:attr:`subsamplingRate`.
"""
return self._set(subsamplingRate=value)
@since("1.4.0")
def getSubsamplingRate(self):
"""
Gets the value of subsamplingRate or its default value.
"""
return self.getOrDefault(self.subsamplingRate)
@since("1.4.0")
def setFeatureSubsetStrategy(self, value):
"""
Sets the value of :py:attr:`featureSubsetStrategy`.
.. note:: Deprecated in 2.4.0 and will be removed in 3.0.0.
"""
return self._set(featureSubsetStrategy=value)
@since("1.4.0")
def getFeatureSubsetStrategy(self):
"""
Gets the value of featureSubsetStrategy or its default value.
"""
return self.getOrDefault(self.featureSubsetStrategy)
class TreeRegressorParams(Params):
"""
Private class to track supported impurity measures.
"""
supportedImpurities = ["variance"]
impurity = Param(Params._dummy(), "impurity",
"Criterion used for information gain calculation (case-insensitive). " +
"Supported options: " +
", ".join(supportedImpurities), typeConverter=TypeConverters.toString)
def __init__(self):
super(TreeRegressorParams, self).__init__()
@since("1.4.0")
def setImpurity(self, value):
"""
Sets the value of :py:attr:`impurity`.
"""
return self._set(impurity=value)
@since("1.4.0")
def getImpurity(self):
"""
Gets the value of impurity or its default value.
"""
return self.getOrDefault(self.impurity)
class RandomForestParams(TreeEnsembleParams):
"""
Private class to track supported random forest parameters.
"""
numTrees = Param(Params._dummy(), "numTrees", "Number of trees to train (>= 1).",
typeConverter=TypeConverters.toInt)
def __init__(self):
super(RandomForestParams, self).__init__()
@since("1.4.0")
def setNumTrees(self, value):
"""
Sets the value of :py:attr:`numTrees`.
"""
return self._set(numTrees=value)
@since("1.4.0")
def getNumTrees(self):
"""
Gets the value of numTrees or its default value.
"""
return self.getOrDefault(self.numTrees)
class GBTParams(TreeEnsembleParams):
"""
Private class to track supported GBT params.
"""
supportedLossTypes = ["squared", "absolute"]
@inherit_doc
class DecisionTreeRegressor(JavaEstimator, HasFeaturesCol, HasLabelCol, HasPredictionCol,
DecisionTreeParams, TreeRegressorParams, HasCheckpointInterval,
HasSeed, JavaMLWritable, JavaMLReadable, HasVarianceCol):
"""
`Decision tree <http://en.wikipedia.org/wiki/Decision_tree_learning>`_
learning algorithm for regression.
It supports both continuous and categorical features.
>>> from pyspark.ml.linalg import Vectors
>>> df = spark.createDataFrame([
... (1.0, Vectors.dense(1.0)),
... (0.0, Vectors.sparse(1, [], []))], ["label", "features"])
>>> dt = DecisionTreeRegressor(maxDepth=2, varianceCol="variance")
>>> model = dt.fit(df)
>>> model.depth
1
>>> model.numNodes
3
>>> model.featureImportances
SparseVector(1, {0: 1.0})
>>> model.numFeatures
1
>>> test0 = spark.createDataFrame([(Vectors.dense(-1.0),)], ["features"])
>>> model.transform(test0).head().prediction
0.0
>>> test1 = spark.createDataFrame([(Vectors.sparse(1, [0], [1.0]),)], ["features"])
>>> model.transform(test1).head().prediction
1.0
>>> dtr_path = temp_path + "/dtr"
>>> dt.save(dtr_path)
>>> dt2 = DecisionTreeRegressor.load(dtr_path)
>>> dt2.getMaxDepth()
2
>>> model_path = temp_path + "/dtr_model"
>>> model.save(model_path)
>>> model2 = DecisionTreeRegressionModel.load(model_path)
>>> model.numNodes == model2.numNodes
True
>>> model.depth == model2.depth
True
>>> model.transform(test1).head().variance
0.0
.. versionadded:: 1.4.0
"""
@keyword_only
def __init__(self, featuresCol="features", labelCol="label", predictionCol="prediction",
maxDepth=5, maxBins=32, minInstancesPerNode=1, minInfoGain=0.0,
maxMemoryInMB=256, cacheNodeIds=False, checkpointInterval=10, impurity="variance",
seed=None, varianceCol=None):
"""
__init__(self, featuresCol="features", labelCol="label", predictionCol="prediction", \
maxDepth=5, maxBins=32, minInstancesPerNode=1, minInfoGain=0.0, \
maxMemoryInMB=256, cacheNodeIds=False, checkpointInterval=10, \
impurity="variance", seed=None, varianceCol=None)
"""
super(DecisionTreeRegressor, self).__init__()
self._java_obj = self._new_java_obj(
"org.apache.spark.ml.regression.DecisionTreeRegressor", self.uid)
self._setDefault(maxDepth=5, maxBins=32, minInstancesPerNode=1, minInfoGain=0.0,
maxMemoryInMB=256, cacheNodeIds=False, checkpointInterval=10,
impurity="variance")
kwargs = self._input_kwargs
self.setParams(**kwargs)
@keyword_only
@since("1.4.0")
def setParams(self, featuresCol="features", labelCol="label", predictionCol="prediction",
maxDepth=5, maxBins=32, minInstancesPerNode=1, minInfoGain=0.0,
maxMemoryInMB=256, cacheNodeIds=False, checkpointInterval=10,
impurity="variance", seed=None, varianceCol=None):
"""
setParams(self, featuresCol="features", labelCol="label", predictionCol="prediction", \
maxDepth=5, maxBins=32, minInstancesPerNode=1, minInfoGain=0.0, \
maxMemoryInMB=256, cacheNodeIds=False, checkpointInterval=10, \
impurity="variance", seed=None, varianceCol=None)
Sets params for the DecisionTreeRegressor.
"""
kwargs = self._input_kwargs
return self._set(**kwargs)
def _create_model(self, java_model):
return DecisionTreeRegressionModel(java_model)
@inherit_doc
class DecisionTreeModel(JavaModel, JavaPredictionModel):
"""
Abstraction for Decision Tree models.
.. versionadded:: 1.5.0
"""
@property
@since("1.5.0")
def numNodes(self):
"""Return number of nodes of the decision tree."""
return self._call_java("numNodes")
@property
@since("1.5.0")
def depth(self):
"""Return depth of the decision tree."""
return self._call_java("depth")
@property
@since("2.0.0")
def toDebugString(self):
"""Full description of model."""
return self._call_java("toDebugString")
def __repr__(self):
return self._call_java("toString")
@inherit_doc
class TreeEnsembleModel(JavaModel):
"""
(private abstraction)
Represents a tree ensemble model.
"""
@property
@since("2.0.0")
def trees(self):
"""Trees in this ensemble. Warning: These have null parent Estimators."""
return [DecisionTreeModel(m) for m in list(self._call_java("trees"))]
@property
@since("2.0.0")
def getNumTrees(self):
"""Number of trees in ensemble."""
return self._call_java("getNumTrees")
@property
@since("1.5.0")
def treeWeights(self):
"""Return the weights for each tree"""
return list(self._call_java("javaTreeWeights"))
@property
@since("2.0.0")
def totalNumNodes(self):
"""Total number of nodes, summed over all trees in the ensemble."""
return self._call_java("totalNumNodes")
@property
@since("2.0.0")
def toDebugString(self):
"""Full description of model."""
return self._call_java("toDebugString")
def __repr__(self):
return self._call_java("toString")
@inherit_doc
class DecisionTreeRegressionModel(DecisionTreeModel, JavaMLWritable, JavaMLReadable):
"""
Model fitted by :class:`DecisionTreeRegressor`.
.. versionadded:: 1.4.0
"""
@property
@since("2.0.0")
def featureImportances(self):
"""
Estimate of the importance of each feature.
This generalizes the idea of "Gini" importance to other losses,
following the explanation of Gini importance from "Random Forests" documentation
by Leo Breiman and Adele Cutler, and following the implementation from scikit-learn.
This feature importance is calculated as follows:
- importance(feature j) = sum (over nodes which split on feature j) of the gain,
where gain is scaled by the number of instances passing through node
- Normalize importances for tree to sum to 1.
.. note:: Feature importance for single decision trees can have high variance due to
correlated predictor variables. Consider using a :py:class:`RandomForestRegressor`
to determine feature importance instead.
"""
return self._call_java("featureImportances")
@inherit_doc
class RandomForestRegressor(JavaEstimator, HasFeaturesCol, HasLabelCol, HasPredictionCol, HasSeed,
RandomForestParams, TreeRegressorParams, HasCheckpointInterval,
JavaMLWritable, JavaMLReadable):
"""
`Random Forest <http://en.wikipedia.org/wiki/Random_forest>`_
learning algorithm for regression.
It supports both continuous and categorical features.
>>> from numpy import allclose
>>> from pyspark.ml.linalg import Vectors
>>> df = spark.createDataFrame([
... (1.0, Vectors.dense(1.0)),
... (0.0, Vectors.sparse(1, [], []))], ["label", "features"])
>>> rf = RandomForestRegressor(numTrees=2, maxDepth=2, seed=42)
>>> model = rf.fit(df)
>>> model.featureImportances
SparseVector(1, {0: 1.0})
>>> allclose(model.treeWeights, [1.0, 1.0])
True
>>> test0 = spark.createDataFrame([(Vectors.dense(-1.0),)], ["features"])
>>> model.transform(test0).head().prediction
0.0
>>> model.numFeatures
1
>>> model.trees
[DecisionTreeRegressionModel (uid=...) of depth..., DecisionTreeRegressionModel...]
>>> model.getNumTrees
2
>>> test1 = spark.createDataFrame([(Vectors.sparse(1, [0], [1.0]),)], ["features"])
>>> model.transform(test1).head().prediction
0.5
>>> rfr_path = temp_path + "/rfr"
>>> rf.save(rfr_path)
>>> rf2 = RandomForestRegressor.load(rfr_path)
>>> rf2.getNumTrees()
2
>>> model_path = temp_path + "/rfr_model"
>>> model.save(model_path)
>>> model2 = RandomForestRegressionModel.load(model_path)
>>> model.featureImportances == model2.featureImportances
True
.. versionadded:: 1.4.0
"""
@keyword_only
def __init__(self, featuresCol="features", labelCol="label", predictionCol="prediction",
maxDepth=5, maxBins=32, minInstancesPerNode=1, minInfoGain=0.0,
maxMemoryInMB=256, cacheNodeIds=False, checkpointInterval=10,
impurity="variance", subsamplingRate=1.0, seed=None, numTrees=20,
featureSubsetStrategy="auto"):
"""
__init__(self, featuresCol="features", labelCol="label", predictionCol="prediction", \
maxDepth=5, maxBins=32, minInstancesPerNode=1, minInfoGain=0.0, \
maxMemoryInMB=256, cacheNodeIds=False, checkpointInterval=10, \
impurity="variance", subsamplingRate=1.0, seed=None, numTrees=20, \
featureSubsetStrategy="auto")
"""
super(RandomForestRegressor, self).__init__()
self._java_obj = self._new_java_obj(
"org.apache.spark.ml.regression.RandomForestRegressor", self.uid)
self._setDefault(maxDepth=5, maxBins=32, minInstancesPerNode=1, minInfoGain=0.0,
maxMemoryInMB=256, cacheNodeIds=False, checkpointInterval=10,
impurity="variance", subsamplingRate=1.0, numTrees=20,
featureSubsetStrategy="auto")
kwargs = self._input_kwargs
self.setParams(**kwargs)
@keyword_only
@since("1.4.0")
def setParams(self, featuresCol="features", labelCol="label", predictionCol="prediction",
maxDepth=5, maxBins=32, minInstancesPerNode=1, minInfoGain=0.0,
maxMemoryInMB=256, cacheNodeIds=False, checkpointInterval=10,
impurity="variance", subsamplingRate=1.0, seed=None, numTrees=20,
featureSubsetStrategy="auto"):
"""
setParams(self, featuresCol="features", labelCol="label", predictionCol="prediction", \
maxDepth=5, maxBins=32, minInstancesPerNode=1, minInfoGain=0.0, \
maxMemoryInMB=256, cacheNodeIds=False, checkpointInterval=10, \
impurity="variance", subsamplingRate=1.0, seed=None, numTrees=20, \
featureSubsetStrategy="auto")
Sets params for linear regression.
"""
kwargs = self._input_kwargs
return self._set(**kwargs)
def _create_model(self, java_model):
return RandomForestRegressionModel(java_model)
@since("2.4.0")
def setFeatureSubsetStrategy(self, value):
"""
Sets the value of :py:attr:`featureSubsetStrategy`.
"""
return self._set(featureSubsetStrategy=value)
class RandomForestRegressionModel(TreeEnsembleModel, JavaPredictionModel, JavaMLWritable,
JavaMLReadable):
"""
Model fitted by :class:`RandomForestRegressor`.
.. versionadded:: 1.4.0
"""
@property
@since("2.0.0")
def trees(self):
"""Trees in this ensemble. Warning: These have null parent Estimators."""
return [DecisionTreeRegressionModel(m) for m in list(self._call_java("trees"))]
@property
@since("2.0.0")
def featureImportances(self):
"""
Estimate of the importance of each feature.
Each feature's importance is the average of its importance across all trees in the ensemble
The importance vector is normalized to sum to 1. This method is suggested by Hastie et al.
(Hastie, Tibshirani, Friedman. "The Elements of Statistical Learning, 2nd Edition." 2001.)
and follows the implementation from scikit-learn.
.. seealso:: :py:attr:`DecisionTreeRegressionModel.featureImportances`
"""
return self._call_java("featureImportances")
@inherit_doc
class GBTRegressor(JavaEstimator, HasFeaturesCol, HasLabelCol, HasPredictionCol, HasMaxIter,
GBTParams, HasCheckpointInterval, HasStepSize, HasSeed, JavaMLWritable,
JavaMLReadable, TreeRegressorParams):
"""
`Gradient-Boosted Trees (GBTs) <http://en.wikipedia.org/wiki/Gradient_boosting>`_
learning algorithm for regression.
It supports both continuous and categorical features.
>>> from numpy import allclose
>>> from pyspark.ml.linalg import Vectors
>>> df = spark.createDataFrame([
... (1.0, Vectors.dense(1.0)),
... (0.0, Vectors.sparse(1, [], []))], ["label", "features"])
>>> gbt = GBTRegressor(maxIter=5, maxDepth=2, seed=42)
>>> print(gbt.getImpurity())
variance
>>> print(gbt.getFeatureSubsetStrategy())
all
>>> model = gbt.fit(df)
>>> model.featureImportances
SparseVector(1, {0: 1.0})
>>> model.numFeatures
1
>>> allclose(model.treeWeights, [1.0, 0.1, 0.1, 0.1, 0.1])
True
>>> test0 = spark.createDataFrame([(Vectors.dense(-1.0),)], ["features"])
>>> model.transform(test0).head().prediction
0.0
>>> test1 = spark.createDataFrame([(Vectors.sparse(1, [0], [1.0]),)], ["features"])
>>> model.transform(test1).head().prediction
1.0
>>> gbtr_path = temp_path + "gbtr"
>>> gbt.save(gbtr_path)
>>> gbt2 = GBTRegressor.load(gbtr_path)
>>> gbt2.getMaxDepth()
2
>>> model_path = temp_path + "gbtr_model"
>>> model.save(model_path)
>>> model2 = GBTRegressionModel.load(model_path)
>>> model.featureImportances == model2.featureImportances
True
>>> model.treeWeights == model2.treeWeights
True
>>> model.trees
[DecisionTreeRegressionModel (uid=...) of depth..., DecisionTreeRegressionModel...]
>>> validation = spark.createDataFrame([(0.0, Vectors.dense(-1.0))],
... ["label", "features"])
>>> model.evaluateEachIteration(validation, "squared")
[0.0, 0.0, 0.0, 0.0, 0.0]
.. versionadded:: 1.4.0
"""
lossType = Param(Params._dummy(), "lossType",
"Loss function which GBT tries to minimize (case-insensitive). " +
"Supported options: " + ", ".join(GBTParams.supportedLossTypes),
typeConverter=TypeConverters.toString)
stepSize = Param(Params._dummy(), "stepSize",
"Step size (a.k.a. learning rate) in interval (0, 1] for shrinking " +
"the contribution of each estimator.",
typeConverter=TypeConverters.toFloat)
@keyword_only
def __init__(self, featuresCol="features", labelCol="label", predictionCol="prediction",
maxDepth=5, maxBins=32, minInstancesPerNode=1, minInfoGain=0.0,
maxMemoryInMB=256, cacheNodeIds=False, subsamplingRate=1.0,
checkpointInterval=10, lossType="squared", maxIter=20, stepSize=0.1, seed=None,
impurity="variance", featureSubsetStrategy="all"):
"""
__init__(self, featuresCol="features", labelCol="label", predictionCol="prediction", \
maxDepth=5, maxBins=32, minInstancesPerNode=1, minInfoGain=0.0, \
maxMemoryInMB=256, cacheNodeIds=False, subsamplingRate=1.0, \
checkpointInterval=10, lossType="squared", maxIter=20, stepSize=0.1, seed=None, \
impurity="variance", featureSubsetStrategy="all")
"""
super(GBTRegressor, self).__init__()
self._java_obj = self._new_java_obj("org.apache.spark.ml.regression.GBTRegressor", self.uid)
self._setDefault(maxDepth=5, maxBins=32, minInstancesPerNode=1, minInfoGain=0.0,
maxMemoryInMB=256, cacheNodeIds=False, subsamplingRate=1.0,
checkpointInterval=10, lossType="squared", maxIter=20, stepSize=0.1,
impurity="variance", featureSubsetStrategy="all")
kwargs = self._input_kwargs
self.setParams(**kwargs)
@keyword_only
@since("1.4.0")
def setParams(self, featuresCol="features", labelCol="label", predictionCol="prediction",
maxDepth=5, maxBins=32, minInstancesPerNode=1, minInfoGain=0.0,
maxMemoryInMB=256, cacheNodeIds=False, subsamplingRate=1.0,
checkpointInterval=10, lossType="squared", maxIter=20, stepSize=0.1, seed=None,
impuriy="variance", featureSubsetStrategy="all"):
"""
setParams(self, featuresCol="features", labelCol="label", predictionCol="prediction", \
maxDepth=5, maxBins=32, minInstancesPerNode=1, minInfoGain=0.0, \
maxMemoryInMB=256, cacheNodeIds=False, subsamplingRate=1.0, \
checkpointInterval=10, lossType="squared", maxIter=20, stepSize=0.1, seed=None, \
impurity="variance", featureSubsetStrategy="all")
Sets params for Gradient Boosted Tree Regression.
"""
kwargs = self._input_kwargs
return self._set(**kwargs)
def _create_model(self, java_model):
return GBTRegressionModel(java_model)
@since("1.4.0")
def setLossType(self, value):
"""
Sets the value of :py:attr:`lossType`.
"""
return self._set(lossType=value)
@since("1.4.0")
def getLossType(self):
"""
Gets the value of lossType or its default value.
"""
return self.getOrDefault(self.lossType)
@since("2.4.0")
def setFeatureSubsetStrategy(self, value):
"""
Sets the value of :py:attr:`featureSubsetStrategy`.
"""
return self._set(featureSubsetStrategy=value)
class GBTRegressionModel(TreeEnsembleModel, JavaPredictionModel, JavaMLWritable, JavaMLReadable):
"""
Model fitted by :class:`GBTRegressor`.
.. versionadded:: 1.4.0
"""
@property
@since("2.0.0")
def featureImportances(self):
"""
Estimate of the importance of each feature.
Each feature's importance is the average of its importance across all trees in the ensemble
The importance vector is normalized to sum to 1. This method is suggested by Hastie et al.
(Hastie, Tibshirani, Friedman. "The Elements of Statistical Learning, 2nd Edition." 2001.)
and follows the implementation from scikit-learn.
.. seealso:: :py:attr:`DecisionTreeRegressionModel.featureImportances`
"""
return self._call_java("featureImportances")
@property
@since("2.0.0")
def trees(self):
"""Trees in this ensemble. Warning: These have null parent Estimators."""
return [DecisionTreeRegressionModel(m) for m in list(self._call_java("trees"))]
@since("2.4.0")
def evaluateEachIteration(self, dataset, loss):
"""
Method to compute error or loss for every iteration of gradient boosting.
:param dataset:
Test dataset to evaluate model on, where dataset is an
instance of :py:class:`pyspark.sql.DataFrame`
:param loss:
The loss function used to compute error.
Supported options: squared, absolute
"""
return self._call_java("evaluateEachIteration", dataset, loss)
@inherit_doc
class AFTSurvivalRegression(JavaEstimator, HasFeaturesCol, HasLabelCol, HasPredictionCol,
HasFitIntercept, HasMaxIter, HasTol, HasAggregationDepth,
JavaMLWritable, JavaMLReadable):
"""
.. note:: Experimental
Accelerated Failure Time (AFT) Model Survival Regression
Fit a parametric AFT survival regression model based on the Weibull distribution
of the survival time.
.. seealso:: `AFT Model <https://en.wikipedia.org/wiki/Accelerated_failure_time_model>`_
>>> from pyspark.ml.linalg import Vectors
>>> df = spark.createDataFrame([
... (1.0, Vectors.dense(1.0), 1.0),
... (1e-40, Vectors.sparse(1, [], []), 0.0)], ["label", "features", "censor"])
>>> aftsr = AFTSurvivalRegression()
>>> model = aftsr.fit(df)
>>> model.predict(Vectors.dense(6.3))
1.0
>>> model.predictQuantiles(Vectors.dense(6.3))
DenseVector([0.0101, 0.0513, 0.1054, 0.2877, 0.6931, 1.3863, 2.3026, 2.9957, 4.6052])
>>> model.transform(df).show()
+-------+---------+------+----------+
| label| features|censor|prediction|
+-------+---------+------+----------+
| 1.0| [1.0]| 1.0| 1.0|
|1.0E-40|(1,[],[])| 0.0| 1.0|
+-------+---------+------+----------+
...
>>> aftsr_path = temp_path + "/aftsr"
>>> aftsr.save(aftsr_path)
>>> aftsr2 = AFTSurvivalRegression.load(aftsr_path)
>>> aftsr2.getMaxIter()
100
>>> model_path = temp_path + "/aftsr_model"
>>> model.save(model_path)
>>> model2 = AFTSurvivalRegressionModel.load(model_path)
>>> model.coefficients == model2.coefficients
True
>>> model.intercept == model2.intercept
True
>>> model.scale == model2.scale
True
.. versionadded:: 1.6.0
"""
censorCol = Param(Params._dummy(), "censorCol",
"censor column name. The value of this column could be 0 or 1. " +
"If the value is 1, it means the event has occurred i.e. " +
"uncensored; otherwise censored.", typeConverter=TypeConverters.toString)
quantileProbabilities = \
Param(Params._dummy(), "quantileProbabilities",
"quantile probabilities array. Values of the quantile probabilities array " +
"should be in the range (0, 1) and the array should be non-empty.",
typeConverter=TypeConverters.toListFloat)
quantilesCol = Param(Params._dummy(), "quantilesCol",
"quantiles column name. This column will output quantiles of " +
"corresponding quantileProbabilities if it is set.",
typeConverter=TypeConverters.toString)
@keyword_only
def __init__(self, featuresCol="features", labelCol="label", predictionCol="prediction",
fitIntercept=True, maxIter=100, tol=1E-6, censorCol="censor",
quantileProbabilities=list([0.01, 0.05, 0.1, 0.25, 0.5, 0.75, 0.9, 0.95, 0.99]),
quantilesCol=None, aggregationDepth=2):
"""
__init__(self, featuresCol="features", labelCol="label", predictionCol="prediction", \
fitIntercept=True, maxIter=100, tol=1E-6, censorCol="censor", \
quantileProbabilities=[0.01, 0.05, 0.1, 0.25, 0.5, 0.75, 0.9, 0.95, 0.99], \
quantilesCol=None, aggregationDepth=2)
"""
super(AFTSurvivalRegression, self).__init__()
self._java_obj = self._new_java_obj(
"org.apache.spark.ml.regression.AFTSurvivalRegression", self.uid)
self._setDefault(censorCol="censor",
quantileProbabilities=[0.01, 0.05, 0.1, 0.25, 0.5, 0.75, 0.9, 0.95, 0.99],
maxIter=100, tol=1E-6)
kwargs = self._input_kwargs
self.setParams(**kwargs)
@keyword_only
@since("1.6.0")
def setParams(self, featuresCol="features", labelCol="label", predictionCol="prediction",
fitIntercept=True, maxIter=100, tol=1E-6, censorCol="censor",
quantileProbabilities=list([0.01, 0.05, 0.1, 0.25, 0.5, 0.75, 0.9, 0.95, 0.99]),
quantilesCol=None, aggregationDepth=2):
"""
setParams(self, featuresCol="features", labelCol="label", predictionCol="prediction", \
fitIntercept=True, maxIter=100, tol=1E-6, censorCol="censor", \
quantileProbabilities=[0.01, 0.05, 0.1, 0.25, 0.5, 0.75, 0.9, 0.95, 0.99], \
quantilesCol=None, aggregationDepth=2):
"""
kwargs = self._input_kwargs
return self._set(**kwargs)
def _create_model(self, java_model):
return AFTSurvivalRegressionModel(java_model)
@since("1.6.0")
def setCensorCol(self, value):
"""
Sets the value of :py:attr:`censorCol`.
"""
return self._set(censorCol=value)
@since("1.6.0")
def getCensorCol(self):
"""
Gets the value of censorCol or its default value.
"""
return self.getOrDefault(self.censorCol)
@since("1.6.0")
def setQuantileProbabilities(self, value):
"""
Sets the value of :py:attr:`quantileProbabilities`.
"""
return self._set(quantileProbabilities=value)
@since("1.6.0")
def getQuantileProbabilities(self):
"""
Gets the value of quantileProbabilities or its default value.
"""
return self.getOrDefault(self.quantileProbabilities)
@since("1.6.0")
def setQuantilesCol(self, value):
"""
Sets the value of :py:attr:`quantilesCol`.
"""
return self._set(quantilesCol=value)
@since("1.6.0")
def getQuantilesCol(self):
"""
Gets the value of quantilesCol or its default value.
"""
return self.getOrDefault(self.quantilesCol)
class AFTSurvivalRegressionModel(JavaModel, JavaMLWritable, JavaMLReadable):
"""
.. note:: Experimental
Model fitted by :class:`AFTSurvivalRegression`.
.. versionadded:: 1.6.0
"""
@property
@since("2.0.0")
def coefficients(self):
"""
Model coefficients.
"""
return self._call_java("coefficients")
@property
@since("1.6.0")
def intercept(self):
"""
Model intercept.
"""
return self._call_java("intercept")
@property
@since("1.6.0")
def scale(self):
"""
Model scale paramter.
"""
return self._call_java("scale")
@since("2.0.0")
def predictQuantiles(self, features):
"""
Predicted Quantiles
"""
return self._call_java("predictQuantiles", features)
@since("2.0.0")
def predict(self, features):
"""
Predicted value
"""
return self._call_java("predict", features)
@inherit_doc
class GeneralizedLinearRegression(JavaEstimator, HasLabelCol, HasFeaturesCol, HasPredictionCol,
HasFitIntercept, HasMaxIter, HasTol, HasRegParam, HasWeightCol,
HasSolver, JavaMLWritable, JavaMLReadable):
"""
.. note:: Experimental
Generalized Linear Regression.
Fit a Generalized Linear Model specified by giving a symbolic description of the linear
predictor (link function) and a description of the error distribution (family). It supports
"gaussian", "binomial", "poisson", "gamma" and "tweedie" as family. Valid link functions for
each family is listed below. The first link function of each family is the default one.
* "gaussian" -> "identity", "log", "inverse"
* "binomial" -> "logit", "probit", "cloglog"
* "poisson" -> "log", "identity", "sqrt"
* "gamma" -> "inverse", "identity", "log"
* "tweedie" -> power link function specified through "linkPower". \
The default link power in the tweedie family is 1 - variancePower.
.. seealso:: `GLM <https://en.wikipedia.org/wiki/Generalized_linear_model>`_
>>> from pyspark.ml.linalg import Vectors
>>> df = spark.createDataFrame([
... (1.0, Vectors.dense(0.0, 0.0)),
... (1.0, Vectors.dense(1.0, 2.0)),
... (2.0, Vectors.dense(0.0, 0.0)),
... (2.0, Vectors.dense(1.0, 1.0)),], ["label", "features"])
>>> glr = GeneralizedLinearRegression(family="gaussian", link="identity", linkPredictionCol="p")
>>> model = glr.fit(df)
>>> transformed = model.transform(df)
>>> abs(transformed.head().prediction - 1.5) < 0.001
True
>>> abs(transformed.head().p - 1.5) < 0.001
True
>>> model.coefficients
DenseVector([1.5..., -1.0...])
>>> model.numFeatures
2
>>> abs(model.intercept - 1.5) < 0.001
True
>>> glr_path = temp_path + "/glr"
>>> glr.save(glr_path)
>>> glr2 = GeneralizedLinearRegression.load(glr_path)
>>> glr.getFamily() == glr2.getFamily()
True
>>> model_path = temp_path + "/glr_model"
>>> model.save(model_path)
>>> model2 = GeneralizedLinearRegressionModel.load(model_path)
>>> model.intercept == model2.intercept
True
>>> model.coefficients[0] == model2.coefficients[0]
True
.. versionadded:: 2.0.0
"""
family = Param(Params._dummy(), "family", "The name of family which is a description of " +
"the error distribution to be used in the model. Supported options: " +
"gaussian (default), binomial, poisson, gamma and tweedie.",
typeConverter=TypeConverters.toString)
link = Param(Params._dummy(), "link", "The name of link function which provides the " +
"relationship between the linear predictor and the mean of the distribution " +
"function. Supported options: identity, log, inverse, logit, probit, cloglog " +
"and sqrt.", typeConverter=TypeConverters.toString)
linkPredictionCol = Param(Params._dummy(), "linkPredictionCol", "link prediction (linear " +
"predictor) column name", typeConverter=TypeConverters.toString)
variancePower = Param(Params._dummy(), "variancePower", "The power in the variance function " +
"of the Tweedie distribution which characterizes the relationship " +
"between the variance and mean of the distribution. Only applicable " +
"for the Tweedie family. Supported values: 0 and [1, Inf).",
typeConverter=TypeConverters.toFloat)
linkPower = Param(Params._dummy(), "linkPower", "The index in the power link function. " +
"Only applicable to the Tweedie family.",
typeConverter=TypeConverters.toFloat)
solver = Param(Params._dummy(), "solver", "The solver algorithm for optimization. Supported " +
"options: irls.", typeConverter=TypeConverters.toString)
offsetCol = Param(Params._dummy(), "offsetCol", "The offset column name. If this is not set " +
"or empty, we treat all instance offsets as 0.0",
typeConverter=TypeConverters.toString)
@keyword_only
def __init__(self, labelCol="label", featuresCol="features", predictionCol="prediction",
family="gaussian", link=None, fitIntercept=True, maxIter=25, tol=1e-6,
regParam=0.0, weightCol=None, solver="irls", linkPredictionCol=None,
variancePower=0.0, linkPower=None, offsetCol=None):
"""
__init__(self, labelCol="label", featuresCol="features", predictionCol="prediction", \
family="gaussian", link=None, fitIntercept=True, maxIter=25, tol=1e-6, \
regParam=0.0, weightCol=None, solver="irls", linkPredictionCol=None, \
variancePower=0.0, linkPower=None, offsetCol=None)
"""
super(GeneralizedLinearRegression, self).__init__()
self._java_obj = self._new_java_obj(
"org.apache.spark.ml.regression.GeneralizedLinearRegression", self.uid)
self._setDefault(family="gaussian", maxIter=25, tol=1e-6, regParam=0.0, solver="irls",
variancePower=0.0)
kwargs = self._input_kwargs
self.setParams(**kwargs)
@keyword_only
@since("2.0.0")
def setParams(self, labelCol="label", featuresCol="features", predictionCol="prediction",
family="gaussian", link=None, fitIntercept=True, maxIter=25, tol=1e-6,
regParam=0.0, weightCol=None, solver="irls", linkPredictionCol=None,
variancePower=0.0, linkPower=None, offsetCol=None):
"""
setParams(self, labelCol="label", featuresCol="features", predictionCol="prediction", \
family="gaussian", link=None, fitIntercept=True, maxIter=25, tol=1e-6, \
regParam=0.0, weightCol=None, solver="irls", linkPredictionCol=None, \
variancePower=0.0, linkPower=None, offsetCol=None)
Sets params for generalized linear regression.
"""
kwargs = self._input_kwargs
return self._set(**kwargs)
def _create_model(self, java_model):
return GeneralizedLinearRegressionModel(java_model)
@since("2.0.0")
def setFamily(self, value):
"""
Sets the value of :py:attr:`family`.
"""
return self._set(family=value)
@since("2.0.0")
def getFamily(self):
"""
Gets the value of family or its default value.
"""
return self.getOrDefault(self.family)
@since("2.0.0")
def setLinkPredictionCol(self, value):
"""
Sets the value of :py:attr:`linkPredictionCol`.
"""
return self._set(linkPredictionCol=value)
@since("2.0.0")
def getLinkPredictionCol(self):
"""
Gets the value of linkPredictionCol or its default value.
"""
return self.getOrDefault(self.linkPredictionCol)
@since("2.0.0")
def setLink(self, value):
"""
Sets the value of :py:attr:`link`.
"""
return self._set(link=value)
@since("2.0.0")
def getLink(self):
"""
Gets the value of link or its default value.
"""
return self.getOrDefault(self.link)
@since("2.2.0")
def setVariancePower(self, value):
"""
Sets the value of :py:attr:`variancePower`.
"""
return self._set(variancePower=value)
@since("2.2.0")
def getVariancePower(self):
"""
Gets the value of variancePower or its default value.
"""
return self.getOrDefault(self.variancePower)
@since("2.2.0")
def setLinkPower(self, value):
"""
Sets the value of :py:attr:`linkPower`.
"""
return self._set(linkPower=value)
@since("2.2.0")
def getLinkPower(self):
"""
Gets the value of linkPower or its default value.
"""
return self.getOrDefault(self.linkPower)
@since("2.3.0")
def setOffsetCol(self, value):
"""
Sets the value of :py:attr:`offsetCol`.
"""
return self._set(offsetCol=value)
@since("2.3.0")
def getOffsetCol(self):
"""
Gets the value of offsetCol or its default value.
"""
return self.getOrDefault(self.offsetCol)
class GeneralizedLinearRegressionModel(JavaModel, JavaPredictionModel, JavaMLWritable,
JavaMLReadable):
"""
.. note:: Experimental
Model fitted by :class:`GeneralizedLinearRegression`.
.. versionadded:: 2.0.0
"""
@property
@since("2.0.0")
def coefficients(self):
"""
Model coefficients.
"""
return self._call_java("coefficients")
@property
@since("2.0.0")
def intercept(self):
"""
Model intercept.
"""
return self._call_java("intercept")
@property
@since("2.0.0")
def summary(self):
"""
Gets summary (e.g. residuals, deviance, pValues) of model on
training set. An exception is thrown if
`trainingSummary is None`.
"""
if self.hasSummary:
java_glrt_summary = self._call_java("summary")
return GeneralizedLinearRegressionTrainingSummary(java_glrt_summary)
else:
raise RuntimeError("No training summary available for this %s" %
self.__class__.__name__)
@property
@since("2.0.0")
def hasSummary(self):
"""
Indicates whether a training summary exists for this model
instance.
"""
return self._call_java("hasSummary")
@since("2.0.0")
def evaluate(self, dataset):
"""
Evaluates the model on a test dataset.
:param dataset:
Test dataset to evaluate model on, where dataset is an
instance of :py:class:`pyspark.sql.DataFrame`
"""
if not isinstance(dataset, DataFrame):
raise ValueError("dataset must be a DataFrame but got %s." % type(dataset))
java_glr_summary = self._call_java("evaluate", dataset)
return GeneralizedLinearRegressionSummary(java_glr_summary)
class GeneralizedLinearRegressionSummary(JavaWrapper):
"""
.. note:: Experimental
Generalized linear regression results evaluated on a dataset.
.. versionadded:: 2.0.0
"""
@property
@since("2.0.0")
def predictions(self):
"""
Predictions output by the model's `transform` method.
"""
return self._call_java("predictions")
@property
@since("2.0.0")
def predictionCol(self):
"""
Field in :py:attr:`predictions` which gives the predicted value of each instance.
This is set to a new column name if the original model's `predictionCol` is not set.
"""
return self._call_java("predictionCol")
@property
@since("2.2.0")
def numInstances(self):
"""
Number of instances in DataFrame predictions.
"""
return self._call_java("numInstances")
@property
@since("2.0.0")
def rank(self):
"""
The numeric rank of the fitted linear model.
"""
return self._call_java("rank")
@property
@since("2.0.0")
def degreesOfFreedom(self):
"""
Degrees of freedom.
"""
return self._call_java("degreesOfFreedom")
@property
@since("2.0.0")
def residualDegreeOfFreedom(self):
"""
The residual degrees of freedom.
"""
return self._call_java("residualDegreeOfFreedom")
@property
@since("2.0.0")
def residualDegreeOfFreedomNull(self):
"""
The residual degrees of freedom for the null model.
"""
return self._call_java("residualDegreeOfFreedomNull")
@since("2.0.0")
def residuals(self, residualsType="deviance"):
"""
Get the residuals of the fitted model by type.
:param residualsType: The type of residuals which should be returned.
Supported options: deviance (default), pearson, working, and response.
"""
return self._call_java("residuals", residualsType)
@property
@since("2.0.0")
def nullDeviance(self):
"""
The deviance for the null model.
"""
return self._call_java("nullDeviance")
@property
@since("2.0.0")
def deviance(self):
"""
The deviance for the fitted model.
"""
return self._call_java("deviance")
@property
@since("2.0.0")
def dispersion(self):
"""
The dispersion of the fitted model.
It is taken as 1.0 for the "binomial" and "poisson" families, and otherwise
estimated by the residual Pearson's Chi-Squared statistic (which is defined as
sum of the squares of the Pearson residuals) divided by the residual degrees of freedom.
"""
return self._call_java("dispersion")
@property
@since("2.0.0")
def aic(self):
"""
Akaike's "An Information Criterion"(AIC) for the fitted model.
"""
return self._call_java("aic")
@inherit_doc
class GeneralizedLinearRegressionTrainingSummary(GeneralizedLinearRegressionSummary):
"""
.. note:: Experimental
Generalized linear regression training results.
.. versionadded:: 2.0.0
"""
@property
@since("2.0.0")
def numIterations(self):
"""
Number of training iterations.
"""
return self._call_java("numIterations")
@property
@since("2.0.0")
def solver(self):
"""
The numeric solver used for training.
"""
return self._call_java("solver")
@property
@since("2.0.0")
def coefficientStandardErrors(self):
"""
Standard error of estimated coefficients and intercept.
If :py:attr:`GeneralizedLinearRegression.fitIntercept` is set to True,
then the last element returned corresponds to the intercept.
"""
return self._call_java("coefficientStandardErrors")
@property
@since("2.0.0")
def tValues(self):
"""
T-statistic of estimated coefficients and intercept.
If :py:attr:`GeneralizedLinearRegression.fitIntercept` is set to True,
then the last element returned corresponds to the intercept.
"""
return self._call_java("tValues")
@property
@since("2.0.0")
def pValues(self):
"""
Two-sided p-value of estimated coefficients and intercept.
If :py:attr:`GeneralizedLinearRegression.fitIntercept` is set to True,
then the last element returned corresponds to the intercept.
"""
return self._call_java("pValues")
def __repr__(self):
return self._call_java("toString")
if __name__ == "__main__":
import doctest
import pyspark.ml.regression
from pyspark.sql import SparkSession
globs = pyspark.ml.regression.__dict__.copy()
# The small batch size here ensures that we see multiple batches,
# even in these small test examples:
spark = SparkSession.builder\
.master("local[2]")\
.appName("ml.regression tests")\
.getOrCreate()
sc = spark.sparkContext
globs['sc'] = sc
globs['spark'] = spark
import tempfile
temp_path = tempfile.mkdtemp()
globs['temp_path'] = temp_path
try:
(failure_count, test_count) = doctest.testmod(globs=globs, optionflags=doctest.ELLIPSIS)
spark.stop()
finally:
from shutil import rmtree
try:
rmtree(temp_path)
except OSError:
pass
if failure_count:
sys.exit(-1)
|
apache-2.0
|
johnzeringue/ET_Redux
|
src/main/java/org/earthtime/UPb_Redux/initialPbModels/InitialPbModelI.java
|
1986
|
/*
* InitialPbModelI.java
*
* Created on August 5, 2007, 10:26 AM
*
*
* Copyright 2006-2015 James F. Bowring and www.Earth-Time.org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.earthtime.UPb_Redux.initialPbModels;
import java.math.BigDecimal;
import org.earthtime.UPb_Redux.valueModels.ValueModel;
/**
*
* @author James F. Bowring
*/
public interface InitialPbModelI {
// accessors
/**
*
* @return
*/
abstract String getName();
/**
*
* @param name
*/
abstract void setName(String name);
/**
*
* @return
*/
abstract String getReference();
/**
*
* @param reference
*/
abstract void setReference(String reference);
/**
*
* @return
*/
abstract ValueModel[] getRatios();
/**
*
* @param ratios
*/
abstract void setRatios(ValueModel[] ratios);
/**
*
* @param ratioName
* @return
*/
abstract ValueModel getRatioByName(String ratioName);
// methods
/**
*
* @return
*/
abstract InitialPbModel copy();
/**
*
* @param estimatedAgeInMA
* @param lambda238
* @param lambda235
* @param lambda232
*/
abstract void calculateRatios(
BigDecimal estimatedAgeInMA,
BigDecimal lambda238,
BigDecimal lambda235,
BigDecimal lambda232);
}
|
apache-2.0
|
sshcherbakov/incubator-geode
|
gemfire-core/src/main/java/com/gemstone/gemfire/cache/operations/ExecuteCQOperationContext.java
|
2185
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.gemstone.gemfire.cache.operations;
import java.util.Set;
/**
* Encapsulates a continuous query registeration operation for both the
* pre-operation and post-operation cases.
*
* @author Sumedh Wale
* @since 5.5
*/
public class ExecuteCQOperationContext extends QueryOperationContext {
/** The name of the continuous query being registered. */
private String cqName;
/**
* Constructor for the EXECUTE_CQ operation.
*
* @param cqName
* the name of the continuous query being registered
* @param queryString
* the query string for this operation
* @param regionNames
* names of regions that are part of the query string
* @param postOperation
* true to set the post-operation flag
*/
public ExecuteCQOperationContext(String cqName, String queryString,
Set regionNames, boolean postOperation) {
super(queryString, regionNames, postOperation);
this.cqName = cqName;
}
/**
* Return the operation associated with the <code>OperationContext</code>
* object.
*
* @return the <code>OperationCode</code> of this operation
*/
@Override
public OperationCode getOperationCode() {
return OperationCode.EXECUTE_CQ;
}
/** Return the name of the continuous query. */
public String getName() {
return this.cqName;
}
}
|
apache-2.0
|
arborworkflows/Visomics
|
ThirdParty/CTK/ctkErrorLogStreamMessageHandler.cpp
|
6532
|
/*=========================================================================
Library: CTK
Copyright (c) Kitware Inc.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0.txt
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
=========================================================================*/
// CTK includes
#include "ctkErrorLogStreamMessageHandler.h"
// STD includes
#include <iostream>
#include <streambuf>
#include <string>
namespace
{
// --------------------------------------------------------------------------
// ctkStreamHandler
//
// See http://lists.trolltech.com/qt-interest/2005-06/thread00166-0.html
//
// --------------------------------------------------------------------------
class ctkStreamHandler : public std::streambuf
{
public:
ctkStreamHandler(ctkErrorLogStreamMessageHandler* messageHandler,
ctkErrorLogModel::LogLevel logLevel,
std::ostream& stream);
void setEnabled(bool value);
protected:
virtual int_type overflow(int_type v);
virtual std::streamsize xsputn(const char *p, std::streamsize n);
private:
ctkErrorLogStreamMessageHandler * MessageHandler;
ctkErrorLogModel::LogLevel LogLevel;
bool Enabled;
std::ostream& Stream;
std::streambuf* SavedBuffer;
std::string StringBuffer;
};
// --------------------------------------------------------------------------
// ctkStreamHandler methods
// --------------------------------------------------------------------------
ctkStreamHandler::ctkStreamHandler(ctkErrorLogStreamMessageHandler* messageHandler,
ctkErrorLogModel::LogLevel logLevel,
std::ostream& stream) :
MessageHandler(messageHandler), LogLevel(logLevel), Stream(stream)
{
this->Enabled = false;
}
// --------------------------------------------------------------------------
void ctkStreamHandler::setEnabled(bool value)
{
if (this->Enabled == value)
{
return;
}
if (value)
{
this->SavedBuffer = this->Stream.rdbuf();
this->Stream.rdbuf(this);
}
else
{
// Output anything that is left
if (!this->StringBuffer.empty())
{
Q_ASSERT(this->MessageHandler->errorLogModel());
this->MessageHandler->errorLogModel()->addEntry(
this->LogLevel, this->MessageHandler->handlerPrettyName(), this->StringBuffer.c_str());
}
this->Stream.rdbuf(this->SavedBuffer);
}
this->Enabled = value;
}
// --------------------------------------------------------------------------
std::streambuf::int_type ctkStreamHandler::overflow(std::streambuf::int_type v)
{
if (v == '\n')
{
Q_ASSERT(this->MessageHandler->errorLogModel());
this->MessageHandler->errorLogModel()->addEntry(
this->LogLevel, this->MessageHandler->handlerPrettyName(), this->StringBuffer.c_str());
this->StringBuffer.erase(this->StringBuffer.begin(), this->StringBuffer.end());
}
else
{
this->StringBuffer += v;
}
return v;
}
// --------------------------------------------------------------------------
std::streamsize ctkStreamHandler::xsputn(const char *p, std::streamsize n)
{
this->StringBuffer.append(p, p + n);
std::string::size_type pos = 0;
while (pos != std::string::npos)
{
pos = this->StringBuffer.find('\n');
if (pos != std::string::npos)
{
std::string tmp(this->StringBuffer.begin(), this->StringBuffer.begin() + pos);
Q_ASSERT(this->MessageHandler->errorLogModel());
this->MessageHandler->errorLogModel()->addEntry(
this->LogLevel, this->MessageHandler->handlerPrettyName(), tmp.c_str());
this->StringBuffer.erase(this->StringBuffer.begin(), this->StringBuffer.begin() + pos + 1);
}
}
return n;
}
}
// --------------------------------------------------------------------------
// ctkErrorLogStreamMessageHandlerPrivate
// --------------------------------------------------------------------------
class ctkErrorLogStreamMessageHandlerPrivate
{
public:
ctkErrorLogStreamMessageHandlerPrivate();
~ctkErrorLogStreamMessageHandlerPrivate();
ctkStreamHandler * CoutStreamHandler;
ctkStreamHandler * CerrStreamHandler;
};
// --------------------------------------------------------------------------
// ctkErrorLogStreamMessageHandlerPrivate methods
//------------------------------------------------------------------------------
ctkErrorLogStreamMessageHandlerPrivate::ctkErrorLogStreamMessageHandlerPrivate()
{
}
//------------------------------------------------------------------------------
ctkErrorLogStreamMessageHandlerPrivate::~ctkErrorLogStreamMessageHandlerPrivate()
{
delete this->CoutStreamHandler;
delete this->CerrStreamHandler;
}
//------------------------------------------------------------------------------
// ctkErrorLogStreamMessageHandler methods
//------------------------------------------------------------------------------
QString ctkErrorLogStreamMessageHandler::HandlerName = QLatin1String("Stream");
// --------------------------------------------------------------------------
ctkErrorLogStreamMessageHandler::ctkErrorLogStreamMessageHandler() :
Superclass(), d_ptr(new ctkErrorLogStreamMessageHandlerPrivate())
{
Q_D(ctkErrorLogStreamMessageHandler);
d->CoutStreamHandler = new ctkStreamHandler(this, ctkErrorLogModel::Info, std::cout);
d->CerrStreamHandler = new ctkStreamHandler(this, ctkErrorLogModel::Critical, std::cerr);
}
// --------------------------------------------------------------------------
ctkErrorLogStreamMessageHandler::~ctkErrorLogStreamMessageHandler()
{
}
// --------------------------------------------------------------------------
QString ctkErrorLogStreamMessageHandler::handlerName()const
{
return ctkErrorLogStreamMessageHandler::HandlerName;
}
// --------------------------------------------------------------------------
void ctkErrorLogStreamMessageHandler::setEnabledInternal(bool value)
{
Q_D(ctkErrorLogStreamMessageHandler);
d->CoutStreamHandler->setEnabled(value);
d->CerrStreamHandler->setEnabled(value);
}
|
apache-2.0
|
shantanusharma/closure-compiler
|
test/com/google/javascript/jscomp/JsMessageTest.java
|
2119
|
/*
* Copyright 2009 The Closure Compiler Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.javascript.jscomp;
import static com.google.common.truth.Truth.assertThat;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.JUnit4;
/** @author anatol@google.com (Anatol Pomazau) */
@RunWith(JUnit4.class)
public final class JsMessageTest {
@Test
public void testIsEmpty() {
assertThat(new JsMessage.Builder().build().isEmpty()).isTrue();
assertThat(new JsMessage.Builder().appendStringPart("").build().isEmpty()).isTrue();
assertThat(new JsMessage.Builder().appendStringPart("").appendStringPart("").build().isEmpty())
.isTrue();
assertThat(new JsMessage.Builder().appendStringPart("s").appendStringPart("").build().isEmpty())
.isFalse();
assertThat(new JsMessage.Builder().appendPlaceholderReference("3").build().isEmpty()).isFalse();
}
@Test
public void testMeaningChangesId() {
String id1 = new JsMessage.Builder()
.appendStringPart("foo").build().getId();
String id2 = new JsMessage.Builder()
.appendStringPart("foo").setMeaning("bar").build().getId();
assertThat(id1.equals(id2)).isFalse();
}
@Test
public void testHashValues() {
final String EMPTY = "";
final String VAL = "Hello, world";
final long ANSWER_STRING_64 = 0x43ec5d9731515874L;
final long ANSWER_EMPTY_64 = 0x468d9ea2c42361aaL;
assertThat(JsMessage.Hash.hash64(VAL)).isEqualTo(ANSWER_STRING_64);
assertThat(JsMessage.Hash.hash64(EMPTY)).isEqualTo(ANSWER_EMPTY_64);
}
}
|
apache-2.0
|
yao-matrix/mProto
|
auditory/package/voicelock/java/src/com/intel/awareness/voicelock/VoiceLockService.java
|
15102
|
package com.intel.awareness.voicelock;
import java.nio.ByteBuffer;
import android.app.Service;
import android.content.Context;
import android.content.Intent;
import android.media.AudioFormat;
import android.media.AudioRecord;
import android.media.MediaRecorder;
import android.os.Handler;
import android.os.Handler.Callback;
import android.os.HandlerThread;
import android.os.IBinder;
import android.os.Message;
import android.os.RemoteCallbackList;
import android.os.RemoteException;
import android.util.Log;
import android.view.Gravity;
import android.view.LayoutInflater;
import android.view.View;
import android.view.WindowManager;
import android.view.WindowManager.LayoutParams;
import android.widget.ImageButton;
import android.widget.ProgressBar;
import android.widget.TextView;
import com.android.internal.policy.IFaceLockCallback;
import com.android.internal.policy.IFaceLockInterface;
public class VoiceLockService extends Service implements Callback {
static final String TAG = "VoiceLock";
private static final String TMP_USER = "test";
private static final int DEFAULT_AWAKE_INTERVAL_MS = 10000;
private static final int NO_RESPONSE_DELAY = 6000;
private static final int SHOW_MSG_DELAY = 1000;
private static final int DEF_BUF_SIZE = 16000; // 0.5 sec
private static final int SAMPLE_RATE = 16000; //Hz
private static final int MAX_PROG = 1000;
private static final int BUF_SIZE = 512; //Bytes
private static final int UNLOCK = 0;
private static final int CANCEL = 1;
private static final int REPORT_FAILED_ATTEMPT = 2;
private static final int EXPOSE_FALLBACK = 3;
private static final int POKE_WAKELOCK = 4;
private static final int MESSAGE_SETVIEW = 100;
private static final int MESSAGE_REMOVEVIEW = 101;
private static final int MESSAGE_CANCEL = 102;
private static final int MESSAGE_SUCCESS = 103;
private static final int MESSAGE_FAILED = 104;
private static final int MESSAGE_TIMEOUT = 105;
private static final int LOOP_EXIT_NONE = 0;
private static final int LOOP_EXIT_SUCCESS = 1;
private static final int LOOP_EXIT_FAILURE = 2;
private static final int LOOP_EXIT_TIMEOUT = 3;
private static final int LOOP_EXIT_USER = 4;
private static final int LOOP_EXIT_SYSTEM = 5;
final RemoteCallbackList<IFaceLockCallback> mCallbacks = new RemoteCallbackList<IFaceLockCallback>();
private WindowManager.LayoutParams mLayoutParams;
private WindowManager mWM;
private VoiceLockRecorder mRecorder;
private int mBufSizeInBytes;
private ByteBuffer mBuf;
private int mExit = LOOP_EXIT_NONE;
private int mCurrFrmNum = 0;
private View mSoundUnlockView;
private TextView mStatus;
private TextView mKeepTalking;
private Context mContext;
private ImageButton mCancelBtn;
private ProgressBar mProgBar;
class VerifierThread extends Thread {
private LibSVJNI mJNI = new LibSVJNI(1);
public void run() {
try {
if (mJNI.init() != 0) {
mExit = LOOP_EXIT_FAILURE;
return;
}
if (mJNI.start_listen(TMP_USER) != 0) {
mExit = LOOP_EXIT_FAILURE;
return;
}
mRecorder.startRecording();
while (mExit == LOOP_EXIT_NONE) {
int len = mRecorder.read(mBuf, BUF_SIZE);
if (len == AudioRecord.ERROR_BAD_VALUE ||
len == AudioRecord.ERROR_INVALID_OPERATION) {
Log.e(TAG, "AudioRecord read Error");
break;
}
if (mJNI.push_data(mBuf.array()) == 1) {
mJNI.process_listen();
int frmNum = mJNI.get_frm_num();
if (frmNum > mCurrFrmNum) {
mCurrFrmNum = frmNum;
mProgBar.post(new Runnable() {
public void run() {
float r = (float)mCurrFrmNum / LibSVJNI.FULL_LISTEN_FRM_NUM;
mProgBar.setProgress((int)(r * (float)MAX_PROG));
}
});
}
if (mJNI.get_result() == LibSVJNI.LIBSV_CB_LISTEN_SUCCESS) {
mExit = LOOP_EXIT_SUCCESS;
} else if (mJNI.get_result() == LibSVJNI.LIBSV_CB_LISTEN_FAILURE) {
mExit = LOOP_EXIT_FAILURE;
}
}
mBuf.clear();
}
} catch (Exception exp) {
exp.printStackTrace();
mExit = LOOP_EXIT_FAILURE;
} finally {
try {
mRecorder.stop();
mBuf.clear();
mJNI.stop_listen();
mJNI.uninit();
switch (mExit) {
case LOOP_EXIT_SUCCESS:
mMainThreadHandler.obtainMessage(MESSAGE_SUCCESS).sendToTarget();
break;
case LOOP_EXIT_FAILURE:
mMainThreadHandler.obtainMessage(MESSAGE_FAILED).sendToTarget();
break;
case LOOP_EXIT_TIMEOUT:
mMainThreadHandler.obtainMessage(MESSAGE_TIMEOUT).sendToTarget();
break;
case LOOP_EXIT_USER:
case LOOP_EXIT_SYSTEM:
break;
}
mExit = LOOP_EXIT_NONE;
} catch (Exception exp) {
exp.printStackTrace();
}
}
}
}
private VerifierThread mVerifierThread;
private Handler mMainThreadHandler;
private Runnable mStopListenRunnable = new Runnable() {
public void run() {
mExit = LOOP_EXIT_TIMEOUT;
}
};
private IFaceLockInterface.Stub mBinder = new IFaceLockInterface.Stub() {
@Override
public void registerCallback(IFaceLockCallback paramIFaceLockCallback) {
if (paramIFaceLockCallback != null)
mCallbacks.register(paramIFaceLockCallback);
}
@Override
public void startUi(IBinder paramIBinder, int paramInt1, int paramInt2,
int paramInt3, int paramInt4, boolean paramBoolean) {
if (paramIBinder != null) {
mLayoutParams.flags = LayoutParams.FLAG_NOT_TOUCH_MODAL
| LayoutParams.FLAG_NOT_FOCUSABLE;
mLayoutParams.type = LayoutParams.TYPE_APPLICATION_PANEL;
mLayoutParams.gravity = Gravity.LEFT | Gravity.TOP;
mLayoutParams.token = paramIBinder;
mLayoutParams.x = paramInt1;
mLayoutParams.y = paramInt2;
mLayoutParams.width = paramInt3;
mLayoutParams.height = paramInt4;
mLayoutParams.packageName = "VoiceUnlock";
mMainThreadHandler.obtainMessage(MESSAGE_SETVIEW,
mLayoutParams).sendToTarget();
}
}
@Override
public void stopUi() {
mMainThreadHandler.obtainMessage(MESSAGE_REMOVEVIEW).sendToTarget();
}
@Override
public void unregisterCallback(IFaceLockCallback paramIFaceLockCallback) {
if (paramIFaceLockCallback != null)
mCallbacks.unregister(paramIFaceLockCallback);
}
};
public void doCallback(int paramInt) {
int i = mCallbacks.beginBroadcast();
for (int j = 0; j < i; j++) {
switch (paramInt) {
case UNLOCK:
try {
((IFaceLockCallback) mCallbacks.getBroadcastItem(j))
.unlock();
} catch (RemoteException localRemoteException) {
Log.e(TAG, "Remote exception during UNLOCK");
}
break;
case CANCEL:
try {
((IFaceLockCallback) mCallbacks.getBroadcastItem(j))
.cancel();
} catch (RemoteException e) {
Log.e(TAG, "Remote exception during CANCEL");
}
break;
case REPORT_FAILED_ATTEMPT:
try {
((IFaceLockCallback) mCallbacks.getBroadcastItem(j))
.reportFailedAttempt();
} catch (RemoteException e) {
Log.e(TAG, "Remote exception during REPORT_FAILED_ATTEMPT");
}
break;
case POKE_WAKELOCK:
try {
((IFaceLockCallback) mCallbacks.getBroadcastItem(j))
.pokeWakelock(DEFAULT_AWAKE_INTERVAL_MS);
} catch (RemoteException e) {
Log.e(TAG, "Remote exception during POKE_WAKELOCK");
}
break;
default:
break;
}
}
mCallbacks.finishBroadcast();
}
@Override
public IBinder onBind(Intent arg0) {
return mBinder;
}
private void showStartUnlock() {
String userName = getString(R.string.this_user);
mStatus.setText(String.format(getString(R.string.unlocking), userName));
mKeepTalking.setVisibility(View.VISIBLE);
mSoundUnlockView.invalidate();
mWM.updateViewLayout(mSoundUnlockView, mLayoutParams);
}
private void showUnlockFailed() {
String userName = getString(R.string.this_user);
mStatus.setText(String.format(getString(R.string.unlock_denine), userName));
mKeepTalking.setVisibility(View.INVISIBLE);
mSoundUnlockView.invalidate();
mWM.updateViewLayout(mSoundUnlockView, mLayoutParams);
}
private void showUnlockSucceed() {
String userName = getString(R.string.this_user);
mStatus.setText(String.format(getString(R.string.unlock_accept), userName));
mKeepTalking.setVisibility(View.INVISIBLE);
mSoundUnlockView.invalidate();
mWM.updateViewLayout(mSoundUnlockView, mLayoutParams);
}
private void showUnlockTimeout() {
mStatus.setText(getString(R.string.unlock_fail));
mKeepTalking.setVisibility(View.INVISIBLE);
mSoundUnlockView.invalidate();
mWM.updateViewLayout(mSoundUnlockView, mLayoutParams);
}
@Override
public void onCreate() {
super.onCreate();
try {
mContext = this;
mMainThreadHandler = new Handler(this);
mWM = (WindowManager) getApplicationContext()
.getSystemService(WINDOW_SERVICE);
mLayoutParams = new LayoutParams();
mSoundUnlockView = LayoutInflater.from(mContext).inflate(
R.layout.sound_unlock, null);
//For Klockwork scan
if (mWM == null || mSoundUnlockView == null)
return;
mStatus = (TextView) mSoundUnlockView.findViewById(R.id.voice_unlock_text);
mKeepTalking = (TextView) mSoundUnlockView
.findViewById(R.id.keep_talking);
mCancelBtn = (ImageButton) mSoundUnlockView.findViewById(R.id.cancel_button);
mProgBar = (ProgressBar) mSoundUnlockView.findViewById(R.id.progressBar1);
//For Klockwork Scan
if (mStatus == null || mKeepTalking == null || mCancelBtn == null || mProgBar == null)
return;
mCancelBtn.setOnClickListener(new View.OnClickListener() {
public void onClick(View v) {
mMainThreadHandler.obtainMessage(MESSAGE_CANCEL)
.sendToTarget();
}
});
mProgBar.setMax(MAX_PROG);
mRecorder = VoiceLockRecorder.obtain();
if (mRecorder == null) {
Log.e(TAG, "AudioRecord Initialize Failed");
}
mBuf = ByteBuffer.allocateDirect(BUF_SIZE);
mVerifierThread = new VerifierThread();
mVerifierThread.setName("verify");
mVerifierThread.setPriority(Thread.MAX_PRIORITY);
mCurrFrmNum = 0;
} catch (Exception exp) {
exp.printStackTrace();
}
}
@Override
public void onDestroy() {
mExit = LOOP_EXIT_NONE;
mVerifierThread = null;
mBuf = null;
mMainThreadHandler = null;
VoiceLockRecorder.recycle();
mRecorder = null;
mCancelBtn.setOnClickListener(null);
super.onDestroy();
}
@Override
public boolean handleMessage(Message msg) {
switch (msg.what) {
case MESSAGE_SETVIEW: {
// UI update
mWM.addView(mSoundUnlockView, mLayoutParams);
showStartUnlock();
// Speaker recognition part
mExit = LOOP_EXIT_NONE;
mVerifierThread.start();
// Listen shall stop in NO_RESPONSE_DELAY seconds
mMainThreadHandler.postDelayed(mStopListenRunnable,
NO_RESPONSE_DELAY);
break;
}
case MESSAGE_REMOVEVIEW: {
mExit = LOOP_EXIT_SYSTEM;
try {
mVerifierThread.join();
} catch (Exception exp) {
exp.printStackTrace();
}
mMainThreadHandler.removeCallbacks(mStopListenRunnable);
// UI update
mWM.removeView(mSoundUnlockView);
break;
}
case MESSAGE_CANCEL: {
doCallback(CANCEL);
break;
}
case MESSAGE_SUCCESS: {
// UI update
showUnlockSucceed();
// Unlock the screen
mMainThreadHandler.postDelayed(new Runnable() {
public void run() {
doCallback(UNLOCK);
}
}, SHOW_MSG_DELAY);
break;
}
case MESSAGE_FAILED: {
// UI update
showUnlockFailed();
// Show fallback pattern 1 second later
mMainThreadHandler.postDelayed(new Runnable() {
@Override
public void run() {
doCallback(CANCEL);
}
}, SHOW_MSG_DELAY);
break;
}
case MESSAGE_TIMEOUT: {
// UI update
showUnlockTimeout();
// Show fallback pattern 1 second later
mMainThreadHandler.postDelayed(new Runnable() {
@Override
public void run() {
doCallback(CANCEL);
}
}, SHOW_MSG_DELAY);
break;
}
default:
break;
}
return false;
}
}
|
apache-2.0
|
apache/avro
|
lang/java/tools/src/main/java/org/apache/avro/tool/ToTextTool.java
|
2914
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.avro.tool;
import java.io.BufferedInputStream;
import java.io.BufferedOutputStream;
import java.io.InputStream;
import java.io.PrintStream;
import java.nio.ByteBuffer;
import java.nio.charset.StandardCharsets;
import java.util.List;
import joptsimple.OptionParser;
import joptsimple.OptionSet;
import org.apache.avro.Schema;
import org.apache.avro.file.DataFileStream;
import org.apache.avro.generic.GenericDatumReader;
/** Reads an avro data file into a plain text file. */
public class ToTextTool implements Tool {
private static final String TEXT_FILE_SCHEMA = "\"bytes\"";
private static final byte[] LINE_SEPARATOR = System.getProperty("line.separator").getBytes(StandardCharsets.UTF_8);
@Override
public String getName() {
return "totext";
}
@Override
public String getShortDescription() {
return "Converts an Avro data file to a text file.";
}
@Override
public int run(InputStream stdin, PrintStream out, PrintStream err, List<String> args) throws Exception {
OptionParser p = new OptionParser();
OptionSet opts = p.parse(args.toArray(new String[0]));
if (opts.nonOptionArguments().size() != 2) {
err.println("Expected 2 args: from_file to_file (filenames or '-' for stdin/stdout");
p.printHelpOn(err);
return 1;
}
BufferedInputStream inStream = Util.fileOrStdin(args.get(0), stdin);
BufferedOutputStream outStream = Util.fileOrStdout(args.get(1), out);
GenericDatumReader<Object> reader = new GenericDatumReader<>();
DataFileStream<Object> fileReader = new DataFileStream<>(inStream, reader);
if (!fileReader.getSchema().equals(new Schema.Parser().parse(TEXT_FILE_SCHEMA))) {
err.println("Avro file is not generic text schema");
p.printHelpOn(err);
fileReader.close();
return 1;
}
while (fileReader.hasNext()) {
ByteBuffer outBuff = (ByteBuffer) fileReader.next();
outStream.write(outBuff.array());
outStream.write(LINE_SEPARATOR);
}
fileReader.close();
Util.close(inStream);
Util.close(outStream);
return 0;
}
}
|
apache-2.0
|
AubinMahe/AubinMahe.github.io
|
doxygen-cpp/html/search/functions_13.js
|
1887
|
var searchData=
[
['_7earguments',['~Arguments',['../db/df5/classdcrud_1_1_arguments.html#a9a08f8f7637c56dac7715c06a8470a1f',1,'dcrud::Arguments']]],
['_7ebytebuffer',['~ByteBuffer',['../dd/dd8/classio_1_1_byte_buffer.html#ade55e28be5925cb8c20dbc9a9857b712',1,'io::ByteBuffer']]],
['_7eicache',['~ICache',['../d8/d86/classdcrud_1_1_i_cache.html#aa520e3ae1af6d51feefebb925082a0bd',1,'dcrud::ICache']]],
['_7eicallback',['~ICallback',['../d2/dfa/classdcrud_1_1_i_callback.html#a35e79411f4bac82d167022dddf7abd8c',1,'dcrud::ICallback']]],
['_7eicrud',['~ICRUD',['../d9/d9a/classdcrud_1_1_i_c_r_u_d.html#aad20cc1ef1a5e0c908c75d98930afbfa',1,'dcrud::ICRUD']]],
['_7eidispatcher',['~IDispatcher',['../d4/de7/classdcrud_1_1_i_dispatcher.html#ac4a475c30f0e08de9d82d04a76134854',1,'dcrud::IDispatcher']]],
['_7eioperation',['~IOperation',['../db/d8b/classdcrud_1_1_i_operation.html#a18f584f89eb0e9ae44585096926c1f7f',1,'dcrud::IOperation']]],
['_7eiparticipant',['~IParticipant',['../d4/d0f/classdcrud_1_1_i_participant.html#afc278b379c7cc1e59caddee5f82dcb84',1,'dcrud::IParticipant']]],
['_7eiprovided',['~IProvided',['../d9/d4f/classdcrud_1_1_i_provided.html#a935c678504f75d643362283bcb031600',1,'dcrud::IProvided']]],
['_7eiregistry',['~IRegistry',['../d7/dad/classdcrud_1_1_i_registry.html#afdb06a5ee08e59b9d8c11b50d16b4fde',1,'dcrud::IRegistry']]],
['_7eirequired',['~IRequired',['../d6/d7b/classdcrud_1_1_i_required.html#aaaed7d0613024835eea745889a805b18',1,'dcrud::IRequired']]],
['_7emutex',['~Mutex',['../d9/d67/classos_1_1_mutex.html#a205e2c334b25cb96e4f1303a4fde6b0c',1,'os::Mutex']]],
['_7eshareable',['~Shareable',['../d6/d8a/classdcrud_1_1_shareable.html#ab800a40a64482e153bcde7e6b808260c',1,'dcrud::Shareable']]],
['_7esynchronized',['~Synchronized',['../db/d0e/classos_1_1_synchronized.html#a1a6fddee5c1cd1b92b88d587e1870ad4',1,'os::Synchronized']]]
];
|
apache-2.0
|
miniway/presto
|
presto-main/src/main/java/io/prestosql/sql/planner/optimizations/LimitPushDown.java
|
9344
|
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.prestosql.sql.planner.optimizations;
import com.google.common.collect.ImmutableList;
import io.prestosql.Session;
import io.prestosql.execution.warnings.WarningCollector;
import io.prestosql.sql.planner.PlanNodeIdAllocator;
import io.prestosql.sql.planner.SymbolAllocator;
import io.prestosql.sql.planner.TypeProvider;
import io.prestosql.sql.planner.plan.AggregationNode;
import io.prestosql.sql.planner.plan.DistinctLimitNode;
import io.prestosql.sql.planner.plan.LimitNode;
import io.prestosql.sql.planner.plan.MarkDistinctNode;
import io.prestosql.sql.planner.plan.PlanNode;
import io.prestosql.sql.planner.plan.ProjectNode;
import io.prestosql.sql.planner.plan.SemiJoinNode;
import io.prestosql.sql.planner.plan.SimplePlanRewriter;
import io.prestosql.sql.planner.plan.SortNode;
import io.prestosql.sql.planner.plan.TopNNode;
import io.prestosql.sql.planner.plan.UnionNode;
import io.prestosql.sql.planner.plan.ValuesNode;
import java.util.ArrayList;
import java.util.List;
import java.util.Optional;
import static com.google.common.base.MoreObjects.toStringHelper;
import static java.util.Objects.requireNonNull;
public class LimitPushDown
implements PlanOptimizer
{
@Override
public PlanNode optimize(PlanNode plan, Session session, TypeProvider types, SymbolAllocator symbolAllocator, PlanNodeIdAllocator idAllocator, WarningCollector warningCollector)
{
requireNonNull(plan, "plan is null");
requireNonNull(session, "session is null");
requireNonNull(types, "types is null");
requireNonNull(symbolAllocator, "symbolAllocator is null");
requireNonNull(idAllocator, "idAllocator is null");
return SimplePlanRewriter.rewriteWith(new Rewriter(idAllocator), plan, null);
}
private static class LimitContext
{
private final long count;
private final boolean partial;
public LimitContext(long count, boolean partial)
{
this.count = count;
this.partial = partial;
}
public long getCount()
{
return count;
}
public boolean isPartial()
{
return partial;
}
@Override
public String toString()
{
return toStringHelper(this)
.add("count", count)
.add("partial", partial)
.toString();
}
}
private static class Rewriter
extends SimplePlanRewriter<LimitContext>
{
private final PlanNodeIdAllocator idAllocator;
private Rewriter(PlanNodeIdAllocator idAllocator)
{
this.idAllocator = requireNonNull(idAllocator, "idAllocator is null");
}
@Override
public PlanNode visitPlan(PlanNode node, RewriteContext<LimitContext> context)
{
PlanNode rewrittenNode = context.defaultRewrite(node);
LimitContext limit = context.get();
if (limit != null) {
// Drop in a LimitNode b/c we cannot push our limit down any further
rewrittenNode = new LimitNode(idAllocator.getNextId(), rewrittenNode, limit.getCount(), limit.isPartial());
}
return rewrittenNode;
}
@Override
public PlanNode visitLimit(LimitNode node, RewriteContext<LimitContext> context)
{
long count = node.getCount();
if (context.get() != null) {
count = Math.min(count, context.get().getCount());
}
// return empty ValuesNode in case of limit 0
if (count == 0) {
return new ValuesNode(idAllocator.getNextId(),
node.getOutputSymbols(),
ImmutableList.of());
}
// default visitPlan logic will insert the limit node
return context.rewrite(node.getSource(), new LimitContext(count, false));
}
@Override
@Deprecated
public PlanNode visitAggregation(AggregationNode node, RewriteContext<LimitContext> context)
{
LimitContext limit = context.get();
if (limit != null &&
node.getAggregations().isEmpty() &&
node.getOutputSymbols().size() == node.getGroupingKeys().size() &&
node.getOutputSymbols().containsAll(node.getGroupingKeys())) {
PlanNode rewrittenSource = context.rewrite(node.getSource());
return new DistinctLimitNode(idAllocator.getNextId(), rewrittenSource, limit.getCount(), false, rewrittenSource.getOutputSymbols(), Optional.empty());
}
PlanNode rewrittenNode = context.defaultRewrite(node);
if (limit != null) {
// Drop in a LimitNode b/c limits cannot be pushed through aggregations
rewrittenNode = new LimitNode(idAllocator.getNextId(), rewrittenNode, limit.getCount(), limit.isPartial());
}
return rewrittenNode;
}
@Override
public PlanNode visitMarkDistinct(MarkDistinctNode node, RewriteContext<LimitContext> context)
{
// the fallback logic (in visitPlan) for node types we don't know about introduces a limit node,
// so we need this here to push the limit through this trivial node type
return context.defaultRewrite(node, context.get());
}
@Override
public PlanNode visitProject(ProjectNode node, RewriteContext<LimitContext> context)
{
// the fallback logic (in visitPlan) for node types we don't know about introduces a limit node,
// so we need this here to push the limit through this trivial node type
return context.defaultRewrite(node, context.get());
}
@Override
public PlanNode visitTopN(TopNNode node, RewriteContext<LimitContext> context)
{
LimitContext limit = context.get();
PlanNode rewrittenSource = context.rewrite(node.getSource());
if (rewrittenSource == node.getSource() && limit == null) {
return node;
}
long count = node.getCount();
if (limit != null) {
count = Math.min(count, limit.getCount());
}
return new TopNNode(node.getId(), rewrittenSource, count, node.getOrderingScheme(), node.getStep());
}
@Override
@Deprecated
public PlanNode visitSort(SortNode node, RewriteContext<LimitContext> context)
{
LimitContext limit = context.get();
PlanNode rewrittenSource = context.rewrite(node.getSource());
if (limit != null) {
return new TopNNode(node.getId(), rewrittenSource, limit.getCount(), node.getOrderingScheme(), TopNNode.Step.SINGLE);
}
else if (rewrittenSource != node.getSource()) {
return new SortNode(node.getId(), rewrittenSource, node.getOrderingScheme());
}
return node;
}
@Override
public PlanNode visitUnion(UnionNode node, RewriteContext<LimitContext> context)
{
LimitContext limit = context.get();
LimitContext childLimit = null;
if (limit != null) {
childLimit = new LimitContext(limit.getCount(), true);
}
List<PlanNode> sources = new ArrayList<>();
for (int i = 0; i < node.getSources().size(); i++) {
sources.add(context.rewrite(node.getSources().get(i), childLimit));
}
PlanNode output = new UnionNode(node.getId(), sources, node.getSymbolMapping(), node.getOutputSymbols());
if (limit != null) {
output = new LimitNode(idAllocator.getNextId(), output, limit.getCount(), limit.isPartial());
}
return output;
}
@Override
public PlanNode visitSemiJoin(SemiJoinNode node, RewriteContext<LimitContext> context)
{
PlanNode source = context.rewrite(node.getSource(), context.get());
if (source != node.getSource()) {
return new SemiJoinNode(
node.getId(),
source,
node.getFilteringSource(),
node.getSourceJoinSymbol(),
node.getFilteringSourceJoinSymbol(),
node.getSemiJoinOutput(),
node.getSourceHashSymbol(),
node.getFilteringSourceHashSymbol(),
node.getDistributionType());
}
return node;
}
}
}
|
apache-2.0
|
bshaffer/google-api-php-client-services
|
src/Google/Service/Sheets/EmbeddedChart.php
|
2059
|
<?php
/*
* Copyright 2014 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
class Google_Service_Sheets_EmbeddedChart extends Google_Model
{
protected $borderType = 'Google_Service_Sheets_EmbeddedObjectBorder';
protected $borderDataType = '';
public $chartId;
protected $positionType = 'Google_Service_Sheets_EmbeddedObjectPosition';
protected $positionDataType = '';
protected $specType = 'Google_Service_Sheets_ChartSpec';
protected $specDataType = '';
/**
* @param Google_Service_Sheets_EmbeddedObjectBorder
*/
public function setBorder(Google_Service_Sheets_EmbeddedObjectBorder $border)
{
$this->border = $border;
}
/**
* @return Google_Service_Sheets_EmbeddedObjectBorder
*/
public function getBorder()
{
return $this->border;
}
public function setChartId($chartId)
{
$this->chartId = $chartId;
}
public function getChartId()
{
return $this->chartId;
}
/**
* @param Google_Service_Sheets_EmbeddedObjectPosition
*/
public function setPosition(Google_Service_Sheets_EmbeddedObjectPosition $position)
{
$this->position = $position;
}
/**
* @return Google_Service_Sheets_EmbeddedObjectPosition
*/
public function getPosition()
{
return $this->position;
}
/**
* @param Google_Service_Sheets_ChartSpec
*/
public function setSpec(Google_Service_Sheets_ChartSpec $spec)
{
$this->spec = $spec;
}
/**
* @return Google_Service_Sheets_ChartSpec
*/
public function getSpec()
{
return $this->spec;
}
}
|
apache-2.0
|
ChosenGlobal/bbs
|
src/main/java/cn/jfinalbbs/index/IndexClientController.java
|
617
|
package cn.jfinalbbs.index;
import cn.jfinalbbs.common.BaseController;
import cn.jfinalbbs.topic.Topic;
import com.jfinal.kit.PropKit;
import com.jfinal.plugin.activerecord.Page;
/**
* Created by Tomoya on 15/6/9.
*/
public class IndexClientController extends BaseController {
public void index() {
String tab = getPara("tab");
String q = getPara("q");
if(tab == null) tab = "all";
Page<Topic> page = Topic.me.paginate(getParaToInt("p", 1),
getParaToInt("size", PropKit.use("config.properties").getInt("page_size")), tab, q, 1);
success(page);
}
}
|
apache-2.0
|
omribahumi/redshift_console
|
redshift_console/redshift.py
|
12883
|
from itertools import chain
import psycopg2
import datetime
import logging
import yaml
import os
import toro
import settings
import re
from tornado.ioloop import IOLoop, PeriodicCallback
from tornado.gen import coroutine, Return
from tornado.concurrent import run_on_executor
from concurrent.futures import ThreadPoolExecutor
from psycopg2.extras import RealDictCursor
from contextlib import contextmanager
with open(os.path.join(os.path.dirname(__file__), './queries.yaml')) as f:
sql_queries = yaml.load(f)
def _concat_query_text(rows_items, key='id', text_col='query'):
"""
Queries related tables tend to split the query text over several lines.
This function concats the text.
"""
queries = {}
ret = []
for row in rows_items:
queries.setdefault(row[key], []).append(row)
for query in queries.values():
query_text = reduce(lambda text, query_data: "{}{}".format(text, query_data[text_col].strip()), query, "")
query[0][text_col] = query_text.strip()
ret.append(query[0])
return ret
class DataFetcher(object):
# Only one query per connection is possible in psycopg2 async mode
# so there is no point to have a ThreadPoolExecutor bigger than the
# connection pool.
executor = ThreadPoolExecutor(settings.REDSHIFT['connection_pool_max_size'])
def __init__(self, connection_pool, refresh_interval, io_loop=None, executor=None):
self.io_loop = io_loop or IOLoop.instance()
self.executor = executor or self.executor
self.connection_pool = connection_pool
self.lock = toro.Lock()
self.name = self.__class__.__name__
self.status = "Started"
self.runtime = 0
self.refresh_finished_at = None
self.refresh_started_at = None
self.refresh_interval = refresh_interval.seconds
self.periodic_callback = PeriodicCallback(self._refresh, refresh_interval.seconds*1000)
@contextmanager
def _get_connection(self):
"""
Context manager for pool handling
"""
conn = self.connection_pool.getconn()
try:
yield conn
finally:
self.connection_pool.putconn(conn)
@coroutine
def start(self):
self.periodic_callback.start()
yield self._refresh()
def stop(self):
self.periodic_callback.stop()
def get_status(self):
return {
'name': self.name,
'state': self.status,
'refresh_started_at': self.refresh_started_at,
'refresh_finished_at': self.refresh_finished_at,
'runtime': self.runtime,
'refresh_interval': self.refresh_interval
}
@coroutine
def _refresh(self):
# Because Tornado runs in a single thread, we can use this locked() safely to verify if this fetcher already
# runs.
if self.lock.locked():
logging.info("%s skipping because already locked.", self.name)
return
with (yield self.lock.acquire()):
self.status = "Fetching data"
self.refresh_started_at = datetime.datetime.utcnow()
logging.info("%s starting refresh", self.name)
yield self.refresh()
self.status = "Waiting"
self.refresh_finished_at = datetime.datetime.utcnow()
delta = self.refresh_finished_at - self.refresh_started_at
self.runtime = delta.seconds + delta.microseconds/1000000
self.refresh_started_at = None
logging.info("%s finished refreshing.", self.name)
@run_on_executor
def execute_query(self, query, *args):
with self._get_connection() as connection:
with connection.cursor(cursor_factory=RealDictCursor) as cursor:
cursor.execute(query, *args)
return cursor.fetchall()
@coroutine
def refresh(self):
pass
class Queries(DataFetcher):
def __init__(self, session, refresh_interval):
super(Queries, self).__init__(session, refresh_interval)
self.inflight_queries = {}
self.inflight_queries_updated_at = None
self.queries_queue = []
self.queries_queue_updated_at = None
self.alerts_updated_at = None
def _set_cancellation_in_progres(self, pid):
for q in chain(self.inflight_queries.values(), self.queries_queue):
if q.get('pid') == int(pid):
q['cancellation_in_progress'] = True
@run_on_executor
def cancel_query(self, pid):
"""
Since multiple attempts are required in order
to cancel a query, the connection is blocked until
that query was canceled.
"""
with self._get_connection() as connection:
with connection.cursor(cursor_factory=RealDictCursor) as cursor:
tries = 1
while tries < int(settings.DATA['max_query_cancelation_retries']):
try:
self._set_cancellation_in_progres(pid)
cursor.execute("CANCEL %s;", (int(pid), ))
logging.info("cancelation retry {} out of {} for PID {}".format(tries, settings.DATA['max_query_cancelation_retries'], pid))
tries += 1
except psycopg2.InternalError:
logging.info('successfully canceled {}'.format(pid))
return True
logging.warning('failed to cancel {}'.format(pid))
return False
@coroutine
def refresh(self):
yield self._fetch_inflight_queries()
yield self._fetch_query_alerts()
yield self._fetch_queries_queue()
@coroutine
def _fetch_inflight_queries(self):
inflight_queries = yield self.execute_query(sql_queries['inflight_queries'])
previous_inflight_queries = self.inflight_queries
self.inflight_queries = {q['id']: q for q in _concat_query_text(inflight_queries)}
# copy previous alerts, until we fetch new ones:
for id, query in self.inflight_queries.iteritems():
if id in previous_inflight_queries and previous_inflight_queries[id].get('alert', None) is not None:
query['alert'] = previous_inflight_queries[id]['alert']
query['cancellation_in_progress'] = False
self.inflight_queries_updated_at = datetime.datetime.utcnow()
@coroutine
def _fetch_query_alerts(self):
if not self.inflight_queries:
logging.info("skipping fetching of query alerts, as there are no inflight queries.")
return
ids = tuple(self.inflight_queries.keys())
end_time = datetime.datetime.utcnow()
start_time = end_time - datetime.timedelta(seconds=3600*3)
alerts_list = yield self.execute_query(sql_queries['query_alerts'], (start_time, end_time, ids))
alerts = {}
# assuming events are in ascending order
# overriding early event with recent per query
for row in alerts_list:
alerts.setdefault(row['id'], {'event': row['event'], 'solution': row['solution']})
for id, alert in alerts.iteritems():
if id in self.inflight_queries:
self.inflight_queries[id]['alert'] = {
'problem': alert['event'],
'solution': alert['solution']
}
self.alerts_updated_at = datetime.datetime.utcnow()
@coroutine
def _fetch_queries_queue(self):
queries_queue = yield self.execute_query(sql_queries['queries_queue'])
self.queries_queue = _concat_query_text(queries_queue)
self.queries_queue_updated_at = datetime.datetime.utcnow()
class Tables(DataFetcher):
def __init__(self, session, refresh_interval):
super(Tables, self).__init__(session, refresh_interval)
self.updated_at = None
self.schemas = {}
self.load_errors = {}
self.tables_rows_sort_status = {}
self.table_id_mapping = {}
self._db_id = None
self.load_errors_updated_at = None
def get(self, namespace, table):
return self.schemas.get(namespace, {}).get(table, None)
@coroutine
def _fetch_current_db_id(self):
"""
db_id is used to limit the querying of STV_TBL_PERM.
pg_class and pg_namespace are limited to the scope of
the current connection (and therefore to a single database)
while STV_TBL_PERM contains info on tables from all databases
in the cluster.
"""
with self._get_connection() as connection:
with connection.cursor() as cursor:
cursor.execute("SELECT current_database()")
dbname = cursor.fetchone()[0]
with connection.cursor() as cursor:
cursor.execute('SELECT OID FROM pg_database WHERE datname=%s', (dbname, ))
self._db_id = cursor.fetchone()[0]
def get_schemas(self):
schemas = {}
for schema_name, schema in self.schemas.iteritems():
schemas.setdefault(schema_name, [])
for name, table in schema.iteritems():
table_data = table.get('metadata', {})
table_data.setdefault('total_rows', 0)
table_data['name'] = name
schemas[schema_name].append(table_data)
schemas[schema_name] = list(reversed(sorted(schemas[schema_name], key=lambda t: t.get('size_in_mb', 0))))
return schemas
@coroutine
def refresh(self):
if not self._db_id:
yield self._fetch_current_db_id()
yield self._fetch_schema()
yield self._fetch_tables_rows_sort_status()
yield self._fetch_design_status()
yield self._fetch_load_errors()
@coroutine
def _fetch_schema(self):
results = yield self.execute_query(sql_queries['table_id_mapping'])
namespaces = list(set(map(lambda r: "'%s'" % r['schema_name'], results)))
tables_ids = {row.pop('table_id'): row for row in results}
self.table_id_mapping = tables_ids
namespaces.insert(0, "'$user'")
namespaces.insert(1, "'public'")
search_path_query = 'set search_path to {};'.format(', '.join(namespaces))
columns = yield self.execute_query(search_path_query + "SELECT * FROM pg_table_def WHERE schemaname NOT IN ('pg_catalog', 'pg_toast', 'information_schema');", namespaces)
schema = {}
for col in columns:
namespace = schema.setdefault(col['schemaname'], {})
table = namespace.setdefault(col['tablename'], {})
columns = table.setdefault('columns', [])
table.setdefault('metadata', self.schemas.get(col['schemaname'], {}).get(col['tablename'], {}).get('metadata', {}))
columns.append({'name': col['column'],
'type': col['type'],
'encoding': col['encoding'],
'distkey': col['distkey'],
'sortkey': col['sortkey']})
self.schemas = schema
self.updated_at = datetime.datetime.utcnow()
@coroutine
def _fetch_load_errors(self):
query = sql_queries['table_load_errors']
load_errors = yield self.execute_query(query)
for row in load_errors:
row['table'] = self.table_id_mapping[row['table_id']]['table_name']
row['schema'] = self.table_id_mapping[row['table_id']]['schema_name']
self.load_errors = load_errors
self.load_errors_updated_at = datetime.datetime.utcnow()
@coroutine
def _fetch_tables_rows_sort_status(self):
query = sql_queries['tables_rows_sort_status']
res = yield self.execute_query(query, (self._db_id,))
for row in res:
schema, table_name = self.table_id_mapping[row['table_id']]['schema_name'], \
self.table_id_mapping[row['table_id']]['table_name']
table = self.get(schema, table_name)
if table is not None:
table.setdefault('metadata', {})
table['metadata']['total_rows'] = row['total_rows']
table['metadata']['sorted_rows'] = row['sorted_rows']
table['metadata']['percent_sorted'] = row['percent_sorted']
@coroutine
def _fetch_design_status(self):
result = yield self.execute_query(sql_queries['table_design_status'])
for row in result:
table = self.get(row['schemaname'], row['tablename'])
if table is not None:
table.setdefault('metadata', {})
for key in ('has_col_encoding', 'pct_slices_populated', 'size_in_mb', 'pct_skew_across_slices', 'has_sort_key', 'has_dist_key'):
table['metadata'][key] = row[key]
|
apache-2.0
|
Terminator-Aaron/Katana
|
aspnetwebsrc/System.Web.Http.WebHost/SeekableBufferedRequestStream.cs
|
5549
|
// Copyright (c) Microsoft Open Technologies, Inc. All rights reserved. See License.txt in the project root for license information.
using System.ComponentModel;
using System.Diagnostics;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
namespace System.Web.Http.WebHost
{
internal class SeekableBufferedRequestStream : NonOwnedStream
{
private const int ReadBufferSize = 1024;
private readonly HttpRequestBase _request;
private bool _isReadToEndComplete;
public SeekableBufferedRequestStream(HttpRequestBase request)
{
if (request == null)
{
throw new ArgumentNullException("request");
}
_request = request;
InnerStream = request.GetBufferedInputStream();
}
public override bool CanSeek
{
get
{
return !IsDisposed;
}
}
public override long Position
{
get
{
ThrowIfDisposed();
return InnerStream.Position;
}
set
{
ThrowIfDisposed();
Seek(value, SeekOrigin.Begin);
}
}
public override int EndRead(IAsyncResult asyncResult)
{
ThrowIfDisposed();
int bytesRead = InnerStream.EndRead(asyncResult);
if (bytesRead == 0 && !_isReadToEndComplete)
{
SwapToSeekableStream();
}
return bytesRead;
}
public override int Read(byte[] buffer, int offset, int count)
{
ThrowIfDisposed();
int bytesRead = InnerStream.Read(buffer, offset, count);
if (bytesRead == 0 && !_isReadToEndComplete)
{
SwapToSeekableStream();
}
return bytesRead;
}
public async override Task<int> ReadAsync(byte[] buffer, int offset, int count, CancellationToken cancellationToken)
{
ThrowIfDisposed();
int bytesRead = await InnerStream.ReadAsync(buffer, offset, count, cancellationToken);
if (bytesRead == 0 && !_isReadToEndComplete)
{
SwapToSeekableStream();
}
return bytesRead;
}
public override int ReadByte()
{
ThrowIfDisposed();
int result = InnerStream.ReadByte();
if (result == -1 && !_isReadToEndComplete)
{
SwapToSeekableStream();
}
return result;
}
public override long Seek(long offset, SeekOrigin origin)
{
ThrowIfDisposed();
long currentPosition = InnerStream.Position;
long? newPosition = null;
switch (origin)
{
case SeekOrigin.Begin:
newPosition = offset;
break;
case SeekOrigin.Current:
newPosition = currentPosition + offset;
break;
case SeekOrigin.End:
// We have to check Length here because we might not know the length in some scenarios.
// If we don't know, then we just do the safe thing and force a read to end.
if (Length >= 0)
{
newPosition = Length + offset;
}
break;
default:
throw new InvalidEnumArgumentException("origin", (int)origin, typeof(SeekOrigin));
}
if (newPosition == currentPosition)
{
// This is a no-op, we want to short circuit because we do significant work on a seek.
return currentPosition;
}
if (!_isReadToEndComplete)
{
// The current stream is the one returned from GetBufferedInputStream(), and it's not
// seekable in the web host case.
//
// We need to read the non-seekable stream to the end, which will populate the seekable stream
// that's provided by .InputStream. This is only done for the side-effect, and we just ignore the
// data, it's already being buffered for us.
//
// This is done synchronously, because we need to block the calling thread so that the result of
// Seek can be returned.
byte[] buffer = new byte[ReadBufferSize];
while (InnerStream.Read(buffer, 0, buffer.Length) > 0)
{
}
SwapToSeekableStream();
}
return InnerStream.Seek(offset, origin);
}
private void SwapToSeekableStream()
{
// At this point we've actually read the non-seekable stream to the end, and we're about to swap streams
// and toggle the value of _isReadToEndComplete. Reading the non-seekable stream to the end will populate
// InnerStream with the buffered data, so we can use it for all future operations.
Debug.Assert(!_isReadToEndComplete);
Stream seekableStream = _request.InputStream;
seekableStream.Position = InnerStream.Position;
InnerStream = seekableStream;
_isReadToEndComplete = true;
}
}
}
|
apache-2.0
|
ernestp/consulo
|
platform/graph-impl/src/com/mxgraph/view/mxSwimlaneManager.java
|
8595
|
/**
* $Id: mxSwimlaneManager.java,v 1.2 2013/08/29 09:19:41 gaudenz Exp $
* Copyright (c) 2007, Gaudenz Alder
*/
package com.mxgraph.view;
import com.mxgraph.model.mxGeometry;
import com.mxgraph.model.mxIGraphModel;
import com.mxgraph.util.*;
import java.util.Map;
/**
* Manager for swimlanes and nested swimlanes that sets the size of newly added
* swimlanes to that of their siblings, and propagates changes to the size of a
* swimlane to its siblings, if siblings is true, and its ancestors, if
* bubbling is true.
*/
public class mxSwimlaneManager extends mxEventSource {
/**
* Defines the type of the source or target terminal. The type is a string
* passed to mxCell.is to check if the rule applies to a cell.
*/
protected mxGraph graph;
/**
* Optional string that specifies the value of the attribute to be passed
* to mxCell.is to check if the rule applies to a cell.
*/
protected boolean enabled;
/**
* Optional string that specifies the attributename to be passed to
* mxCell.is to check if the rule applies to a cell.
*/
protected boolean horizontal;
/**
* Specifies if newly added cells should be resized to match the size of their
* existing siblings. Default is true.
*/
protected boolean addEnabled;
/**
* Specifies if resizing of swimlanes should be handled. Default is true.
*/
protected boolean resizeEnabled;
/**
*
*/
protected mxIEventListener addHandler = new mxIEventListener() {
public void invoke(Object source, mxEventObject evt) {
if (isEnabled() && isAddEnabled()) {
cellsAdded((Object[])evt.getProperty("cells"));
}
}
};
/**
*
*/
protected mxIEventListener resizeHandler = new mxIEventListener() {
public void invoke(Object source, mxEventObject evt) {
if (isEnabled() && isResizeEnabled()) {
cellsResized((Object[])evt.getProperty("cells"));
}
}
};
/**
*
*/
public mxSwimlaneManager(mxGraph graph) {
setGraph(graph);
}
/**
* @return the enabled
*/
public boolean isEnabled() {
return enabled;
}
/**
* @param value the enabled to set
*/
public void setEnabled(boolean value) {
enabled = value;
}
/**
* @return the bubbling
*/
public boolean isHorizontal() {
return horizontal;
}
/**
* @param value the bubbling to set
*/
public void setHorizontal(boolean value) {
horizontal = value;
}
/**
* @return the addEnabled
*/
public boolean isAddEnabled() {
return addEnabled;
}
/**
* @param value the addEnabled to set
*/
public void setAddEnabled(boolean value) {
addEnabled = value;
}
/**
* @return the resizeEnabled
*/
public boolean isResizeEnabled() {
return resizeEnabled;
}
/**
* @param value the resizeEnabled to set
*/
public void setResizeEnabled(boolean value) {
resizeEnabled = value;
}
/**
* @return the graph
*/
public mxGraph getGraph() {
return graph;
}
/**
* @param graph the graph to set
*/
public void setGraph(mxGraph graph) {
if (this.graph != null) {
this.graph.removeListener(addHandler);
this.graph.removeListener(resizeHandler);
}
this.graph = graph;
if (this.graph != null) {
this.graph.addListener(mxEvent.ADD_CELLS, addHandler);
this.graph.addListener(mxEvent.CELLS_RESIZED, resizeHandler);
}
}
/**
* Returns true if the given swimlane should be ignored.
*/
protected boolean isSwimlaneIgnored(Object swimlane) {
return !getGraph().isSwimlane(swimlane);
}
/**
* Returns true if the given cell is horizontal. If the given cell is not a
* swimlane, then the <horizontal> value is returned.
*/
protected boolean isCellHorizontal(Object cell) {
if (graph.isSwimlane(cell)) {
mxCellState state = graph.getView().getState(cell);
Map<String, Object> style = (state != null) ? state.getStyle() : graph.getCellStyle(cell);
return mxUtils.isTrue(style, mxConstants.STYLE_HORIZONTAL, true);
}
return !isHorizontal();
}
/**
* Called if any cells have been added. Calls swimlaneAdded for all swimlanes
* where isSwimlaneIgnored returns false.
*/
protected void cellsAdded(Object[] cells) {
if (cells != null) {
mxIGraphModel model = getGraph().getModel();
model.beginUpdate();
try {
for (int i = 0; i < cells.length; i++) {
if (!isSwimlaneIgnored(cells[i])) {
swimlaneAdded(cells[i]);
}
}
}
finally {
model.endUpdate();
}
}
}
/**
* Called for each swimlane which has been added. This finds a reference
* sibling swimlane and applies its size to the newly added swimlane. If no
* sibling can be found then the parent swimlane is resized so that the
* new swimlane fits into the parent swimlane.
*/
protected void swimlaneAdded(Object swimlane) {
mxIGraphModel model = getGraph().getModel();
Object parent = model.getParent(swimlane);
int childCount = model.getChildCount(parent);
mxGeometry geo = null;
// Finds the first valid sibling swimlane as reference
for (int i = 0; i < childCount; i++) {
Object child = model.getChildAt(parent, i);
if (child != swimlane && !this.isSwimlaneIgnored(child)) {
geo = model.getGeometry(child);
if (geo != null) {
break;
}
}
}
// Applies the size of the refernece to the newly added swimlane
if (geo != null) {
boolean parentHorizontal = (parent != null) ? isCellHorizontal(parent) : horizontal;
resizeSwimlane(swimlane, geo.getWidth(), geo.getHeight(), parentHorizontal);
}
}
/**
* Called if any cells have been resizes. Calls swimlaneResized for all
* swimlanes where isSwimlaneIgnored returns false.
*/
protected void cellsResized(Object[] cells) {
if (cells != null) {
mxIGraphModel model = this.getGraph().getModel();
model.beginUpdate();
try {
// Finds the top-level swimlanes and adds offsets
for (int i = 0; i < cells.length; i++) {
if (!this.isSwimlaneIgnored(cells[i])) {
mxGeometry geo = model.getGeometry(cells[i]);
if (geo != null) {
mxRectangle size = new mxRectangle(0, 0, geo.getWidth(), geo.getHeight());
Object top = cells[i];
Object current = top;
while (current != null) {
top = current;
current = model.getParent(current);
mxRectangle tmp = (graph.isSwimlane(current)) ? graph.getStartSize(current) : new mxRectangle();
size.setWidth(size.getWidth() + tmp.getWidth());
size.setHeight(size.getHeight() + tmp.getHeight());
}
boolean parentHorizontal = (current != null) ? isCellHorizontal(current) : horizontal;
resizeSwimlane(top, size.getWidth(), size.getHeight(), parentHorizontal);
}
}
}
}
finally {
model.endUpdate();
}
}
}
/**
* Sets the width or height of the given swimlane to the given value depending
* on <horizontal>. If <horizontal> is true, then the width is set, otherwise,
* the height is set.
*/
protected void resizeSwimlane(Object swimlane, double w, double h, boolean parentHorizontal) {
mxIGraphModel model = getGraph().getModel();
model.beginUpdate();
try {
boolean horizontal = this.isCellHorizontal(swimlane);
if (!this.isSwimlaneIgnored(swimlane)) {
mxGeometry geo = model.getGeometry(swimlane);
if (geo != null) {
if ((parentHorizontal && geo.getHeight() != h) || (!parentHorizontal && geo.getWidth() != w)) {
geo = (mxGeometry)geo.clone();
if (parentHorizontal) {
geo.setHeight(h);
}
else {
geo.setWidth(w);
}
model.setGeometry(swimlane, geo);
}
}
}
mxRectangle tmp = (graph.isSwimlane(swimlane)) ? graph.getStartSize(swimlane) : new mxRectangle();
w -= tmp.getWidth();
h -= tmp.getHeight();
int childCount = model.getChildCount(swimlane);
for (int i = 0; i < childCount; i++) {
Object child = model.getChildAt(swimlane, i);
resizeSwimlane(child, w, h, horizontal);
}
}
finally {
model.endUpdate();
}
}
/**
*
*/
public void destroy() {
setGraph(null);
}
}
|
apache-2.0
|
Saw85/DibikeManagement
|
src/com/Dibike/controller/PushController.java
|
8097
|
package com.Dibike.controller;
import java.io.IOException;
import java.math.BigDecimal;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.Date;
import javax.annotation.Resource;
import org.apache.log4j.Logger;
import org.springframework.stereotype.Controller;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.ResponseBody;
import com.Dibike.common.BaseController;
import com.Dibike.common.JsonFormat;
import com.Dibike.common.MapDistance;
import com.Dibike.common.Result;
import com.Dibike.common.Tests;
import com.Dibike.common.UtilDate;
import com.Dibike.entity.Bike;
import com.Dibike.entity.Dictionaries_table;
import com.Dibike.entity.Journey;
import com.Dibike.entity.Journey_details;
import com.Dibike.entity.MemberInfo;
import com.Dibike.entity.Point;
import com.Dibike.entity.Report_illegal_stop;
import com.Dibike.service.BikeService;
import com.Dibike.service.Dictionaries_tableService;
import com.Dibike.service.JourneyService;
import com.Dibike.service.Journey_detailsService;
import com.Dibike.service.MemberInfoService;
import com.Dibike.service.PointService;
import com.Dibike.service.Report_illegal_stopService;
import com.google.gson.Gson;
import com.google.gson.JsonObject;
/**
* @author wuxiang
* @version 创建时间:2017年1月7日 下午5:49:57
*
*/
@Controller
@RequestMapping("/push")
public class PushController extends BaseController {
private static Logger logger = Logger.getLogger(PushController.class);
private Gson gson = new Gson();
private JsonFormat jsonFormat = new JsonFormat();
@Resource
private BikeService bikeService;
@Resource
private JourneyService journeyService;
@Resource
private PointService pointService;
@Resource
private Journey_detailsService journey_detailsService;
@Resource
private MemberInfoService memberInfoService;
@Resource
private Dictionaries_tableService dictionaries_tableService;
@Resource
private Report_illegal_stopService report_illegal_stopService;
@RequestMapping(value = "/pushMessage")
@ResponseBody
public Result pushMessage(String bikeNO) throws ParseException, IOException{
logger.info("-------------后台开始推送消息----------------------------");
Result result = new Result();
Bike bike=bikeService.findByBikeNO(bikeNO);
if(bike == null){
result.setStatus("1");
result.setMsg("推送失败");
return result;
}
// Bike bike = bikeService.findDeviceid(deviceid);
Report_illegal_stop report=report_illegal_stopService.findByBikeNO(bikeNO);
if(report == null){
result.setStatus("1");
result.setMsg("推送失败");
return result;
}
Journey journey = journeyService.findMB(bike.getMemberID(), bike.getBikeNO());
if(journey == null){
result.setStatus("1");
result.setMsg("推送失败");
return result;
}
//记录当前时间
journey.setEndDate(UtilDate.getDateFormatter());
String message=IpStatus(bike, UtilDate.getDateFormatter(),report.getLng(),report.getLat());
MemberInfo memberInfo = memberInfoService.findByMemberID(bike.getMemberID());
if(memberInfo == null){
result.setStatus("1");
result.setMsg("推送失败");
return result;
}
Bike mbikes = bikeService.findByMemberID(bike.getMemberID());
if(mbikes == null){
result.setStatus("1");
result.setMsg("推送失败");
return result;
}
mbikes.setMemberID("");
mbikes.setBank2("");
bikeService.upBike(mbikes);
if (memberInfo != null) {
Tests tests = new Tests();
tests.testSendPush(message, bike.getMemberID());
logger.info(message);
System.out.println(message);
response.getWriter().println(message);
result.setStatus("0");
result.setMsg("推送成功");
return result;
}
result.setStatus("1");
result.setMsg("推送失败");
return result;
}
public String IpStatus(Bike bikes, String time, String strlng, String strlat) throws ParseException, IOException {
bikes.setStatus("O");
bikeService.upBike(bikes);
Journey journey = journeyService.findMB(bikes.getMemberID(), bikes.getBikeNO());
logger.info(time);
journey.setEndDate(time);
journey.setClose_lng(strlng);
journey.setClose_lat(strlat);
journey.setJourneyID(String.valueOf(System.currentTimeMillis()));
journey.setBank1("");
journeyService.updateJourney(journey);
Journey_details journey_details = journey_detailsService.findMB(bikes.getMemberID(), bikes.getBikeNO());
journey_details.setClose_lng(strlng);
journey_details.setClose_lat(strlat);
journey_details.setJourneyID(String.valueOf(journey.getJourneyID()));
journey_details.setBank1("");
journey_detailsService.updateJourney_details(journey_details);
SimpleDateFormat dfs = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
Date begin = dfs.parse(journey.getStartDate());
Date end = dfs.parse(journey.getEndDate());
long between = (end.getTime() - begin.getTime()) / 1000;// 除以1000是为了转换成秒
int m = 0;
long min = 0;
if (between < 60) {
m = 1;
} else {
min = between / 60;
m = (int) min;
}
int summoney = 0;
Dictionaries_table dictionaries_table = dictionaries_tableService.findAlls();
if (dictionaries_table.getMoney().equals("0")) {
summoney = 0;
Point point = new Point();
point.setMemberID(bikes.getMemberID());
point.setTime(UtilDate.getDateFormatter());
point.setNumber(Integer.toString(summoney));
point.setPoint_name("骑行完毕加" + summoney + "分");
pointService.savePoint(point);
} else {
int money = m / 60 * Integer.parseInt(dictionaries_table.getMoney());
int y = m % 60;
if (y != 0) {
summoney = money += Integer.parseInt(dictionaries_table.getMoney());
}
Point point = new Point();
point.setMemberID(bikes.getMemberID());
point.setTime(UtilDate.getDateFormatter());
point.setNumber(Integer.toString(summoney));
point.setPoint_name("骑行完毕加" + summoney + "分");
pointService.savePoint(point);
}
String KM = MapDistance.getDistance(journey.getOpen_lng(), journey.getOpen_lat(), journey.getClose_lng(),
journey.getClose_lat());
String carbonEmissions = Integer.toString((m * 140));
String calorie = Integer.toString((m * 49));
journey.setRidingCost(Integer.toString(summoney));
journey.setRidingTime(Integer.toString(m));
journeyService.updateJourney(journey);
journey_details.setBank1(KM);
journey_details.setCalorie(calorie);
journey_details.setCarbonEmissions(carbonEmissions);
journey_details.setRidingTime(Integer.toString(m));
journey_detailsService.updateJourney_details(journey_details);
MemberInfo memberInfo = memberInfoService.findByMemberID(bikes.getMemberID());
int p = Integer.parseInt(memberInfo.getPoint());
BigDecimal bigDecimal = new BigDecimal(memberInfo.getMoney());
BigDecimal bigDecimal2 = new BigDecimal(Integer.toString(summoney));
int sum = p + summoney / 1;
String summy = bigDecimal.subtract(bigDecimal2).toString();
memberInfo.setPoint(Integer.toString(sum));
memberInfo.setMoney(summy);
memberInfoService.updateMemberInfo(memberInfo);
bikes.setStatus("O");
bikeService.upBike(bikes);
JsonObject jsonObj = new JsonObject();
jsonObj.add("journeyID", gson.toJsonTree(journey.getJourneyID()));
jsonObj.add("time", gson.toJsonTree(Integer.toString(m)));
jsonObj.add("carbonEmissions", gson.toJsonTree(carbonEmissions));
jsonObj.add("calorie", gson.toJsonTree(calorie));
jsonObj.add("km", gson.toJsonTree(KM));
jsonObj.add("hf", gson.toJsonTree(Double.toString(summoney)));
jsonObj.add("point", gson.toJsonTree(memberInfo.getPoint()));
jsonObj.add("money", gson.toJsonTree(memberInfo.getMoney()));
jsonObj.add("open_lng", gson.toJsonTree(journey.getOpen_lng()));
jsonObj.add("open_lat", gson.toJsonTree(journey.getOpen_lat()));
jsonObj.add("close_lng", gson.toJsonTree(journey.getClose_lng()));
jsonObj.add("close_lat", gson.toJsonTree(journey.getClose_lat()));
jsonObj.add("cmd", gson.toJsonTree("close"));
String str = jsonFormat.ElementFormat("01001", "请求成功", jsonObj);
logger.info(str);
return str;
}
}
|
apache-2.0
|
masonmei/apm-agent
|
thrift/src/main/java/com/baidu/oped/apm/thrift/io/TBaseLocator.java
|
1130
|
/*
* Copyright 2014 NAVER Corp.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.baidu.oped.apm.thrift.io;
import org.apache.thrift.TBase;
import org.apache.thrift.TException;
/**
* @author emeroad
* @author koo.taejin
*/
public interface TBaseLocator {
TBase<?, ?> tBaseLookup(short type) throws TException;
// short typeLookup(TBase<?, ?> tbase) throws TException;
Header headerLookup(TBase<?, ?> dto) throws TException;
boolean isSupport(short type);
boolean isSupport(Class<? extends TBase> clazz);
Header getChunkHeader();
boolean isChunkHeader(short type);
}
|
apache-2.0
|
giggsey/libphonenumber-for-php
|
src/carrier/data/en/253.php
|
425
|
<?php
/**
* This file has been @generated by a phing task by {@link GeneratePhonePrefixData}.
* See [README.md](README.md#generating-data) for more information.
*
* Pull requests changing data in these files will not be accepted. See the
* [FAQ in the README](README.md#problems-with-invalid-numbers] on how to make
* metadata changes.
*
* Do not modify this file directly!
*/
return array (
2537 => 'Evatis',
);
|
apache-2.0
|
CraigHarris/gpdb
|
src/test/tinc/tincrepo/mpp/gpdb/tests/package/procedural_language/pljava/test_pljava.py
|
2461
|
#!/usr/bin/env python
"""
Copyright (C) 2004-2015 Pivotal Software, Inc. All rights reserved.
This program and the accompanying materials are made available under
the terms of the under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
@summary: Test class for installing pljava language in a given GPDB system
"""
from mpp.gpdb.tests.package.procedural_language import ProceduralLanguage
from mpp.lib.gppkg.gppkg import Gppkg
from mpp.models import MPPTestCase
from tinctest.lib import run_shell_command
cmd = 'gpssh --version'
res = {'rc':0, 'stderr':'', 'stdout':''}
run_shell_command (cmd, 'check product version', res)
product_version = res['stdout'].split('gpssh version ')[1].split(' build ')[0]
class PljavaMPPTestCase(MPPTestCase):
def __init__(self, methodName):
self.pl = ProceduralLanguage()
self.language = 'pljava'
super(PljavaMPPTestCase, self).__init__(methodName)
@classmethod
def setUpClass(self):
super(PljavaMPPTestCase, self).setUpClass()
gppkg = Gppkg()
gppkg.gppkg_install(product_version, 'pljava')
def setUp(self):
"""
@summary: Overrides setUp for gptest to check if current OS is supported for gppkg. If not, test is skipped.
"""
if self.pl.gppkg_os.find('rhel') < 0 and self.pl.gppkg_os.find('suse') < 0:
self.skipTest('TEST SKIPPED: pljava is only supported on RHEL and SuSE. Skipping test.')
def tearDown(self):
pass
def test_install_Pljava(self):
"""Install pljava"""
if self.pl.language_in_db(self.language) == True:
self.pl.drop_lanaguage_from_db(self.language)
self.assertTrue(self.pl.create_language_in_db(self.language))
def test_uninstall_Pljava(self):
"""uninstall pljava language"""
if self.pl.language_in_db(self.language) == False:
self.pl.create_language_in_db(self.language)
self.assertTrue(self.pl.drop_lanaguage_from_db(self.language))
|
apache-2.0
|
TremoloSecurity/MyVirtualDirectory
|
server/src/main/java/net/sourceforge/myvd/types/Bool.java
|
846
|
/*
* Copyright 2008 Marc Boorshtein
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.sourceforge.myvd.types;
public class Bool {
boolean val;
public Bool(boolean val) {
this.val = val;
}
public void setValue(boolean val) {
this.val = val;
}
public boolean getValue() {
return this.val;
}
}
|
apache-2.0
|
mikebrow/containerd
|
pkg/process/init.go
|
12025
|
//go:build !windows
// +build !windows
/*
Copyright The containerd Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package process
import (
"context"
"encoding/json"
"fmt"
"io"
"os"
"path/filepath"
"strings"
"sync"
"time"
"github.com/containerd/console"
"github.com/containerd/containerd/log"
"github.com/containerd/containerd/mount"
"github.com/containerd/containerd/pkg/stdio"
"github.com/containerd/fifo"
runc "github.com/containerd/go-runc"
google_protobuf "github.com/gogo/protobuf/types"
specs "github.com/opencontainers/runtime-spec/specs-go"
"github.com/pkg/errors"
"golang.org/x/sys/unix"
)
// Init represents an initial process for a container
type Init struct {
wg sync.WaitGroup
initState initState
// mu is used to ensure that `Start()` and `Exited()` calls return in
// the right order when invoked in separate go routines.
// This is the case within the shim implementation as it makes use of
// the reaper interface.
mu sync.Mutex
waitBlock chan struct{}
WorkDir string
id string
Bundle string
console console.Console
Platform stdio.Platform
io *processIO
runtime *runc.Runc
// pausing preserves the pausing state.
pausing *atomicBool
status int
exited time.Time
pid int
closers []io.Closer
stdin io.Closer
stdio stdio.Stdio
Rootfs string
IoUID int
IoGID int
NoPivotRoot bool
NoNewKeyring bool
CriuWorkPath string
}
// NewRunc returns a new runc instance for a process
func NewRunc(root, path, namespace, runtime, criu string, systemd bool) *runc.Runc {
if root == "" {
root = RuncRoot
}
return &runc.Runc{
Command: runtime,
Log: filepath.Join(path, "log.json"),
LogFormat: runc.JSON,
PdeathSignal: unix.SIGKILL,
Root: filepath.Join(root, namespace),
Criu: criu,
SystemdCgroup: systemd,
}
}
// New returns a new process
func New(id string, runtime *runc.Runc, stdio stdio.Stdio) *Init {
p := &Init{
id: id,
runtime: runtime,
pausing: new(atomicBool),
stdio: stdio,
status: 0,
waitBlock: make(chan struct{}),
}
p.initState = &createdState{p: p}
return p
}
// Create the process with the provided config
func (p *Init) Create(ctx context.Context, r *CreateConfig) error {
var (
err error
socket *runc.Socket
pio *processIO
pidFile = newPidFile(p.Bundle)
)
if r.Terminal {
if socket, err = runc.NewTempConsoleSocket(); err != nil {
return errors.Wrap(err, "failed to create OCI runtime console socket")
}
defer socket.Close()
} else {
if pio, err = createIO(ctx, p.id, p.IoUID, p.IoGID, p.stdio); err != nil {
return errors.Wrap(err, "failed to create init process I/O")
}
p.io = pio
}
if r.Checkpoint != "" {
return p.createCheckpointedState(r, pidFile)
}
opts := &runc.CreateOpts{
PidFile: pidFile.Path(),
NoPivot: p.NoPivotRoot,
NoNewKeyring: p.NoNewKeyring,
}
if p.io != nil {
opts.IO = p.io.IO()
}
if socket != nil {
opts.ConsoleSocket = socket
}
if err := p.runtime.Create(ctx, r.ID, r.Bundle, opts); err != nil {
return p.runtimeError(err, "OCI runtime create failed")
}
if r.Stdin != "" {
if err := p.openStdin(r.Stdin); err != nil {
return err
}
}
ctx, cancel := context.WithTimeout(ctx, 30*time.Second)
defer cancel()
if socket != nil {
console, err := socket.ReceiveMaster()
if err != nil {
return errors.Wrap(err, "failed to retrieve console master")
}
console, err = p.Platform.CopyConsole(ctx, console, p.id, r.Stdin, r.Stdout, r.Stderr, &p.wg)
if err != nil {
return errors.Wrap(err, "failed to start console copy")
}
p.console = console
} else {
if err := pio.Copy(ctx, &p.wg); err != nil {
return errors.Wrap(err, "failed to start io pipe copy")
}
}
pid, err := pidFile.Read()
if err != nil {
return errors.Wrap(err, "failed to retrieve OCI runtime container pid")
}
p.pid = pid
return nil
}
func (p *Init) openStdin(path string) error {
sc, err := fifo.OpenFifo(context.Background(), path, unix.O_WRONLY|unix.O_NONBLOCK, 0)
if err != nil {
return errors.Wrapf(err, "failed to open stdin fifo %s", path)
}
p.stdin = sc
p.closers = append(p.closers, sc)
return nil
}
func (p *Init) createCheckpointedState(r *CreateConfig, pidFile *pidFile) error {
opts := &runc.RestoreOpts{
CheckpointOpts: runc.CheckpointOpts{
ImagePath: r.Checkpoint,
WorkDir: p.CriuWorkPath,
ParentPath: r.ParentCheckpoint,
},
PidFile: pidFile.Path(),
NoPivot: p.NoPivotRoot,
Detach: true,
NoSubreaper: true,
}
if p.io != nil {
opts.IO = p.io.IO()
}
p.initState = &createdCheckpointState{
p: p,
opts: opts,
}
return nil
}
// Wait for the process to exit
func (p *Init) Wait() {
<-p.waitBlock
}
// ID of the process
func (p *Init) ID() string {
return p.id
}
// Pid of the process
func (p *Init) Pid() int {
return p.pid
}
// ExitStatus of the process
func (p *Init) ExitStatus() int {
p.mu.Lock()
defer p.mu.Unlock()
return p.status
}
// ExitedAt at time when the process exited
func (p *Init) ExitedAt() time.Time {
p.mu.Lock()
defer p.mu.Unlock()
return p.exited
}
// Status of the process
func (p *Init) Status(ctx context.Context) (string, error) {
if p.pausing.get() {
return "pausing", nil
}
p.mu.Lock()
defer p.mu.Unlock()
return p.initState.Status(ctx)
}
// Start the init process
func (p *Init) Start(ctx context.Context) error {
p.mu.Lock()
defer p.mu.Unlock()
return p.initState.Start(ctx)
}
func (p *Init) start(ctx context.Context) error {
err := p.runtime.Start(ctx, p.id)
return p.runtimeError(err, "OCI runtime start failed")
}
// SetExited of the init process with the next status
func (p *Init) SetExited(status int) {
p.mu.Lock()
defer p.mu.Unlock()
p.initState.SetExited(status)
}
func (p *Init) setExited(status int) {
p.exited = time.Now()
p.status = status
p.Platform.ShutdownConsole(context.Background(), p.console)
close(p.waitBlock)
}
// Delete the init process
func (p *Init) Delete(ctx context.Context) error {
p.mu.Lock()
defer p.mu.Unlock()
return p.initState.Delete(ctx)
}
func (p *Init) delete(ctx context.Context) error {
waitTimeout(ctx, &p.wg, 2*time.Second)
err := p.runtime.Delete(ctx, p.id, nil)
// ignore errors if a runtime has already deleted the process
// but we still hold metadata and pipes
//
// this is common during a checkpoint, runc will delete the container state
// after a checkpoint and the container will no longer exist within runc
if err != nil {
if strings.Contains(err.Error(), "does not exist") {
err = nil
} else {
err = p.runtimeError(err, "failed to delete task")
}
}
if p.io != nil {
for _, c := range p.closers {
c.Close()
}
p.io.Close()
}
if err2 := mount.UnmountAll(p.Rootfs, 0); err2 != nil {
log.G(ctx).WithError(err2).Warn("failed to cleanup rootfs mount")
if err == nil {
err = errors.Wrap(err2, "failed rootfs umount")
}
}
return err
}
// Resize the init processes console
func (p *Init) Resize(ws console.WinSize) error {
p.mu.Lock()
defer p.mu.Unlock()
if p.console == nil {
return nil
}
return p.console.Resize(ws)
}
// Pause the init process and all its child processes
func (p *Init) Pause(ctx context.Context) error {
p.mu.Lock()
defer p.mu.Unlock()
return p.initState.Pause(ctx)
}
// Resume the init process and all its child processes
func (p *Init) Resume(ctx context.Context) error {
p.mu.Lock()
defer p.mu.Unlock()
return p.initState.Resume(ctx)
}
// Kill the init process
func (p *Init) Kill(ctx context.Context, signal uint32, all bool) error {
p.mu.Lock()
defer p.mu.Unlock()
return p.initState.Kill(ctx, signal, all)
}
func (p *Init) kill(ctx context.Context, signal uint32, all bool) error {
err := p.runtime.Kill(ctx, p.id, int(signal), &runc.KillOpts{
All: all,
})
return checkKillError(err)
}
// KillAll processes belonging to the init process
func (p *Init) KillAll(ctx context.Context) error {
p.mu.Lock()
defer p.mu.Unlock()
err := p.runtime.Kill(ctx, p.id, int(unix.SIGKILL), &runc.KillOpts{
All: true,
})
return p.runtimeError(err, "OCI runtime killall failed")
}
// Stdin of the process
func (p *Init) Stdin() io.Closer {
return p.stdin
}
// Runtime returns the OCI runtime configured for the init process
func (p *Init) Runtime() *runc.Runc {
return p.runtime
}
// Exec returns a new child process
func (p *Init) Exec(ctx context.Context, path string, r *ExecConfig) (Process, error) {
p.mu.Lock()
defer p.mu.Unlock()
return p.initState.Exec(ctx, path, r)
}
// exec returns a new exec'd process
func (p *Init) exec(ctx context.Context, path string, r *ExecConfig) (Process, error) {
// process exec request
var spec specs.Process
if err := json.Unmarshal(r.Spec.Value, &spec); err != nil {
return nil, err
}
spec.Terminal = r.Terminal
e := &execProcess{
id: r.ID,
path: path,
parent: p,
spec: spec,
stdio: stdio.Stdio{
Stdin: r.Stdin,
Stdout: r.Stdout,
Stderr: r.Stderr,
Terminal: r.Terminal,
},
waitBlock: make(chan struct{}),
}
e.execState = &execCreatedState{p: e}
return e, nil
}
// Checkpoint the init process
func (p *Init) Checkpoint(ctx context.Context, r *CheckpointConfig) error {
p.mu.Lock()
defer p.mu.Unlock()
return p.initState.Checkpoint(ctx, r)
}
func (p *Init) checkpoint(ctx context.Context, r *CheckpointConfig) error {
var actions []runc.CheckpointAction
if !r.Exit {
actions = append(actions, runc.LeaveRunning)
}
// keep criu work directory if criu work dir is set
work := r.WorkDir
if work == "" {
work = filepath.Join(p.WorkDir, "criu-work")
defer os.RemoveAll(work)
}
if err := p.runtime.Checkpoint(ctx, p.id, &runc.CheckpointOpts{
WorkDir: work,
ImagePath: r.Path,
AllowOpenTCP: r.AllowOpenTCP,
AllowExternalUnixSockets: r.AllowExternalUnixSockets,
AllowTerminal: r.AllowTerminal,
FileLocks: r.FileLocks,
EmptyNamespaces: r.EmptyNamespaces,
}, actions...); err != nil {
dumpLog := filepath.Join(p.Bundle, "criu-dump.log")
if cerr := copyFile(dumpLog, filepath.Join(work, "dump.log")); cerr != nil {
log.G(ctx).WithError(cerr).Error("failed to copy dump.log to criu-dump.log")
}
return fmt.Errorf("%s path= %s", criuError(err), dumpLog)
}
return nil
}
// Update the processes resource configuration
func (p *Init) Update(ctx context.Context, r *google_protobuf.Any) error {
p.mu.Lock()
defer p.mu.Unlock()
return p.initState.Update(ctx, r)
}
func (p *Init) update(ctx context.Context, r *google_protobuf.Any) error {
var resources specs.LinuxResources
if err := json.Unmarshal(r.Value, &resources); err != nil {
return err
}
return p.runtime.Update(ctx, p.id, &resources)
}
// Stdio of the process
func (p *Init) Stdio() stdio.Stdio {
return p.stdio
}
func (p *Init) runtimeError(rErr error, msg string) error {
if rErr == nil {
return nil
}
rMsg, err := getLastRuntimeError(p.runtime)
switch {
case err != nil:
return errors.Wrapf(rErr, "%s: %s (%s)", msg, "unable to retrieve OCI runtime error", err.Error())
case rMsg == "":
return errors.Wrap(rErr, msg)
default:
return errors.Errorf("%s: %s", msg, rMsg)
}
}
func withConditionalIO(c stdio.Stdio) runc.IOOpt {
return func(o *runc.IOOption) {
o.OpenStdin = c.Stdin != ""
o.OpenStdout = c.Stdout != ""
o.OpenStderr = c.Stderr != ""
}
}
|
apache-2.0
|
apache/sis
|
core/sis-referencing/src/main/java/org/apache/sis/internal/referencing/provider/Geographic3Dto2D.java
|
7953
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.sis.internal.referencing.provider;
import javax.xml.bind.annotation.XmlTransient;
import org.opengis.util.FactoryException;
import org.opengis.parameter.ParameterValueGroup;
import org.opengis.parameter.ParameterDescriptorGroup;
import org.opengis.referencing.operation.MathTransform;
import org.opengis.referencing.operation.MathTransformFactory;
import org.opengis.referencing.operation.NoninvertibleTransformException;
import org.apache.sis.referencing.operation.matrix.Matrices;
import org.apache.sis.referencing.operation.matrix.MatrixSIS;
import org.apache.sis.referencing.operation.transform.MathTransforms;
import org.apache.sis.parameter.Parameterized;
import org.apache.sis.io.wkt.Formatter;
import org.apache.sis.io.wkt.FormattableObject;
import org.apache.sis.internal.referencing.WKTKeywords;
import org.apache.sis.internal.referencing.WKTUtilities;
/**
* The provider for <cite>"Geographic 3D to 2D conversion"</cite> (EPSG:9659).
* This is a trivial operation that just drop the height in a geographic coordinate.
* The inverse operation arbitrarily sets the ellipsoidal height to zero.
*
* @author Martin Desruisseaux (Geomatys)
* @version 0.8
*
* @see Geographic2Dto3D
*
* @since 0.7
* @module
*/
@XmlTransient
public final class Geographic3Dto2D extends GeographicRedimension {
/**
* Serial number for inter-operability with different versions.
*/
private static final long serialVersionUID = -9103595336196565505L;
/**
* The group of all parameters expected by this coordinate operation (in this case, none).
*/
public static final ParameterDescriptorGroup PARAMETERS = builder()
.addIdentifier("9659").addName("Geographic3D to 2D conversion").createGroup();
/**
* The unique instance, created when first needed.
*/
private transient MathTransform transform;
/**
* Constructs a provider with default parameters.
*/
public Geographic3Dto2D() {
this(new GeodeticOperation[4]);
redimensioned[0] = new GeographicRedimension(2, redimensioned);
redimensioned[1] = new Geographic2Dto3D(redimensioned);
redimensioned[2] = this;
redimensioned[3] = new GeographicRedimension(3, redimensioned);
}
/**
* Constructs a provider that can be resized.
*/
private Geographic3Dto2D(GeodeticOperation[] redimensioned) {
super(3, 2, PARAMETERS, redimensioned);
}
/**
* Returns the inverse of this operation.
*/
@Override
public AbstractProvider inverse() {
return getMethod(Geographic2Dto3D.PARAMETERS);
}
/**
* Workaround while waiting for JDK 9. After migration to Jigsaw, {@link #inverse()}
* should return directly the unique provider instance.
*/
static AbstractProvider getMethod(final ParameterDescriptorGroup desc) {
try {
return (AbstractProvider) org.apache.sis.internal.system.DefaultFactories.forBuildin(MathTransformFactory.class,
org.apache.sis.referencing.operation.transform.DefaultMathTransformFactory.class)
.getOperationMethod(desc.getName().getCode());
} catch (FactoryException e) {
throw new org.apache.sis.util.collection.BackingStoreException(e);
}
}
/**
* Returns the transform.
*
* <div class="note"><b>Implementation note:</b>
* creating a transform that drop a dimension is trivial. We even have a helper method for that:
* {@link Matrices#createDimensionSelect} The difficulty is that the inverse of that transform
* will set the height to NaN, while we want zero. The trick is to first create the transform for
* the inverse transform with the zero that we want, then get the inverse of that inverse transform.
* The transform that we get will remember where it come from (its inverse).
*
* <p>This work with SIS implementation, but is not guaranteed to work with other implementations.
* For that reason, this method does not use the given {@code factory}.</p></div>
*
* @param factory ignored (can be null).
* @param values ignored.
* @return the math transform.
* @throws FactoryException should never happen.
*/
@Override
public synchronized MathTransform createMathTransform(MathTransformFactory factory, ParameterValueGroup values)
throws FactoryException
{
if (transform == null) try {
final MatrixSIS m = Matrices.createDiagonal(4, 3);
m.setElement(2, 2, 0); // Here is the height value that we want.
m.setElement(3, 2, 1);
transform = MathTransforms.linear(m).inverse();
} catch (NoninvertibleTransformException e) {
throw new FactoryException(e); // Should never happen.
}
return transform;
}
/**
* A temporary placeholder used for formatting a {@code PARAM_MT["Geographic 3D to 2D conversion"]} element in
* Well-Known Text format. This placeholder is needed because there is no {@link MathTransform} implementation
* for the Geographic 3D to 2D conversion, since we use affine transform instead.
*/
public static final class WKT extends FormattableObject implements Parameterized {
/**
* {@code true} if this placeholder is for the inverse transform instead of the direct one.
*/
private final boolean inverse;
/**
* Creates a new object to be formatted.
*
* @param inverse {@code false} for the "Geographic3D to 2D" operation, or {@code true} for its inverse.
*/
public WKT(final boolean inverse) {
this.inverse = inverse;
}
/**
* Returns the parameters descriptor.
*/
@Override
public ParameterDescriptorGroup getParameterDescriptors() {
return PARAMETERS;
}
/**
* Returns the parameter values.
*/
@Override
public ParameterValueGroup getParameterValues() {
return PARAMETERS.createValue();
}
/**
* Formats a <cite>Well Known Text</cite> version 1 (WKT 1) element for a transform using this group of parameters.
*
* <div class="note"><b>Compatibility note:</b>
* {@code Param_MT} is defined in the WKT 1 specification only.
* If the {@linkplain Formatter#getConvention() formatter convention} is set to WKT 2,
* then this method silently uses the WKT 1 convention without raising an error.</div>
*
* @return {@code "Param_MT"} or {@code "Inverse_MT"}.
*/
@Override
protected String formatTo(final Formatter formatter) {
if (inverse) {
formatter.append(new WKT(false));
return WKTKeywords.Inverse_MT;
} else {
WKTUtilities.appendParamMT(getParameterValues(), formatter);
return WKTKeywords.Param_MT;
}
}
}
}
|
apache-2.0
|
alibaba/fastjson
|
src/test/java/com/alibaba/json/bvt/parser/DefaultExtJSONParserTest_4.java
|
3392
|
package com.alibaba.json.bvt.parser;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import org.junit.Assert;
import junit.framework.TestCase;
import com.alibaba.fastjson.JSONObject;
import com.alibaba.fastjson.parser.DefaultJSONParser;
import com.alibaba.fastjson.parser.DefaultJSONParser;
import com.alibaba.fastjson.parser.Feature;
public class DefaultExtJSONParserTest_4 extends TestCase {
public void test_0() throws Exception {
List<?> res = Arrays.asList(1, 2, 3);
String[] tests = { "[1,2,3]", "[1,,2,3]", "[1,2,,,3]", "[1 2,,,3]", "[1 2 3]", "[1, 2, 3,,]", "[,,1, 2, 3,,]", };
for (String t : tests) {
DefaultJSONParser ext = new DefaultJSONParser(t);
ext.config(Feature.AllowArbitraryCommas, true);
List<Object> extRes = ext.parseArray(Object.class);
Assert.assertEquals(res, extRes);
DefaultJSONParser basic = new DefaultJSONParser(t);
basic.config(Feature.AllowArbitraryCommas, true);
List<Object> basicRes = new ArrayList<Object>();
basic.parseArray(basicRes);
Assert.assertEquals(res, basicRes);
}
}
public void test_1() throws Exception {
JSONObject res = new JSONObject();
res.put("a", 1);
res.put("b", 2);
res.put("c", 3);
String[] tests = { "{ 'a':1, 'b':2, 'c':3 }", "{ 'a':1,,'b':2, 'c':3 }", "{,'a':1, 'b':2, 'c':3 }", "{'a':1, 'b':2, 'c':3,,}",
"{,,'a':1,,,,'b':2,'c':3,,,,,}", };
for (String t : tests) {
DefaultJSONParser ext = new DefaultJSONParser(t);
ext.config(Feature.AllowArbitraryCommas, true);
JSONObject extRes = ext.parseObject();
Assert.assertEquals(res, extRes);
DefaultJSONParser basic = new DefaultJSONParser(t);
basic.config(Feature.AllowArbitraryCommas, true);
JSONObject basicRes = basic.parseObject();
Assert.assertEquals(res, basicRes);
}
}
public void test_2() throws Exception {
A res = new A();
res.setA(1);
res.setB(2);
res.setC(3);
String[] tests = { "{ 'a':1, 'b':2, 'c':3 }", "{ 'a':1,,'b':2, 'c':3 }", "{,'a':1, 'b':2, 'c':3 }", "{'a':1, 'b':2, 'c':3,,}",
"{,,'a':1,,,,'b':2,,'c':3,,,,,}", };
for (String t : tests) {
DefaultJSONParser ext = new DefaultJSONParser(t);
ext.config(Feature.AllowArbitraryCommas, true);
A extRes = ext.parseObject(A.class);
Assert.assertEquals(res, extRes);
}
}
public static class A {
private int a, b, c;
public A(){
}
public int getA() {
return a;
}
public void setA(int a) {
this.a = a;
}
public int getB() {
return b;
}
public void setB(int b) {
this.b = b;
}
public int getC() {
return c;
}
public void setC(int c) {
this.c = c;
}
@Override
public boolean equals(Object obj) {
A o = (A) obj;
return a == o.a && b == o.b && c == o.c;
}
}
}
|
apache-2.0
|
Dominator008/buck
|
test/com/facebook/buck/cli/BuildCommandTest.java
|
5971
|
/*
* Copyright 2014-present Facebook, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License. You may obtain
* a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package com.facebook.buck.cli;
import static com.facebook.buck.rules.BuildRuleSuccessType.BUILT_LOCALLY;
import static com.facebook.buck.rules.BuildRuleSuccessType.FETCHED_FROM_CACHE;
import static org.junit.Assert.assertEquals;
import com.facebook.buck.artifact_cache.CacheResult;
import com.facebook.buck.command.BuildExecutionResult;
import com.facebook.buck.command.BuildReport;
import com.facebook.buck.model.BuildTargetFactory;
import com.facebook.buck.rules.BuildResult;
import com.facebook.buck.rules.BuildRule;
import com.facebook.buck.rules.BuildRuleResolver;
import com.facebook.buck.rules.DefaultTargetNodeToBuildRuleTransformer;
import com.facebook.buck.rules.FakeBuildRule;
import com.facebook.buck.rules.SourcePathResolver;
import com.facebook.buck.rules.TargetGraph;
import com.facebook.buck.testutil.TestConsole;
import com.facebook.buck.util.Ansi;
import com.facebook.buck.util.CapturingPrintStream;
import com.facebook.buck.util.Console;
import com.facebook.buck.util.Verbosity;
import com.google.common.base.Joiner;
import com.google.common.base.Optional;
import com.google.common.collect.ImmutableSet;
import org.junit.Before;
import org.junit.Test;
import java.io.IOException;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.LinkedHashMap;
import javax.annotation.Nullable;
public class BuildCommandTest {
private BuildExecutionResult buildExecutionResult;
@Before
public void setUp() {
BuildRuleResolver ruleResolver =
new BuildRuleResolver(TargetGraph.EMPTY, new DefaultTargetNodeToBuildRuleTransformer());
SourcePathResolver resolver = new SourcePathResolver(ruleResolver);
LinkedHashMap<BuildRule, Optional<BuildResult>> ruleToResult = new LinkedHashMap<>();
BuildRule rule1 = new FakeBuildRule(
BuildTargetFactory.newInstance("//fake:rule1"),
resolver) {
@Override
@Nullable
public Path getPathToOutput() {
return Paths.get("buck-out/gen/fake/rule1.txt");
}
};
ruleToResult.put(
rule1,
Optional.of(BuildResult.success(rule1, BUILT_LOCALLY, CacheResult.miss())));
BuildRule rule2 = new FakeBuildRule(
BuildTargetFactory.newInstance("//fake:rule2"),
resolver);
BuildResult rule2Failure = BuildResult.failure(rule2, new RuntimeException("some"));
ruleToResult.put(rule2, Optional.of(rule2Failure));
BuildRule rule3 = new FakeBuildRule(
BuildTargetFactory.newInstance("//fake:rule3"),
resolver);
ruleToResult.put(
rule3,
Optional.of(BuildResult.success(rule3, FETCHED_FROM_CACHE, CacheResult.hit("dir"))));
BuildRule rule4 = new FakeBuildRule(
BuildTargetFactory.newInstance("//fake:rule4"),
resolver);
ruleToResult.put(rule4, Optional.<BuildResult>absent());
buildExecutionResult = BuildExecutionResult.builder()
.setResults(ruleToResult)
.setFailures(ImmutableSet.of(rule2Failure))
.build();
}
@Test
public void testGenerateBuildReportForConsole() {
String expectedReport =
"\u001B[1m\u001B[42m\u001B[30mOK \u001B[0m //fake:rule1 " +
"BUILT_LOCALLY buck-out/gen/fake/rule1.txt\n" +
"\u001B[1m\u001B[41m\u001B[37mFAIL\u001B[0m //fake:rule2\n" +
"\u001B[1m\u001B[42m\u001B[30mOK \u001B[0m //fake:rule3 FETCHED_FROM_CACHE\n" +
"\u001B[1m\u001B[41m\u001B[37mFAIL\u001B[0m //fake:rule4\n";
String observedReport = new BuildReport(buildExecutionResult).generateForConsole(
new Console(
Verbosity.STANDARD_INFORMATION,
new CapturingPrintStream(),
new CapturingPrintStream(),
Ansi.forceTty()));
assertEquals(expectedReport, observedReport);
}
@Test
public void testGenerateVerboseBuildReportForConsole() {
String expectedReport =
"OK //fake:rule1 BUILT_LOCALLY buck-out/gen/fake/rule1.txt\n" +
"FAIL //fake:rule2\n" +
"OK //fake:rule3 FETCHED_FROM_CACHE\n" +
"FAIL //fake:rule4\n\n" +
" ** Summary of failures encountered during the build **\n" +
"Rule //fake:rule2 FAILED because some.\n";
String observedReport = new BuildReport(buildExecutionResult).generateForConsole(
new TestConsole(Verbosity.COMMANDS));
assertEquals(expectedReport, observedReport);
}
@Test
public void testGenerateJsonBuildReport() throws IOException {
String expectedReport = Joiner.on('\n').join(
"{",
" \"success\" : false,",
" \"results\" : {",
" \"//fake:rule1\" : {",
" \"success\" : true,",
" \"type\" : \"BUILT_LOCALLY\",",
" \"output\" : \"buck-out/gen/fake/rule1.txt\"",
" },",
" \"//fake:rule2\" : {",
" \"success\" : false",
" },",
" \"//fake:rule3\" : {",
" \"success\" : true,",
" \"type\" : \"FETCHED_FROM_CACHE\",",
" \"output\" : null",
" },",
" \"//fake:rule4\" : {",
" \"success\" : false",
" }",
" },",
" \"failures\" : {",
" \"//fake:rule2\" : \"some\"",
" }",
"}");
String observedReport = new BuildReport(buildExecutionResult).generateJsonBuildReport();
assertEquals(expectedReport, observedReport);
}
}
|
apache-2.0
|
whumph/sakai
|
lessonbuilder/tool/src/java/org/sakaiproject/lessonbuildertool/service/BltiEntity.java
|
19054
|
/**********************************************************************************
* $URL: $
* $Id: $
***********************************************************************************
*
* Author: Charles Hedrick, hedrick@rutgers.edu
*
* Copyright (c) 2010 Rutgers, the State University of New Jersey
*
* Licensed under the Educational Community License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.opensource.org/licenses/ECL-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
**********************************************************************************/
package org.sakaiproject.lessonbuildertool.service;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.List;
import java.util.Set;
import java.util.SortedSet;
import java.util.TreeSet;
import java.util.Comparator;
import java.util.Date;
import java.util.Map;
import java.util.Iterator;
import java.util.Properties;
import java.net.URLEncoder;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.sakaiproject.lessonbuildertool.service.LessonSubmission;
import org.sakaiproject.lessonbuildertool.tool.beans.SimplePageBean;
import org.sakaiproject.lessonbuildertool.tool.beans.SimplePageBean.UrlItem;
import org.sakaiproject.tool.api.Session;
import org.sakaiproject.tool.cover.ToolManager;
import org.sakaiproject.tool.api.SessionManager;
import org.sakaiproject.site.api.Site;
import org.sakaiproject.site.cover.SiteService;
import org.sakaiproject.site.api.ToolConfiguration;
import org.sakaiproject.component.cover.ServerConfigurationService;
import org.sakaiproject.component.cover.ComponentManager;
import org.sakaiproject.memory.api.Cache;
import org.sakaiproject.memory.api.CacheRefresher;
import org.sakaiproject.memory.api.MemoryService;
import uk.org.ponder.messageutil.MessageLocator;
import org.sakaiproject.lti.api.LTIService;
// import org.sakaiproject.lti.impl.DBLTIService; // HACK
/**
* Interface to Assignment
*
* @author Charles Hedrick <hedrick@rutgers.edu>
*
*/
// NOTE: almost no other class should import this. We want to be able
// to support both forums and jforum. So typically there will be a
// forumEntity, but it's injected, and it can be either forum and jforum.
// Hence it has to be declared LessonEntity. That leads to a lot of
// declarations like LessonEntity forumEntity. In this case forumEntity
// means either a ForumEntity or a JForumEntity. We can't just call the
// variables lessonEntity because the same module will probably have an
// injected class to handle tests and quizes as well. That will eventually
// be converted to be a LessonEntity.
public class BltiEntity implements LessonEntity, BltiInterface {
private static Log log = LogFactory.getLog(BltiEntity.class);
private static Cache bltiCache = null;
protected static final int DEFAULT_EXPIRATION = 10 * 60;
// 2.8 doesn't define this, so put it here
static final String LTI_PAGETITLE = "pagetitle";
private SimplePageBean simplePageBean;
protected static LTIService ltiService = null;
public void setSimplePageBean(SimplePageBean simplePageBean) {
this.simplePageBean = simplePageBean;
}
private LessonEntity nextEntity = null;
public void setNextEntity(LessonEntity e) {
nextEntity = e;
}
public LessonEntity getNextEntity() {
return nextEntity;
}
static MemoryService memoryService = null;
public void setMemoryService(MemoryService m) {
memoryService = m;
}
static MessageLocator messageLocator = null;
public void setMessageLocator(MessageLocator m) {
messageLocator = m;
}
static String returnUrl = null;
public void setReturnUrl(String m) {
returnUrl = m;
}
public void init () {
log.info("init()");
bltiCache = memoryService
.newCache("org.sakaiproject.lessonbuildertool.service.BltiEntity.cache");
/* Hack to avoid a restart to get a new version of DBLTIService
if ( ltiService == null ) {
ltiService = (LTIService) new DBLTIService();
((org.sakaiproject.lti.impl.DBLTIService) ltiService).setAutoDdl("true");
((org.sakaiproject.lti.impl.DBLTIService) ltiService).init();
}
*/
if ( ltiService == null ) {
Object service = ComponentManager.get("org.sakaiproject.lti.api.LTIService");
if (service == null) {
log.info("can't find LTI Service -- disabling LTI support");
return;
}
ltiService = (LTIService)service;
log.info("LTI initialized");
}
}
public void destroy()
{
// bltiCache.destroy();
// bltiCache = null;
log.info("destroy()");
}
// to create bean. the bean is used only to call the pseudo-static
// methods such as getEntitiesInSite. So type, id, etc are left uninitialized
public boolean servicePresent() {
return ltiService != null;
}
protected BltiEntity() {
}
protected BltiEntity(int type, String id) {
this.type = type;
this.id = id;
}
public String getToolId() {
return "sakai.blti";
}
// the underlying object, something Sakaiish
protected String id;
protected int type;
// not required fields. If we need to look up
// the actual objects, lets us cache them
protected Map<String,Object> content;
protected Map<String,Object> tool;
/*
public Blti getBlti(String ref, boolean nocache) {
Blti ret = (Blti)bltiCache.get(ref);
if (!nocache && ret != null)
return ret;
try {
// ret = BltiService.getBlti(ref);
} catch (Exception e) {
ret = null;
}
if (ret != null)
bltiCache.put(ref, ret, DEFAULT_EXPIRATION);
return ret;
}
*/
// type of the underlying object
public int getType() {
return type;
}
public int getLevel() {
return 0;
}
public int getTypeOfGrade() {
return 1;
}
// hack for forums. not used for assessments, so always ok
public boolean isUsable() {
return true;
}
public String getReference() {
return "/" + BLTI + "/" + id;
}
public List<LessonEntity> getEntitiesInSite() {
return getEntitiesInSite(null, null);
}
public List<LessonEntity> getEntitiesInSite(SimplePageBean bean) {
return getEntitiesInSite(bean, null);
}
// find topics in site, but organized by forum
public List<LessonEntity> getEntitiesInSite(SimplePageBean bean, Integer bltiToolId) {
List<LessonEntity> ret = new ArrayList<LessonEntity>();
if (ltiService == null)
return ret;
String search = null;
if (bltiToolId != null)
search = "tool_id=" + bltiToolId;
List<Map<String,Object>> contents = ltiService.getContents(search,null,0,0);
for (Map<String, Object> content : contents ) {
Long id = getLong(content.get(LTIService.LTI_ID));
if ( id == -1 ) continue;
BltiEntity entity = new BltiEntity(TYPE_BLTI, id.toString());
entity.content = content;
ret.add(entity);
}
return ret;
}
public LessonEntity getEntity(String ref, SimplePageBean o) {
return getEntity(ref);
}
public LessonEntity getEntity(String ref) {
int i = ref.indexOf("/",1);
String typeString = ref.substring(1, i);
String idString = ref.substring(i+1);
String id = "";
try {
id = idString;
} catch (Exception ignore) {
return null;
}
if (typeString.equals(BLTI)) {
return new BltiEntity(TYPE_BLTI, id);
} else if (nextEntity != null) {
// in case we chain to a different implementation. Not likely for BLTI
return nextEntity.getEntity(ref);
} else
return null;
}
protected void loadContent() {
if ( content != null ) return;
if ( id == null ) return; // Likely a failure
if ( ltiService == null) return; // not basiclti or old
Long key = getLong(id);
content = ltiService.getContent(key);
if ( content == null ) return;
Long toolKey = getLongNull(content.get("tool_id"));
if (toolKey != null ) tool = ltiService.getTool(toolKey);
}
// properties of entities
public String getTitle() {
loadContent();
if ( content == null ) return null;
return (String) content.get(LTIService.LTI_TITLE);
}
private String getErrorUrl() {
return "javascript:document.write('" + messageLocator.getMessage("simplepage.format.item_removed").replace("'", "\\'") + "')";
}
// TODO: Concern regarding the lack of the returnUrl when this is called
public String getUrl() {
loadContent();
// If I return null here, it appears that I cause an NPE in LB
if ( content == null ) return getErrorUrl();
String ret = (String) content.get("launch_url");
if ( ltiService != null && tool != null && ltiService.isMaintain()
&& LTIService.LTI_SECRET_INCOMPLETE.equals((String) tool.get(LTIService.LTI_SECRET))
&& LTIService.LTI_SECRET_INCOMPLETE.equals((String) tool.get(LTIService.LTI_CONSUMERKEY)) ) {
String toolId = getCurrentTool("sakai.siteinfo");
if ( toolId != null ) {
ret = editItemUrl(toolId);
return ret;
}
}
ret = ServerConfigurationService.getServerUrl() + ret;
return ret;
}
public Date getDueDate() {
return null;
}
// the following methods all take references. So they're in effect static.
// They ignore the entity from which they're called.
// The reason for not making them a normal method is that many of the
// implementations seem to let you set access control and find submissions
// from a reference, without needing the actual object. So doing it this
// way could save some database activity
// access control
public boolean addEntityControl(String siteId, String groupId) throws IOException {
// not used for BLTI, control is done entirely within LB
return false;
}
public boolean removeEntityControl(String siteId, String groupId) throws IOException {
return false;
}
// submission
// do we need the data from submission?
public boolean needSubmission(){
return false;
}
public LessonSubmission getSubmission(String userId) {
// students don't have submissions to BLTI
return null;
}
// we can do this for real, but the API will cause us to get all the submissions in full, not just a count.
// I think it's cheaper to get the best assessment, since we don't actually care whether it's 1 or >= 1.
public int getSubmissionCount(String user) {
return 0;
}
// URL to create a new item. Normally called from the generic entity, not a specific one
// can't be null
public List<UrlItem> createNewUrls(SimplePageBean bean) {
return createNewUrls(bean, null);
}
public List<UrlItem> createNewUrls(SimplePageBean bean, Integer bltiToolId) {
ArrayList<UrlItem> list = new ArrayList<UrlItem>();
String toolId = bean.getCurrentTool("sakai.siteinfo");
if ( ltiService == null || toolId == null || returnUrl == null ) return list;
// Retrieve all tools
String search = null;
if (bltiToolId != null)
search = "lti_tools.id=" + bltiToolId;
List<Map<String,Object>> tools = ltiService.getTools(search,null,0,0);
for ( Map<String,Object> tool : tools ) {
String url = ServerConfigurationService.getToolUrl() + "/" + toolId + "/sakai.basiclti.admin.helper.helper?panel=ContentConfig&tool_id="
+ tool.get(LTIService.LTI_ID) + "&returnUrl=" + URLEncoder.encode(returnUrl);
list.add(new UrlItem(url, (String) tool.get(LTIService.LTI_TITLE)));
}
String url = ServerConfigurationService.getToolUrl() + "/" + toolId + "/sakai.basiclti.admin.helper.helper?panel=Main" +
"&returnUrl=" + URLEncoder.encode(returnUrl);
list.add(new UrlItem(url, messageLocator.getMessage("simplepage.create_blti")));
return list;
}
public boolean isPopUp() {
loadContent();
if (content == null)
return false;
Long newPage = getLong(content.get(LTIService.LTI_NEWPAGE));
return (newPage == 1) ;
}
public int frameSize() {
loadContent();
if ( content == null ) return -1;
Long newPage = getLong(content.get(LTIService.LTI_FRAMEHEIGHT));
return newPage.intValue();
}
// URL to edit an existing entity.
// Can be null if we can't get one or it isn't needed
public String editItemUrl(SimplePageBean bean) {
String toolId = bean.getCurrentTool("sakai.siteinfo");
if ( toolId == null ) return null;
return editItemUrl(toolId);
}
public String editItemUrl(String toolId) {
if ( toolId == null ) return null;
loadContent();
if (content == null)
return null;
String url = ServerConfigurationService.getToolUrl() + "/" + toolId + "/sakai.basiclti.admin.helper.helper?panel=ContentConfig&id=" +
content.get(LTIService.LTI_ID);
if ( returnUrl != null ) {
url = url + "&returnUrl=" + URLEncoder.encode(returnUrl);
} else {
url = url + "&returnUrl=about:blank";
}
return url;
}
// for most entities editItem is enough, however tests allow separate editing of
// contents and settings. This will be null except in that situation
public String editItemSettingsUrl(SimplePageBean bean) {
return null;
}
public boolean objectExists() {
loadContent();
return content != null;
}
public boolean notPublished(String ref) {
return false;
}
public boolean notPublished() {
return !objectExists();
}
// return the list of groups if the item is only accessible to specific groups
// null if it's accessible to the whole site.
public Collection<String> getGroups(boolean nocache) {
// done entirely within LB, this item type is not group-aware
return null;
}
// set the item to be accessible only to the specific groups.
// null to make it accessible to the whole site
public void setGroups(Collection<String> groups) {
// not group aware
}
public String doImportTool(String launchUrl, String bltiTitle, String strXml, String custom)
{
if ( ltiService == null ) return null;
String toolBaseUrl = launchUrl;
int pos = launchUrl.indexOf('?');
if ( pos > 1 ) {
toolBaseUrl = launchUrl.substring(0,pos);
}
// Look for a tool that is a perfect match, and fall back
//
Map<String,Object> theTool = null;
Map<String,Object> theBaseTool = null;
List<Map<String,Object>> tools = ltiService.getTools(null,null,0,0);
for ( Map<String,Object> tool : tools ) {
String toolLaunch = (String) tool.get(LTIService.LTI_LAUNCH);
if ( toolLaunch.equals(launchUrl) ) {
theTool = tool;
break;
}
if ( theBaseTool == null && toolLaunch.equals(toolBaseUrl) ) {
theBaseTool = tool;
}
}
if ( theTool == null && theBaseTool != null ) theTool = theBaseTool;
if ( theTool == null ) {
Properties props = new Properties ();
props.setProperty(LTIService.LTI_LAUNCH,toolBaseUrl);
if ( toolBaseUrl.equals(launchUrl) ) {
props.setProperty(LTIService.LTI_TITLE, bltiTitle);
} else {
props.setProperty(LTIService.LTI_TITLE, toolBaseUrl);
}
props.setProperty(LTI_PAGETITLE, bltiTitle);
props.setProperty(LTIService.LTI_CONSUMERKEY, LTIService.LTI_SECRET_INCOMPLETE);
props.setProperty(LTIService.LTI_SECRET, LTIService.LTI_SECRET_INCOMPLETE);
props.setProperty(LTIService.LTI_ALLOWLAUNCH, "1");
props.setProperty(LTIService.LTI_ALLOWCUSTOM, "1");
props.setProperty(LTIService.LTI_ALLOWTITLE, "1");
props.setProperty(LTIService.LTI_ALLOWPAGETITLE, "1");
props.setProperty(LTIService.LTI_ALLOWOUTCOMES, "1");
props.setProperty(LTIService.LTI_SENDNAME, "1");
props.setProperty(LTIService.LTI_SENDEMAILADDR, "1");
props.setProperty(LTIService.LTI_XMLIMPORT,strXml);
if (custom != null)
props.setProperty(LTIService.LTI_CUSTOM, custom);
Object result = ltiService.insertTool(props);
if ( result instanceof String ) {
log.info("Could not insert tool - "+result);
}
if ( result instanceof Long ) theTool = ltiService.getTool((Long) result);
}
Map<String,Object> theContent = null;
Long contentKey = null;
if ( theTool != null ) {
Properties props = new Properties ();
props.setProperty(LTIService.LTI_TOOL_ID,getLong(theTool.get(LTIService.LTI_ID)).toString());
props.setProperty(LTIService.LTI_TITLE, bltiTitle);
props.setProperty( LTI_PAGETITLE, bltiTitle);
props.setProperty(LTIService.LTI_LAUNCH,launchUrl);
props.setProperty(LTIService.LTI_XMLIMPORT,strXml);
if ( custom != null ) props.setProperty(LTIService.LTI_CUSTOM,custom);
Object result = ltiService.insertContent(props);
if ( result instanceof String ) {
log.info("Could not insert content - "+result);
}
if ( result instanceof Long ) theContent = ltiService.getContent((Long) result);
}
String sakaiId = null;
if ( theContent != null ) {
sakaiId = "/blti/" + theContent.get(LTIService.LTI_ID);
log.info("Adding LTI content "+sakaiId);
}
return sakaiId;
}
// TODO: Could we get simplePageBean populated here and not build out own get
public String getCurrentTool(String commonToolId) {
try {
String currentSiteId = ToolManager.getCurrentPlacement().getContext();
Site site = SiteService.getSite(currentSiteId);
ToolConfiguration toolConfig = site.getToolForCommonId(commonToolId);
if (toolConfig == null) return null;
return toolConfig.getId();
} catch (Exception e) {
return null;
}
}
public Long getLong(Object key) {
Long retval = getLongNull(key);
if (retval != null)
return retval;
return new Long(-1);
}
public Long getLongNull(Object key) {
if (key == null)
return null;
if (key instanceof Number)
return new Long(((Number) key).longValue());
if (key instanceof String) {
try {
return new Long((String) key);
} catch (Exception e) {
return null;
}
}
return null;
}
public String getObjectId(){
return null;
}
public String findObject(String objectid, Map<String,String>objectMap, String siteid) {
if (nextEntity != null)
return nextEntity.findObject(objectid, objectMap, siteid);
return null;
}
public String getSiteId() {
loadContent();
if ( content == null ) return null;
return (String) content.get(LTIService.LTI_SITE_ID);
}
}
|
apache-2.0
|
joachimhs/Montric
|
Montric.Api/src/main/java/org/eurekaj/api/util/ListToString.java
|
3284
|
package org.eurekaj.api.util;
import java.util.ArrayList;
import java.util.List;
public class ListToString {
public static String convertFromArray(Object[] array, String delimeter) {
if (array.length == 0) {
return "";
}
StringBuffer sb = new StringBuffer();
sb.append("[");
for (int i = 0; i < array.length; i++) {
if (i > 0) {
sb.append(delimeter);
}
if (array[i] instanceof String) {
sb.append("'").append(array[i]).append("'");
} else {
sb.append(array[i]);
}
}
sb.append("]");
return sb.toString();
}
public static String convertFromListToJsonString(List<String> list, String delimeter) {
if (list.size() == 0) {
return "";
}
StringBuffer sb = new StringBuffer();
sb.append("[");
for (int i = 0; i < list.size(); i++) {
if (i > 0) {
sb.append(delimeter);
}
if (list.get(i) instanceof String) {
sb.append("\"").append(list.get(i)).append("\"");
} else {
sb.append(list.get(i));
}
}
sb.append("]");
return sb.toString();
}
public static String convertToCassandraMap(Object[] array) {
if (array.length == 0) {
return "";
}
StringBuffer sb = new StringBuffer();
sb.append("{");
for (int i = 0; i < array.length; i++) {
if (array[i] != null) {
if (i > 0) {
sb.append(",");
}
sb.append(i).append(":");
if (array[i] instanceof String) {
sb.append("'").append(array[i]).append("'");
} else {
sb.append(array[i]);
}
}
}
sb.append("}");
return sb.toString();
}
public static String[] convertToArray(String listString, String delimeter) {
return listString.split(delimeter);
}
public static String convertFromList(List<String> list, String delimeter) {
if (list == null || list.isEmpty()) {
return "";
}
StringBuffer sb = new StringBuffer();
for (int i = 0; i < list.size(); i++) {
if (i > 0) {
sb.append(delimeter);
}
sb.append(list.get(i));
}
return sb.toString();
}
public static List<String> convertToList(String listString, String delimeter) {
List<String> retList = new ArrayList<String>();
for (String item : listString.split(delimeter)) {
retList.add(item);
}
return retList;
}
public static String convertFromCassandraList(List<String> list, String delimeter) {
if (list == null || list.isEmpty()) {
return "[]";
}
StringBuffer sb = new StringBuffer();
sb.append("[");
for (int i = 0; i < list.size(); i++) {
if (i > 0) {
sb.append(delimeter);
}
sb.append("'").append(list.get(i)).append("'");
}
sb.append("]");
return sb.toString();
}
}
|
apache-2.0
|
grundprinzip/Impala
|
thirdparty/hive-0.13.1-cdh5.4.0-SNAPSHOT/src/metastore/src/test/org/apache/hadoop/hive/metastore/txn/TestCompactionTxnHandler.java
|
16638
|
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hive.metastore.txn;
import junit.framework.Assert;
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.metastore.IMetaStoreClient;
import org.apache.hadoop.hive.metastore.api.*;
import org.apache.log4j.Level;
import org.apache.log4j.LogManager;
import org.junit.After;
import org.junit.Before;
import org.junit.Ignore;
import org.junit.Test;
import java.util.ArrayList;
import java.util.List;
import java.util.Set;
import static junit.framework.Assert.*;
/**
* Tests for TxnHandler.
*/
public class TestCompactionTxnHandler {
private HiveConf conf = new HiveConf();
private CompactionTxnHandler txnHandler;
public TestCompactionTxnHandler() throws Exception {
TxnDbUtil.setConfValues(conf);
LogManager.getLogger(TxnHandler.class.getName()).setLevel(Level.DEBUG);
tearDown();
}
@Test
public void testFindNextToCompact() throws Exception {
CompactionRequest rqst = new CompactionRequest("foo", "bar", CompactionType.MINOR);
rqst.setPartitionname("ds=today");
txnHandler.compact(rqst);
long now = System.currentTimeMillis();
CompactionInfo ci = txnHandler.findNextToCompact("fred");
assertNotNull(ci);
assertEquals("foo", ci.dbname);
assertEquals("bar", ci.tableName);
assertEquals("ds=today", ci.partName);
assertEquals(CompactionType.MINOR, ci.type);
assertNull(ci.runAs);
assertNull(txnHandler.findNextToCompact("fred"));
txnHandler.setRunAs(ci.id, "bob");
ShowCompactResponse rsp = txnHandler.showCompact(new ShowCompactRequest());
List<ShowCompactResponseElement> compacts = rsp.getCompacts();
assertEquals(1, compacts.size());
ShowCompactResponseElement c = compacts.get(0);
assertEquals("foo", c.getDbname());
assertEquals("bar", c.getTablename());
assertEquals("ds=today", c.getPartitionname());
assertEquals(CompactionType.MINOR, c.getType());
assertEquals("working", c.getState());
assertTrue(c.getStart() - 5000 < now && c.getStart() + 5000 > now);
assertEquals("fred", c.getWorkerid());
assertEquals("bob", c.getRunAs());
}
@Test
public void testFindNextToCompact2() throws Exception {
CompactionRequest rqst = new CompactionRequest("foo", "bar", CompactionType.MINOR);
rqst.setPartitionname("ds=today");
txnHandler.compact(rqst);
rqst = new CompactionRequest("foo", "bar", CompactionType.MINOR);
rqst.setPartitionname("ds=yesterday");
txnHandler.compact(rqst);
long now = System.currentTimeMillis();
boolean expectToday = false;
CompactionInfo ci = txnHandler.findNextToCompact("fred");
assertNotNull(ci);
assertEquals("foo", ci.dbname);
assertEquals("bar", ci.tableName);
if ("ds=today".equals(ci.partName)) expectToday = false;
else if ("ds=yesterday".equals(ci.partName)) expectToday = true;
else fail("partition name should have been today or yesterday but was " + ci.partName);
assertEquals(CompactionType.MINOR, ci.type);
ci = txnHandler.findNextToCompact("fred");
assertNotNull(ci);
assertEquals("foo", ci.dbname);
assertEquals("bar", ci.tableName);
if (expectToday) assertEquals("ds=today", ci.partName);
else assertEquals("ds=yesterday", ci.partName);
assertEquals(CompactionType.MINOR, ci.type);
assertNull(txnHandler.findNextToCompact("fred"));
ShowCompactResponse rsp = txnHandler.showCompact(new ShowCompactRequest());
List<ShowCompactResponseElement> compacts = rsp.getCompacts();
assertEquals(2, compacts.size());
for (ShowCompactResponseElement e : compacts) {
assertEquals("working", e.getState());
assertTrue(e.getStart() - 5000 < now && e.getStart() + 5000 > now);
assertEquals("fred", e.getWorkerid());
}
}
@Test
public void testFindNextToCompactNothingToCompact() throws Exception {
assertNull(txnHandler.findNextToCompact("fred"));
}
@Test
public void testMarkCompacted() throws Exception {
CompactionRequest rqst = new CompactionRequest("foo", "bar", CompactionType.MINOR);
rqst.setPartitionname("ds=today");
txnHandler.compact(rqst);
CompactionInfo ci = txnHandler.findNextToCompact("fred");
assertNotNull(ci);
txnHandler.markCompacted(ci);
assertNull(txnHandler.findNextToCompact("fred"));
ShowCompactResponse rsp = txnHandler.showCompact(new ShowCompactRequest());
List<ShowCompactResponseElement> compacts = rsp.getCompacts();
assertEquals(1, compacts.size());
ShowCompactResponseElement c = compacts.get(0);
assertEquals("foo", c.getDbname());
assertEquals("bar", c.getTablename());
assertEquals("ds=today", c.getPartitionname());
assertEquals(CompactionType.MINOR, c.getType());
assertEquals("ready for cleaning", c.getState());
assertNull(c.getWorkerid());
}
@Test
public void testFindNextToClean() throws Exception {
CompactionRequest rqst = new CompactionRequest("foo", "bar", CompactionType.MINOR);
rqst.setPartitionname("ds=today");
txnHandler.compact(rqst);
assertEquals(0, txnHandler.findReadyToClean().size());
CompactionInfo ci = txnHandler.findNextToCompact("fred");
assertNotNull(ci);
assertEquals(0, txnHandler.findReadyToClean().size());
txnHandler.markCompacted(ci);
assertNull(txnHandler.findNextToCompact("fred"));
List<CompactionInfo> toClean = txnHandler.findReadyToClean();
assertEquals(1, toClean.size());
assertNull(txnHandler.findNextToCompact("fred"));
ShowCompactResponse rsp = txnHandler.showCompact(new ShowCompactRequest());
List<ShowCompactResponseElement> compacts = rsp.getCompacts();
assertEquals(1, compacts.size());
ShowCompactResponseElement c = compacts.get(0);
assertEquals("foo", c.getDbname());
assertEquals("bar", c.getTablename());
assertEquals("ds=today", c.getPartitionname());
assertEquals(CompactionType.MINOR, c.getType());
assertEquals("ready for cleaning", c.getState());
assertNull(c.getWorkerid());
}
@Test
public void testMarkCleaned() throws Exception {
CompactionRequest rqst = new CompactionRequest("foo", "bar", CompactionType.MINOR);
rqst.setPartitionname("ds=today");
txnHandler.compact(rqst);
assertEquals(0, txnHandler.findReadyToClean().size());
CompactionInfo ci = txnHandler.findNextToCompact("fred");
assertNotNull(ci);
assertEquals(0, txnHandler.findReadyToClean().size());
txnHandler.markCompacted(ci);
assertNull(txnHandler.findNextToCompact("fred"));
List<CompactionInfo> toClean = txnHandler.findReadyToClean();
assertEquals(1, toClean.size());
assertNull(txnHandler.findNextToCompact("fred"));
txnHandler.markCleaned(ci);
assertNull(txnHandler.findNextToCompact("fred"));
assertEquals(0, txnHandler.findReadyToClean().size());
ShowCompactResponse rsp = txnHandler.showCompact(new ShowCompactRequest());
assertNull(rsp.getCompacts());
}
@Test
public void testRevokeFromLocalWorkers() throws Exception {
CompactionRequest rqst = new CompactionRequest("foo", "bar", CompactionType.MINOR);
txnHandler.compact(rqst);
rqst = new CompactionRequest("foo", "baz", CompactionType.MINOR);
txnHandler.compact(rqst);
rqst = new CompactionRequest("foo", "bazzoo", CompactionType.MINOR);
txnHandler.compact(rqst);
assertNotNull(txnHandler.findNextToCompact("fred-193892"));
assertNotNull(txnHandler.findNextToCompact("bob-193892"));
assertNotNull(txnHandler.findNextToCompact("fred-193893"));
txnHandler.revokeFromLocalWorkers("fred");
ShowCompactResponse rsp = txnHandler.showCompact(new ShowCompactRequest());
List<ShowCompactResponseElement> compacts = rsp.getCompacts();
assertEquals(3, compacts.size());
boolean sawWorkingBob = false;
int initiatedCount = 0;
for (ShowCompactResponseElement c : compacts) {
if (c.getState().equals("working")) {
assertEquals("bob-193892", c.getWorkerid());
sawWorkingBob = true;
} else if (c.getState().equals("initiated")) {
initiatedCount++;
} else {
fail("Unexpected state");
}
}
assertTrue(sawWorkingBob);
assertEquals(2, initiatedCount);
}
@Test
public void testRevokeTimedOutWorkers() throws Exception {
CompactionRequest rqst = new CompactionRequest("foo", "bar", CompactionType.MINOR);
txnHandler.compact(rqst);
rqst = new CompactionRequest("foo", "baz", CompactionType.MINOR);
txnHandler.compact(rqst);
assertNotNull(txnHandler.findNextToCompact("fred-193892"));
Thread.sleep(200);
assertNotNull(txnHandler.findNextToCompact("fred-193892"));
txnHandler.revokeTimedoutWorkers(100);
ShowCompactResponse rsp = txnHandler.showCompact(new ShowCompactRequest());
List<ShowCompactResponseElement> compacts = rsp.getCompacts();
assertEquals(2, compacts.size());
boolean sawWorking = false, sawInitiated = false;
for (ShowCompactResponseElement c : compacts) {
if (c.getState().equals("working")) sawWorking = true;
else if (c.getState().equals("initiated")) sawInitiated = true;
else fail("Unexpected state");
}
assertTrue(sawWorking);
assertTrue(sawInitiated);
}
@Test
public void testLockNoWait() throws Exception {
// Test that we can acquire the lock alone
LockComponent comp = new LockComponent(LockType.EXCLUSIVE, LockLevel.DB,
"mydb");
comp.setTablename("mytable");
comp.setPartitionname("mypartition");
List<LockComponent> components = new ArrayList<LockComponent>(1);
components.add(comp);
LockRequest req = new LockRequest(components, "me", "localhost");
LockResponse res = txnHandler.lockNoWait(req);
assertTrue(res.getState() == LockState.ACQUIRED);
txnHandler.unlock(new UnlockRequest(res.getLockid()));
// test that another lock blocks it
comp = new LockComponent(LockType.SHARED_READ, LockLevel.DB,
"mydb");
comp.setTablename("mytable");
comp.setPartitionname("mypartition");
components.clear();
components.add(comp);
req = new LockRequest(components, "me", "localhost");
res = txnHandler.lock(req);
assertEquals(LockState.ACQUIRED, res.getState());
comp = new LockComponent(LockType.EXCLUSIVE, LockLevel.DB,
"mydb");
comp.setTablename("mytable");
comp.setPartitionname("mypartition");
components.clear();
components.add(comp);
req = new LockRequest(components, "me", "localhost");
res = txnHandler.lockNoWait(req);
assertEquals(LockState.NOT_ACQUIRED, res.getState());
assertEquals(1, TxnDbUtil.findNumCurrentLocks());
}
@Test
public void testFindPotentialCompactions() throws Exception {
// Test that committing unlocks
long txnid = openTxn();
LockComponent comp = new LockComponent(LockType.SHARED_WRITE, LockLevel.DB,
"mydb");
comp.setTablename("mytable");
List<LockComponent> components = new ArrayList<LockComponent>(1);
components.add(comp);
comp = new LockComponent(LockType.SHARED_WRITE, LockLevel.DB,
"mydb");
comp.setTablename("yourtable");
comp.setPartitionname("mypartition");
components.add(comp);
LockRequest req = new LockRequest(components, "me", "localhost");
req.setTxnid(txnid);
LockResponse res = txnHandler.lock(req);
assertTrue(res.getState() == LockState.ACQUIRED);
txnHandler.commitTxn(new CommitTxnRequest(txnid));
assertEquals(0, txnHandler.numLocksInLockTable());
Set<CompactionInfo> potentials = txnHandler.findPotentialCompactions(100);
assertEquals(2, potentials.size());
boolean sawMyTable = false, sawYourTable = false;
for (CompactionInfo ci : potentials) {
sawMyTable |= (ci.dbname.equals("mydb") && ci.tableName.equals("mytable") &&
ci.partName == null);
sawYourTable |= (ci.dbname.equals("mydb") && ci.tableName.equals("yourtable") &&
ci.partName.equals("mypartition"));
}
assertTrue(sawMyTable);
assertTrue(sawYourTable);
}
// TODO test changes to mark cleaned to clean txns and txn_components
@Test
public void testMarkCleanedCleansTxnsAndTxnComponents()
throws Exception {
long txnid = openTxn();
LockComponent comp = new LockComponent(LockType.SHARED_WRITE, LockLevel.DB,
"mydb");
comp.setTablename("mytable");
List<LockComponent> components = new ArrayList<LockComponent>(1);
components.add(comp);
LockRequest req = new LockRequest(components, "me", "localhost");
req.setTxnid(txnid);
LockResponse res = txnHandler.lock(req);
assertTrue(res.getState() == LockState.ACQUIRED);
txnHandler.abortTxn(new AbortTxnRequest(txnid));
txnid = openTxn();
comp = new LockComponent(LockType.SHARED_WRITE, LockLevel.DB, "mydb");
comp.setTablename("yourtable");
components = new ArrayList<LockComponent>(1);
components.add(comp);
req = new LockRequest(components, "me", "localhost");
req.setTxnid(txnid);
res = txnHandler.lock(req);
assertTrue(res.getState() == LockState.ACQUIRED);
txnHandler.abortTxn(new AbortTxnRequest(txnid));
txnid = openTxn();
comp = new LockComponent(LockType.SHARED_WRITE, LockLevel.DB, "mydb");
comp.setTablename("foo");
comp.setPartitionname("bar");
components = new ArrayList<LockComponent>(1);
components.add(comp);
req = new LockRequest(components, "me", "localhost");
req.setTxnid(txnid);
res = txnHandler.lock(req);
assertTrue(res.getState() == LockState.ACQUIRED);
comp = new LockComponent(LockType.SHARED_WRITE, LockLevel.DB, "mydb");
comp.setTablename("foo");
comp.setPartitionname("baz");
components = new ArrayList<LockComponent>(1);
components.add(comp);
req = new LockRequest(components, "me", "localhost");
req.setTxnid(txnid);
res = txnHandler.lock(req);
assertTrue(res.getState() == LockState.ACQUIRED);
txnHandler.abortTxn(new AbortTxnRequest(txnid));
CompactionInfo ci = new CompactionInfo();
// Now clean them and check that they are removed from the count.
CompactionRequest rqst = new CompactionRequest("mydb", "mytable", CompactionType.MAJOR);
txnHandler.compact(rqst);
assertEquals(0, txnHandler.findReadyToClean().size());
ci = txnHandler.findNextToCompact("fred");
assertNotNull(ci);
txnHandler.markCompacted(ci);
List<CompactionInfo> toClean = txnHandler.findReadyToClean();
assertEquals(1, toClean.size());
txnHandler.markCleaned(ci);
// Check that we are cleaning up the empty aborted transactions
GetOpenTxnsResponse txnList = txnHandler.getOpenTxns();
assertEquals(3, txnList.getOpen_txnsSize());
txnHandler.cleanEmptyAbortedTxns();
txnList = txnHandler.getOpenTxns();
assertEquals(2, txnList.getOpen_txnsSize());
rqst = new CompactionRequest("mydb", "foo", CompactionType.MAJOR);
rqst.setPartitionname("bar");
txnHandler.compact(rqst);
assertEquals(0, txnHandler.findReadyToClean().size());
ci = txnHandler.findNextToCompact("fred");
assertNotNull(ci);
txnHandler.markCompacted(ci);
toClean = txnHandler.findReadyToClean();
assertEquals(1, toClean.size());
txnHandler.markCleaned(ci);
txnHandler.openTxns(new OpenTxnRequest(1, "me", "localhost"));
txnHandler.cleanEmptyAbortedTxns();
txnList = txnHandler.getOpenTxns();
assertEquals(3, txnList.getOpen_txnsSize());
}
@Before
public void setUp() throws Exception {
TxnDbUtil.prepDb();
txnHandler = new CompactionTxnHandler(conf);
}
@After
public void tearDown() throws Exception {
TxnDbUtil.cleanDb();
}
private long openTxn() throws MetaException {
List<Long> txns = txnHandler.openTxns(new OpenTxnRequest(1, "me", "localhost")).getTxn_ids();
return txns.get(0);
}
}
|
apache-2.0
|
Photobucket/Solbase-Lucene
|
src/test/org/apache/lucene/search/spans/TestBasics.java
|
16139
|
package org.apache.lucene.search.spans;
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import java.io.IOException;
import org.apache.lucene.analysis.SimpleAnalyzer;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;
import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.index.Term;
import org.apache.lucene.search.BooleanClause;
import org.apache.lucene.search.BooleanQuery;
import org.apache.lucene.search.CheckHits;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.PhraseQuery;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.QueryUtils;
import org.apache.lucene.search.TermQuery;
import org.apache.lucene.store.RAMDirectory;
import org.apache.lucene.util.English;
import org.apache.lucene.util.LuceneTestCase;
/**
* Tests basic search capabilities.
*
* <p>Uses a collection of 1000 documents, each the english rendition of their
* document number. For example, the document numbered 333 has text "three
* hundred thirty three".
*
* <p>Tests are each a single query, and its hits are checked to ensure that
* all and only the correct documents are returned, thus providing end-to-end
* testing of the indexing and search code.
*
*/
public class TestBasics extends LuceneTestCase {
private IndexSearcher searcher;
public void setUp() throws Exception {
super.setUp();
RAMDirectory directory = new RAMDirectory();
IndexWriter writer = new IndexWriter(directory, new SimpleAnalyzer(), true,
IndexWriter.MaxFieldLength.LIMITED);
//writer.infoStream = System.out;
for (int i = 0; i < 1000; i++) {
Document doc = new Document();
doc.add(new Field("field", English.intToEnglish(i), Field.Store.YES, Field.Index.ANALYZED));
writer.addDocument(doc);
}
writer.close();
searcher = new IndexSearcher(directory);
}
public void testTerm() throws Exception {
Query query = new TermQuery(new Term("field", "seventy"));
checkHits(query, new int[]
{70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 170, 171, 172, 173, 174, 175,
176, 177, 178, 179, 270, 271, 272, 273, 274, 275, 276, 277, 278, 279,
370, 371, 372, 373, 374, 375, 376, 377, 378, 379, 470, 471, 472, 473,
474, 475, 476, 477, 478, 479, 570, 571, 572, 573, 574, 575, 576, 577,
578, 579, 670, 671, 672, 673, 674, 675, 676, 677, 678, 679, 770, 771,
772, 773, 774, 775, 776, 777, 778, 779, 870, 871, 872, 873, 874, 875,
876, 877, 878, 879, 970, 971, 972, 973, 974, 975, 976, 977, 978, 979});
}
public void testTerm2() throws Exception {
Query query = new TermQuery(new Term("field", "seventish"));
checkHits(query, new int[] {});
}
public void testPhrase() throws Exception {
PhraseQuery query = new PhraseQuery();
query.add(new Term("field", "seventy"));
query.add(new Term("field", "seven"));
checkHits(query, new int[]
{77, 177, 277, 377, 477, 577, 677, 777, 877, 977});
}
public void testPhrase2() throws Exception {
PhraseQuery query = new PhraseQuery();
query.add(new Term("field", "seventish"));
query.add(new Term("field", "sevenon"));
checkHits(query, new int[] {});
}
public void testBoolean() throws Exception {
BooleanQuery query = new BooleanQuery();
query.add(new TermQuery(new Term("field", "seventy")), BooleanClause.Occur.MUST);
query.add(new TermQuery(new Term("field", "seven")), BooleanClause.Occur.MUST);
checkHits(query, new int[]
{77, 777, 177, 277, 377, 477, 577, 677, 770, 771, 772, 773, 774, 775,
776, 778, 779, 877, 977});
}
public void testBoolean2() throws Exception {
BooleanQuery query = new BooleanQuery();
query.add(new TermQuery(new Term("field", "sevento")), BooleanClause.Occur.MUST);
query.add(new TermQuery(new Term("field", "sevenly")), BooleanClause.Occur.MUST);
checkHits(query, new int[] {});
}
public void testSpanNearExact() throws Exception {
SpanTermQuery term1 = new SpanTermQuery(new Term("field", "seventy"));
SpanTermQuery term2 = new SpanTermQuery(new Term("field", "seven"));
SpanNearQuery query = new SpanNearQuery(new SpanQuery[] {term1, term2},
0, true);
checkHits(query, new int[]
{77, 177, 277, 377, 477, 577, 677, 777, 877, 977});
assertTrue(searcher.explain(query, 77).getValue() > 0.0f);
assertTrue(searcher.explain(query, 977).getValue() > 0.0f);
QueryUtils.check(term1);
QueryUtils.check(term2);
QueryUtils.checkUnequal(term1,term2);
}
public void testSpanNearUnordered() throws Exception {
SpanTermQuery term1 = new SpanTermQuery(new Term("field", "nine"));
SpanTermQuery term2 = new SpanTermQuery(new Term("field", "six"));
SpanNearQuery query = new SpanNearQuery(new SpanQuery[] {term1, term2},
4, false);
checkHits(query, new int[]
{609, 629, 639, 649, 659, 669, 679, 689, 699,
906, 926, 936, 946, 956, 966, 976, 986, 996});
}
public void testSpanNearOrdered() throws Exception {
SpanTermQuery term1 = new SpanTermQuery(new Term("field", "nine"));
SpanTermQuery term2 = new SpanTermQuery(new Term("field", "six"));
SpanNearQuery query = new SpanNearQuery(new SpanQuery[] {term1, term2},
4, true);
checkHits(query, new int[]
{906, 926, 936, 946, 956, 966, 976, 986, 996});
}
public void testSpanNot() throws Exception {
SpanTermQuery term1 = new SpanTermQuery(new Term("field", "eight"));
SpanTermQuery term2 = new SpanTermQuery(new Term("field", "one"));
SpanNearQuery near = new SpanNearQuery(new SpanQuery[] {term1, term2},
4, true);
SpanTermQuery term3 = new SpanTermQuery(new Term("field", "forty"));
SpanNotQuery query = new SpanNotQuery(near, term3);
checkHits(query, new int[]
{801, 821, 831, 851, 861, 871, 881, 891});
assertTrue(searcher.explain(query, 801).getValue() > 0.0f);
assertTrue(searcher.explain(query, 891).getValue() > 0.0f);
}
public void testSpanWithMultipleNotSingle() throws Exception {
SpanTermQuery term1 = new SpanTermQuery(new Term("field", "eight"));
SpanTermQuery term2 = new SpanTermQuery(new Term("field", "one"));
SpanNearQuery near = new SpanNearQuery(new SpanQuery[] {term1, term2},
4, true);
SpanTermQuery term3 = new SpanTermQuery(new Term("field", "forty"));
SpanOrQuery or = new SpanOrQuery(new SpanQuery[] {term3});
SpanNotQuery query = new SpanNotQuery(near, or);
checkHits(query, new int[]
{801, 821, 831, 851, 861, 871, 881, 891});
assertTrue(searcher.explain(query, 801).getValue() > 0.0f);
assertTrue(searcher.explain(query, 891).getValue() > 0.0f);
}
public void testSpanWithMultipleNotMany() throws Exception {
SpanTermQuery term1 = new SpanTermQuery(new Term("field", "eight"));
SpanTermQuery term2 = new SpanTermQuery(new Term("field", "one"));
SpanNearQuery near = new SpanNearQuery(new SpanQuery[] {term1, term2},
4, true);
SpanTermQuery term3 = new SpanTermQuery(new Term("field", "forty"));
SpanTermQuery term4 = new SpanTermQuery(new Term("field", "sixty"));
SpanTermQuery term5 = new SpanTermQuery(new Term("field", "eighty"));
SpanOrQuery or = new SpanOrQuery(new SpanQuery[] {term3, term4, term5});
SpanNotQuery query = new SpanNotQuery(near, or);
checkHits(query, new int[]
{801, 821, 831, 851, 871, 891});
assertTrue(searcher.explain(query, 801).getValue() > 0.0f);
assertTrue(searcher.explain(query, 891).getValue() > 0.0f);
}
public void testNpeInSpanNearWithSpanNot() throws Exception {
SpanTermQuery term1 = new SpanTermQuery(new Term("field", "eight"));
SpanTermQuery term2 = new SpanTermQuery(new Term("field", "one"));
SpanNearQuery near = new SpanNearQuery(new SpanQuery[] {term1, term2},
4, true);
SpanTermQuery hun = new SpanTermQuery(new Term("field", "hundred"));
SpanTermQuery term3 = new SpanTermQuery(new Term("field", "forty"));
SpanNearQuery exclude = new SpanNearQuery(new SpanQuery[] {hun, term3},
1, true);
SpanNotQuery query = new SpanNotQuery(near, exclude);
checkHits(query, new int[]
{801, 821, 831, 851, 861, 871, 881, 891});
assertTrue(searcher.explain(query, 801).getValue() > 0.0f);
assertTrue(searcher.explain(query, 891).getValue() > 0.0f);
}
public void testNpeInSpanNearInSpanFirstInSpanNot() throws Exception {
int n = 5;
SpanTermQuery hun = new SpanTermQuery(new Term("field", "hundred"));
SpanTermQuery term40 = new SpanTermQuery(new Term("field", "forty"));
SpanTermQuery term40c = (SpanTermQuery)term40.clone();
SpanFirstQuery include = new SpanFirstQuery(term40, n);
SpanNearQuery near = new SpanNearQuery(new SpanQuery[]{hun, term40c},
n-1, true);
SpanFirstQuery exclude = new SpanFirstQuery(near, n-1);
SpanNotQuery q = new SpanNotQuery(include, exclude);
checkHits(q, new int[]{40,41,42,43,44,45,46,47,48,49});
}
public void testSpanFirst() throws Exception {
SpanTermQuery term1 = new SpanTermQuery(new Term("field", "five"));
SpanFirstQuery query = new SpanFirstQuery(term1, 1);
checkHits(query, new int[]
{5, 500, 501, 502, 503, 504, 505, 506, 507, 508, 509, 510, 511, 512, 513,
514, 515, 516, 517, 518, 519, 520, 521, 522, 523, 524, 525, 526, 527,
528, 529, 530, 531, 532, 533, 534, 535, 536, 537, 538, 539, 540, 541,
542, 543, 544, 545, 546, 547, 548, 549, 550, 551, 552, 553, 554, 555,
556, 557, 558, 559, 560, 561, 562, 563, 564, 565, 566, 567, 568, 569,
570, 571, 572, 573, 574, 575, 576, 577, 578, 579, 580, 581, 582, 583,
584, 585, 586, 587, 588, 589, 590, 591, 592, 593, 594, 595, 596, 597,
598, 599});
assertTrue(searcher.explain(query, 5).getValue() > 0.0f);
assertTrue(searcher.explain(query, 599).getValue() > 0.0f);
}
public void testSpanOr() throws Exception {
SpanTermQuery term1 = new SpanTermQuery(new Term("field", "thirty"));
SpanTermQuery term2 = new SpanTermQuery(new Term("field", "three"));
SpanNearQuery near1 = new SpanNearQuery(new SpanQuery[] {term1, term2},
0, true);
SpanTermQuery term3 = new SpanTermQuery(new Term("field", "forty"));
SpanTermQuery term4 = new SpanTermQuery(new Term("field", "seven"));
SpanNearQuery near2 = new SpanNearQuery(new SpanQuery[] {term3, term4},
0, true);
SpanOrQuery query = new SpanOrQuery(new SpanQuery[] {near1, near2});
checkHits(query, new int[]
{33, 47, 133, 147, 233, 247, 333, 347, 433, 447, 533, 547, 633, 647, 733,
747, 833, 847, 933, 947});
assertTrue(searcher.explain(query, 33).getValue() > 0.0f);
assertTrue(searcher.explain(query, 947).getValue() > 0.0f);
}
public void testSpanExactNested() throws Exception {
SpanTermQuery term1 = new SpanTermQuery(new Term("field", "three"));
SpanTermQuery term2 = new SpanTermQuery(new Term("field", "hundred"));
SpanNearQuery near1 = new SpanNearQuery(new SpanQuery[] {term1, term2},
0, true);
SpanTermQuery term3 = new SpanTermQuery(new Term("field", "thirty"));
SpanTermQuery term4 = new SpanTermQuery(new Term("field", "three"));
SpanNearQuery near2 = new SpanNearQuery(new SpanQuery[] {term3, term4},
0, true);
SpanNearQuery query = new SpanNearQuery(new SpanQuery[] {near1, near2},
0, true);
checkHits(query, new int[] {333});
assertTrue(searcher.explain(query, 333).getValue() > 0.0f);
}
public void testSpanNearOr() throws Exception {
SpanTermQuery t1 = new SpanTermQuery(new Term("field","six"));
SpanTermQuery t3 = new SpanTermQuery(new Term("field","seven"));
SpanTermQuery t5 = new SpanTermQuery(new Term("field","seven"));
SpanTermQuery t6 = new SpanTermQuery(new Term("field","six"));
SpanOrQuery to1 = new SpanOrQuery(new SpanQuery[] {t1, t3});
SpanOrQuery to2 = new SpanOrQuery(new SpanQuery[] {t5, t6});
SpanNearQuery query = new SpanNearQuery(new SpanQuery[] {to1, to2},
10, true);
checkHits(query, new int[]
{606, 607, 626, 627, 636, 637, 646, 647,
656, 657, 666, 667, 676, 677, 686, 687, 696, 697,
706, 707, 726, 727, 736, 737, 746, 747,
756, 757, 766, 767, 776, 777, 786, 787, 796, 797});
}
public void testSpanComplex1() throws Exception {
SpanTermQuery t1 = new SpanTermQuery(new Term("field","six"));
SpanTermQuery t2 = new SpanTermQuery(new Term("field","hundred"));
SpanNearQuery tt1 = new SpanNearQuery(new SpanQuery[] {t1, t2}, 0,true);
SpanTermQuery t3 = new SpanTermQuery(new Term("field","seven"));
SpanTermQuery t4 = new SpanTermQuery(new Term("field","hundred"));
SpanNearQuery tt2 = new SpanNearQuery(new SpanQuery[] {t3, t4}, 0,true);
SpanTermQuery t5 = new SpanTermQuery(new Term("field","seven"));
SpanTermQuery t6 = new SpanTermQuery(new Term("field","six"));
SpanOrQuery to1 = new SpanOrQuery(new SpanQuery[] {tt1, tt2});
SpanOrQuery to2 = new SpanOrQuery(new SpanQuery[] {t5, t6});
SpanNearQuery query = new SpanNearQuery(new SpanQuery[] {to1, to2},
100, true);
checkHits(query, new int[]
{606, 607, 626, 627, 636, 637, 646, 647,
656, 657, 666, 667, 676, 677, 686, 687, 696, 697,
706, 707, 726, 727, 736, 737, 746, 747,
756, 757, 766, 767, 776, 777, 786, 787, 796, 797});
}
public void testSpansSkipTo() throws Exception {
SpanTermQuery t1 = new SpanTermQuery(new Term("field", "seventy"));
SpanTermQuery t2 = new SpanTermQuery(new Term("field", "seventy"));
Spans s1 = t1.getSpans(searcher.getIndexReader());
Spans s2 = t2.getSpans(searcher.getIndexReader());
assertTrue(s1.next());
assertTrue(s2.next());
boolean hasMore = true;
do {
hasMore = skipToAccoringToJavaDocs(s1, s1.doc());
assertEquals(hasMore, s2.skipTo(s2.doc()));
assertEquals(s1.doc(), s2.doc());
} while (hasMore);
}
/** Skips to the first match beyond the current, whose document number is
* greater than or equal to <i>target</i>. <p>Returns true iff there is such
* a match. <p>Behaves as if written: <pre>
* boolean skipTo(int target) {
* do {
* if (!next())
* return false;
* } while (target > doc());
* return true;
* }
* </pre>
*/
private boolean skipToAccoringToJavaDocs(Spans s, int target)
throws Exception {
do {
if (!s.next())
return false;
} while (target > s.doc());
return true;
}
private void checkHits(Query query, int[] results) throws IOException {
CheckHits.checkHits(query, "field", searcher, results);
}
}
|
apache-2.0
|
ened/ExoPlayer
|
library/smoothstreaming/src/main/java/com/google/android/exoplayer2/source/smoothstreaming/SsMediaPeriod.java
|
10634
|
/*
* Copyright (C) 2016 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.android.exoplayer2.source.smoothstreaming;
import androidx.annotation.Nullable;
import com.google.android.exoplayer2.C;
import com.google.android.exoplayer2.Format;
import com.google.android.exoplayer2.SeekParameters;
import com.google.android.exoplayer2.drm.DrmSessionEventListener;
import com.google.android.exoplayer2.drm.DrmSessionManager;
import com.google.android.exoplayer2.offline.StreamKey;
import com.google.android.exoplayer2.source.CompositeSequenceableLoaderFactory;
import com.google.android.exoplayer2.source.MediaPeriod;
import com.google.android.exoplayer2.source.MediaSourceEventListener;
import com.google.android.exoplayer2.source.SampleStream;
import com.google.android.exoplayer2.source.SequenceableLoader;
import com.google.android.exoplayer2.source.TrackGroup;
import com.google.android.exoplayer2.source.TrackGroupArray;
import com.google.android.exoplayer2.source.chunk.ChunkSampleStream;
import com.google.android.exoplayer2.source.smoothstreaming.manifest.SsManifest;
import com.google.android.exoplayer2.trackselection.ExoTrackSelection;
import com.google.android.exoplayer2.upstream.Allocator;
import com.google.android.exoplayer2.upstream.LoadErrorHandlingPolicy;
import com.google.android.exoplayer2.upstream.LoaderErrorThrower;
import com.google.android.exoplayer2.upstream.TransferListener;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import org.checkerframework.checker.nullness.compatqual.NullableType;
/** A SmoothStreaming {@link MediaPeriod}. */
/* package */ final class SsMediaPeriod
implements MediaPeriod, SequenceableLoader.Callback<ChunkSampleStream<SsChunkSource>> {
private final SsChunkSource.Factory chunkSourceFactory;
@Nullable private final TransferListener transferListener;
private final LoaderErrorThrower manifestLoaderErrorThrower;
private final DrmSessionManager drmSessionManager;
private final DrmSessionEventListener.EventDispatcher drmEventDispatcher;
private final LoadErrorHandlingPolicy loadErrorHandlingPolicy;
private final MediaSourceEventListener.EventDispatcher mediaSourceEventDispatcher;
private final Allocator allocator;
private final TrackGroupArray trackGroups;
private final CompositeSequenceableLoaderFactory compositeSequenceableLoaderFactory;
@Nullable private Callback callback;
private SsManifest manifest;
private ChunkSampleStream<SsChunkSource>[] sampleStreams;
private SequenceableLoader compositeSequenceableLoader;
public SsMediaPeriod(
SsManifest manifest,
SsChunkSource.Factory chunkSourceFactory,
@Nullable TransferListener transferListener,
CompositeSequenceableLoaderFactory compositeSequenceableLoaderFactory,
DrmSessionManager drmSessionManager,
DrmSessionEventListener.EventDispatcher drmEventDispatcher,
LoadErrorHandlingPolicy loadErrorHandlingPolicy,
MediaSourceEventListener.EventDispatcher mediaSourceEventDispatcher,
LoaderErrorThrower manifestLoaderErrorThrower,
Allocator allocator) {
this.manifest = manifest;
this.chunkSourceFactory = chunkSourceFactory;
this.transferListener = transferListener;
this.manifestLoaderErrorThrower = manifestLoaderErrorThrower;
this.drmSessionManager = drmSessionManager;
this.drmEventDispatcher = drmEventDispatcher;
this.loadErrorHandlingPolicy = loadErrorHandlingPolicy;
this.mediaSourceEventDispatcher = mediaSourceEventDispatcher;
this.allocator = allocator;
this.compositeSequenceableLoaderFactory = compositeSequenceableLoaderFactory;
trackGroups = buildTrackGroups(manifest, drmSessionManager);
sampleStreams = newSampleStreamArray(0);
compositeSequenceableLoader =
compositeSequenceableLoaderFactory.createCompositeSequenceableLoader(sampleStreams);
}
public void updateManifest(SsManifest manifest) {
this.manifest = manifest;
for (ChunkSampleStream<SsChunkSource> sampleStream : sampleStreams) {
sampleStream.getChunkSource().updateManifest(manifest);
}
callback.onContinueLoadingRequested(this);
}
public void release() {
for (ChunkSampleStream<SsChunkSource> sampleStream : sampleStreams) {
sampleStream.release();
}
callback = null;
}
// MediaPeriod implementation.
@Override
public void prepare(Callback callback, long positionUs) {
this.callback = callback;
callback.onPrepared(this);
}
@Override
public void maybeThrowPrepareError() throws IOException {
manifestLoaderErrorThrower.maybeThrowError();
}
@Override
public TrackGroupArray getTrackGroups() {
return trackGroups;
}
@Override
public long selectTracks(
@NullableType ExoTrackSelection[] selections,
boolean[] mayRetainStreamFlags,
@NullableType SampleStream[] streams,
boolean[] streamResetFlags,
long positionUs) {
ArrayList<ChunkSampleStream<SsChunkSource>> sampleStreamsList = new ArrayList<>();
for (int i = 0; i < selections.length; i++) {
if (streams[i] != null) {
@SuppressWarnings("unchecked")
ChunkSampleStream<SsChunkSource> stream = (ChunkSampleStream<SsChunkSource>) streams[i];
if (selections[i] == null || !mayRetainStreamFlags[i]) {
stream.release();
streams[i] = null;
} else {
stream.getChunkSource().updateTrackSelection(selections[i]);
sampleStreamsList.add(stream);
}
}
if (streams[i] == null && selections[i] != null) {
ChunkSampleStream<SsChunkSource> stream = buildSampleStream(selections[i], positionUs);
sampleStreamsList.add(stream);
streams[i] = stream;
streamResetFlags[i] = true;
}
}
sampleStreams = newSampleStreamArray(sampleStreamsList.size());
sampleStreamsList.toArray(sampleStreams);
compositeSequenceableLoader =
compositeSequenceableLoaderFactory.createCompositeSequenceableLoader(sampleStreams);
return positionUs;
}
@Override
public List<StreamKey> getStreamKeys(List<ExoTrackSelection> trackSelections) {
List<StreamKey> streamKeys = new ArrayList<>();
for (int selectionIndex = 0; selectionIndex < trackSelections.size(); selectionIndex++) {
ExoTrackSelection trackSelection = trackSelections.get(selectionIndex);
int streamElementIndex = trackGroups.indexOf(trackSelection.getTrackGroup());
for (int i = 0; i < trackSelection.length(); i++) {
streamKeys.add(new StreamKey(streamElementIndex, trackSelection.getIndexInTrackGroup(i)));
}
}
return streamKeys;
}
@Override
public void discardBuffer(long positionUs, boolean toKeyframe) {
for (ChunkSampleStream<SsChunkSource> sampleStream : sampleStreams) {
sampleStream.discardBuffer(positionUs, toKeyframe);
}
}
@Override
public void reevaluateBuffer(long positionUs) {
compositeSequenceableLoader.reevaluateBuffer(positionUs);
}
@Override
public boolean continueLoading(long positionUs) {
return compositeSequenceableLoader.continueLoading(positionUs);
}
@Override
public boolean isLoading() {
return compositeSequenceableLoader.isLoading();
}
@Override
public long getNextLoadPositionUs() {
return compositeSequenceableLoader.getNextLoadPositionUs();
}
@Override
public long readDiscontinuity() {
return C.TIME_UNSET;
}
@Override
public long getBufferedPositionUs() {
return compositeSequenceableLoader.getBufferedPositionUs();
}
@Override
public long seekToUs(long positionUs) {
for (ChunkSampleStream<SsChunkSource> sampleStream : sampleStreams) {
sampleStream.seekToUs(positionUs);
}
return positionUs;
}
@Override
public long getAdjustedSeekPositionUs(long positionUs, SeekParameters seekParameters) {
for (ChunkSampleStream<SsChunkSource> sampleStream : sampleStreams) {
if (sampleStream.primaryTrackType == C.TRACK_TYPE_VIDEO) {
return sampleStream.getAdjustedSeekPositionUs(positionUs, seekParameters);
}
}
return positionUs;
}
// SequenceableLoader.Callback implementation.
@Override
public void onContinueLoadingRequested(ChunkSampleStream<SsChunkSource> sampleStream) {
callback.onContinueLoadingRequested(this);
}
// Private methods.
private ChunkSampleStream<SsChunkSource> buildSampleStream(
ExoTrackSelection selection, long positionUs) {
int streamElementIndex = trackGroups.indexOf(selection.getTrackGroup());
SsChunkSource chunkSource =
chunkSourceFactory.createChunkSource(
manifestLoaderErrorThrower, manifest, streamElementIndex, selection, transferListener);
return new ChunkSampleStream<>(
manifest.streamElements[streamElementIndex].type,
null,
null,
chunkSource,
this,
allocator,
positionUs,
drmSessionManager,
drmEventDispatcher,
loadErrorHandlingPolicy,
mediaSourceEventDispatcher);
}
private static TrackGroupArray buildTrackGroups(
SsManifest manifest, DrmSessionManager drmSessionManager) {
TrackGroup[] trackGroups = new TrackGroup[manifest.streamElements.length];
for (int i = 0; i < manifest.streamElements.length; i++) {
Format[] manifestFormats = manifest.streamElements[i].formats;
Format[] exposedFormats = new Format[manifestFormats.length];
for (int j = 0; j < manifestFormats.length; j++) {
Format manifestFormat = manifestFormats[j];
exposedFormats[j] =
manifestFormat.copyWithCryptoType(drmSessionManager.getCryptoType(manifestFormat));
}
trackGroups[i] = new TrackGroup(exposedFormats);
}
return new TrackGroupArray(trackGroups);
}
// We won't assign the array to a variable that erases the generic type, and then write into it.
@SuppressWarnings({"unchecked", "rawtypes"})
private static ChunkSampleStream<SsChunkSource>[] newSampleStreamArray(int length) {
return new ChunkSampleStream[length];
}
}
|
apache-2.0
|
8u1a/plaso
|
plaso/parsers/winreg_plugins/userassist.py
|
7898
|
# -*- coding: utf-8 -*-
"""This file contains the UserAssist Windows Registry plugin."""
import logging
import construct
from plaso.events import windows_events
from plaso.parsers import winreg
from plaso.parsers.winreg_plugins import interface
from plaso.winnt import environ_expand
from plaso.winnt import known_folder_ids
class UserAssistPlugin(interface.WindowsRegistryPlugin):
"""Plugin that parses an UserAssist key."""
NAME = u'userassist'
DESCRIPTION = u'Parser for User Assist Registry data.'
REG_TYPE = u'NTUSER'
REG_KEYS = [
(u'\\Software\\Microsoft\\Windows\\CurrentVersion\\Explorer'
u'\\UserAssist\\{{FA99DFC7-6AC2-453A-A5E2-5E2AFF4507BD}}'),
(u'\\Software\\Microsoft\\Windows\\CurrentVersion\\Explorer'
u'\\UserAssist\\{{F4E57C4B-2036-45F0-A9AB-443BCFE33D9F}}'),
(u'\\Software\\Microsoft\\Windows\\CurrentVersion\\Explorer'
u'\\UserAssist\\{{F2A1CB5A-E3CC-4A2E-AF9D-505A7009D442}}'),
(u'\\Software\\Microsoft\\Windows\\CurrentVersion\\Explorer'
u'\\UserAssist\\{{CEBFF5CD-ACE2-4F4F-9178-9926F41749EA}}'),
(u'\\Software\\Microsoft\\Windows\\CurrentVersion\\Explorer'
u'\\UserAssist\\{{CAA59E3C-4792-41A5-9909-6A6A8D32490E}}'),
(u'\\Software\\Microsoft\\Windows\\CurrentVersion\\Explorer'
u'\\UserAssist\\{{B267E3AD-A825-4A09-82B9-EEC22AA3B847}}'),
(u'\\Software\\Microsoft\\Windows\\CurrentVersion\\Explorer'
u'\\UserAssist\\{{A3D53349-6E61-4557-8FC7-0028EDCEEBF6}}'),
(u'\\Software\\Microsoft\\Windows\\CurrentVersion\\Explorer'
u'\\UserAssist\\{{9E04CAB2-CC14-11DF-BB8C-A2F1DED72085}}'),
(u'\\Software\\Microsoft\\Windows\\CurrentVersion\\Explorer'
u'\\UserAssist\\{{75048700-EF1F-11D0-9888-006097DEACF9}}'),
(u'\\Software\\Microsoft\\Windows\\CurrentVersion\\Explorer'
u'\\UserAssist\\{{5E6AB780-7743-11CF-A12B-00AA004AE837}}'),
(u'\\Software\\Microsoft\\Windows\\CurrentVersion\\Explorer'
u'\\UserAssist\\{{0D6D4F41-2994-4BA0-8FEF-620E43CD2812}}'),
(u'\\Software\\Microsoft\\Windows\\CurrentVersion\\Explorer'
u'\\UserAssist\\{{BCB48336-4DDD-48FF-BB0B-D3190DACB3E2}}')]
URL = [
u'http://blog.didierstevens.com/programs/userassist/',
u'https://code.google.com/p/winreg-kb/wiki/UserAssistKeys',
u'http://intotheboxes.files.wordpress.com/2010/04'
u'/intotheboxes_2010_q1.pdf']
# UserAssist format version used in Windows 2000, XP, 2003, Vista.
USERASSIST_V3_STRUCT = construct.Struct(
u'userassist_entry',
construct.Padding(4),
construct.ULInt32(u'count'),
construct.ULInt64(u'timestamp'))
# UserAssist format version used in Windows 2008, 7, 8.
USERASSIST_V5_STRUCT = construct.Struct(
u'userassist_entry',
construct.Padding(4),
construct.ULInt32(u'count'),
construct.ULInt32(u'app_focus_count'),
construct.ULInt32(u'focus_duration'),
construct.Padding(44),
construct.ULInt64(u'timestamp'),
construct.Padding(4))
def GetEntries(
self, parser_mediator, registry_key, registry_file_type=None, **kwargs):
"""Parses a UserAssist Registry key.
Args:
parser_mediator: A parser mediator object (instance of ParserMediator).
registry_key: A Windows Registry key (instance of
dfwinreg.WinRegistryKey).
registry_file_type: Optional string containing the Windows Registry file
type, e.g. NTUSER, SOFTWARE. The default is None.
"""
version_value = registry_key.GetValueByName(u'Version')
count_subkey = registry_key.GetSubkeyByName(u'Count')
if not version_value:
parser_mediator.ProduceParseError(u'Missing version value')
elif not version_value.DataIsInteger():
parser_mediator.ProduceParseError(u'Unsupported version value data type')
elif version_value.data not in [3, 5]:
parser_mediator.ProduceParseError(
u'Unsupported version: {0:d}'.format(version_value.data))
elif not count_subkey:
parser_mediator.ProduceParseError(u'Missing count subkey')
else:
userassist_entry_index = 0
for value in count_subkey.GetValues():
try:
value_name = value.name.decode(u'rot-13')
except UnicodeEncodeError as exception:
logging.debug((
u'Unable to decode UserAssist string: {0:s} with error: {1:s}.\n'
u'Attempting piecewise decoding.').format(
value.name, exception))
characters = []
for char in value.name:
if ord(char) < 128:
try:
characters.append(char.decode(u'rot-13'))
except UnicodeEncodeError:
characters.append(char)
else:
characters.append(char)
value_name = u''.join(characters)
if version_value.data == 5:
path_segments = value_name.split(u'\\')
for segment_index in range(0, len(path_segments)):
# Remove the { } from the path segment to get the GUID.
guid = path_segments[segment_index][1:-1]
path_segments[segment_index] = known_folder_ids.PATHS.get(
guid, path_segments[segment_index])
value_name = u'\\'.join(path_segments)
# Check if we might need to substitute values.
if u'%' in value_name:
# TODO: deprecate direct use of pre_obj.
value_name = environ_expand.ExpandWindowsEnvironmentVariables(
value_name, parser_mediator.knowledge_base.pre_obj)
value_data_size = len(value.data)
if not value.DataIsBinaryData():
parser_mediator.ProduceParseError(
u'Unsupported value data type: {0:s}'.format(
value.data_type_string))
elif value_name == u'UEME_CTLSESSION':
pass
elif version_value.data == 3:
if value_data_size != self.USERASSIST_V3_STRUCT.sizeof():
parser_mediator.ProduceParseError(
u'Unsupported value data size: {0:d}'.format(value_data_size))
else:
parsed_data = self.USERASSIST_V3_STRUCT.parse(value.data)
filetime = parsed_data.get(u'timestamp', 0)
count = parsed_data.get(u'count', 0)
if count > 5:
count -= 5
values_dict = {}
values_dict[value_name] = u'[Count: {0:d}]'.format(count)
event_object = windows_events.WindowsRegistryEvent(
filetime, count_subkey.path, values_dict, offset=value.offset,
registry_file_type=registry_file_type)
parser_mediator.ProduceEvent(event_object)
elif version_value.data == 5:
if value_data_size != self.USERASSIST_V5_STRUCT.sizeof():
parser_mediator.ProduceParseError(
u'Unsupported value data size: {0:d}'.format(value_data_size))
parsed_data = self.USERASSIST_V5_STRUCT.parse(value.data)
userassist_entry_index += 1
count = parsed_data.get(u'count', None)
app_focus_count = parsed_data.get(u'app_focus_count', None)
focus_duration = parsed_data.get(u'focus_duration', None)
filetime = parsed_data.get(u'timestamp', 0)
values_dict = {}
values_dict[value_name] = (
u'[UserAssist entry: {0:d}, Count: {1:d}, '
u'Application focus count: {2:d}, Focus duration: {3:d}]').format(
userassist_entry_index, count, app_focus_count,
focus_duration)
event_object = windows_events.WindowsRegistryEvent(
filetime, count_subkey.path, values_dict,
offset=count_subkey.offset, registry_file_type=registry_file_type)
parser_mediator.ProduceEvent(event_object)
winreg.WinRegistryParser.RegisterPlugin(UserAssistPlugin)
|
apache-2.0
|
mattbertolini/liquibase
|
liquibase-core/src/main/java/liquibase/dbdoc/ChangeLogWriter.java
|
1497
|
package liquibase.dbdoc;
import liquibase.GlobalConfiguration;
import liquibase.resource.ResourceAccessor;
import liquibase.util.StreamUtil;
import java.io.*;
public class ChangeLogWriter {
protected File outputDir;
private ResourceAccessor resourceAccessor;
public ChangeLogWriter(ResourceAccessor resourceAccessor, File rootOutputDir) {
this.outputDir = new File(rootOutputDir, "changelogs");
this.resourceAccessor = resourceAccessor;
}
public void writeChangeLog(String changeLog, String physicalFilePath) throws IOException {
String changeLogOutFile = changeLog.replace(":", "_");
File xmlFile = new File(outputDir, changeLogOutFile.toLowerCase() + ".html");
xmlFile.getParentFile().mkdirs();
BufferedWriter changeLogStream = new BufferedWriter(new OutputStreamWriter(new FileOutputStream(xmlFile,
false), GlobalConfiguration.OUTPUT_FILE_ENCODING.getCurrentValue()));
try (InputStream stylesheet = resourceAccessor.openStream(null, physicalFilePath)) {
if (stylesheet == null) {
throw new IOException("Can not find " + changeLog);
}
changeLogStream.write("<html><body><pre>\n");
changeLogStream.write(StreamUtil.readStreamAsString(stylesheet).replace("<", "<").replace(">", ">"));
changeLogStream.write("\n</pre></body></html>");
} finally {
changeLogStream.close();
}
}
}
|
apache-2.0
|
palantir/giraffe
|
core/src/main/java/com/palantir/giraffe/command/ExecutionSystemNotFoundException.java
|
1046
|
/**
* Copyright 2015 Palantir Technologies, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.palantir.giraffe.command;
/**
* Runtime exception thrown when an execution system cannot be found.
*
* @author bkeyes
*/
public final class ExecutionSystemNotFoundException extends RuntimeException {
public ExecutionSystemNotFoundException() {}
public ExecutionSystemNotFoundException(String message) {
super(message);
}
private static final long serialVersionUID = -378596008898522613L;
}
|
apache-2.0
|
cftorres/runtimes-common
|
iDiff/differs/pipDiff_test.go
|
2284
|
package differs
import (
"reflect"
"testing"
"github.com/GoogleCloudPlatform/runtimes-common/iDiff/utils"
)
func TestGetPythonVersion(t *testing.T) {
testCases := []struct {
layerPath string
expectedVersion string
expectedSuccess bool
}{
{
layerPath: "testDirs/pipTests/pythonVersionTests/noLibLayer",
expectedVersion: "",
expectedSuccess: false,
},
{
layerPath: "testDirs/pipTests/pythonVersionTests/noPythonLayer",
expectedVersion: "",
expectedSuccess: false,
},
{
layerPath: "testDirs/pipTests/pythonVersionTests/version2.7Layer",
expectedVersion: "python2.7",
expectedSuccess: true,
},
{
layerPath: "testDirs/pipTests/pythonVersionTests/version3.6Layer",
expectedVersion: "python3.6",
expectedSuccess: true,
},
}
for _, test := range testCases {
version, success := getPythonVersion(test.layerPath)
if success != test.expectedSuccess {
if test.expectedSuccess {
t.Error("Expected success finding version but got none")
} else {
t.Errorf("Expected failure finding version but found one: %s", version)
}
} else if version != test.expectedVersion {
t.Errorf("Expected: %s. Got: %s", test.expectedVersion, version)
}
}
}
func TestGetPythonPackages(t *testing.T) {
testCases := []struct {
path string
expectedPackages map[string]utils.PackageInfo
}{
{
path: "testDirs/pipTests/noPackagesTest",
expectedPackages: map[string]utils.PackageInfo{},
},
{
path: "testDirs/pipTests/packagesManyLayers",
expectedPackages: map[string]utils.PackageInfo{
"packageone": {Version: "3.6.9", Size: "0"},
"packagetwo": {Version: "4.6.2", Size: "0"},
"packagethree": {Version: "2.4.5", Size: "0"},
"packagefour": {Version: "2.4.6", Size: "0"},
},
},
{
path: "testDirs/pipTests/packagesOneLayer",
expectedPackages: map[string]utils.PackageInfo{
"packageone": {Version: "3.6.9", Size: "0"},
"packagetwo": {Version: "4.6.2", Size: "0"},
},
},
}
for _, test := range testCases {
d := PipDiffer{}
packages, _ := d.getPackages(test.path)
if !reflect.DeepEqual(packages, test.expectedPackages) {
t.Errorf("Expected: %s but got: %s", test.expectedPackages, packages)
}
}
}
|
apache-2.0
|
sverkera/camel
|
camel-core/src/test/java/org/apache/camel/runtimecatalog/RuntimeCamelCatalogTest.java
|
15932
|
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.runtimecatalog;
import java.util.HashMap;
import java.util.Map;
import org.apache.camel.impl.DefaultCamelContext;
import org.junit.BeforeClass;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertTrue;
public class RuntimeCamelCatalogTest {
static RuntimeCamelCatalog catalog;
private static final Logger LOG = LoggerFactory.getLogger(RuntimeCamelCatalogTest.class);
@BeforeClass
public static void createCamelCatalog() {
catalog = new DefaultRuntimeCamelCatalog(new DefaultCamelContext());
}
@Test
public void testFromCamelContext() throws Exception {
String schema = new DefaultCamelContext().getRuntimeCamelCatalog().modelJSonSchema("choice");
assertNotNull(schema);
}
@Test
public void testJsonSchema() throws Exception {
String schema = catalog.modelJSonSchema("aggregate");
assertNotNull(schema);
// lets make it possible to find bean/method using both names
schema = catalog.modelJSonSchema("method");
assertNotNull(schema);
schema = catalog.modelJSonSchema("bean");
assertNotNull(schema);
}
@Test
public void testAsEndpointUriMapFile() throws Exception {
Map<String, String> map = new HashMap<>();
map.put("directoryName", "src/data/inbox");
map.put("noop", "true");
map.put("delay", "5000");
String uri = catalog.asEndpointUri("file", map, true);
assertEquals("file:src/data/inbox?delay=5000&noop=true", uri);
String uri2 = catalog.asEndpointUriXml("file", map, true);
assertEquals("file:src/data/inbox?delay=5000&noop=true", uri2);
}
@Test
public void testAsEndpointUriTimer() throws Exception {
Map<String, String> map = new HashMap<>();
map.put("timerName", "foo");
map.put("period", "5000");
String uri = catalog.asEndpointUri("timer", map, true);
assertEquals("timer:foo?period=5000", uri);
}
@Test
public void testAsEndpointUriPropertiesPlaceholders() throws Exception {
Map<String, String> map = new HashMap<>();
map.put("timerName", "foo");
map.put("period", "{{howoften}}");
map.put("repeatCount", "5");
String uri = catalog.asEndpointUri("timer", map, true);
assertEquals("timer:foo?period=%7B%7Bhowoften%7D%7D&repeatCount=5", uri);
uri = catalog.asEndpointUri("timer", map, false);
assertEquals("timer:foo?period={{howoften}}&repeatCount=5", uri);
}
@Test
public void testAsEndpointUriBeanLookup() throws Exception {
Map<String, String> map = new HashMap<>();
map.put("resourceUri", "foo.xslt");
map.put("converter", "#myConverter");
String uri = catalog.asEndpointUri("xslt", map, true);
assertEquals("xslt:foo.xslt?converter=%23myConverter", uri);
uri = catalog.asEndpointUri("xslt", map, false);
assertEquals("xslt:foo.xslt?converter=#myConverter", uri);
}
@Test
public void testEndpointPropertiesPlaceholders() throws Exception {
Map<String, String> map = catalog.endpointProperties("timer:foo?period={{howoften}}&repeatCount=5");
assertNotNull(map);
assertEquals(3, map.size());
assertEquals("foo", map.get("timerName"));
assertEquals("{{howoften}}", map.get("period"));
assertEquals("5", map.get("repeatCount"));
}
@Test
public void testAsEndpointUriLog() throws Exception {
Map<String, String> map = new HashMap<>();
map.put("loggerName", "foo");
map.put("loggerLevel", "WARN");
map.put("multiline", "true");
map.put("showAll", "true");
map.put("showBody", "false");
map.put("showBodyType", "false");
map.put("showExchangePattern", "false");
map.put("style", "Tab");
assertEquals("log:foo?loggerLevel=WARN&multiline=true&showAll=true&style=Tab", catalog.asEndpointUri("log", map, false));
}
@Test
public void testAsEndpointUriLogShort() throws Exception {
Map<String, String> map = new HashMap<>();
map.put("loggerName", "foo");
map.put("loggerLevel", "DEBUG");
assertEquals("log:foo?loggerLevel=DEBUG", catalog.asEndpointUri("log", map, false));
}
@Test
public void testAsEndpointUriWithplaceholder() throws Exception {
Map<String, String> map = new HashMap<>();
map.put("name", "foo");
map.put("blockWhenFull", "{{block}}");
assertEquals("seda:foo?blockWhenFull={{block}}", catalog.asEndpointUri("seda", map, false));
}
@Test
public void testEndpointPropertiesSedaRequired() throws Exception {
Map<String, String> map = catalog.endpointProperties("seda:foo");
assertNotNull(map);
assertEquals(1, map.size());
assertEquals("foo", map.get("name"));
map = catalog.endpointProperties("seda:foo?blockWhenFull=true");
assertNotNull(map);
assertEquals(2, map.size());
assertEquals("foo", map.get("name"));
assertEquals("true", map.get("blockWhenFull"));
}
@Test
public void validateProperties() throws Exception {
// valid
EndpointValidationResult result = catalog.validateEndpointProperties("log:mylog");
assertTrue(result.isSuccess());
// unknown
result = catalog.validateEndpointProperties("log:mylog?level=WARN&foo=bar");
assertFalse(result.isSuccess());
assertTrue(result.getUnknown().contains("foo"));
assertEquals(1, result.getNumberOfErrors());
// enum
result = catalog.validateEndpointProperties("seda:foo?waitForTaskToComplete=blah");
assertFalse(result.isSuccess());
assertEquals("blah", result.getInvalidEnum().get("waitForTaskToComplete"));
assertEquals(1, result.getNumberOfErrors());
// reference okay
result = catalog.validateEndpointProperties("seda:foo?queue=#queue");
assertTrue(result.isSuccess());
assertEquals(0, result.getNumberOfErrors());
// unknown component
result = catalog.validateEndpointProperties("foo:bar?me=you");
assertFalse(result.isSuccess());
assertTrue(result.getUnknownComponent().equals("foo"));
assertEquals(1, result.getNumberOfErrors());
// invalid boolean but default value
result = catalog.validateEndpointProperties("log:output?showAll=ggg");
assertFalse(result.isSuccess());
assertEquals("ggg", result.getInvalidBoolean().get("showAll"));
assertEquals(1, result.getNumberOfErrors());
// dataset
result = catalog.validateEndpointProperties("dataset:foo?minRate=50");
assertTrue(result.isSuccess());
// time pattern
result = catalog.validateEndpointProperties("timer://foo?fixedRate=true&delay=0&period=2s");
assertTrue(result.isSuccess());
// reference lookup
result = catalog.validateEndpointProperties("timer://foo?fixedRate=#fixed&delay=#myDelay");
assertTrue(result.isSuccess());
// optional consumer. prefix
result = catalog.validateEndpointProperties("file:inbox?consumer.delay=5000&consumer.greedy=true");
assertTrue(result.isSuccess());
// optional without consumer. prefix
result = catalog.validateEndpointProperties("file:inbox?delay=5000&greedy=true");
assertTrue(result.isSuccess());
// mixed optional without consumer. prefix
result = catalog.validateEndpointProperties("file:inbox?delay=5000&consumer.greedy=true");
assertTrue(result.isSuccess());
// prefix
result = catalog.validateEndpointProperties("file:inbox?delay=5000&scheduler.foo=123&scheduler.bar=456");
assertTrue(result.isSuccess());
// stub
result = catalog.validateEndpointProperties("stub:foo?me=123&you=456");
assertTrue(result.isSuccess());
// lenient on
result = catalog.validateEndpointProperties("dataformat:string:marshal?foo=bar");
assertTrue(result.isSuccess());
// lenient off
result = catalog.validateEndpointProperties("dataformat:string:marshal?foo=bar", true);
assertFalse(result.isSuccess());
assertTrue(result.getUnknown().contains("foo"));
// data format
result = catalog.validateEndpointProperties("dataformat:string:marshal?charset=utf-8", true);
assertTrue(result.isSuccess());
// incapable to parse
result = catalog.validateEndpointProperties("{{getFtpUrl}}?recursive=true");
assertFalse(result.isSuccess());
assertTrue(result.getIncapable() != null);
}
@Test
public void validatePropertiesSummary() throws Exception {
EndpointValidationResult result = catalog.validateEndpointProperties("yammer:MESSAGES?blah=yada&accessToken=aaa&consumerKey=&useJson=no&initialDelay=five&pollStrategy=myStrategy");
assertFalse(result.isSuccess());
String reason = result.summaryErrorMessage(true);
LOG.info(reason);
result = catalog.validateEndpointProperties("jms:unknown:myqueue");
assertFalse(result.isSuccess());
reason = result.summaryErrorMessage(false);
LOG.info(reason);
}
@Test
public void validateTimePattern() throws Exception {
assertTrue(catalog.validateTimePattern("0"));
assertTrue(catalog.validateTimePattern("500"));
assertTrue(catalog.validateTimePattern("10000"));
assertTrue(catalog.validateTimePattern("5s"));
assertTrue(catalog.validateTimePattern("5sec"));
assertTrue(catalog.validateTimePattern("5secs"));
assertTrue(catalog.validateTimePattern("3m"));
assertTrue(catalog.validateTimePattern("3min"));
assertTrue(catalog.validateTimePattern("3minutes"));
assertTrue(catalog.validateTimePattern("5m15s"));
assertTrue(catalog.validateTimePattern("1h"));
assertTrue(catalog.validateTimePattern("1hour"));
assertTrue(catalog.validateTimePattern("2hours"));
assertFalse(catalog.validateTimePattern("bla"));
assertFalse(catalog.validateTimePattern("2year"));
assertFalse(catalog.validateTimePattern("60darn"));
}
@Test
public void testEndpointComponentName() throws Exception {
String name = catalog.endpointComponentName("jms:queue:foo");
assertEquals("jms", name);
}
@Test
public void testSimpleExpression() throws Exception {
SimpleValidationResult result = catalog.validateSimpleExpression(null, "${body}");
assertTrue(result.isSuccess());
assertEquals("${body}", result.getSimple());
result = catalog.validateSimpleExpression(null, "${body");
assertFalse(result.isSuccess());
assertEquals("${body", result.getSimple());
LOG.info(result.getError());
assertTrue(result.getError().startsWith("expected symbol functionEnd but was eol at location 5"));
assertEquals("expected symbol functionEnd but was eol", result.getShortError());
assertEquals(5, result.getIndex());
}
@Test
public void testSimplePredicate() throws Exception {
SimpleValidationResult result = catalog.validateSimplePredicate(null, "${body} == 'abc'");
assertTrue(result.isSuccess());
assertEquals("${body} == 'abc'", result.getSimple());
result = catalog.validateSimplePredicate(null, "${body} > ${header.size");
assertFalse(result.isSuccess());
assertEquals("${body} > ${header.size", result.getSimple());
LOG.info(result.getError());
assertTrue(result.getError().startsWith("expected symbol functionEnd but was eol at location 22"));
assertEquals("expected symbol functionEnd but was eol", result.getShortError());
assertEquals(22, result.getIndex());
}
@Test
public void testSimplePredicatePlaceholder() throws Exception {
SimpleValidationResult result = catalog.validateSimplePredicate(null, "${body} contains '{{danger}}'");
assertTrue(result.isSuccess());
assertEquals("${body} contains '{{danger}}'", result.getSimple());
result = catalog.validateSimplePredicate(null, "${bdy} contains '{{danger}}'");
assertFalse(result.isSuccess());
assertEquals("${bdy} contains '{{danger}}'", result.getSimple());
LOG.info(result.getError());
assertTrue(result.getError().startsWith("Unknown function: bdy at location 0"));
assertTrue(result.getError().contains("'{{danger}}'"));
assertEquals("Unknown function: bdy", result.getShortError());
assertEquals(0, result.getIndex());
}
@Test
public void testValidateLanguage() throws Exception {
LanguageValidationResult result = catalog.validateLanguageExpression(null, "simple", "${body}");
assertTrue(result.isSuccess());
assertEquals("${body}", result.getText());
result = catalog.validateLanguageExpression(null, "header", "foo");
assertTrue(result.isSuccess());
assertEquals("foo", result.getText());
result = catalog.validateLanguagePredicate(null, "simple", "${body} > 10");
assertTrue(result.isSuccess());
assertEquals("${body} > 10", result.getText());
result = catalog.validateLanguagePredicate(null, "header", "bar");
assertTrue(result.isSuccess());
assertEquals("bar", result.getText());
result = catalog.validateLanguagePredicate(null, "foobar", "bar");
assertFalse(result.isSuccess());
assertEquals("Unknown language foobar", result.getError());
}
@Test
public void testValidateEndpointConsumerOnly() throws Exception {
String uri = "file:inbox?bufferSize=4096&readLock=changed&delete=true";
EndpointValidationResult result = catalog.validateEndpointProperties(uri, false, true, false);
assertTrue(result.isSuccess());
uri = "file:inbox?bufferSize=4096&readLock=changed&delete=true&fileExist=Append";
result = catalog.validateEndpointProperties(uri, false, true, false);
assertFalse(result.isSuccess());
assertEquals("fileExist", result.getNotConsumerOnly().iterator().next());
}
@Test
public void testValidateEndpointProducerOnly() throws Exception {
String uri = "file:outbox?bufferSize=4096&fileExist=Append";
EndpointValidationResult result = catalog.validateEndpointProperties(uri, false, false, true);
assertTrue(result.isSuccess());
uri = "file:outbox?bufferSize=4096&fileExist=Append&delete=true";
result = catalog.validateEndpointProperties(uri, false, false, true);
assertFalse(result.isSuccess());
assertEquals("delete", result.getNotProducerOnly().iterator().next());
}
}
|
apache-2.0
|
barnyard/p2p-app
|
src/main/java/com/bt/pi/app/common/net/SubnetAllocationResult.java
|
920
|
package com.bt.pi.app.common.net;
import org.apache.commons.lang.builder.ToStringBuilder;
public class SubnetAllocationResult {
private long subnetBaseAddress;
private long subnetMask;
private String dnsAddress;
public SubnetAllocationResult(long aSubnetBaseAddress, long aSubnetMask, String aDnsAddress) {
super();
this.subnetBaseAddress = aSubnetBaseAddress;
this.subnetMask = aSubnetMask;
this.dnsAddress = aDnsAddress;
}
public long getSubnetBaseAddress() {
return subnetBaseAddress;
}
public long getSubnetMask() {
return subnetMask;
}
public String getDnsAddress() {
return dnsAddress;
}
@Override
public String toString() {
return new ToStringBuilder(this).append("subnetBaseAddress", subnetBaseAddress).append("subnetMask", subnetMask).append("dnsAddress", dnsAddress).toString();
}
}
|
apache-2.0
|
eurosecom/projekt1
|
faktury/nacitajsluzbycsv.php
|
3833
|
<HTML>
<?php
// celkovy zaciatok dokumentu
do
{
$sys = 'FAK';
$urov = 2000;
$clsm = 820;
$cslm=404200;
$uziv = include("../uziv.php");
if( !$uziv ) exit;
// cislo operacie
$copern = 1*$_REQUEST['copern'];
$drupoh = 1*$_REQUEST['drupoh'];
$cislo_dok = 1*$_REQUEST['cislo_dok'];
$odeslano = 1*$_REQUEST['odeslano'];
require_once("../pswd/password.php");
@$spojeni = mysql_connect($mysqlhost, $mysqluser, $mysqlpasswd);
if (!$spojeni):
echo "Spojenie so serverom nedostupne.";
exit;
endif;
mysql_select_db($mysqldb);
$pole = explode(".", $kli_vume);
$kli_vmes=$pole[0];
$kli_vrok=$pole[1];
$citfir = include("../cis/citaj_fir.php");
$mena1 = $fir_mena1;
$mena2 = $fir_mena2;
$kurz12 = $fir_kurz12;
//datumove funkcie
$sDat = include("../funkcie/dat_sk_us.php");
?>
<HEAD>
<META http-equiv="Content-Type" content="text/html; charset=cp1250">
<link type="text/css" rel="stylesheet" href="../css/styl.css">
<title> </title>
<style type="text/css">
</style>
<script type="text/javascript">
</script>
</HEAD>
<BODY class="white" >
<table class="h2" width="100%" >
<tr>
<td>EuroSecom
</td>
<td align="right"><span class="login"><?php echo "UME $kli_vume FIR$kli_vxcf-$kli_nxcf login: $kli_uzmeno $kli_uzprie / $kli_uzid ";?></span></td>
</tr>
</table>
<br />
<?php
if ( $odeslano == 1 )
{
if (File_Exists ("../import/FIR$kli_vxcf/sluzbycis.csv")) { $soubor = unlink("../import/FIR$kli_vxcf/sluzbycis.csv"); }
if (move_uploaded_file($_FILES['original']['tmp_name'], "../import/FIR$kli_vxcf/sluzbycis.csv"))
{
$sqlttt = "DELETE FROM F$kli_vxcf"."_sluzby WHERE slu >= 0 ";
$ulozene = mysql_query("$sqlttt");
$i=0;
$subor = fopen("../import/FIR$kli_vxcf/sluzbycis.csv", "r");
while (! feof($subor))
{
$i=$i+1;
$riadok = fgets($subor, 500);
//print "$riadok<br />";
$pole = explode(";", $riadok);
//ID;poloka;dph;cenabez;cenas;mj
//1011;produkcia;20;1;1,2;ks
//sluzby
//slu nsl nslp nslz mer dph cep ced
//tl1 tl2 tl3 labh1 labh2 kat01h kat02h kat03h kat04h webtx1 webtx2 datm
$x_cis = 1*$pole[0];
$x_nsl = $pole[1];
$x_dph = $pole[2];
$x_cep = $pole[3];
$x_ced = $pole[4];
$x_mer = $pole[5];
$ccis=1*$x_cis;
$x_ced=str_replace(",",".",$x_ced);
$x_cep=str_replace(",",".",$x_cep);
$x_nsl=str_replace("\"","",$x_nsl);
$sqlttt = "INSERT INTO F$kli_vxcf"."_sluzby ( slu, nsl, mer, dph, cep, ced ) VALUES ".
" ( '$x_cis', '$x_nsl', '$x_mer', '$x_dph', '$x_cep', '$x_ced' )";
if( $ccis > 0 ) { $ulozene = mysql_query("$sqlttt"); }
}
//koniec while
//labh1 labh2 kat01h kat02h kat03h kat04h webtx1 webtx2
$sqlttt = "UPDATE F$kli_vxcf"."_sluzby SET ".
" labh1='', labh2='', kat01h='', kat02h='', kat03h='', kat04h='', webtx1='', webtx2='' ";
$ulozene = mysql_query("$sqlttt");
?>
<script type="text/javascript" >
var okno = window.open("cslu.php?copern=1&drupoh=1&page=1&zmtz=1","_self");
</script>
<?php
}
//koniec ak upload
}
//koniec if odeslano=1
if ( $odeslano == 0 )
{
?>
<form method="POST" ENCTYPE="multipart/form-data" action="<?php echo $_SERVER["PHP_SELF"]?>?cislo_dok=<?php echo $cislo_dok; ?>
&drupoh=<?php echo $drupoh; ?>&copern=<?php echo $copern; ?>&odeslano=1">
<table class="vstup" width="100%" height="50px">
<tr>
<td width="35%" align="right" >Súbor:</td>
<td width="30%" align="center" >
<input type="HIDDEN" name="MAX_FILE_SIZE" VALUE=2097152>
<input type="file" name="original" >
</td>
<td width="35%" align="left" >(max. 2 MB)</td>
</tr>
<tr>
<td colspan="3">
<input type="hidden" name="odeslano" value="1">
<p align="center"><input type="submit" value="Odosla"></td>
</tr>
</table>
</form>
<?php
}
// celkovy koniec dokumentu
} while (false);
?>
</BODY>
</HTML>
|
apache-2.0
|
jcfr/Midas
|
modules/ldap/models/AppModel.php
|
984
|
<?php
/*=========================================================================
Midas Server
Copyright Kitware SAS, 26 rue Louis Guérin, 69100 Villeurbanne, France.
All rights reserved.
For more information visit http://www.kitware.com/.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0.txt
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
=========================================================================*/
/** Base model class for the ldap module */
class Ldap_AppModel extends MIDASModel
{
public $moduleName = 'ldap';
}
|
apache-2.0
|
punkhorn/camel-upstream
|
components/camel-sql/src/main/java/org/apache/camel/component/sql/SqlConsumer.java
|
16018
|
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.component.sql;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import java.util.Queue;
import org.apache.camel.Exchange;
import org.apache.camel.ExchangePattern;
import org.apache.camel.Message;
import org.apache.camel.Processor;
import org.apache.camel.RollbackExchangeException;
import org.apache.camel.RuntimeCamelException;
import org.apache.camel.support.ScheduledBatchPollingConsumer;
import org.apache.camel.util.CastUtils;
import org.apache.camel.util.ObjectHelper;
import org.springframework.dao.DataAccessException;
import org.springframework.jdbc.core.JdbcTemplate;
import org.springframework.jdbc.core.PreparedStatementCallback;
import org.springframework.jdbc.core.namedparam.NamedParameterJdbcTemplate;
import org.springframework.jdbc.core.namedparam.SqlParameterSource;
import static org.springframework.jdbc.support.JdbcUtils.closeResultSet;
public class SqlConsumer extends ScheduledBatchPollingConsumer {
private final String query;
private String resolvedQuery;
private final JdbcTemplate jdbcTemplate;
private final NamedParameterJdbcTemplate namedJdbcTemplate;
private final SqlParameterSource parameterSource;
private final SqlPrepareStatementStrategy sqlPrepareStatementStrategy;
private final SqlProcessingStrategy sqlProcessingStrategy;
private String onConsume;
private String onConsumeFailed;
private String onConsumeBatchComplete;
private boolean useIterator = true;
private boolean routeEmptyResultSet;
private int expectedUpdateCount = -1;
private boolean breakBatchOnConsumeFail;
private int parametersCount;
private boolean alwaysPopulateStatement;
private static final class DataHolder {
private Exchange exchange;
private Object data;
private DataHolder() {
}
}
public SqlConsumer(DefaultSqlEndpoint endpoint, Processor processor, JdbcTemplate jdbcTemplate, String query, SqlPrepareStatementStrategy sqlPrepareStatementStrategy,
SqlProcessingStrategy sqlProcessingStrategy) {
super(endpoint, processor);
this.jdbcTemplate = jdbcTemplate;
this.namedJdbcTemplate = null;
this.query = query;
this.parameterSource = null;
this.sqlPrepareStatementStrategy = sqlPrepareStatementStrategy;
this.sqlProcessingStrategy = sqlProcessingStrategy;
}
public SqlConsumer(DefaultSqlEndpoint endpoint, Processor processor, NamedParameterJdbcTemplate namedJdbcTemplate, String query, SqlParameterSource parameterSource,
SqlPrepareStatementStrategy sqlPrepareStatementStrategy, SqlProcessingStrategy sqlProcessingStrategy) {
super(endpoint, processor);
this.jdbcTemplate = null;
this.namedJdbcTemplate = namedJdbcTemplate;
this.query = query;
this.parameterSource = parameterSource;
this.sqlPrepareStatementStrategy = sqlPrepareStatementStrategy;
this.sqlProcessingStrategy = sqlProcessingStrategy;
}
@Override
public DefaultSqlEndpoint getEndpoint() {
return (DefaultSqlEndpoint) super.getEndpoint();
}
@Override
protected void doStart() throws Exception {
super.doStart();
String placeholder = getEndpoint().isUsePlaceholder() ? getEndpoint().getPlaceholder() : null;
resolvedQuery = SqlHelper.resolveQuery(getEndpoint().getCamelContext(), query, placeholder);
}
@Override
protected int poll() throws Exception {
// must reset for each poll
shutdownRunningTask = null;
pendingExchanges = 0;
final Exchange dummy = getEndpoint().createExchange();
final String preparedQuery = sqlPrepareStatementStrategy.prepareQuery(resolvedQuery, getEndpoint().isAllowNamedParameters(), dummy);
log.trace("poll: {}", preparedQuery);
final PreparedStatementCallback<Integer> callback = new PreparedStatementCallback<Integer>() {
@Override
public Integer doInPreparedStatement(PreparedStatement ps) throws SQLException, DataAccessException {
Queue<DataHolder> answer = new LinkedList<>();
int expected = parametersCount > 0 ? parametersCount : ps.getParameterMetaData().getParameterCount();
// only populate if really needed
if (alwaysPopulateStatement || expected > 0) {
Iterator<?> i = sqlPrepareStatementStrategy.createPopulateIterator(resolvedQuery, preparedQuery, expected, dummy, null);
sqlPrepareStatementStrategy.populateStatement(ps, i, expected);
}
log.debug("Executing query: {}", preparedQuery);
ResultSet rs = ps.executeQuery();
SqlOutputType outputType = getEndpoint().getOutputType();
boolean closeEager = true;
try {
log.trace("Got result list from query: {}, outputType={}", rs, outputType);
if (outputType == SqlOutputType.StreamList) {
ResultSetIterator data = getEndpoint().queryForStreamList(ps.getConnection(), ps, rs);
// only process if we have data
if (data.hasNext()) {
addListToQueue(data, answer);
closeEager = false;
}
} else if (outputType == SqlOutputType.SelectList) {
List<?> data = getEndpoint().queryForList(rs, true);
addListToQueue(data, answer);
} else if (outputType == SqlOutputType.SelectOne) {
Object data = getEndpoint().queryForObject(rs);
if (data != null) {
addListToQueue(data, answer);
}
} else {
throw new IllegalArgumentException("Invalid outputType=" + outputType);
}
} finally {
if (closeEager) {
closeResultSet(rs);
}
}
// process all the exchanges in this batch
try {
if (answer.isEmpty()) {
// no data
return 0;
} else {
int rows = processBatch(CastUtils.cast(answer));
return rows;
}
} catch (Exception e) {
throw RuntimeCamelException.wrapRuntimeCamelException(e);
} finally {
closeResultSet(rs);
}
}
};
Integer messagePolled;
if (namedJdbcTemplate != null) {
messagePolled = namedJdbcTemplate.execute(preparedQuery, parameterSource, callback);
} else {
messagePolled = jdbcTemplate.execute(preparedQuery, callback);
}
return messagePolled;
}
private void addListToQueue(Object data, Queue<DataHolder> answer) {
if (data instanceof List) {
// create a list of exchange objects with the data
List<?> list = (List)data;
if (useIterator) {
for (Object item : list) {
addItemToQueue(item, answer);
}
} else if (!list.isEmpty() || routeEmptyResultSet) {
addItemToQueue(list, answer);
}
} else {
// create single object as data
addItemToQueue(data, answer);
}
}
private void addItemToQueue(Object item, Queue<DataHolder> answer) {
Exchange exchange = createExchange(item);
DataHolder holder = new DataHolder();
holder.exchange = exchange;
holder.data = item;
answer.add(holder);
}
protected Exchange createExchange(Object data) {
final Exchange exchange = getEndpoint().createExchange(ExchangePattern.InOnly);
Message msg = exchange.getIn();
if (getEndpoint().getOutputHeader() != null) {
msg.setHeader(getEndpoint().getOutputHeader(), data);
} else {
msg.setBody(data);
}
return exchange;
}
@Override
public int processBatch(Queue<Object> exchanges) throws Exception {
int total = exchanges.size();
if (maxMessagesPerPoll > 0 && total == maxMessagesPerPoll) {
log.debug("Maximum messages to poll is {} and there were exactly {} messages in this poll.", maxMessagesPerPoll, total);
}
for (int index = 0; index < total && isBatchAllowed(); index++) {
// only loop if we are started (allowed to run)
DataHolder holder = ObjectHelper.cast(DataHolder.class, exchanges.poll());
Exchange exchange = holder.exchange;
Object data = holder.data;
// add current index and total as properties
exchange.setProperty(Exchange.BATCH_INDEX, index);
exchange.setProperty(Exchange.BATCH_SIZE, total);
exchange.setProperty(Exchange.BATCH_COMPLETE, index == total - 1);
// update pending number of exchanges
pendingExchanges = total - index - 1;
// process the current exchange
try {
getProcessor().process(exchange);
} catch (Exception e) {
exchange.setException(e);
}
if (getEndpoint().isTransacted() && exchange.isFailed()) {
// break out as we are transacted and should rollback
Exception cause = exchange.getException();
if (cause != null) {
throw cause;
} else {
throw new RollbackExchangeException("Rollback transaction due error processing exchange", exchange);
}
}
// pick the on consume to use
String sql = exchange.isFailed() ? onConsumeFailed : onConsume;
try {
// we can only run on consume if there was data
if (data != null && sql != null) {
int updateCount;
if (namedJdbcTemplate != null && sqlProcessingStrategy instanceof SqlNamedProcessingStrategy) {
SqlNamedProcessingStrategy namedProcessingStrategy = (SqlNamedProcessingStrategy) sqlProcessingStrategy;
updateCount = namedProcessingStrategy.commit(getEndpoint(), exchange, data, namedJdbcTemplate, parameterSource, sql);
} else {
updateCount = sqlProcessingStrategy.commit(getEndpoint(), exchange, data, jdbcTemplate, sql);
}
if (expectedUpdateCount > -1 && updateCount != expectedUpdateCount) {
String msg = "Expected update count " + expectedUpdateCount + " but was " + updateCount + " executing query: " + sql;
throw new SQLException(msg);
}
}
} catch (Exception e) {
if (breakBatchOnConsumeFail) {
throw e;
} else {
handleException("Error executing onConsume/onConsumeFailed query " + sql, e);
}
}
}
try {
if (onConsumeBatchComplete != null) {
int updateCount;
if (namedJdbcTemplate != null && sqlProcessingStrategy instanceof SqlNamedProcessingStrategy) {
SqlNamedProcessingStrategy namedProcessingStrategy = (SqlNamedProcessingStrategy) sqlProcessingStrategy;
updateCount = namedProcessingStrategy.commitBatchComplete(getEndpoint(), namedJdbcTemplate, parameterSource, onConsumeBatchComplete);
} else {
updateCount = sqlProcessingStrategy.commitBatchComplete(getEndpoint(), jdbcTemplate, onConsumeBatchComplete);
}
log.debug("onConsumeBatchComplete update count {}", updateCount);
}
} catch (Exception e) {
if (breakBatchOnConsumeFail) {
throw e;
} else {
handleException("Error executing onConsumeBatchComplete query " + onConsumeBatchComplete, e);
}
}
return total;
}
public String getOnConsume() {
return onConsume;
}
/**
* Sets a SQL to execute when the row has been successfully processed.
*/
public void setOnConsume(String onConsume) {
this.onConsume = onConsume;
}
public String getOnConsumeFailed() {
return onConsumeFailed;
}
/**
* Sets a SQL to execute when the row failed being processed.
*/
public void setOnConsumeFailed(String onConsumeFailed) {
this.onConsumeFailed = onConsumeFailed;
}
public String getOnConsumeBatchComplete() {
return onConsumeBatchComplete;
}
public void setOnConsumeBatchComplete(String onConsumeBatchComplete) {
this.onConsumeBatchComplete = onConsumeBatchComplete;
}
public boolean isUseIterator() {
return useIterator;
}
/**
* Sets how resultset should be delivered to route.
* Indicates delivery as either a list or individual object.
* defaults to true.
*/
public void setUseIterator(boolean useIterator) {
this.useIterator = useIterator;
}
public boolean isRouteEmptyResultSet() {
return routeEmptyResultSet;
}
/**
* Sets whether empty resultset should be allowed to be sent to the next hop.
* defaults to false. So the empty resultset will be filtered out.
*/
public void setRouteEmptyResultSet(boolean routeEmptyResultSet) {
this.routeEmptyResultSet = routeEmptyResultSet;
}
public int getExpectedUpdateCount() {
return expectedUpdateCount;
}
/**
* Sets an expected update count to validate when using onConsume.
*/
public void setExpectedUpdateCount(int expectedUpdateCount) {
this.expectedUpdateCount = expectedUpdateCount;
}
public boolean isBreakBatchOnConsumeFail() {
return breakBatchOnConsumeFail;
}
/**
* Sets whether to break batch if onConsume failed.
*/
public void setBreakBatchOnConsumeFail(boolean breakBatchOnConsumeFail) {
this.breakBatchOnConsumeFail = breakBatchOnConsumeFail;
}
@Override
public void setMaxMessagesPerPoll(int maxMessagesPerPoll) {
super.setMaxMessagesPerPoll(maxMessagesPerPoll);
if (jdbcTemplate != null) {
jdbcTemplate.setMaxRows(maxMessagesPerPoll);
}
}
public void setParametersCount(int parametersCount) {
this.parametersCount = parametersCount;
}
public void setAlwaysPopulateStatement(boolean alwaysPopulateStatement) {
this.alwaysPopulateStatement = alwaysPopulateStatement;
}
}
|
apache-2.0
|
FieldDB/fielddb-spreadsheet-sikuli
|
select-corpus.sikuli/select-corpus.py
|
239
|
find("1416542452932.png")
click("1416542464884.png")
type("jenkins\tphoneme\n")
wait("1416543208055.png")
click("1416543208055.png")
wait("1416543251929.png")
wait("1416542595201.png")
click("1416542595201.png")
wait("1416542452932.png")
|
apache-2.0
|
chrisdunelm/google-cloud-dotnet
|
apis/Google.Cloud.EntityFrameworkCore.Spanner/Google.Cloud.EntityFrameworkCore.Spanner/Infrastructure/Internal/SpannerOptionsExtension.cs
|
1711
|
// Copyright 2017 Google Inc. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
using Google.Api.Gax;
using Microsoft.EntityFrameworkCore.Infrastructure;
using Microsoft.Extensions.DependencyInjection;
namespace Google.Cloud.EntityFrameworkCore.Spanner.Infrastructure.Internal
{
/// <summary>
/// </summary>
public class SpannerOptionsExtension : RelationalOptionsExtension
{
/// <summary>
/// </summary>
public SpannerOptionsExtension()
{
}
/// <summary>
/// </summary>
/// <param name="original"></param>
protected SpannerOptionsExtension(SpannerOptionsExtension original)
: base(original)
{
}
/// <summary>
/// </summary>
/// <returns></returns>
protected override RelationalOptionsExtension Clone()
=> new SpannerOptionsExtension(this);
/// <inheritdoc />
public override bool ApplyServices(IServiceCollection services)
{
GaxPreconditions.CheckNotNull(services, nameof(services));
services.AddEntityFrameworkSpanner();
return true;
}
}
}
|
apache-2.0
|
google/binnavi
|
src/main/java/com/google/security/zynamics/binnavi/API/disassembly/ViewListenerAdapter.java
|
2111
|
// Copyright 2011-2016 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.security.zynamics.binnavi.API.disassembly;
import java.util.Date;
// / Adapter class for views
/**
* Adapter class that can be used by objects that want to listen on views but only need to process
* few events.
*/
public class ViewListenerAdapter implements IViewListener {
@Override
public void addedEdge(final View view, final ViewEdge edge) {
// Adapter method
}
@Override
public void addedNode(final View view, final ViewNode node) {
// Adapter method
}
@Override
public void changedDescription(final View view, final String description) {
// Adapter method
}
@Override
public void changedGraphType(final View view, final GraphType type) {
// Adapter method
}
@Override
public void changedModificationDate(final View view, final Date date) {
// Adapter method
}
@Override
public void changedName(final View view, final String name) {
// Adapter method
}
@Override
public void closedView(final View view) {
// Adapter method
}
@Override
public boolean closingView(final View view) {
return true;
}
@Override
public void deletedEdge(final View view, final ViewEdge edge) {
// Adapter method
}
@Override
public void deletedNode(final View view, final ViewNode node) {
// Adapter method
}
@Override
public void taggedView(final View view, final Tag tag) {
// Adapter method
}
@Override
public void untaggedView(final View view, final Tag tag) {
// Adapter method
}
}
|
apache-2.0
|
csharpfritz/NuGet.VisualStudioExtension
|
src/PackageManagement.PowerShellCmdlets/Cmdlets/GetProjectCommand.cs
|
2360
|
using EnvDTE;
using NuGet.ProjectManagement;
using System;
using System.Collections.Generic;
using System.Diagnostics.CodeAnalysis;
using System.Linq;
using System.Management.Automation;
namespace NuGet.PackageManagement.PowerShellCmdlets
{
/// <summary>
/// This cmdlet returns the list of project names in the current solution,
/// which is used for tab expansion.
/// </summary>
[Cmdlet(VerbsCommon.Get, "Project", DefaultParameterSetName = ParameterSetByName)]
[OutputType(typeof(Project))]
public class GetProjectCommand : NuGetPowerShellBaseCommand
{
private const string ParameterSetByName = "ByName";
private const string ParameterSetAllProjects = "AllProjects";
public GetProjectCommand()
: base()
{
}
[Parameter(Mandatory = false, Position = 0, ParameterSetName = ParameterSetByName, ValueFromPipelineByPropertyName = true)]
[ValidateNotNullOrEmpty]
[SuppressMessage("Microsoft.Performance", "CA1819:PropertiesShouldNotReturnArrays", Justification = "PowerShell API requirement")]
public string[] Name { get; set; }
[Parameter(Mandatory = true, ParameterSetName = ParameterSetAllProjects)]
public SwitchParameter All { get; set; }
protected override void Preprocess()
{
base.Preprocess();
CheckForSolutionOpen();
GetNuGetProject();
}
protected override void ProcessRecordCore()
{
Preprocess();
if (All.IsPresent)
{
var projects = DTE.Solution.GetAllProjects();
WriteObject(projects, enumerateCollection: true);
}
else
{
// No name specified; return default project (if not null)
if (Name == null)
{
Project defaultProject = GetDefaultProject();
if (defaultProject != null)
{
WriteObject(defaultProject);
}
}
else
{
// get all projects matching name(s) - handles wildcards
WriteObject(GetProjectsByName(Name), enumerateCollection: true);
}
}
}
}
}
|
apache-2.0
|
cloudnautique/cloud-cattle
|
code/framework/eventing/src/main/java/io/cattle/platform/eventing/memory/InMemoryEventService.java
|
1623
|
package io.cattle.platform.eventing.memory;
import io.cattle.platform.eventing.EventListener;
import io.cattle.platform.eventing.impl.AbstractThreadPoolingEventService;
import io.cattle.platform.eventing.model.Event;
import java.io.IOException;
import java.util.List;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.common.util.concurrent.SettableFuture;
public class InMemoryEventService extends AbstractThreadPoolingEventService {
private static final Logger log = LoggerFactory.getLogger(InMemoryEventService.class);
@Override
protected boolean doPublish(final String name, final Event event, final String eventString) throws IOException {
getDefaultExecutor().execute(new Runnable() {
@Override
public void run() {
/* Don't send events we know there are no listeners for.
* This emulates the behavior of endpoints only getting what they've subscribed to.
*/
List<EventListener> listeners = getEventListeners(event);
if ( listeners != null && listeners.size() > 0 ) {
onEvent(null, name, eventString);
}
}
});
return true;
}
@Override
protected void doSubscribe(String eventName, SettableFuture<?> future) {
log.debug("Subscribing to [{}]", eventName);
future.set(null);
}
@Override
protected void doUnsubscribe(String eventName) {
log.debug("Unsubscribing from [{}]", eventName);
}
@Override
protected void disconnect() {
}
}
|
apache-2.0
|
flofreud/aws-sdk-java
|
aws-java-sdk-workspaces/src/main/java/com/amazonaws/services/workspaces/model/transform/ComputeTypeJsonMarshaller.java
|
2158
|
/*
* Copyright 2010-2016 Amazon.com, Inc. or its affiliates. All Rights
* Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazonaws.services.workspaces.model.transform;
import java.util.Map;
import java.util.List;
import com.amazonaws.AmazonClientException;
import com.amazonaws.services.workspaces.model.*;
import com.amazonaws.transform.Marshaller;
import com.amazonaws.util.BinaryUtils;
import com.amazonaws.util.StringUtils;
import com.amazonaws.util.IdempotentUtils;
import com.amazonaws.util.StringInputStream;
import com.amazonaws.protocol.json.*;
/**
* ComputeTypeMarshaller
*/
public class ComputeTypeJsonMarshaller {
/**
* Marshall the given parameter object, and output to a SdkJsonGenerator
*/
public void marshall(ComputeType computeType,
StructuredJsonGenerator jsonGenerator) {
if (computeType == null) {
throw new AmazonClientException(
"Invalid argument passed to marshall(...)");
}
try {
jsonGenerator.writeStartObject();
if (computeType.getName() != null) {
jsonGenerator.writeFieldName("Name").writeValue(
computeType.getName());
}
jsonGenerator.writeEndObject();
} catch (Throwable t) {
throw new AmazonClientException(
"Unable to marshall request to JSON: " + t.getMessage(), t);
}
}
private static ComputeTypeJsonMarshaller instance;
public static ComputeTypeJsonMarshaller getInstance() {
if (instance == null)
instance = new ComputeTypeJsonMarshaller();
return instance;
}
}
|
apache-2.0
|
apache/zest-qi4j
|
core/runtime/src/test/java/org/apache/polygene/runtime/mixin/PrivateMixinTest.java
|
3076
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*
*/
package org.apache.polygene.runtime.mixin;
import org.apache.polygene.test.AbstractPolygeneTest;
import org.junit.Test;
import org.apache.polygene.api.composite.TransientComposite;
import org.apache.polygene.api.injection.scope.This;
import org.apache.polygene.api.mixin.Mixins;
import org.apache.polygene.bootstrap.AssemblyException;
import org.apache.polygene.bootstrap.ModuleAssembly;
import static org.hamcrest.CoreMatchers.equalTo;
import static org.hamcrest.CoreMatchers.is;
import static org.junit.Assert.assertThat;
/**
* Unit tests related to private mixins.
*/
public class PrivateMixinTest
extends AbstractPolygeneTest
{
@Override
public void assemble( ModuleAssembly module )
throws AssemblyException
{
module.transients( SpeakComposite.class );
}
/**
* Tests that private mixins are injected correctly.
*/
@Test
public void privateMixinFieldAndConstructorInjection()
{
SpeakComposite test = transientBuilderFactory.newTransient( SpeakComposite.class );
assertThat( "Speak", test.speak(), is( equalTo( "I say it works!" ) ) );
}
@Mixins( SpeakMixin.class )
public interface Speak
{
String speak();
}
public static class SpeakMixin
implements Speak
{
private final Word word;
@This Punctuation punctuation;
public SpeakMixin( @This Word word )
{
this.word = word;
}
@Override
public String speak()
{
return "I say " + word.get() + punctuation.punctuate();
}
}
public interface SpeakComposite
extends Speak, TransientComposite
{
}
@Mixins( WordMixin.class )
public interface Word
{
String get();
}
public static class WordMixin
implements Word
{
@Override
public String get()
{
return "it works";
}
}
@Mixins( PunctuationMixin.class )
public interface Punctuation
{
String punctuate();
}
public static class PunctuationMixin
implements Punctuation
{
@Override
public String punctuate()
{
return "!";
}
}
}
|
apache-2.0
|
apache/portals-pluto
|
portlet-tck_3.0/V2WrapperTests/src/main/java/javax/portlet/tck/portlets/WrapperTests_ActionRequestWrapper_ApiAction.java
|
14705
|
/* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package javax.portlet.tck.portlets;
import static javax.portlet.tck.beans.JSR286ApiTestCaseDetails.V2WRAPPERTESTS_ACTIONREQUESTWRAPPER_APIACTION_GETCHARACTERENCODING;
import static javax.portlet.tck.beans.JSR286ApiTestCaseDetails.V2WRAPPERTESTS_ACTIONREQUESTWRAPPER_APIACTION_GETCONTENTLENGTH;
import static javax.portlet.tck.beans.JSR286ApiTestCaseDetails.V2WRAPPERTESTS_ACTIONREQUESTWRAPPER_APIACTION_GETCONTENTTYPE;
import static javax.portlet.tck.beans.JSR286ApiTestCaseDetails.V2WRAPPERTESTS_ACTIONREQUESTWRAPPER_APIACTION_GETPORTLETINPUTSTREAM;
import static javax.portlet.tck.beans.JSR286ApiTestCaseDetails.V2WRAPPERTESTS_ACTIONREQUESTWRAPPER_APIACTION_GETREADER;
import static javax.portlet.tck.beans.JSR286ApiTestCaseDetails.V2WRAPPERTESTS_ACTIONREQUESTWRAPPER_APIACTION_SETCHARACTERENCODING;
import static javax.portlet.tck.beans.JSR286ApiTestCaseDetails.V2WRAPPERTESTS_ACTIONREQUESTWRAPPER_APIACTION_GETREQUEST;
import static javax.portlet.tck.beans.JSR286ApiTestCaseDetails.V2WRAPPERTESTS_ACTIONREQUESTWRAPPER_APIACTION_SETREQUEST;
import static javax.portlet.tck.beans.JSR286ApiTestCaseDetails.V2WRAPPERTESTS_ACTIONREQUESTWRAPPER_APIACTION_GETMETHOD;
import static javax.portlet.PortletSession.APPLICATION_SCOPE;
import static javax.portlet.tck.constants.Constants.RESULT_ATTR_PREFIX;
import static javax.portlet.tck.constants.Constants.THREADID_ATTR;
import java.io.IOException;
import java.io.PrintWriter;
import java.io.StringWriter;
import javax.portlet.ActionRequest;
import javax.portlet.ActionResponse;
import javax.portlet.Portlet;
import javax.portlet.PortletConfig;
import javax.portlet.PortletException;
import javax.portlet.PortletSession;
import javax.portlet.PortletURL;
import javax.portlet.RenderRequest;
import javax.portlet.RenderResponse;
import javax.portlet.filter.ActionRequestWrapper;
import javax.portlet.tck.beans.ActionRequestWrapperChecker;
import javax.portlet.tck.beans.JSR286ApiTestCaseDetails;
import javax.portlet.tck.beans.TestButton;
import javax.portlet.tck.beans.TestResult;
import javax.portlet.tck.constants.Constants;
/**
* This portlet implements several test cases for the JSR 362 TCK. The test case
* names are defined in the /src/main/resources/xml-resources/additionalTCs.xml
* file. The build process will integrate the test case names defined in the
* additionalTCs.xml file into the complete list of test case names for
* execution by the driver.
*
* This is the main portlet for the test cases. If the test cases call for
* events, this portlet will initiate the events, but not process them. The
* processing is done in the companion portlet
* WrapperTests_ActionRequestWrapper_ApiAction_event
*
*/
public class WrapperTests_ActionRequestWrapper_ApiAction implements Portlet {
@Override
public void init(PortletConfig config) throws PortletException {
}
@Override
public void destroy() {
}
@Override
public void processAction(ActionRequest portletReq,
ActionResponse portletResp) throws PortletException, IOException {
portletResp.setRenderParameters(portletReq.getParameterMap());
long tid = Thread.currentThread().getId();
portletReq.setAttribute(THREADID_ATTR, tid);
StringWriter writer = new StringWriter();
ActionRequestWrapperChecker wc = new ActionRequestWrapperChecker(
portletReq);
ActionRequestWrapper wpr = new ActionRequestWrapper(portletReq);
wpr.setRequest(wc);
JSR286ApiTestCaseDetails tcd = new JSR286ApiTestCaseDetails();
// Create result objects for the tests
/*
* TestCase:
* V2WrapperTests_ActionRequestWrapper_ApiAction_getCharacterEncoding
*/
/* Details: "Method getCharacterEncoding(): Calls wrapped method" */
TestResult tr0 = tcd.getTestResultFailed(
V2WRAPPERTESTS_ACTIONREQUESTWRAPPER_APIACTION_GETCHARACTERENCODING);
try {
Object[] args = {};
wc.prepare(tr0, "getCharacterEncoding", args);
wc.checkRetval(wpr.getCharacterEncoding());
} catch (Exception e) {
tr0.appendTcDetail(e.toString());
}
tr0.writeTo(writer);
/*
* TestCase:
* V2WrapperTests_ActionRequestWrapper_ApiAction_getContentLength
*/
/* Details: "Method getContentLength(): Calls wrapped method" */
TestResult tr1 = tcd.getTestResultFailed(
V2WRAPPERTESTS_ACTIONREQUESTWRAPPER_APIACTION_GETCONTENTLENGTH);
try {
Object[] args = {};
wc.prepare(tr1, "getContentLength", args);
wc.checkRetval(wpr.getContentLength());
} catch (Exception e) {
tr1.appendTcDetail(e.toString());
}
tr1.writeTo(writer);
/*
* TestCase: V2WrapperTests_ActionRequestWrapper_ApiAction_getContentType
*/
/* Details: "Method getContentType(): Calls wrapped method" */
TestResult tr2 = tcd.getTestResultFailed(
V2WRAPPERTESTS_ACTIONREQUESTWRAPPER_APIACTION_GETCONTENTTYPE);
try {
Object[] args = {};
wc.prepare(tr2, "getContentType", args);
wc.checkRetval(wpr.getContentType());
} catch (Exception e) {
tr2.appendTcDetail(e.toString());
}
tr2.writeTo(writer);
/*
* TestCase:
* V2WrapperTests_ActionRequestWrapper_ApiAction_getPortletInputStream
*/
/* Details: "Method getPortletInputStream(): Calls wrapped method" */
TestResult tr3 = tcd.getTestResultFailed(
V2WRAPPERTESTS_ACTIONREQUESTWRAPPER_APIACTION_GETPORTLETINPUTSTREAM);
try {
Object[] args = {};
wc.prepare(tr3, "getPortletInputStream", args);
wc.checkRetval(wpr.getPortletInputStream());
} catch (Exception e) {
tr3.appendTcDetail(e.toString());
}
tr3.writeTo(writer);
/* TestCase: V2WrapperTests_ActionRequestWrapper_ApiAction_getReader */
/* Details: "Method getReader(): Calls wrapped method" */
TestResult tr4 = tcd.getTestResultFailed(
V2WRAPPERTESTS_ACTIONREQUESTWRAPPER_APIACTION_GETREADER);
try {
Object[] args = {};
wc.prepare(tr4, "getReader", args);
wc.checkRetval(wpr.getReader());
} catch (Exception e) {
tr4.appendTcDetail(e.toString());
}
tr4.writeTo(writer);
/*
* TestCase:
* V2WrapperTests_ActionRequestWrapper_ApiAction_setCharacterEncoding
*/
/* Details: "Method setCharacterEncoding(String): Calls wrapped */
/* method" */
TestResult tr5 = tcd.getTestResultFailed(
V2WRAPPERTESTS_ACTIONREQUESTWRAPPER_APIACTION_SETCHARACTERENCODING);
try {
String parm1 = "val1";
Object[] args = { parm1 };
wc.prepare(tr5, "setCharacterEncoding", args);
wpr.setCharacterEncoding(parm1);
} catch (Exception e) {
tr5.appendTcDetail(e.toString());
}
tr5.writeTo(writer);
/* TestCase: V2WrapperTests_ActionRequestWrapper_ApiAction_getRequest */
/* Details: "Method getRequest(): Returns wrapped ActionRequest */
/* object" */
TestResult tr6 = tcd.getTestResultFailed(
V2WRAPPERTESTS_ACTIONREQUESTWRAPPER_APIACTION_GETREQUEST);
try {
// The retrieved request / response object should be the wrapper
// checker instance
ActionRequest r = wpr.getRequest();
tr6.setTcSuccess(r == wc);
} catch (Exception e) {
tr6.appendTcDetail(e.toString());
}
tr6.writeTo(writer);
/* TestCase: V2WrapperTests_ActionRequestWrapper_ApiAction_setRequest */
/* Details: "Method setRequest(ActionRequest): Allows wrapped */
/* ActionRequest object to be set" */
TestResult tr7 = tcd.getTestResultFailed(
V2WRAPPERTESTS_ACTIONREQUESTWRAPPER_APIACTION_SETREQUEST);
try {
// tested by method set up
tr7.setTcSuccess(true);
} catch (Exception e) {
tr7.appendTcDetail(e.toString());
}
tr7.writeTo(writer);
/* TestCase: V2WrapperTests_ActionRequestWrapper_ApiAction_getMethod */
/* Details: "Method getMethod(): Calls wrapped method" */
TestResult tr8 = tcd.getTestResultFailed(
V2WRAPPERTESTS_ACTIONREQUESTWRAPPER_APIACTION_GETMETHOD);
try {
Object[] args = {};
wc.prepare(tr8, "getMethod", args);
wc.checkRetval(wpr.getMethod());
} catch (Exception e) {
tr8.appendTcDetail(e.toString());
}
tr8.writeTo(writer);
portletReq.getPortletSession().setAttribute(
Constants.RESULT_ATTR_PREFIX
+ "WrapperTests_ActionRequestWrapper_ApiAction",
writer.toString(), APPLICATION_SCOPE);
}
@Override
public void render(RenderRequest portletReq, RenderResponse portletResp)
throws PortletException, IOException {
long tid = Thread.currentThread().getId();
portletReq.setAttribute(THREADID_ATTR, tid);
PrintWriter writer = portletResp.getWriter();
PortletSession ps = portletReq.getPortletSession();
String msg = (String) ps.getAttribute(
RESULT_ATTR_PREFIX + "WrapperTests_ActionRequestWrapper_ApiAction",
APPLICATION_SCOPE);
if (msg != null) {
writer.write("<p>" + msg + "</p><br/>\n");
ps.removeAttribute(
RESULT_ATTR_PREFIX
+ "WrapperTests_ActionRequestWrapper_ApiAction",
APPLICATION_SCOPE);
}
/*
* TestCase:
* V2WrapperTests_ActionRequestWrapper_ApiAction_getCharacterEncoding
*/
/* Details: "Method getCharacterEncoding(): Calls wrapped method" */
{
PortletURL aurl = portletResp.createActionURL();
aurl.setParameters(portletReq.getPrivateParameterMap());
TestButton tb = new TestButton(
"V2WrapperTests_ActionRequestWrapper_ApiAction_getCharacterEncoding",
aurl);
tb.writeTo(writer);
}
/*
* TestCase:
* V2WrapperTests_ActionRequestWrapper_ApiAction_getContentLength
*/
/* Details: "Method getContentLength(): Calls wrapped method" */
{
PortletURL aurl = portletResp.createActionURL();
aurl.setParameters(portletReq.getPrivateParameterMap());
TestButton tb = new TestButton(
"V2WrapperTests_ActionRequestWrapper_ApiAction_getContentLength",
aurl);
tb.writeTo(writer);
}
/*
* TestCase: V2WrapperTests_ActionRequestWrapper_ApiAction_getContentType
*/
/* Details: "Method getContentType(): Calls wrapped method" */
{
PortletURL aurl = portletResp.createActionURL();
aurl.setParameters(portletReq.getPrivateParameterMap());
TestButton tb = new TestButton(
"V2WrapperTests_ActionRequestWrapper_ApiAction_getContentType",
aurl);
tb.writeTo(writer);
}
/*
* TestCase:
* V2WrapperTests_ActionRequestWrapper_ApiAction_getPortletInputStream
*/
/* Details: "Method getPortletInputStream(): Calls wrapped method" */
{
PortletURL aurl = portletResp.createActionURL();
aurl.setParameters(portletReq.getPrivateParameterMap());
TestButton tb = new TestButton(
"V2WrapperTests_ActionRequestWrapper_ApiAction_getPortletInputStream",
aurl);
tb.writeTo(writer);
}
/* TestCase: V2WrapperTests_ActionRequestWrapper_ApiAction_getReader */
/* Details: "Method getReader(): Calls wrapped method" */
{
PortletURL aurl = portletResp.createActionURL();
aurl.setParameters(portletReq.getPrivateParameterMap());
TestButton tb = new TestButton(
"V2WrapperTests_ActionRequestWrapper_ApiAction_getReader", aurl);
tb.writeTo(writer);
}
/*
* TestCase:
* V2WrapperTests_ActionRequestWrapper_ApiAction_setCharacterEncoding
*/
/* Details: "Method setCharacterEncoding(String): Calls wrapped */
/* method" */
{
PortletURL aurl = portletResp.createActionURL();
aurl.setParameters(portletReq.getPrivateParameterMap());
TestButton tb = new TestButton(
"V2WrapperTests_ActionRequestWrapper_ApiAction_setCharacterEncoding",
aurl);
tb.writeTo(writer);
}
/* TestCase: V2WrapperTests_ActionRequestWrapper_ApiAction_getRequest */
/* Details: "Method getRequest(): Returns wrapped ActionRequest */
/* object" */
{
PortletURL aurl = portletResp.createActionURL();
aurl.setParameters(portletReq.getPrivateParameterMap());
TestButton tb = new TestButton(
"V2WrapperTests_ActionRequestWrapper_ApiAction_getRequest",
aurl);
tb.writeTo(writer);
}
/* TestCase: V2WrapperTests_ActionRequestWrapper_ApiAction_setRequest */
/* Details: "Method setRequest(ActionRequest): Allows wrapped */
/* ActionRequest object to be set" */
{
PortletURL aurl = portletResp.createActionURL();
aurl.setParameters(portletReq.getPrivateParameterMap());
TestButton tb = new TestButton(
"V2WrapperTests_ActionRequestWrapper_ApiAction_setRequest",
aurl);
tb.writeTo(writer);
}
/* TestCase: V2WrapperTests_ActionRequestWrapper_ApiAction_getMethod */
/* Details: "Method getMethod(): Calls wrapped method" */
{
PortletURL aurl = portletResp.createActionURL();
aurl.setParameters(portletReq.getPrivateParameterMap());
TestButton tb = new TestButton(
"V2WrapperTests_ActionRequestWrapper_ApiAction_getMethod", aurl);
tb.writeTo(writer);
}
}
}
|
apache-2.0
|
alexkuk/elasticsearch
|
core/src/main/java/org/elasticsearch/gateway/GatewayAllocator.java
|
30497
|
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.gateway;
import com.carrotsearch.hppc.ObjectLongHashMap;
import com.carrotsearch.hppc.cursors.ObjectCursor;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import com.google.common.collect.Sets;
import org.apache.lucene.util.CollectionUtil;
import org.elasticsearch.action.support.nodes.BaseNodeResponse;
import org.elasticsearch.action.support.nodes.BaseNodesResponse;
import org.elasticsearch.cluster.*;
import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.cluster.metadata.MetaData;
import org.elasticsearch.cluster.node.DiscoveryNode;
import org.elasticsearch.cluster.node.DiscoveryNodes;
import org.elasticsearch.cluster.routing.MutableShardRouting;
import org.elasticsearch.cluster.routing.RoutingNode;
import org.elasticsearch.cluster.routing.RoutingNodes;
import org.elasticsearch.cluster.routing.ShardRouting;
import org.elasticsearch.cluster.routing.allocation.AllocationService;
import org.elasticsearch.cluster.routing.allocation.FailedRerouteAllocation;
import org.elasticsearch.cluster.routing.allocation.RoutingAllocation;
import org.elasticsearch.cluster.routing.allocation.StartedRerouteAllocation;
import org.elasticsearch.cluster.routing.allocation.decider.Decision;
import org.elasticsearch.common.Priority;
import org.elasticsearch.common.component.AbstractComponent;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.lease.Releasables;
import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.ByteSizeValue;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.util.concurrent.ConcurrentCollections;
import org.elasticsearch.index.settings.IndexSettings;
import org.elasticsearch.index.shard.ShardId;
import org.elasticsearch.index.store.StoreFileMetaData;
import org.elasticsearch.indices.store.TransportNodesListShardStoreMetaData;
import java.util.*;
import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.atomic.AtomicBoolean;
/**
*
*/
public class GatewayAllocator extends AbstractComponent {
public static final String INDEX_RECOVERY_INITIAL_SHARDS = "index.recovery.initial_shards";
private final String initialShards;
private final TransportNodesListGatewayStartedShards startedAction;
private final TransportNodesListShardStoreMetaData storeAction;
private ClusterService clusterService;
private AllocationService allocationService;
private final ConcurrentMap<ShardId, AsyncShardFetch<TransportNodesListGatewayStartedShards.NodeGatewayStartedShards>> asyncFetchStarted = ConcurrentCollections.newConcurrentMap();
private final ConcurrentMap<ShardId, AsyncShardFetch<TransportNodesListShardStoreMetaData.NodeStoreFilesMetaData>> asyncFetchStore = ConcurrentCollections.newConcurrentMap();
@Inject
public GatewayAllocator(Settings settings, TransportNodesListGatewayStartedShards startedAction, TransportNodesListShardStoreMetaData storeAction) {
super(settings);
this.startedAction = startedAction;
this.storeAction = storeAction;
this.initialShards = settings.get("gateway.initial_shards", settings.get("gateway.local.initial_shards", "quorum"));
logger.debug("using initial_shards [{}]", initialShards);
}
public void setReallocation(final ClusterService clusterService, final AllocationService allocationService) {
this.clusterService = clusterService;
this.allocationService = allocationService;
clusterService.add(new ClusterStateListener() {
@Override
public void clusterChanged(ClusterChangedEvent event) {
boolean cleanCache = false;
DiscoveryNode localNode = event.state().nodes().localNode();
if (localNode != null) {
if (localNode.masterNode() == true && event.localNodeMaster() == false) {
cleanCache = true;
}
} else {
cleanCache = true;
}
if (cleanCache) {
Releasables.close(asyncFetchStarted.values());
asyncFetchStarted.clear();
Releasables.close(asyncFetchStore.values());
asyncFetchStore.clear();
}
}
});
}
public int getNumberOfInFlightFetch() {
int count = 0;
for (AsyncShardFetch<TransportNodesListGatewayStartedShards.NodeGatewayStartedShards> fetch : asyncFetchStarted.values()) {
count += fetch.getNumberOfInFlightFetches();
}
for (AsyncShardFetch<TransportNodesListShardStoreMetaData.NodeStoreFilesMetaData> fetch : asyncFetchStore.values()) {
count += fetch.getNumberOfInFlightFetches();
}
return count;
}
public void applyStartedShards(StartedRerouteAllocation allocation) {
for (ShardRouting shard : allocation.startedShards()) {
Releasables.close(asyncFetchStarted.remove(shard.shardId()));
Releasables.close(asyncFetchStore.remove(shard.shardId()));
}
}
public void applyFailedShards(FailedRerouteAllocation allocation) {
for (FailedRerouteAllocation.FailedShard shard : allocation.failedShards()) {
Releasables.close(asyncFetchStarted.remove(shard.shard.shardId()));
Releasables.close(asyncFetchStore.remove(shard.shard.shardId()));
}
}
/**
* Return {@code true} if the index is configured to allow shards to be
* recovered on any node
*/
private boolean recoverOnAnyNode(@IndexSettings Settings idxSettings) {
return IndexMetaData.isOnSharedFilesystem(idxSettings) &&
idxSettings.getAsBoolean(IndexMetaData.SETTING_SHARED_FS_ALLOW_RECOVERY_ON_ANY_NODE, false);
}
public boolean allocateUnassigned(RoutingAllocation allocation) {
boolean changed = false;
DiscoveryNodes nodes = allocation.nodes();
RoutingNodes routingNodes = allocation.routingNodes();
// First, handle primaries, they must find a place to be allocated on here
MetaData metaData = routingNodes.metaData();
Iterator<MutableShardRouting> unassignedIterator = routingNodes.unassigned().iterator();
while (unassignedIterator.hasNext()) {
MutableShardRouting shard = unassignedIterator.next();
if (!shard.primary()) {
continue;
}
// this is an API allocation, ignore since we know there is no data...
if (!routingNodes.routingTable().index(shard.index()).shard(shard.id()).primaryAllocatedPostApi()) {
continue;
}
AsyncShardFetch<TransportNodesListGatewayStartedShards.NodeGatewayStartedShards> fetch = asyncFetchStarted.get(shard.shardId());
if (fetch == null) {
fetch = new InternalAsyncFetch<>(logger, "shard_started", shard.shardId(), startedAction);
asyncFetchStarted.put(shard.shardId(), fetch);
}
AsyncShardFetch.FetchResult<TransportNodesListGatewayStartedShards.NodeGatewayStartedShards> shardState = fetch.fetchData(nodes, metaData, allocation.getIgnoreNodes(shard.shardId()));
if (shardState.hasData() == false) {
logger.trace("{}: ignoring allocation, still fetching shard started state", shard);
unassignedIterator.remove();
routingNodes.ignoredUnassigned().add(shard);
continue;
}
shardState.processAllocation(allocation);
IndexMetaData indexMetaData = metaData.index(shard.getIndex());
/**
* Build a map of DiscoveryNodes to shard state number for the given shard.
* A state of -1 means the shard does not exist on the node, where any
* shard state >= 0 is the state version of the shard on that node's disk.
*
* A shard on shared storage will return at least shard state 0 for all
* nodes, indicating that the shard can be allocated to any node.
*/
ObjectLongHashMap<DiscoveryNode> nodesState = new ObjectLongHashMap<>();
for (TransportNodesListGatewayStartedShards.NodeGatewayStartedShards nodeShardState : shardState.getData().values()) {
long version = nodeShardState.version();
// -1 version means it does not exists, which is what the API returns, and what we expect to
logger.trace("[{}] on node [{}] has version [{}] of shard", shard, nodeShardState.getNode(), version);
nodesState.put(nodeShardState.getNode(), version);
}
int numberOfAllocationsFound = 0;
long highestVersion = -1;
final Map<DiscoveryNode, Long> nodesWithVersion = Maps.newHashMap();
assert !nodesState.containsKey(null);
final Object[] keys = nodesState.keys;
final long[] values = nodesState.values;
Settings idxSettings = indexMetaData.settings();
for (int i = 0; i < keys.length; i++) {
if (keys[i] == null) {
continue;
}
DiscoveryNode node = (DiscoveryNode) keys[i];
long version = values[i];
// since we don't check in NO allocation, we need to double check here
if (allocation.shouldIgnoreShardForNode(shard.shardId(), node.id())) {
continue;
}
if (recoverOnAnyNode(idxSettings)) {
numberOfAllocationsFound++;
if (version > highestVersion) {
highestVersion = version;
}
// We always put the node without clearing the map
nodesWithVersion.put(node, version);
} else if (version != -1) {
numberOfAllocationsFound++;
// If we've found a new "best" candidate, clear the
// current candidates and add it
if (version > highestVersion) {
highestVersion = version;
nodesWithVersion.clear();
nodesWithVersion.put(node, version);
} else if (version == highestVersion) {
// If the candidate is the same, add it to the
// list, but keep the current candidate
nodesWithVersion.put(node, version);
}
}
}
// Now that we have a map of nodes to versions along with the
// number of allocations found (and not ignored), we need to sort
// it so the node with the highest version is at the beginning
List<DiscoveryNode> nodesWithHighestVersion = Lists.newArrayList();
nodesWithHighestVersion.addAll(nodesWithVersion.keySet());
CollectionUtil.timSort(nodesWithHighestVersion, new Comparator<DiscoveryNode>() {
@Override
public int compare(DiscoveryNode o1, DiscoveryNode o2) {
return Long.compare(nodesWithVersion.get(o2), nodesWithVersion.get(o1));
}
});
if (logger.isDebugEnabled()) {
logger.debug("[{}][{}] found {} allocations of {}, highest version: [{}]",
shard.index(), shard.id(), numberOfAllocationsFound, shard, highestVersion);
}
if (logger.isTraceEnabled()) {
StringBuilder sb = new StringBuilder("[");
for (DiscoveryNode n : nodesWithHighestVersion) {
sb.append("[");
sb.append(n.getName());
sb.append("]");
sb.append(" -> ");
sb.append(nodesWithVersion.get(n));
sb.append(", ");
}
sb.append("]");
logger.trace("{} candidates for allocation: {}", shard, sb.toString());
}
// check if the counts meets the minimum set
int requiredAllocation = 1;
// if we restore from a repository one copy is more then enough
if (shard.restoreSource() == null) {
try {
String initialShards = indexMetaData.settings().get(INDEX_RECOVERY_INITIAL_SHARDS, settings.get(INDEX_RECOVERY_INITIAL_SHARDS, this.initialShards));
if ("quorum".equals(initialShards)) {
if (indexMetaData.numberOfReplicas() > 1) {
requiredAllocation = ((1 + indexMetaData.numberOfReplicas()) / 2) + 1;
}
} else if ("quorum-1".equals(initialShards) || "half".equals(initialShards)) {
if (indexMetaData.numberOfReplicas() > 2) {
requiredAllocation = ((1 + indexMetaData.numberOfReplicas()) / 2);
}
} else if ("one".equals(initialShards)) {
requiredAllocation = 1;
} else if ("full".equals(initialShards) || "all".equals(initialShards)) {
requiredAllocation = indexMetaData.numberOfReplicas() + 1;
} else if ("full-1".equals(initialShards) || "all-1".equals(initialShards)) {
if (indexMetaData.numberOfReplicas() > 1) {
requiredAllocation = indexMetaData.numberOfReplicas();
}
} else {
requiredAllocation = Integer.parseInt(initialShards);
}
} catch (Exception e) {
logger.warn("[{}][{}] failed to derived initial_shards from value {}, ignore allocation for {}", shard.index(), shard.id(), initialShards, shard);
}
}
// not enough found for this shard, continue...
if (numberOfAllocationsFound < requiredAllocation) {
// if we are restoring this shard we still can allocate
if (shard.restoreSource() == null) {
// we can't really allocate, so ignore it and continue
unassignedIterator.remove();
routingNodes.ignoredUnassigned().add(shard);
if (logger.isDebugEnabled()) {
logger.debug("[{}][{}]: not allocating, number_of_allocated_shards_found [{}], required_number [{}]", shard.index(), shard.id(), numberOfAllocationsFound, requiredAllocation);
}
} else if (logger.isDebugEnabled()) {
logger.debug("[{}][{}]: missing local data, will restore from [{}]", shard.index(), shard.id(), shard.restoreSource());
}
continue;
}
Set<DiscoveryNode> throttledNodes = Sets.newHashSet();
Set<DiscoveryNode> noNodes = Sets.newHashSet();
for (DiscoveryNode discoNode : nodesWithHighestVersion) {
RoutingNode node = routingNodes.node(discoNode.id());
if (node == null) {
continue;
}
Decision decision = allocation.deciders().canAllocate(shard, node, allocation);
if (decision.type() == Decision.Type.THROTTLE) {
throttledNodes.add(discoNode);
} else if (decision.type() == Decision.Type.NO) {
noNodes.add(discoNode);
} else {
if (logger.isDebugEnabled()) {
logger.debug("[{}][{}]: allocating [{}] to [{}] on primary allocation", shard.index(), shard.id(), shard, discoNode);
}
// we found a match
changed = true;
// make sure we create one with the version from the recovered state
allocation.routingNodes().assign(new MutableShardRouting(shard, highestVersion), node.nodeId());
unassignedIterator.remove();
// found a node, so no throttling, no "no", and break out of the loop
throttledNodes.clear();
noNodes.clear();
break;
}
}
if (throttledNodes.isEmpty()) {
// if we have a node that we "can't" allocate to, force allocation, since this is our master data!
if (!noNodes.isEmpty()) {
DiscoveryNode discoNode = noNodes.iterator().next();
RoutingNode node = routingNodes.node(discoNode.id());
if (logger.isDebugEnabled()) {
logger.debug("[{}][{}]: forcing allocating [{}] to [{}] on primary allocation", shard.index(), shard.id(), shard, discoNode);
}
// we found a match
changed = true;
// make sure we create one with the version from the recovered state
allocation.routingNodes().assign(new MutableShardRouting(shard, highestVersion), node.nodeId());
unassignedIterator.remove();
}
} else {
if (logger.isDebugEnabled()) {
logger.debug("[{}][{}]: throttling allocation [{}] to [{}] on primary allocation", shard.index(), shard.id(), shard, throttledNodes);
}
// we are throttling this, but we have enough to allocate to this node, ignore it for now
unassignedIterator.remove();
routingNodes.ignoredUnassigned().add(shard);
}
}
if (!routingNodes.hasUnassigned()) {
return changed;
}
// Now, handle replicas, try to assign them to nodes that are similar to the one the primary was allocated on
unassignedIterator = routingNodes.unassigned().iterator();
while (unassignedIterator.hasNext()) {
MutableShardRouting shard = unassignedIterator.next();
if (shard.primary()) {
continue;
}
// pre-check if it can be allocated to any node that currently exists, so we won't list the store for it for nothing
boolean canBeAllocatedToAtLeastOneNode = false;
for (ObjectCursor<DiscoveryNode> cursor : nodes.dataNodes().values()) {
RoutingNode node = routingNodes.node(cursor.value.id());
if (node == null) {
continue;
}
// if we can't allocate it on a node, ignore it, for example, this handles
// cases for only allocating a replica after a primary
Decision decision = allocation.deciders().canAllocate(shard, node, allocation);
if (decision.type() == Decision.Type.YES) {
canBeAllocatedToAtLeastOneNode = true;
break;
}
}
if (!canBeAllocatedToAtLeastOneNode) {
logger.trace("{}: ignoring allocation, can't be allocated on any node", shard);
unassignedIterator.remove();
routingNodes.ignoredUnassigned().add(shard);
continue;
}
AsyncShardFetch<TransportNodesListShardStoreMetaData.NodeStoreFilesMetaData> fetch = asyncFetchStore.get(shard.shardId());
if (fetch == null) {
fetch = new InternalAsyncFetch<>(logger, "shard_store", shard.shardId(), storeAction);
asyncFetchStore.put(shard.shardId(), fetch);
}
AsyncShardFetch.FetchResult<TransportNodesListShardStoreMetaData.NodeStoreFilesMetaData> shardStores = fetch.fetchData(nodes, metaData, allocation.getIgnoreNodes(shard.shardId()));
if (shardStores.hasData() == false) {
logger.trace("{}: ignoring allocation, still fetching shard stores", shard);
unassignedIterator.remove();
routingNodes.ignoredUnassigned().add(shard);
continue; // still fetching
}
shardStores.processAllocation(allocation);
long lastSizeMatched = 0;
DiscoveryNode lastDiscoNodeMatched = null;
RoutingNode lastNodeMatched = null;
boolean hasReplicaData = false;
IndexMetaData indexMetaData = metaData.index(shard.getIndex());
for (Map.Entry<DiscoveryNode, TransportNodesListShardStoreMetaData.NodeStoreFilesMetaData> nodeStoreEntry : shardStores.getData().entrySet()) {
DiscoveryNode discoNode = nodeStoreEntry.getKey();
TransportNodesListShardStoreMetaData.StoreFilesMetaData storeFilesMetaData = nodeStoreEntry.getValue().storeFilesMetaData();
logger.trace("{}: checking node [{}]", shard, discoNode);
if (storeFilesMetaData == null) {
// already allocated on that node...
continue;
}
RoutingNode node = routingNodes.node(discoNode.id());
if (node == null) {
continue;
}
// check if we can allocate on that node...
// we only check for NO, since if this node is THROTTLING and it has enough "same data"
// then we will try and assign it next time
Decision decision = allocation.deciders().canAllocate(shard, node, allocation);
if (decision.type() == Decision.Type.NO) {
continue;
}
// if it is already allocated, we can't assign to it...
if (storeFilesMetaData.allocated()) {
continue;
}
if (!shard.primary()) {
hasReplicaData |= storeFilesMetaData.iterator().hasNext();
MutableShardRouting primaryShard = routingNodes.activePrimary(shard);
if (primaryShard != null) {
assert primaryShard.active();
DiscoveryNode primaryNode = nodes.get(primaryShard.currentNodeId());
if (primaryNode != null) {
TransportNodesListShardStoreMetaData.NodeStoreFilesMetaData primaryNodeFilesStore = shardStores.getData().get(primaryNode);
if (primaryNodeFilesStore != null) {
TransportNodesListShardStoreMetaData.StoreFilesMetaData primaryNodeStore = primaryNodeFilesStore.storeFilesMetaData();
if (primaryNodeStore != null && primaryNodeStore.allocated()) {
long sizeMatched = 0;
String primarySyncId = primaryNodeStore.syncId();
String replicaSyncId = storeFilesMetaData.syncId();
// see if we have a sync id we can make use of
if (replicaSyncId != null && replicaSyncId.equals(primarySyncId)) {
logger.trace("{}: node [{}] has same sync id {} as primary", shard, discoNode.name(), replicaSyncId);
lastNodeMatched = node;
lastSizeMatched = Long.MAX_VALUE;
lastDiscoNodeMatched = discoNode;
} else {
for (StoreFileMetaData storeFileMetaData : storeFilesMetaData) {
String metaDataFileName = storeFileMetaData.name();
if (primaryNodeStore.fileExists(metaDataFileName) && primaryNodeStore.file(metaDataFileName).isSame(storeFileMetaData)) {
sizeMatched += storeFileMetaData.length();
}
}
logger.trace("{}: node [{}] has [{}/{}] bytes of re-usable data",
shard, discoNode.name(), new ByteSizeValue(sizeMatched), sizeMatched);
if (sizeMatched > lastSizeMatched) {
lastSizeMatched = sizeMatched;
lastDiscoNodeMatched = discoNode;
lastNodeMatched = node;
}
}
}
}
}
}
}
}
if (lastNodeMatched != null) {
// we only check on THROTTLE since we checked before before on NO
Decision decision = allocation.deciders().canAllocate(shard, lastNodeMatched, allocation);
if (decision.type() == Decision.Type.THROTTLE) {
if (logger.isDebugEnabled()) {
logger.debug("[{}][{}]: throttling allocation [{}] to [{}] in order to reuse its unallocated persistent store with total_size [{}]", shard.index(), shard.id(), shard, lastDiscoNodeMatched, new ByteSizeValue(lastSizeMatched));
}
// we are throttling this, but we have enough to allocate to this node, ignore it for now
unassignedIterator.remove();
routingNodes.ignoredUnassigned().add(shard);
} else {
if (logger.isDebugEnabled()) {
logger.debug("[{}][{}]: allocating [{}] to [{}] in order to reuse its unallocated persistent store with total_size [{}]", shard.index(), shard.id(), shard, lastDiscoNodeMatched, new ByteSizeValue(lastSizeMatched));
}
// we found a match
changed = true;
allocation.routingNodes().assign(shard, lastNodeMatched.nodeId());
unassignedIterator.remove();
}
} else if (hasReplicaData == false) {
// if we didn't manage to find *any* data (regardless of matching sizes), check if the allocation
// of the replica shard needs to be delayed, and if so, add it to the ignore unassigned list
// note: we only care about replica in delayed allocation, since if we have an unassigned primary it
// will anyhow wait to find an existing copy of the shard to be allocated
// note: the other side of the equation is scheduling a reroute in a timely manner, which happens in the RoutingService
long delay = shard.unassignedInfo().getDelayAllocationExpirationIn(settings, indexMetaData.getSettings());
if (delay > 0) {
logger.debug("[{}][{}]: delaying allocation of [{}] for [{}]", shard.index(), shard.id(), shard, TimeValue.timeValueMillis(delay));
/**
* mark it as changed, since we want to kick a publishing to schedule future allocation,
* see {@link org.elasticsearch.cluster.routing.RoutingService#clusterChanged(ClusterChangedEvent)}).
*/
changed = true;
unassignedIterator.remove();
routingNodes.ignoredUnassigned().add(shard);
}
}
}
return changed;
}
private final AtomicBoolean rerouting = new AtomicBoolean();
class InternalAsyncFetch<T extends BaseNodeResponse> extends AsyncShardFetch<T> {
public InternalAsyncFetch(ESLogger logger, String type, ShardId shardId, List<? extends BaseNodesResponse<T>, T> action) {
super(logger, type, shardId, action);
}
@Override
protected void reroute(ShardId shardId, String reason) {
if (rerouting.compareAndSet(false, true) == false) {
logger.trace("{} already has pending reroute, ignoring {}", shardId, reason);
return;
}
clusterService.submitStateUpdateTask("async_shard_fetch", Priority.HIGH, new ClusterStateUpdateTask() {
@Override
public ClusterState execute(ClusterState currentState) throws Exception {
rerouting.set(false);
if (currentState.nodes().masterNode() == null) {
return currentState;
}
RoutingAllocation.Result routingResult = allocationService.reroute(currentState);
if (!routingResult.changed()) {
return currentState;
}
return ClusterState.builder(currentState).routingResult(routingResult).build();
}
@Override
public void onFailure(String source, Throwable t) {
rerouting.set(false);
logger.warn("failed to perform reroute post async fetch for {}", t, source);
}
});
}
}
}
|
apache-2.0
|
RodrigoSilv4/web
|
web%200.2/paginas/ranking1.php
|
7244
|
<?php
include 'rodrigosilvaconfig.php';
?>
<!doctype html>
<html>
<head>
<meta charset="utf-8">
<meta name="viewport" content="width=device-width, maximum-scale=1">
<title><?=$title?></title>
<link rel="icon" href="../PBSuicida/favicon.png" type="image/png">
<link rel="shortcut icon" href="#" type="img/x-icon">
<meta itemprop="name" content="<?= $pg_title; ?>"/>
<meta itemprop="description" content="#"/>
<meta property="og:description" content="#">
<meta itemprop="image" content="img/logo-branca.png"/>
<!-- GOOGLE -->
<meta name="google-site-verification" content="GOo0-in0bq4CHUR8m_eFhpSDpZWJXPR2Tm6TA64nsn8" />
<link href='http://fonts.googleapis.com/css?family=Montserrat:400,700' rel='stylesheet' type='text/css'>
<link href='http://fonts.googleapis.com/css?family=Open+Sans:400,300,800italic,700italic,600italic,400italic,300italic,800,700,600' rel='stylesheet' type='text/css'>
<link href="../css/bootstrap.css" rel="stylesheet" type="text/css">
<link href="..//css/style.css" rel="stylesheet" type="text/css">
<link href=../css/font-awesome.css" rel="stylesheet" type="text/css">
<link href="../css/responsive.css" rel="stylesheet" type="text/css">
<link href="../css/animate.css" rel="stylesheet" type="text/css">
<link href="../css/remodal.css" rel="stylesheet" type="text/css">
<link href="../css/remodal-default-theme.css" rel="stylesheet" type="text/css">
<!--[if IE]><style type="text/css">.pie {behavior:url(PIE.htc);}</style><![endif]-->
<script type="text/javascript" src="../PBSuicida/js/jquery.1.8.3.min.js"></script>
<script type="text/javascript" src="../PBSuicida/js/bootstrap.js"></script>
<script type="text/javascript" src="../PBSuicida/js/jquery-scrolltofixed.js"></script>
<script type="text/javascript" src="../PBSuicida/js/jquery.easing.1.3.js"></script>
<script type="text/javascript" src="../PBSuicida/js/jquery.isotope.js"></script>
<script type="text/javascript" src="../PBSuicida/js/wow.js"></script>
<script type="text/javascript" src="../PBSuicida/js/classie.js"></script>
<script type="text/javascript" src="../PBSuicida/js/geral.js"></script>
<script type="text/javascript" src="../PBSuicida/js/remodal.min.js"></script>
<!--[if lt IE 9]>
<script src="js/respond-1.1.0.min.js"></script>
<script src="js/html5shiv.js"></script>
<script src="js/html5element.js"></script>
<![endif]-->
<script src="http://www.w3schools.com/lib/w3data.js"></script>
<div w3-include-html="WebConfigR.php"></div>
</head>
<body>
<div class="floating-alert-messages"></div>
<div style="overflow:hidden;">
<header class="header" id="header"><!--header-start-->
<div class="container">
<figure class="logo animated fadeInDown delay-07s">
<a href="#"><img src="../img/logo.png" alt="" width="126" height="101"></a>
</figure>
<!--
<h1 class="animated fadeInDown delay-07s">SITES QUE SE ADAPTAM A TODOS OS APARELHOS</h1>
<ul class="we-create animated fadeInUp delay-1s">
<li>ALCAN�E CLIENTES EM TODAS AS PLATAFORMAS, COMO SMARTPHONE, TABLET, NOTEBOOK E DESKTOP.</li>
</ul>
<h1 class="animated fadeInDown delay-07s">O SEU SITE FEITO A M�O</h1>
<ul class="we-create animated fadeInUp delay-1s">
<li>UM SITE BEM FEITO � COMO UMA OBRA DE ARTE �NICA E EXCLUSIVA.</li>
</ul>
<h1 class="animated fadeInDown delay-07s">MARKETING DIGITAL</h1>
<ul class="we-create animated fadeInUp delay-1s">
<li>DIVULGUE SUA MARCA NA INTERNET E TENHA RESULTADOS EXPRESSIVOS EM SEU NEG�CIO.</li>
</ul>
-->
<section class="slide">
<article class="slide_item first">
<h1 class="animated fadeInDown delay-07s"><?=$title?></h1>
</article>
<article class="slide_item">
<h1 class="animated fadeInDown delay-07s">O seu site feito a m�o</h1>
<ul class="we-create animated fadeInUp delay-1s">
<li>UM SITE BEM FEITO � COMO UMA OBRA DE ARTE �NICA E EXCLUSIVA.</li>
</ul>
</article>
<article class="slide_item">
<h1 class="animated fadeInDown delay-07s">Marketing Digital</h1>
<ul class="we-create animated fadeInUp delay-1s">
<li>DIVULGUE SUA MARCA NA INTERNET E TENHA RESULTADOS EXPRESSIVOS EM SEU NEG�CIO.</li>
</ul>
</article>
</section>
<a data-remodal-target="modal" class="link animated fadeInUp delay-1s" href="#">RC:<?=$rc?></a>
</div>
</header><!--header-end-->
<nav class="main-nav-outer" id="test"><!--main-nav-start-->
<div class="container">
<ul class="main-nav">
<li><a href="index.html">Home</a></li>
<li><a href="../paginas/download.php">Download</a></li>
<li class="small-logo"><a href="#"><img src="../img/logo.png" alt=""></a></li>
<li><a href="../paginas/php">Nossa Team</a></li>
</li>
<li><a href="../paginas/ranking.php">Ranking</a></li>
<a class="res-nav_click" href="#"><i class="fa-bars"></a>
</div>
</nav><!--main-nav-end-->
<!--main-section team-start-->
<div class="container">
<h2>RANKING<br>
<section class="main-section team" id="team"><!--main-section team-start-->
<div class="container"><br>
<br>
<h2>
<iframe width='20' height='20' frameborder='0' src='http://127.0.0.1/WebConfigR.php'></iframe></h2>
<div class="team-leader-block clearfix">
<iframe width='700' height='500' frameborder='0' src='http://127.0.0.1/paginas/ranking.php'></iframe>
</div>
</div>
</section><!--main-section team-end-->
</h2>
<p> </p>
<div class="team-leader-block clearfix">
<div class="team-leader-box"></div>
</div>
</div>
</section><!--main-section team-end--><br>
<!--main-section team-start-->
</section>
<!--main-section team-end-->
<h3> </h3>
</div>
</div>
</section><!--main-section-end--><!--business-talking-end-->
<div class="container">
<section class="main-section contact" id="contact">
<div class="row"></div>
</section>
</div>
<Script Language='Javascript'>
<!--
document.write(unescape('%3C%66%6F%6F%74%65%72%20%63%6C%61%73%73%3D%22%66%6F%6F%74%65%72%22%3E%0A%20%20%3C%64%69%76%20%63%6C%61%73%73%3D%22%63%6F%6E%74%61%69%6E%65%72%22%3E%0A%20%20%20%20%3C%64%69%76%20%63%6C%61%73%73%3D%22%66%6F%6F%74%65%72%2D%6C%6F%67%6F%22%3E%3C%61%20%68%72%65%66%3D%22%23%22%3E%3C%69%6D%67%20%73%72%63%3D%22%69%6D%67%2F%6C%6F%67%6F%2E%70%6E%67%22%20%61%6C%74%3D%22%22%3E%3C%2F%61%3E%3C%2F%64%69%76%3E%0A%20%20%20%20%20%20%20%20%3C%70%3E%42%79%20%52%6F%64%72%69%67%6F%53%69%6C%76%61%40%30%36%2F%31%30%2F%32%30%31%36%3C%2F%70%3E%0A%20%20%20%20%3C%2F%64%69%76%3E'));
//-->
</Script>
<script>
(function(i,s,o,g,r,a,m){i['GoogleAnalyticsObject']=r;i[r]=i[r]||function(){
(i[r].q=i[r].q||[]).push(arguments)},i[r].l=1*new Date();a=s.createElement(o),
m=s.getElementsByTagName(o)[0];a.async=1;a.src=g;m.parentNode.insertBefore(a,m)
})(window,document,'script','https://www.google-analytics.com/analytics.js','ga');
ga('create', 'UA-84861322-1', 'auto');
ga('send', 'pageview');
</script>
</body>
</html>
|
apache-2.0
|
apache/portals-pluto
|
portlet-tck_3.0/V3PortletHubTests/src/main/webapp/javascript/PortletHubTests_SPEC_23_JSListener.js
|
10391
|
/* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
(function() {
'use strict';
var portletName = 'PortletHubTests_SPEC_23_JSListener', msg;
function setSuccess (tc, fail) {
var el;
el = document.getElementById(tc + '-async');
if (el !== null) {
if (fail) {
el.innerHTML = fail;
} else {
el.innerHTML = 'Test Succeeded';
}
el.id = tc + '-result';
}
}
function testException (func, type) {
var ok = null;
try {
func();
ok = 'Function did not throw exception';
} catch (e) {
if (e.name !== type) {
ok = 'Invalid exception type: ' + e.name;
}
}
return ok;
}
function checkParams (p1, p2) {
var fail = null, key, keys = Object.keys(p1), ii, jj;
if (keys.length !== Object.keys(p2).length) {
fail = 'Cloned parameters have different numbers of keys';
} else {
for (ii = 0; ii < keys.length && fail === null; ii++) {
key = keys[ii];
if (!Array.isArray(p1[key]) || !Array.isArray(p2[key])) {
fail = 'Parameter value is not array for key: ' + key;
break;
}
if (p1[key].length !== p2[key].length) {
fail = 'Value arrays not equal for key: ' + key;
break;
}
for (jj = 0; jj < p1[key].length; jj++) {
if (p1[key][jj] !== p2[key][jj]) {
fail = 'Value arrays not equal for key: ' + key;
break;
}
}
}
}
return fail;
}
function execute () {
var update, testFunction, hub, handle, errorHandle,
pid = tck.PortletHubTests_SPEC_23_JSListener.pid;
update = function (type, state) {
var msg;
/* TestCase: V3PortletHubTests_SPEC_23_JSListener_onStateChange1 */
/* Details: "After an onStateChange listener is added, the portlet hub calls */
/* the onStateChange function" */
msg = null;
msg = (type) ? msg : 'Type is null or undefined.';
msg = (state) ? msg : msg + ' State is null or undefined.';
setSuccess('V3PortletHubTests_SPEC_23_JSListener_onStateChange1', msg);
/* TestCase: V3PortletHubTests_SPEC_23_JSListener_onStateChange2 */
/* Details: "The onStateChange listener type argument has the value */
/* 'portlet.onStateChange'" */
msg = null;
if (!type || (typeof type !== 'string')){
msg = 'Type is not a string, but instead: ' + typeof type;
} else if (type !== 'portlet.onStateChange') {
msg = 'type is not equal to portlet.onStateChange, but to: ' + type;
}
setSuccess('V3PortletHubTests_SPEC_23_JSListener_onStateChange2', msg);
/* TestCase: V3PortletHubTests_SPEC_23_JSListener_onStateChange3 */
/* Details: "The onStateChange listener renderState argument is a valid */
/* RenderState object" */
msg = null;
if (typeof state !== 'object') {
msg = 'State is not of type object';
} else if (typeof state.clone !== 'function') {
msg = 'State object clone function is not a function';
} else if (typeof state.setPortletMode !== 'function') {
msg = 'State object setPortletMode function is not a function';
} else if (typeof state.getPortletMode !== 'function') {
msg = 'State object getPortletMode function is not a function';
} else if (typeof state.setWindowState !== 'function') {
msg = 'State object setWindowState function is not a function';
} else if (typeof state.getWindowState !== 'function') {
msg = 'State object getWindowState function is not a function';
} else if (typeof state.setValue !== 'function') {
msg = 'State object setValue function is not a function';
} else if (typeof state.getValue !== 'function') {
msg = 'State object getValue function is not a function';
} else if (typeof state.getValues !== 'function') {
msg = 'State object getValues function is not a function';
} else if (typeof state.remove !== 'function') {
msg = 'State object remove function is not a function';
} else if (typeof state.parameters !== 'object') {
msg = 'State object parameters is not of type object';
} else if (typeof state.portletMode !== 'string') {
msg = 'State object portletMode is not of type string';
} else if (typeof state.windowState !== 'string') {
msg = 'State object windowState is not of type string';
}
setSuccess('V3PortletHubTests_SPEC_23_JSListener_onStateChange3', msg);
}
portlet.register(pid).then(function (pi) {
hub = pi;
handle = hub.addEventListener('portlet.onStateChange', update);
});
/* TestCase: V3PortletHubTests_SPEC_23_JSListener_onStateChange4 */
/* Details: "A previously added onStateChangeListener can be removed" */
document.getElementById('V3PortletHubTests_SPEC_23_JSListener_onStateChange4-clickme').onclick = function () {
msg = null;
if (!handle) {
msg = 'onStateChange event listener is null or undefined.';
} else {
try {
hub.removeEventListener(handle);
} catch (e) {
msg = 'Error was thrown removing the listener: ' + e.message;
}
}
if (!msg) {
handle = hub.addEventListener('portlet.onStateChange', update);
}
setSuccess('V3PortletHubTests_SPEC_23_JSListener_onStateChange4', msg);
}
document.getElementById('V3PortletHubTests_SPEC_23_JSListener_onStateChange4-notready').id='ready';
/* TestCase: V3PortletHubTests_SPEC_23_JSListener_onStateChange5 */
/* Details: "The portlet hub throws a TypeError if a previously added */
/* onStateChange listener is removed twice" */
document.getElementById('V3PortletHubTests_SPEC_23_JSListener_onStateChange5-clickme').onclick = function () {
msg = null;
hub.removeEventListener(handle);
testFunction = function () {
hub.removeEventListener(handle);
}
msg = testException(testFunction, 'TypeError');
if (!msg) {
handle = hub.addEventListener('portlet.onStateChange', update);
}
setSuccess('V3PortletHubTests_SPEC_23_JSListener_onStateChange5', msg);
}
document.getElementById('V3PortletHubTests_SPEC_23_JSListener_onStateChange5-notready').id='ready';
function onTestError () {}
/* TestCase: V3PortletHubTests_SPEC_23_JSListener_onError1 */
/* Details: "An onError listener can be added" */
document.getElementById('V3PortletHubTests_SPEC_23_JSListener_onError1-clickme').onclick = function () {
msg = null;
try {
errorHandle = hub.addEventListener('portlet.onError', onTestError);
} catch (e) {
msg = 'Error was thrown adding the onError listener: ' + e.message;
errorHandle = null;
}
setSuccess('V3PortletHubTests_SPEC_23_JSListener_onError1', msg);
}
document.getElementById('V3PortletHubTests_SPEC_23_JSListener_onError1-notready').id='ready';
/* TestCase: V3PortletHubTests_SPEC_23_JSListener_onError2 */
/* Details: "A previously added onError listener can be removed" */
document.getElementById('V3PortletHubTests_SPEC_23_JSListener_onError2-clickme').onclick = function () {
if (!errorHandle) {
errorHandle = hub.addEventListener('portlet.onError', onTestError);
}
msg = null;
try {
hub.removeEventListener(errorHandle);
} catch (e) {
msg = 'Error was thrown removing the listener: ' + e.message;
}
errorHandle = null;
setSuccess('V3PortletHubTests_SPEC_23_JSListener_onError2', msg);
}
document.getElementById('V3PortletHubTests_SPEC_23_JSListener_onError2-notready').id='ready';
/* TestCase: V3PortletHubTests_SPEC_23_JSListener_onError3 */
/* Details: "The portlet hub throws a TypeError if a previously added onError */
/* listener is removed twice" */
document.getElementById('V3PortletHubTests_SPEC_23_JSListener_onError3-clickme').onclick = function () {
if (!errorHandle) {
errorHandle = hub.addEventListener('portlet.onError', onTestError);
}
msg = null;
hub.removeEventListener(errorHandle);
testFunction = function () {
hub.removeEventListener(errorHandle);
}
msg = testException(testFunction, 'TypeError');
setSuccess('V3PortletHubTests_SPEC_23_JSListener_onError3', msg);
}
document.getElementById('V3PortletHubTests_SPEC_23_JSListener_onError3-notready').id='ready';
}
window.addEventListener('load', execute);
}());
|
apache-2.0
|
enovance/infra-virt
|
tests/test_dumpelastic.py
|
1561
|
# -*- coding: utf-8 -*-
#
# Copyright 2015 eNovance SAS <licensing@enovance.com>
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import dumpelastic
from tests.data.elasticdatas import mockdata
import mock
import testtools
class TestDumpElastic(testtools.TestCase):
@mock.patch("dumpelastic.requests")
def test_get_indices(self, m_request):
m_return_value = mock.Mock()
m_return_value.json.return_value = ['logstash-2015.02.09', 'noop']
m_request.get.return_value = m_return_value
indices = dumpelastic._get_indices("url")
self.assertEqual(['logstash-2015.02.09'], indices)
@mock.patch("dumpelastic.requests")
def test_dump_elasticsearch(self, m_request):
m_return_value = mock.Mock()
m_return_value.json = mock.MagicMock(side_effect=[mockdata.data1,
mockdata.data2])
m_request.get.return_value = m_return_value
dumpelastic._save_docs = mock.Mock()
dumpelastic._dump_elasticsearch("url", "output_dir")
|
apache-2.0
|
kapilt/cloud-custodian
|
tools/c7n_azure/tests_azure/test_dependency_manager.py
|
4279
|
# Copyright 2015-2018 Capital One Services, LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import json
import os
import re
import shutil
import tempfile
from .azure_common import BaseTest
from c7n_azure.dependency_manager import DependencyManager
class DependencyManagerTest(BaseTest):
test_zip = os.path.join(os.path.dirname(__file__), 'data', 'test_cache', 'cache.zip')
test_metadata = os.path.join(os.path.dirname(__file__), 'data', 'test_cache', 'metadata.json')
test_zip_wrong = os.path.join(os.path.dirname(__file__), 'data', 'cache', 'wrong.zip')
test_metadata_wrong = os.path.join(os.path.dirname(__file__), 'data', 'cache', 'wrong.json')
test_packages = ['package1', 'package2']
def test_get_file_hash(self):
self.assertEqual(DependencyManager._get_file_hash(self.test_zip),
'EqAMFyrJXL+/+kEgji2hHQESjSHDTm4/SQZjwVdVcgg=')
def test_get_string_hash(self):
self.assertEqual(DependencyManager._get_string_hash(' '.join(self.test_packages)),
'1189b389ffc75d3a3174b6c63dee03fc')
def test_create_cache_metadata(self):
bench = tempfile.mkdtemp()
self.addCleanup(lambda: shutil.rmtree(bench))
tmp_metadata = os.path.join(bench, 'metadata.json')
DependencyManager.create_cache_metadata(tmp_metadata,
self.test_zip,
self.test_packages)
with open(self.test_metadata, 'rt') as f:
test_json = json.load(f)
with open(tmp_metadata, 'rt') as f:
tmp_json = json.load(f)
self.assertTrue(test_json == tmp_json)
def test_check_hash(self):
self.assertFalse(DependencyManager.check_cache(self.test_metadata_wrong,
self.test_zip,
self.test_packages))
self.assertFalse(DependencyManager.check_cache(self.test_metadata,
self.test_zip_wrong,
self.test_packages))
self.assertFalse(DependencyManager.check_cache(self.test_metadata,
self.test_zip,
['wrong', 'wrong2']))
self.assertFalse(DependencyManager.check_cache(self.test_metadata,
self.test_metadata,
self.test_packages))
self.assertTrue(DependencyManager.check_cache(self.test_metadata,
self.test_zip,
self.test_packages))
def test_get_installed_distributions(self):
d = DependencyManager.get_dependency_packages_list(
['c7n-azure', 'c7n-azure'],
['azure-cli-core'])
# expected dependencies
self.assertTrue('adal' in d)
# excluded packages are missing
self.assertTrue('azure-cli-core' not in d)
# dependencies that are substrings of another are includes
self.assertTrue('applicationinsights' in d)
self.assertTrue('azure-mgmt-applicationinsights' in d)
# dependencies are sorted
self.assertEqual(sorted(d), d)
# Remove versions from all packages & make sure there is no duplicates in the list
regex = "^[^<>~=]*"
d_no_versions = [re.match(regex, p).group(0) for p in d]
self.assertEqual(len(d), len(set(d_no_versions)))
|
apache-2.0
|
copasi/COPASI
|
copasi/sbml/unittests/test000002.hpp
|
1151
|
// Copyright (C) 2017 by Pedro Mendes, Virginia Tech Intellectual
// Properties, Inc., University of Heidelberg, and University of
// of Connecticut School of Medicine.
// All rights reserved.
// Copyright (C) 2010 - 2016 by Pedro Mendes, Virginia Tech Intellectual
// Properties, Inc., University of Heidelberg, and The University
// of Manchester.
// All rights reserved.
// Copyright (C) 2008 - 2009 by Pedro Mendes, Virginia Tech Intellectual
// Properties, Inc., EML Research, gGmbH, University of Heidelberg,
// and The University of Manchester.
// All rights reserved.
#ifndef TEST_000002_HPP__
#define TEST_000002_HPP__
#include <cppunit/TestFixture.h>
#include <cppunit/TestSuite.h>
#include <cppunit/TestResult.h>
#include <cppunit/extensions/HelperMacros.h>
class CDataModel;
class test000002 : public CppUnit::TestFixture
{
CPPUNIT_TEST_SUITE(test000002);
CPPUNIT_TEST(test_references_to_species);
CPPUNIT_TEST_SUITE_END();
protected:
static const char* MODEL_STRING;
static CDataModel* pCOPASIDATAMODEL;
public:
void setUp();
void tearDown();
void test_references_to_species();
};
#endif /* TEST000002_HPP__ */
|
artistic-2.0
|
brunolauze/pegasus-providers
|
UNIXProviders/iSCSICapabilities/UNIX_iSCSICapabilitiesMain.cpp
|
2204
|
//%LICENSE////////////////////////////////////////////////////////////////
//
// Licensed to The Open Group (TOG) under one or more contributor license
// agreements. Refer to the OpenPegasusNOTICE.txt file distributed with
// this work for additional information regarding copyright ownership.
// Each contributor licenses this file to you under the OpenPegasus Open
// Source License; you may not use this file except in compliance with the
// License.
//
// Permission is hereby granted, free of charge, to any person obtaining a
// copy of this software and associated documentation files (the "Software"),
// to deal in the Software without restriction, including without limitation
// the rights to use, copy, modify, merge, publish, distribute, sublicense,
// and/or sell copies of the Software, and to permit persons to whom the
// Software is furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included
// in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
// IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
// CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
// TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
// SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
//
//////////////////////////////////////////////////////////////////////////
//
//%/////////////////////////////////////////////////////////////////////////
#include <Pegasus/Common/Config.h>
#include <Pegasus/Common/String.h>
#include <Pegasus/Common/PegasusVersion.h>
PEGASUS_USING_PEGASUS;
PEGASUS_USING_STD;
#include "UNIX_iSCSICapabilitiesProvider.h"
extern "C"
PEGASUS_EXPORT CIMProvider* PegasusCreateProvider(const String& providerName)
{
if (String::equalNoCase(providerName, CIMHelper::EmptyString)) return NULL;
else if (String::equalNoCase(providerName, "UNIX_iSCSICapabilitiesProvider")) return new UNIX_iSCSICapabilitiesProvider();
return NULL;
}
|
bsd-2-clause
|
CI-WATER/tethys
|
tests/unit_tests/test_tethys_compute/test_models/test_CondorWorkflow.py
|
8793
|
from tethys_sdk.testing import TethysTestCase
from tethys_compute.models.condor.condor_scheduler import CondorScheduler
from tethys_compute.models.condor.condor_py_workflow import CondorPyWorkflow
from tethys_compute.models.condor.condor_workflow_job_node import CondorWorkflowJobNode
from tethys_compute.models.condor.condor_workflow import CondorWorkflow
from django.contrib.auth.models import User
from django.utils import timezone as tz
from unittest import mock
import os
import shutil
import os.path
class CondorWorkflowTest(TethysTestCase):
mock_nodes = mock.MagicMock()
mock_nodes.name = 'test_job1'
mock_condor_workflow = mock.MagicMock()
mock_condor_workflow._execute.return_value = 'out', 'err'
def set_up(self):
test_models_dir = os.path.dirname(__file__)
self.workspace_dir = os.path.join(test_models_dir, 'workspace')
self.user = User.objects.create_user('tethys_super', 'user@example.com', 'pass')
files_dir = os.path.join(os.path.dirname(test_models_dir), 'files')
self.private_key = os.path.join(files_dir, 'keys', 'testkey')
self.private_key_pass = 'password'
self.scheduler = CondorScheduler(
name='test_scheduler',
host='localhost',
username='tethys_super',
password='pass',
private_key_path=self.private_key,
private_key_pass=self.private_key_pass
)
self.scheduler.save()
self.condorworkflow = CondorWorkflow(
_max_jobs={'foo': 10},
_config='test_config',
name='test name',
workspace=self.workspace_dir,
user=self.user,
scheduler=self.scheduler,
)
self.condorworkflow.save()
self.id_value = CondorWorkflow.objects.get(name='test name').condorpyworkflow_ptr_id
self.condorpyworkflow = CondorPyWorkflow.objects.get(condorpyworkflow_id=self.id_value)
self.condorworkflowjobnode_child = CondorWorkflowJobNode(
name='Node_child',
workflow=self.condorpyworkflow,
_attributes={'test': 'one'},
_num_jobs=1,
_remote_input_files=['test1.txt'],
)
self.condorworkflowjobnode_child.save()
self.condorworkflowjobnode = CondorWorkflowJobNode(
name='Node_1',
workflow=self.condorpyworkflow,
_attributes={'test': 'one'},
_num_jobs=1,
_remote_input_files=['test1.txt'],
)
self.condorworkflowjobnode.save()
# Django model many to many relationship add method
self.condorworkflowjobnode.parent_nodes.add(self.condorworkflowjobnode_child)
self.condorbase_id = CondorWorkflow.objects.get(name='test name').condorbase_ptr_id
self.condorpyworkflow_id = CondorWorkflow.objects.get(name='test name').condorpyworkflow_ptr_id
def tear_down(self):
self.scheduler.delete()
if self.condorworkflow.condorbase_ptr_id == self.condorbase_id:
self.condorworkflow.delete()
if os.path.exists(self.workspace_dir):
shutil.rmtree(self.workspace_dir)
def test_type(self):
ret = self.condorworkflow.type
self.assertEqual('CondorWorkflow', ret)
def test_condor_object_prop(self):
ret = self.condorworkflow._condor_object
# Check workflow return
self.assertEqual({'foo': 10}, ret.max_jobs)
self.assertEqual('test_config', ret.config)
self.assertEqual('<DAG: test_name>', repr(ret))
@mock.patch('tethys_compute.models.condor.condor_workflow.CondorPyWorkflow.load_nodes')
@mock.patch('tethys_compute.models.condor.condor_workflow.CondorBase.condor_object')
def test_execute(self, mock_co, mock_ln):
# Mock submit to return a 111 cluster id
mock_co.submit.return_value = 111
# Execute
self.condorworkflow._execute(options=['foo'])
# We already tested load_nodes in CondorPyWorkflow, just mocked to make sure it's called here.
mock_ln.assert_called()
mock_co.submit.assert_called_with(options=['foo'])
# Check cluster_id from _execute in condorbase
self.assertEqual(111, self.condorworkflow.cluster_id)
def test_get_job(self):
ret = self.condorworkflow.get_job(job_name='Node_1')
# Check result
self.assertIsInstance(ret, CondorWorkflowJobNode)
self.assertEqual('Node_1', ret.name)
def test_get_job_does_not_exist(self):
ret = self.condorworkflow.get_job(job_name='Node_2')
# Check result
self.assertIsNone(ret)
@mock.patch('tethys_compute.models.condor.condor_workflow.CondorBase.update_database_fields')
@mock.patch('tethys_compute.models.condor.condor_workflow.CondorPyWorkflow.update_database_fields')
def test_update_database_fieds(self, mock_pw_update, mock_ba_update):
# Execute
self.condorworkflow.update_database_fields()
# Check if mock is called
mock_pw_update.assert_called()
mock_ba_update.assert_called()
@mock.patch('tethys_compute.models.condor.condor_workflow.CondorWorkflow.nodes',
new_callable=mock.PropertyMock(return_value=[mock_nodes]))
def test_log_files(self, _):
expected_ret = {'workflow': 'test_name.dag.dagman.out',
'test_job1': {'log': 'test_job1/logs/*.log',
'error': 'test_job1/logs/*.err',
'output': 'test_job1/logs/*.out'}}
# Execute
ret = self.condorworkflow._log_files()
self.assertEqual(expected_ret, ret)
@mock.patch('tethys_compute.models.condor.condor_workflow.CondorWorkflow.update_database_fields')
def test_condor_workflow_presave(self, mock_update):
# Excute
self.condorworkflow.save()
# Check if update_database_fields is called
mock_update.assert_called()
@mock.patch('tethys_compute.models.condor.condor_workflow.CondorWorkflow.condor_object')
def test_condor_job_pre_delete(self, mock_co):
if not os.path.exists(self.workspace_dir):
os.makedirs(self.workspace_dir)
file_path = os.path.join(self.workspace_dir, 'test_file.txt')
open(file_path, 'a').close()
self.condorworkflow.delete()
# Check if close_remote is called
mock_co.close_remote.assert_called()
# Check if file has been removed
self.assertFalse(os.path.isfile(file_path))
@mock.patch('tethys_compute.models.condor.condor_workflow.log')
@mock.patch('tethys_compute.models.condor.condor_workflow.CondorWorkflow.condor_object')
def test_condor_job_pre_delete_exception(self, mock_co, mock_log):
mock_co.close_remote.side_effect = Exception('test error')
self.condorworkflow.delete()
# Check if close_remote is called
mock_log.exception.assert_called_with('test error')
def test__update_status_no_execute_time(self):
self.condorworkflow.execute_time = None
ret = self.condorworkflow._update_status()
self.assertEqual('SUB', ret)
@mock.patch('tethys_compute.models.condor.condor_workflow.CondorBase.condor_object')
def test__update_status_not_Running(self, mock_co):
self.condorworkflow.execute_time = tz.now()
mock_co.status = 'Completed'
self.condorworkflow._update_status()
self.assertEqual('COM', self.condorworkflow._status)
@mock.patch('tethys_compute.models.condor.condor_workflow.CondorBase.condor_object')
def test__update_status_Running_not_running_statuses(self, mock_co):
self.condorworkflow.execute_time = tz.now()
mock_co.status = 'Running'
mock_co.statuses = {'Unexpanded': 0, 'Idle': 0, 'Running': 0, 'Completed': 1}
self.condorworkflow._update_status()
self.assertEqual('VCP', self.condorworkflow._status)
@mock.patch('tethys_compute.models.condor.condor_workflow.CondorBase.condor_object')
def test__update_status_Running_no_statuses(self, mock_co):
self.condorworkflow.execute_time = tz.now()
mock_co.status = 'Running'
mock_co.statuses = {'Unexpanded': 0, 'Idle': 0, 'Running': 0, 'Completed': 0}
self.condorworkflow._update_status()
self.assertEqual('SUB', self.condorworkflow._status)
@mock.patch('tethys_compute.models.condor.condor_workflow.CondorBase.condor_object')
def test__update_status_exception(self, mock_co):
self.condorworkflow.execute_time = tz.now()
type(mock_co).status = mock.PropertyMock(side_effect=Exception)
self.condorworkflow._update_status()
self.assertEqual('ERR', self.condorworkflow._status)
|
bsd-2-clause
|
pcastanha/frame
|
src/softframe/classification/utils.py
|
2236
|
import os
import codecs
import requests, json
from os import walk
from sklearn.datasets import load_svmlight_file
import pickle
from text_unidecode import unidecode
def TextToDep(text):
dados = {"doc": text }
jsonArray = json.dumps(dados)
response = requests.post("http://172.23.227.172:4567/parse", data=jsonArray)
tags = response.json()
return tags
def ensure_dir(directory):
if not os.path.exists(directory):
os.makedirs(directory)
def write_file(filename, doc):
file = codecs.open(filename, "w", "utf-8")
file.write(doc)
file.close()
def load_file(filename):
X, y = load_svmlight_file(filename)
return X, y
def FastSerialize(obj, file):
f = open(file, 'wb')
pickle.dump(obj, file)
f.close()
return 0
def FastLoad(obj, file):
f = open(file, 'rb')
obj = pickle.load(file)
return obj
def load_file_as_list(filename):
with open(filename) as f:
l = f.read().splitlines()
return l
def tokenize(text):
return([text.split('#', 1)[0].strip()])
def carregar_texto_de_lista_documentos(filenames, tag=False):
docs = {}
for filename in filenames:
if "svmlight" not in filename:
# implementar teste de filesize
with codecs.open(filename=filename, encoding='utf8') as f:
if tag is False:
text = f.read().strip()
else:
text = ' '.join(TextToDep(f.read().strip()))
docs[filename] = text
return docs
def separar_texto_em_tokens(docs, stopwords=set([])):
corpus = {}
for key, val in docs.items():
corpus[key] = [word for word in val.split() if word not in stopwords]
return corpus
def checar_retorno(arg, message):
if arg is None:
raise ValueError(message)
return arg
def montar_corpus(documento):
saida = []
for i in documento.keys():
label = i.split('/')[1].split('_')[0]
saida.append([unidecode(documento.get(i)), label, i])
return saida
def obter_lista_documentos(path):
f = []
for (dirpath, dirnames, filenames) in walk(path):
for filename in filenames:
f.append(dirpath + filename)
return f
|
bsd-2-clause
|
eHealthAfrica/onadata
|
onadata/apps/api/tests/models/test_organization_profile.py
|
1060
|
from onadata.apps.main.tests.test_base import TestBase
from onadata.apps.api import tools
from onadata.apps.api.models.organization_profile import OrganizationProfile
from onadata.apps.api.models.team import Team
class TestOrganizationProfile(TestBase):
def test_create_organization_creates_team_and_perms(self):
# create a user - bob
profile = tools.create_organization("modilabs", self.user)
self.assertIsInstance(profile, OrganizationProfile)
organization_profile = OrganizationProfile.objects.get(
user__username="modilabs")
# check organization was created
self.assertTrue(organization_profile.is_organization)
# check that the default team was created
team_name = "modilabs#%s" % Team.OWNER_TEAM_NAME
team = Team.objects.get(
organization=organization_profile.user, name=team_name)
self.assertIsInstance(team, Team)
self.assertIn(team.group_ptr, self.user.groups.all())
self.assertTrue(self.user.has_perm('api.is_org_owner'))
|
bsd-2-clause
|
yuany/ghostdriver
|
test/java/src/test/java/ghostdriver/server/CallbackServlet.java
|
2116
|
/*
This file is part of the GhostDriver by Ivan De Marino <http://ivandemarino.me>.
Copyright (c) 2012, Ivan De Marino <http://ivandemarino.me>
All rights reserved.
Redistribution and use in source and binary forms, with or without modification,
are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice,
this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package ghostdriver.server;
import javax.servlet.ServletException;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import java.io.IOException;
public class CallbackServlet extends HttpServlet {
private CallbackHttpServer server;
CallbackServlet(CallbackHttpServer server) {
this.server = server;
}
protected void doGet(HttpServletRequest req, HttpServletResponse res) throws ServletException, IOException {
if (server.getGetHandler() != null) {
server.getGetHandler().call(req, res);
} else {
super.doGet(req, res);
}
}
}
|
bsd-2-clause
|
makinacorpus/Geotrek
|
geotrek/maintenance/migrations/0009_auto_20200211_1011.py
|
14867
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.27 on 2020-02-11 10:11
from __future__ import unicode_literals
import datetime
from django.db import migrations, models
import django.db.models.deletion
import geotrek.authent.models
class Migration(migrations.Migration):
dependencies = [
('maintenance', '0008_auto_20191210_0921'),
]
operations = [
migrations.AlterField(
model_name='contractor',
name='contractor',
field=models.CharField(max_length=128, verbose_name='Contractor'),
),
migrations.AlterField(
model_name='contractor',
name='structure',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='authent.Structure', verbose_name='Related structure'),
),
migrations.AlterField(
model_name='funding',
name='amount',
field=models.FloatField(default=0.0, verbose_name='Amount'),
),
migrations.AlterField(
model_name='funding',
name='organism',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='common.Organism', verbose_name='Organism'),
),
migrations.AlterField(
model_name='funding',
name='project',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='maintenance.Project', verbose_name='Project'),
),
migrations.AlterField(
model_name='intervention',
name='area',
field=models.FloatField(blank=True, default=0, editable=False, null=True, verbose_name='Area'),
),
migrations.AlterField(
model_name='intervention',
name='ascent',
field=models.IntegerField(blank=True, default=0, editable=False, null=True, verbose_name='Ascent'),
),
migrations.AlterField(
model_name='intervention',
name='date',
field=models.DateField(default=datetime.datetime.now, help_text='When ?', verbose_name='Date'),
),
migrations.AlterField(
model_name='intervention',
name='descent',
field=models.IntegerField(blank=True, default=0, editable=False, null=True, verbose_name='Descent'),
),
migrations.AlterField(
model_name='intervention',
name='description',
field=models.TextField(blank=True, help_text='Remarks and notes', verbose_name='Description'),
),
migrations.AlterField(
model_name='intervention',
name='disorders',
field=models.ManyToManyField(blank=True, related_name='interventions', to='maintenance.InterventionDisorder', verbose_name='Disorders'),
),
migrations.AlterField(
model_name='intervention',
name='height',
field=models.FloatField(blank=True, default=0.0, null=True, verbose_name='Height'),
),
migrations.AlterField(
model_name='intervention',
name='heliport_cost',
field=models.FloatField(blank=True, default=0.0, null=True, verbose_name='Heliport cost'),
),
migrations.AlterField(
model_name='intervention',
name='length',
field=models.FloatField(blank=True, default=0.0, null=True, verbose_name='3D Length'),
),
migrations.AlterField(
model_name='intervention',
name='material_cost',
field=models.FloatField(blank=True, default=0.0, null=True, verbose_name='Material cost'),
),
migrations.AlterField(
model_name='intervention',
name='max_elevation',
field=models.IntegerField(blank=True, default=0, editable=False, null=True, verbose_name='Maximum elevation'),
),
migrations.AlterField(
model_name='intervention',
name='min_elevation',
field=models.IntegerField(blank=True, default=0, editable=False, null=True, verbose_name='Minimum elevation'),
),
migrations.AlterField(
model_name='intervention',
name='name',
field=models.CharField(help_text='Brief summary', max_length=128, verbose_name='Name'),
),
migrations.AlterField(
model_name='intervention',
name='project',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='interventions', to='maintenance.Project', verbose_name='Project'),
),
migrations.AlterField(
model_name='intervention',
name='slope',
field=models.FloatField(blank=True, default=0.0, editable=False, null=True, verbose_name='Slope'),
),
migrations.AlterField(
model_name='intervention',
name='stake',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='interventions', to='core.Stake', verbose_name='Stake'),
),
migrations.AlterField(
model_name='intervention',
name='status',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='maintenance.InterventionStatus', verbose_name='Status'),
),
migrations.AlterField(
model_name='intervention',
name='structure',
field=models.ForeignKey(default=geotrek.authent.models.default_structure_pk, on_delete=django.db.models.deletion.CASCADE, to='authent.Structure', verbose_name='Related structure'),
),
migrations.AlterField(
model_name='intervention',
name='subcontract_cost',
field=models.FloatField(blank=True, default=0.0, null=True, verbose_name='Subcontract cost'),
),
migrations.AlterField(
model_name='intervention',
name='subcontracting',
field=models.BooleanField(default=False, verbose_name='Subcontracting'),
),
migrations.AlterField(
model_name='intervention',
name='type',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='maintenance.InterventionType', verbose_name='Type'),
),
migrations.AlterField(
model_name='intervention',
name='width',
field=models.FloatField(blank=True, default=0.0, null=True, verbose_name='Width'),
),
migrations.AlterField(
model_name='interventiondisorder',
name='disorder',
field=models.CharField(max_length=128, verbose_name='Disorder'),
),
migrations.AlterField(
model_name='interventiondisorder',
name='structure',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='authent.Structure', verbose_name='Related structure'),
),
migrations.AlterField(
model_name='interventionjob',
name='cost',
field=models.DecimalField(decimal_places=2, default=1.0, max_digits=8, verbose_name='Cost'),
),
migrations.AlterField(
model_name='interventionjob',
name='job',
field=models.CharField(max_length=128, verbose_name='Job'),
),
migrations.AlterField(
model_name='interventionjob',
name='structure',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='authent.Structure', verbose_name='Related structure'),
),
migrations.AlterField(
model_name='interventionstatus',
name='order',
field=models.PositiveSmallIntegerField(blank=True, default=None, null=True, verbose_name='Display order'),
),
migrations.AlterField(
model_name='interventionstatus',
name='status',
field=models.CharField(max_length=128, verbose_name='Status'),
),
migrations.AlterField(
model_name='interventionstatus',
name='structure',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='authent.Structure', verbose_name='Related structure'),
),
migrations.AlterField(
model_name='interventiontype',
name='structure',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='authent.Structure', verbose_name='Related structure'),
),
migrations.AlterField(
model_name='interventiontype',
name='type',
field=models.CharField(max_length=128, verbose_name='Type'),
),
migrations.AlterField(
model_name='manday',
name='intervention',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='maintenance.Intervention'),
),
migrations.AlterField(
model_name='manday',
name='job',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='maintenance.InterventionJob', verbose_name='Job'),
),
migrations.AlterField(
model_name='manday',
name='nb_days',
field=models.DecimalField(decimal_places=2, max_digits=6, verbose_name='Mandays'),
),
migrations.AlterField(
model_name='project',
name='begin_year',
field=models.IntegerField(verbose_name='Begin year'),
),
migrations.AlterField(
model_name='project',
name='comments',
field=models.TextField(blank=True, help_text='Remarks and notes', verbose_name='Comments'),
),
migrations.AlterField(
model_name='project',
name='constraint',
field=models.TextField(blank=True, help_text='Specific conditions, ...', verbose_name='Constraint'),
),
migrations.AlterField(
model_name='project',
name='contractors',
field=models.ManyToManyField(blank=True, related_name='projects', to='maintenance.Contractor', verbose_name='Contractors'),
),
migrations.AlterField(
model_name='project',
name='domain',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='maintenance.ProjectDomain', verbose_name='Domain'),
),
migrations.AlterField(
model_name='project',
name='end_year',
field=models.IntegerField(blank=True, null=True, verbose_name='End year'),
),
migrations.AlterField(
model_name='project',
name='global_cost',
field=models.FloatField(blank=True, default=0, help_text='€', null=True, verbose_name='Global cost'),
),
migrations.AlterField(
model_name='project',
name='name',
field=models.CharField(max_length=128, verbose_name='Name'),
),
migrations.AlterField(
model_name='project',
name='project_manager',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='manage', to='common.Organism', verbose_name='Project manager'),
),
migrations.AlterField(
model_name='project',
name='project_owner',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='own', to='common.Organism', verbose_name='Project owner'),
),
migrations.AlterField(
model_name='project',
name='structure',
field=models.ForeignKey(default=geotrek.authent.models.default_structure_pk, on_delete=django.db.models.deletion.CASCADE, to='authent.Structure', verbose_name='Related structure'),
),
migrations.AlterField(
model_name='project',
name='type',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='maintenance.ProjectType', verbose_name='Type'),
),
migrations.AlterField(
model_name='projectdomain',
name='domain',
field=models.CharField(max_length=128, verbose_name='Domain'),
),
migrations.AlterField(
model_name='projectdomain',
name='structure',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='authent.Structure', verbose_name='Related structure'),
),
migrations.AlterField(
model_name='projecttype',
name='structure',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='authent.Structure', verbose_name='Related structure'),
),
migrations.AlterField(
model_name='projecttype',
name='type',
field=models.CharField(max_length=128, verbose_name='Type'),
),
migrations.AlterField(
model_name='intervention',
name='deleted',
field=models.BooleanField(default=False, editable=False, verbose_name='Deleted'),
),
migrations.AlterField(
model_name='project',
name='deleted',
field=models.BooleanField(default=False, editable=False, verbose_name='Deleted'),
),
migrations.AlterModelTable(
name='contractor',
table=None,
),
migrations.AlterModelTable(
name='funding',
table=None,
),
migrations.AlterModelTable(
name='intervention',
table=None,
),
migrations.AlterModelTable(
name='interventiondisorder',
table=None,
),
migrations.AlterModelTable(
name='interventionjob',
table=None,
),
migrations.AlterModelTable(
name='interventionstatus',
table=None,
),
migrations.AlterModelTable(
name='interventiontype',
table=None,
),
migrations.AlterModelTable(
name='manday',
table=None,
),
migrations.AlterModelTable(
name='project',
table=None,
),
migrations.AlterModelTable(
name='projectdomain',
table=None,
),
migrations.AlterModelTable(
name='projecttype',
table=None,
),
]
|
bsd-2-clause
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.