repo_name
stringlengths 4
116
| path
stringlengths 4
379
| size
stringlengths 1
7
| content
stringlengths 3
1.05M
| license
stringclasses 15
values |
---|---|---|---|---|
imshashank/aws-doc-sdk-examples
|
cpp/example_code/sqs/dead_letter_queue.cpp
|
2537
|
/*
Copyright 2010-2017 Amazon.com, Inc. or its affiliates. All Rights Reserved.
This file is licensed under the Apache License, Version 2.0 (the "License").
You may not use this file except in compliance with the License. A copy of
the License is located at
http://aws.amazon.com/apache2.0/
This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
CONDITIONS OF ANY KIND, either express or implied. See the License for the
specific language governing permissions and limitations under the License.
*/
#include <aws/core/Aws.h>
#include <aws/core/utils/json/JsonSerializer.h>
#include <aws/sqs/SQSClient.h>
#include <aws/sqs/model/SetQueueAttributesRequest.h>
#include <iostream>
Aws::String MakeRedrivePolicy(const Aws::String& dlq_arn, int max_msg)
{
Aws::Utils::Json::JsonValue redrive_arn_entry;
redrive_arn_entry.AsString(dlq_arn);
Aws::Utils::Json::JsonValue max_msg_entry;
max_msg_entry.AsInteger(max_msg);
Aws::Utils::Json::JsonValue policy_map;
policy_map.WithObject("deadLetterTargetArn", redrive_arn_entry);
policy_map.WithObject("maxReceiveCount", max_msg_entry);
return policy_map.WriteReadable();
}
/**
* Connects an sqs queue to an associated dead letter queue based on command
* line input
*/
int main(int argc, char** argv)
{
if (argc != 4) {
std::cout << "Usage: dead_letter_queue <source_queue_url> " <<
"<dead_letter_queue_arn> <max_messages>" << std::endl;
return 1;
}
Aws::String src_queue_url = argv[1];
Aws::String dlq_arn = argv[2];
Aws::StringStream ss(argv[3]);
int max_msg = 1;
ss >> max_msg;
Aws::SDKOptions options;
Aws::InitAPI(options);
{
Aws::SQS::SQSClient sqs;
Aws::String redrivePolicy = MakeRedrivePolicy(dlq_arn, max_msg);
Aws::SQS::Model::SetQueueAttributesRequest sqa_req;
sqa_req.SetQueueUrl(src_queue_url);
sqa_req.AddAttributes(
Aws::SQS::Model::QueueAttributeName::RedrivePolicy,
redrivePolicy);
auto sqa_out = sqs.SetQueueAttributes(sqa_req);
if (sqa_out.IsSuccess()) {
std::cout << "Successfully set dead letter queue for queue " <<
src_queue_url << " to " << dlq_arn << std::endl;
} else {
std::cout << "Error setting dead letter queue for queue " <<
src_queue_url << ": " << sqa_out.GetError().GetMessage() <<
std::endl;
}
}
Aws::ShutdownAPI(options);
return 0;
}
|
apache-2.0
|
seoj/herd
|
herd-code/herd-service/src/main/java/org/finra/herd/service/impl/EmrClusterDefinitionServiceImpl.java
|
10736
|
/*
* Copyright 2015 herd contributors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.finra.herd.service.impl;
import edu.umd.cs.findbugs.annotations.SuppressFBWarnings;
import org.apache.log4j.Logger;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
import org.finra.herd.dao.HerdDao;
import org.finra.herd.dao.config.DaoSpringModuleConfig;
import org.finra.herd.model.AlreadyExistsException;
import org.finra.herd.model.jpa.EmrClusterDefinitionEntity;
import org.finra.herd.model.jpa.NamespaceEntity;
import org.finra.herd.model.api.xml.EmrClusterDefinition;
import org.finra.herd.model.api.xml.EmrClusterDefinitionCreateRequest;
import org.finra.herd.model.api.xml.EmrClusterDefinitionInformation;
import org.finra.herd.model.api.xml.EmrClusterDefinitionKey;
import org.finra.herd.model.api.xml.EmrClusterDefinitionUpdateRequest;
import org.finra.herd.service.EmrClusterDefinitionService;
import org.finra.herd.service.helper.HerdDaoHelper;
import org.finra.herd.service.helper.HerdHelper;
import org.finra.herd.dao.helper.XmlHelper;
/**
* The EMR cluster definition service implementation.
*/
@Service
@Transactional(value = DaoSpringModuleConfig.HERD_TRANSACTION_MANAGER_BEAN_NAME)
@SuppressFBWarnings(value = "VA_FORMAT_STRING_USES_NEWLINE", justification = "We will use the standard carriage return character.")
public class EmrClusterDefinitionServiceImpl implements EmrClusterDefinitionService
{
private static final Logger LOGGER = Logger.getLogger(EmrClusterDefinitionServiceImpl.class);
@Autowired
private HerdHelper herdHelper;
@Autowired
protected XmlHelper xmlHelper;
@Autowired
private HerdDao herdDao;
@Autowired
private HerdDaoHelper herdDaoHelper;
/**
* Creates a new EMR cluster definition.
*
* @param request the information needed to create an EMR cluster definition
*
* @return the newly created EMR cluster definition
*/
@Override
public EmrClusterDefinitionInformation createEmrClusterDefinition(EmrClusterDefinitionCreateRequest request) throws Exception
{
// Perform validate and trim of the EMR cluster definition key.
herdHelper.validateEmrClusterDefinitionKey(request.getEmrClusterDefinitionKey());
// Validate the EMR cluster definition configuration.
herdHelper.validateEmrClusterDefinitionConfiguration(request.getEmrClusterDefinition());
// Get the namespace and ensure it exists.
NamespaceEntity namespaceEntity = herdDaoHelper.getNamespaceEntity(request.getEmrClusterDefinitionKey().getNamespace());
// Ensure a EMR cluster definition with the specified name doesn't already exist.
EmrClusterDefinitionEntity emrClusterDefinitionEntity = herdDao.getEmrClusterDefinitionByAltKey(request.getEmrClusterDefinitionKey());
if (emrClusterDefinitionEntity != null)
{
throw new AlreadyExistsException(String
.format("Unable to create EMR cluster definition with name \"%s\" for namespace \"%s\" because it already exists.",
request.getEmrClusterDefinitionKey().getEmrClusterDefinitionName(), request.getEmrClusterDefinitionKey().getNamespace()));
}
// Create a EMR cluster definition entity from the request information.
emrClusterDefinitionEntity = createEmrClusterDefinitionEntity(namespaceEntity, request);
// Persist the new entity.
emrClusterDefinitionEntity = herdDao.saveAndRefresh(emrClusterDefinitionEntity);
// Create and return the EMR cluster definition object from the persisted entity.
return createEmrClusterDefinitionFromEntity(emrClusterDefinitionEntity);
}
/**
* Gets an existing EMR cluster definition by key.
*
* @param emrClusterDefinitionKey the EMR cluster definition key
*
* @return the EMR cluster definition
*/
@Override
public EmrClusterDefinitionInformation getEmrClusterDefinition(EmrClusterDefinitionKey emrClusterDefinitionKey) throws Exception
{
// Perform validate and trim of the EMR cluster definition key.
herdHelper.validateEmrClusterDefinitionKey(emrClusterDefinitionKey);
// Retrieve and ensure that a EMR cluster definition exists with the specified key.
EmrClusterDefinitionEntity emrClusterDefinitionEntity = herdDaoHelper.getEmrClusterDefinitionEntity(emrClusterDefinitionKey);
// Create and return the EMR cluster definition object from the persisted entity.
return createEmrClusterDefinitionFromEntity(emrClusterDefinitionEntity);
}
/**
* Updates an existing EMR cluster definition.
*
* @param emrClusterDefinitionKey the EMR cluster definition key
* @param request the information needed to update the EMR cluster definition
*
* @return the updated EMR cluster definition
*/
@Override
public EmrClusterDefinitionInformation updateEmrClusterDefinition(EmrClusterDefinitionKey emrClusterDefinitionKey,
EmrClusterDefinitionUpdateRequest request) throws Exception
{
// Perform validate and trim of the EMR cluster definition key.
herdHelper.validateEmrClusterDefinitionKey(emrClusterDefinitionKey);
// Validate the EMR cluster definition configuration.
herdHelper.validateEmrClusterDefinitionConfiguration(request.getEmrClusterDefinition());
// Retrieve and ensure that a EMR cluster definition already exists with the specified name.
EmrClusterDefinitionEntity emrClusterDefinitionEntity = herdDaoHelper.getEmrClusterDefinitionEntity(emrClusterDefinitionKey);
// Log the existing EMR cluster definition before the update.
LOGGER.info(String.format("EMR cluster definition before the update:\n%s",
xmlHelper.objectToXml(createEmrClusterDefinitionFromEntity(emrClusterDefinitionEntity), true)));
// Convert EMR cluster configuration to the XML representation.
String emrClusterConfiguration = xmlHelper.objectToXml(request.getEmrClusterDefinition());
// Update the EMR cluster definition entity.
emrClusterDefinitionEntity.setConfiguration(emrClusterConfiguration);
// Persist and refresh the entity.
emrClusterDefinitionEntity = herdDao.saveAndRefresh(emrClusterDefinitionEntity);
// Create and return the EMR cluster definition object from the persisted entity.
return createEmrClusterDefinitionFromEntity(emrClusterDefinitionEntity);
}
/**
* Deletes an existing EMR cluster definition by key.
*
* @param emrClusterDefinitionKey the EMR cluster definition key
*
* @return the EMR cluster definition that got deleted
*/
@Override
public EmrClusterDefinitionInformation deleteEmrClusterDefinition(EmrClusterDefinitionKey emrClusterDefinitionKey) throws Exception
{
// Perform validate and trim of the EMR cluster definition key.
herdHelper.validateEmrClusterDefinitionKey(emrClusterDefinitionKey);
// Retrieve and ensure that a EMR cluster definition already exists with the specified key.
EmrClusterDefinitionEntity emrClusterDefinitionEntity = herdDaoHelper.getEmrClusterDefinitionEntity(emrClusterDefinitionKey);
// Log the existing EMR cluster definition.
LOGGER.info(String.format("EMR cluster definition being deleted:\n%s",
xmlHelper.objectToXml(createEmrClusterDefinitionFromEntity(emrClusterDefinitionEntity), true)));
// Delete the EMR cluster definition.
herdDao.delete(emrClusterDefinitionEntity);
// Create and return the EMR cluster definition object from the deleted entity.
return createEmrClusterDefinitionFromEntity(emrClusterDefinitionEntity);
}
/**
* Creates a new EMR cluster definition entity from the request information.
*
* @param namespaceEntity the namespace entity
* @param request the EMR cluster definition create request
*
* @return the newly created EMR cluster definition entity
*/
private EmrClusterDefinitionEntity createEmrClusterDefinitionEntity(NamespaceEntity namespaceEntity, EmrClusterDefinitionCreateRequest request)
throws Exception
{
// Convert EMR cluster configuration to the XML representation.
String emrClusterConfiguration = xmlHelper.objectToXml(request.getEmrClusterDefinition());
// Create a new entity.
EmrClusterDefinitionEntity emrClusterDefinitionEntity = new EmrClusterDefinitionEntity();
emrClusterDefinitionEntity.setNamespace(namespaceEntity);
emrClusterDefinitionEntity.setName(request.getEmrClusterDefinitionKey().getEmrClusterDefinitionName());
emrClusterDefinitionEntity.setConfiguration(emrClusterConfiguration);
return emrClusterDefinitionEntity;
}
/**
* Creates the EMR cluster definition information from the persisted entity.
*
* @param emrClusterDefinitionEntity the EMR cluster definition entity
*
* @return the EMR cluster definition information
*/
private EmrClusterDefinitionInformation createEmrClusterDefinitionFromEntity(EmrClusterDefinitionEntity emrClusterDefinitionEntity) throws Exception
{
// Unmarshal EMR cluster definition XML into JAXB object.
EmrClusterDefinition emrClusterDefinition = xmlHelper.unmarshallXmlToObject(EmrClusterDefinition.class, emrClusterDefinitionEntity.getConfiguration());
// Create a new instance of EMR cluster definition information.
EmrClusterDefinitionInformation emrClusterDefinitionInformation = new EmrClusterDefinitionInformation();
emrClusterDefinitionInformation.setId(emrClusterDefinitionEntity.getId());
emrClusterDefinitionInformation
.setEmrClusterDefinitionKey(new EmrClusterDefinitionKey(emrClusterDefinitionEntity.getNamespace().getCode(), emrClusterDefinitionEntity.getName()));
emrClusterDefinitionInformation.setEmrClusterDefinition(emrClusterDefinition);
return emrClusterDefinitionInformation;
}
}
|
apache-2.0
|
SSEHUB/EASyProducer
|
Plugins/Instantiation/de.uni-hildesheim.sse.easy.instantiatorCore.tests/src/net/ssehub/easy/instantiation/core/model/templateModel/TemplateLangTests.java
|
354
|
package net.ssehub.easy.instantiation.core.model.templateModel;
import org.junit.runner.RunWith;
import org.junit.runners.Suite;
/**
* The test suite for the template language.
*
* @author Holger Eichelberger
*/
@RunWith(Suite.class)
@Suite.SuiteClasses({IndentationUtilsTests.class, SerializationTest.class })
public class TemplateLangTests {
}
|
apache-2.0
|
seava/seava.mod.ad
|
seava.mod.ad.i18n/src/main/resources/webapp/en/seava/ad/i18n/ds/ViewState_Ds.js
|
116
|
Ext.define("seava.ad.i18n.ds.ViewState_Ds", {
cmpType__lbl: "Cmp Type",
cmp__lbl: "Cmp",
value__lbl: "Value"
});
|
apache-2.0
|
HewlettPackard/oneview-puppet
|
spec/integration/provider/oneview_logical_interconnect_c7000_spec.rb
|
3288
|
################################################################################
# (C) Copyright 2016-2017 Hewlett Packard Enterprise Development LP
#
# Licensed under the Apache License, Version 2.0 (the "License");
# You may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
################################################################################
require 'spec_helper'
provider_class = Puppet::Type.type(:oneview_logical_interconnect).provider(:c7000)
describe provider_class, integration: true do
let(:resource) do
Puppet::Type.type(:oneview_logical_interconnect).new(
name: 'Test Logical Interconnect',
ensure: 'present',
data:
{
'name' => 'Encl2-my enclosure logical interconnect group',
'internalNetworks' => ['NET'],
'snmpConfiguration' =>
{
'enabled' => true
},
'firmware' =>
{
'command' => 'Stage',
'isoFileName' => 'fake_firmware.iso',
'force' => false
}
},
provider: 'c7000'
)
end
let(:provider) { resource.provider }
let(:instance) { provider.class.instances.first }
before(:each) do
provider.exists?
end
it 'should be an instance of the provider c7000' do
expect(provider).to be_an_instance_of Puppet::Type.type(:oneview_logical_interconnect).provider(:c7000)
end
it 'should find the interconnect' do
expect(provider.found).to be
end
it 'should get the igmp settings from the logical interconnect' do
expect(provider.get_igmp_settings).to be
end
it 'should update the igmp settings' do
expect(provider.set_igmp_settings).to be
end
it 'should get the snmp configuration from the logical interconnect' do
expect(provider.get_snmp_configuration).to be
end
it 'should get the firmware from the logical interconnect' do
expect(provider.get_firmware).to be
end
it 'should get the port monitor from the logical interconnect' do
expect(provider.get_port_monitor).to be
end
it 'should get the list of internal networks from the logical interconnect' do
expect(provider.get_internal_vlans).to be
end
it 'should get the qos configuration from the logical interconnect' do
expect(provider.get_qos_aggregated_configuration).to be
end
it 'should get the logical interconnect compliant' do
expect(provider.set_compliance).to be
end
it 'should update the snmp configuration' do
expect(provider.set_snmp_configuration).to be
end
it 'should set the firmware configuration' do
expect(provider.set_firmware).to be
end
it 'should set the LI configuration' do
expect(provider.set_configuration).to be
end
it 'should update the the internal networks' do
expect(provider.set_internal_networks).to be
end
end
|
apache-2.0
|
occidere/MMDownloader
|
src/util/UserAgent.java
|
3334
|
package util;
import java.util.Random;
/**
* User-Agent 관련 클래스
* 2017.10.24
* @author occidere
*/
public class UserAgent {
private static final String USER_AGENTS[] = {
/* Chrome */
"Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/37.0.2062.103 Safari/537.36",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_2) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/40.0.2214.38 Safari/537.36",
"Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/46.0.2490.71 Safari/537.36",
"Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/51.0.2704.103 Safari/537.36",
"Mozilla/5.0 (Windows NT 6.1; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/62.0.3202.62 Safari/537.36",
/* Firefox */
"Mozilla/5.0 (Windows NT 6.1; rv:28.0) Gecko/20100101 Firefox/28.0",
"Mozilla/5.0 (X11; Linux i686; rv:30.0) Gecko/20100101 Firefox/30.0",
"Mozilla/5.0 (Windows NT 5.1; rv:31.0) Gecko/20100101 Firefox/31.0",
"Mozilla/5.0 (Windows NT 6.1; WOW64; rv:33.0) Gecko/20100101 Firefox/33.0",
"Mozilla/5.0 (Windows NT 10.0; WOW64; rv:40.0) Gecko/20100101 Firefox/40.0",
/* Internet Explorer */
"Mozilla/5.0 (compatible; MSIE 10.0; Windows NT 6.1; Trident/6.0)",
"Mozilla/5.0 (compatible; MSIE 10.0; Windows NT 6.1; WOW64; Trident/6.0)",
"Mozilla/5.0 (compatible; MSIE 10.0; Windows NT 6.2; Win64; x64; Trident/6.0)",
"Mozilla/5.0 (IE 11.0; Windows NT 6.3; Trident/7.0; .NET4.0E; .NET4.0C; rv:11.0) like Gecko",
"Mozilla/5.0 (IE 11.0; Windows NT 6.3; WOW64; Trident/7.0; Touch; rv:11.0) like Gecko",
/* Microsoft Edge */
"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/42.0.2311.135 Safari/537.36 Edge/12.9600",
"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/42.0.2311.135 Safari/537.36 Edge/12.10240",
"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/46.0.2486.0 Safari/537.36 Edge/13.10547",
"Mozilla/5.0 (Windows NT 10.0; Win64; x64; Xbox; Xbox One) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/46.0.2486.0 Safari/537.36 Edge/13.10586",
"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/48.0.2564.82 Safari/537.36 Edge/14.14359",
/* Safari */
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_8_2) AppleWebKit/536.26.17 (KHTML, like Gecko) Version/6.0.2 Safari/536.26.17",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_8_5) AppleWebKit/537.75.14 (KHTML, like Gecko) Version/6.1.3 Safari/537.75.14",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_0) AppleWebKit/600.3.10 (KHTML, like Gecko) Version/8.0.3 Safari/600.3.10",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_11) AppleWebKit/601.1.39 (KHTML, like Gecko) Version/9.0 Safari/601.1.39",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_13) AppleWebKit/603.1.13 (KHTML, like Gecko) Version/10.1 Safari/603.1.13"
};
/**
* 랜덤으로 User-Agent를 선택해 반환한다.
* <b>단, baidu등의 중국 브라우저 하면 403 되므로 쓰지 말 것!</b>
* @return
*/
public static String getUserAgent() {
return USER_AGENTS[new Random().nextInt(USER_AGENTS.length)];
}
}
|
apache-2.0
|
Warglaive/TechModuleSeptember2017
|
Methods.DebuggingandLab/06. Calculate Triangle Area/Properties/AssemblyInfo.cs
|
1425
|
using System.Reflection;
using System.Runtime.CompilerServices;
using System.Runtime.InteropServices;
// General Information about an assembly is controlled through the following
// set of attributes. Change these attribute values to modify the information
// associated with an assembly.
[assembly: AssemblyTitle("06. Calculate Triangle Area")]
[assembly: AssemblyDescription("")]
[assembly: AssemblyConfiguration("")]
[assembly: AssemblyCompany("")]
[assembly: AssemblyProduct("06. Calculate Triangle Area")]
[assembly: AssemblyCopyright("Copyright © 2017")]
[assembly: AssemblyTrademark("")]
[assembly: AssemblyCulture("")]
// Setting ComVisible to false makes the types in this assembly not visible
// to COM components. If you need to access a type in this assembly from
// COM, set the ComVisible attribute to true on that type.
[assembly: ComVisible(false)]
// The following GUID is for the ID of the typelib if this project is exposed to COM
[assembly: Guid("ed690880-0759-4cef-88ac-e2a41e813b6b")]
// Version information for an assembly consists of the following four values:
//
// Major Version
// Minor Version
// Build Number
// Revision
//
// You can specify all the values or you can default the Build and Revision Numbers
// by using the '*' as shown below:
// [assembly: AssemblyVersion("1.0.*")]
[assembly: AssemblyVersion("1.0.0.0")]
[assembly: AssemblyFileVersion("1.0.0.0")]
|
apache-2.0
|
AfricaRegex/SjcProduct
|
SjcProject/src/com/sjc/cc/activity/vo/ServiceActivityVo.java
|
1663
|
package com.sjc.cc.activity.vo;
public class ServiceActivityVo {
/** 处理结果 */
private String actResult;
/** 任务说明 */
private String actDesc;
/** 后续任务处理组 */
private String actGrp;
/** 后续任务处理人 */
private String actUser;
/** 暂缓理由 */
private String postponeDesc;
/** 退单原因 */
private String errRetType;
/**
* 任务版本
*/
private Long actPriority;
public Long getActPriority() {
return actPriority;
}
public void setActPriority(Long actPriority) {
this.actPriority = actPriority;
}
public String getActDesc() {
return actDesc;
}
public void setActDesc(String actDesc) {
this.actDesc = actDesc;
}
public String getActGrp() {
return actGrp;
}
public void setActGrp(String actGrp) {
this.actGrp = actGrp;
}
public String getActUser() {
return actUser;
}
public String getErrRetType() {
return errRetType;
}
public void setErrRetType(String errRetType) {
this.errRetType = errRetType;
}
public void setActUser(String actUser) {
this.actUser = actUser;
}
public String getPostponeDesc() {
return postponeDesc;
}
public void setPostponeDesc(String postponeDesc) {
this.postponeDesc = postponeDesc;
}
public String getActResult() {
return actResult;
}
public void setActResult(String actResult) {
this.actResult = actResult;
}
}
|
apache-2.0
|
djpflager/reversa
|
app/src/androidTest/java/com/pflagers/reversa/ApplicationTest.java
|
351
|
package com.pflagers.reversa;
import android.app.Application;
import android.test.ApplicationTestCase;
/**
* <a href="http://d.android.com/tools/testing/testing_android.html">Testing Fundamentals</a>
*/
public class ApplicationTest extends ApplicationTestCase<Application> {
public ApplicationTest() {
super(Application.class);
}
}
|
apache-2.0
|
googleapis/google-api-java-client-services
|
clients/google-api-services-abusiveexperiencereport/v1/1.26.0/com/google/api/services/abusiveexperiencereport/v1/AbusiveExperienceReport.java
|
20936
|
/*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
/*
* This code was generated by https://github.com/googleapis/google-api-java-client-services/
* Modify at your own risk.
*/
package com.google.api.services.abusiveexperiencereport.v1;
/**
* Service definition for AbusiveExperienceReport (v1).
*
* <p>
* Views Abusive Experience Report data, and gets a list of sites that have a significant number of abusive experiences.
* </p>
*
* <p>
* For more information about this service, see the
* <a href="https://developers.google.com/abusive-experience-report/" target="_blank">API Documentation</a>
* </p>
*
* <p>
* This service uses {@link AbusiveExperienceReportRequestInitializer} to initialize global parameters via its
* {@link Builder}.
* </p>
*
* @since 1.3
* @author Google, Inc.
*/
@SuppressWarnings("javadoc")
public class AbusiveExperienceReport extends com.google.api.client.googleapis.services.json.AbstractGoogleJsonClient {
// Note: Leave this static initializer at the top of the file.
static {
com.google.api.client.util.Preconditions.checkState(
com.google.api.client.googleapis.GoogleUtils.MAJOR_VERSION == 1 &&
com.google.api.client.googleapis.GoogleUtils.MINOR_VERSION >= 15,
"You are currently running with version %s of google-api-client. " +
"You need at least version 1.15 of google-api-client to run version " +
"1.26.0 of the Abusive Experience Report API library.", com.google.api.client.googleapis.GoogleUtils.VERSION);
}
/**
* The default encoded root URL of the service. This is determined when the library is generated
* and normally should not be changed.
*
* @since 1.7
*/
public static final String DEFAULT_ROOT_URL = "https://abusiveexperiencereport.googleapis.com/";
/**
* The default encoded service path of the service. This is determined when the library is
* generated and normally should not be changed.
*
* @since 1.7
*/
public static final String DEFAULT_SERVICE_PATH = "";
/**
* The default encoded batch path of the service. This is determined when the library is
* generated and normally should not be changed.
*
* @since 1.23
*/
public static final String DEFAULT_BATCH_PATH = "batch";
/**
* The default encoded base URL of the service. This is determined when the library is generated
* and normally should not be changed.
*/
public static final String DEFAULT_BASE_URL = DEFAULT_ROOT_URL + DEFAULT_SERVICE_PATH;
/**
* Constructor.
*
* <p>
* Use {@link Builder} if you need to specify any of the optional parameters.
* </p>
*
* @param transport HTTP transport, which should normally be:
* <ul>
* <li>Google App Engine:
* {@code com.google.api.client.extensions.appengine.http.UrlFetchTransport}</li>
* <li>Android: {@code newCompatibleTransport} from
* {@code com.google.api.client.extensions.android.http.AndroidHttp}</li>
* <li>Java: {@link com.google.api.client.googleapis.javanet.GoogleNetHttpTransport#newTrustedTransport()}
* </li>
* </ul>
* @param jsonFactory JSON factory, which may be:
* <ul>
* <li>Jackson: {@code com.google.api.client.json.jackson2.JacksonFactory}</li>
* <li>Google GSON: {@code com.google.api.client.json.gson.GsonFactory}</li>
* <li>Android Honeycomb or higher:
* {@code com.google.api.client.extensions.android.json.AndroidJsonFactory}</li>
* </ul>
* @param httpRequestInitializer HTTP request initializer or {@code null} for none
* @since 1.7
*/
public AbusiveExperienceReport(com.google.api.client.http.HttpTransport transport, com.google.api.client.json.JsonFactory jsonFactory,
com.google.api.client.http.HttpRequestInitializer httpRequestInitializer) {
this(new Builder(transport, jsonFactory, httpRequestInitializer));
}
/**
* @param builder builder
*/
AbusiveExperienceReport(Builder builder) {
super(builder);
}
@Override
protected void initialize(com.google.api.client.googleapis.services.AbstractGoogleClientRequest<?> httpClientRequest) throws java.io.IOException {
super.initialize(httpClientRequest);
}
/**
* An accessor for creating requests from the Sites collection.
*
* <p>The typical use is:</p>
* <pre>
* {@code AbusiveExperienceReport abusiveexperiencereport = new AbusiveExperienceReport(...);}
* {@code AbusiveExperienceReport.Sites.List request = abusiveexperiencereport.sites().list(parameters ...)}
* </pre>
*
* @return the resource collection
*/
public Sites sites() {
return new Sites();
}
/**
* The "sites" collection of methods.
*/
public class Sites {
/**
* Gets a summary of the abusive experience rating of a site.
*
* Create a request for the method "sites.get".
*
* This request holds the parameters needed by the abusiveexperiencereport server. After setting
* any optional parameters, call the {@link Get#execute()} method to invoke the remote operation.
*
* @param name The required site name. This is the site property whose abusive
experiences have been reviewed, and
* it must be URL-encoded. For example,
sites/https%3A%2F%2Fwww.google.com. The server will
* return an error of
BAD_REQUEST if this field is not filled in. Note that if the site
* property
is not yet verified in Search Console, the reportUrl field
returned by the API
* will lead to the verification page, prompting the user
to go through that process before
* they can gain access to the Abusive
Experience Report.
* @return the request
*/
public Get get(java.lang.String name) throws java.io.IOException {
Get result = new Get(name);
initialize(result);
return result;
}
public class Get extends AbusiveExperienceReportRequest<com.google.api.services.abusiveexperiencereport.v1.model.SiteSummaryResponse> {
private static final String REST_PATH = "v1/{+name}";
private final java.util.regex.Pattern NAME_PATTERN =
java.util.regex.Pattern.compile("^sites/[^/]+$");
/**
* Gets a summary of the abusive experience rating of a site.
*
* Create a request for the method "sites.get".
*
* This request holds the parameters needed by the the abusiveexperiencereport server. After
* setting any optional parameters, call the {@link Get#execute()} method to invoke the remote
* operation. <p> {@link
* Get#initialize(com.google.api.client.googleapis.services.AbstractGoogleClientRequest)} must be
* called to initialize this instance immediately after invoking the constructor. </p>
*
* @param name The required site name. This is the site property whose abusive
experiences have been reviewed, and
* it must be URL-encoded. For example,
sites/https%3A%2F%2Fwww.google.com. The server will
* return an error of
BAD_REQUEST if this field is not filled in. Note that if the site
* property
is not yet verified in Search Console, the reportUrl field
returned by the API
* will lead to the verification page, prompting the user
to go through that process before
* they can gain access to the Abusive
Experience Report.
* @since 1.13
*/
protected Get(java.lang.String name) {
super(AbusiveExperienceReport.this, "GET", REST_PATH, null, com.google.api.services.abusiveexperiencereport.v1.model.SiteSummaryResponse.class);
this.name = com.google.api.client.util.Preconditions.checkNotNull(name, "Required parameter name must be specified.");
if (!getSuppressPatternChecks()) {
com.google.api.client.util.Preconditions.checkArgument(NAME_PATTERN.matcher(name).matches(),
"Parameter name must conform to the pattern " +
"^sites/[^/]+$");
}
}
@Override
public com.google.api.client.http.HttpResponse executeUsingHead() throws java.io.IOException {
return super.executeUsingHead();
}
@Override
public com.google.api.client.http.HttpRequest buildHttpRequestUsingHead() throws java.io.IOException {
return super.buildHttpRequestUsingHead();
}
@Override
public Get set$Xgafv(java.lang.String $Xgafv) {
return (Get) super.set$Xgafv($Xgafv);
}
@Override
public Get setAccessToken(java.lang.String accessToken) {
return (Get) super.setAccessToken(accessToken);
}
@Override
public Get setAlt(java.lang.String alt) {
return (Get) super.setAlt(alt);
}
@Override
public Get setCallback(java.lang.String callback) {
return (Get) super.setCallback(callback);
}
@Override
public Get setFields(java.lang.String fields) {
return (Get) super.setFields(fields);
}
@Override
public Get setKey(java.lang.String key) {
return (Get) super.setKey(key);
}
@Override
public Get setOauthToken(java.lang.String oauthToken) {
return (Get) super.setOauthToken(oauthToken);
}
@Override
public Get setPrettyPrint(java.lang.Boolean prettyPrint) {
return (Get) super.setPrettyPrint(prettyPrint);
}
@Override
public Get setQuotaUser(java.lang.String quotaUser) {
return (Get) super.setQuotaUser(quotaUser);
}
@Override
public Get setUploadType(java.lang.String uploadType) {
return (Get) super.setUploadType(uploadType);
}
@Override
public Get setUploadProtocol(java.lang.String uploadProtocol) {
return (Get) super.setUploadProtocol(uploadProtocol);
}
/**
* The required site name. This is the site property whose abusive experiences have been
* reviewed, and it must be URL-encoded. For example, sites/https%3A%2F%2Fwww.google.com. The
* server will return an error of BAD_REQUEST if this field is not filled in. Note that if the
* site property is not yet verified in Search Console, the reportUrl field returned by the
* API will lead to the verification page, prompting the user to go through that process
* before they can gain access to the Abusive Experience Report.
*/
@com.google.api.client.util.Key
private java.lang.String name;
/** The required site name. This is the site property whose abusive experiences have been reviewed, and
it must be URL-encoded. For example, sites/https%3A%2F%2Fwww.google.com. The server will return an
error of BAD_REQUEST if this field is not filled in. Note that if the site property is not yet
verified in Search Console, the reportUrl field returned by the API will lead to the verification
page, prompting the user to go through that process before they can gain access to the Abusive
Experience Report.
*/
public java.lang.String getName() {
return name;
}
/**
* The required site name. This is the site property whose abusive experiences have been
* reviewed, and it must be URL-encoded. For example, sites/https%3A%2F%2Fwww.google.com. The
* server will return an error of BAD_REQUEST if this field is not filled in. Note that if the
* site property is not yet verified in Search Console, the reportUrl field returned by the
* API will lead to the verification page, prompting the user to go through that process
* before they can gain access to the Abusive Experience Report.
*/
public Get setName(java.lang.String name) {
if (!getSuppressPatternChecks()) {
com.google.api.client.util.Preconditions.checkArgument(NAME_PATTERN.matcher(name).matches(),
"Parameter name must conform to the pattern " +
"^sites/[^/]+$");
}
this.name = name;
return this;
}
@Override
public Get set(String parameterName, Object value) {
return (Get) super.set(parameterName, value);
}
}
}
/**
* An accessor for creating requests from the ViolatingSites collection.
*
* <p>The typical use is:</p>
* <pre>
* {@code AbusiveExperienceReport abusiveexperiencereport = new AbusiveExperienceReport(...);}
* {@code AbusiveExperienceReport.ViolatingSites.List request = abusiveexperiencereport.violatingSites().list(parameters ...)}
* </pre>
*
* @return the resource collection
*/
public ViolatingSites violatingSites() {
return new ViolatingSites();
}
/**
* The "violatingSites" collection of methods.
*/
public class ViolatingSites {
/**
* Lists sites with Abusive Experience Report statuses of "Failing".
*
* Create a request for the method "violatingSites.list".
*
* This request holds the parameters needed by the abusiveexperiencereport server. After setting
* any optional parameters, call the {@link List#execute()} method to invoke the remote operation.
*
* @return the request
*/
public List list() throws java.io.IOException {
List result = new List();
initialize(result);
return result;
}
public class List extends AbusiveExperienceReportRequest<com.google.api.services.abusiveexperiencereport.v1.model.ViolatingSitesResponse> {
private static final String REST_PATH = "v1/violatingSites";
/**
* Lists sites with Abusive Experience Report statuses of "Failing".
*
* Create a request for the method "violatingSites.list".
*
* This request holds the parameters needed by the the abusiveexperiencereport server. After
* setting any optional parameters, call the {@link List#execute()} method to invoke the remote
* operation. <p> {@link
* List#initialize(com.google.api.client.googleapis.services.AbstractGoogleClientRequest)} must be
* called to initialize this instance immediately after invoking the constructor. </p>
*
* @since 1.13
*/
protected List() {
super(AbusiveExperienceReport.this, "GET", REST_PATH, null, com.google.api.services.abusiveexperiencereport.v1.model.ViolatingSitesResponse.class);
}
@Override
public com.google.api.client.http.HttpResponse executeUsingHead() throws java.io.IOException {
return super.executeUsingHead();
}
@Override
public com.google.api.client.http.HttpRequest buildHttpRequestUsingHead() throws java.io.IOException {
return super.buildHttpRequestUsingHead();
}
@Override
public List set$Xgafv(java.lang.String $Xgafv) {
return (List) super.set$Xgafv($Xgafv);
}
@Override
public List setAccessToken(java.lang.String accessToken) {
return (List) super.setAccessToken(accessToken);
}
@Override
public List setAlt(java.lang.String alt) {
return (List) super.setAlt(alt);
}
@Override
public List setCallback(java.lang.String callback) {
return (List) super.setCallback(callback);
}
@Override
public List setFields(java.lang.String fields) {
return (List) super.setFields(fields);
}
@Override
public List setKey(java.lang.String key) {
return (List) super.setKey(key);
}
@Override
public List setOauthToken(java.lang.String oauthToken) {
return (List) super.setOauthToken(oauthToken);
}
@Override
public List setPrettyPrint(java.lang.Boolean prettyPrint) {
return (List) super.setPrettyPrint(prettyPrint);
}
@Override
public List setQuotaUser(java.lang.String quotaUser) {
return (List) super.setQuotaUser(quotaUser);
}
@Override
public List setUploadType(java.lang.String uploadType) {
return (List) super.setUploadType(uploadType);
}
@Override
public List setUploadProtocol(java.lang.String uploadProtocol) {
return (List) super.setUploadProtocol(uploadProtocol);
}
@Override
public List set(String parameterName, Object value) {
return (List) super.set(parameterName, value);
}
}
}
/**
* Builder for {@link AbusiveExperienceReport}.
*
* <p>
* Implementation is not thread-safe.
* </p>
*
* @since 1.3.0
*/
public static final class Builder extends com.google.api.client.googleapis.services.json.AbstractGoogleJsonClient.Builder {
/**
* Returns an instance of a new builder.
*
* @param transport HTTP transport, which should normally be:
* <ul>
* <li>Google App Engine:
* {@code com.google.api.client.extensions.appengine.http.UrlFetchTransport}</li>
* <li>Android: {@code newCompatibleTransport} from
* {@code com.google.api.client.extensions.android.http.AndroidHttp}</li>
* <li>Java: {@link com.google.api.client.googleapis.javanet.GoogleNetHttpTransport#newTrustedTransport()}
* </li>
* </ul>
* @param jsonFactory JSON factory, which may be:
* <ul>
* <li>Jackson: {@code com.google.api.client.json.jackson2.JacksonFactory}</li>
* <li>Google GSON: {@code com.google.api.client.json.gson.GsonFactory}</li>
* <li>Android Honeycomb or higher:
* {@code com.google.api.client.extensions.android.json.AndroidJsonFactory}</li>
* </ul>
* @param httpRequestInitializer HTTP request initializer or {@code null} for none
* @since 1.7
*/
public Builder(com.google.api.client.http.HttpTransport transport, com.google.api.client.json.JsonFactory jsonFactory,
com.google.api.client.http.HttpRequestInitializer httpRequestInitializer) {
super(
transport,
jsonFactory,
DEFAULT_ROOT_URL,
DEFAULT_SERVICE_PATH,
httpRequestInitializer,
false);
setBatchPath(DEFAULT_BATCH_PATH);
}
/** Builds a new instance of {@link AbusiveExperienceReport}. */
@Override
public AbusiveExperienceReport build() {
return new AbusiveExperienceReport(this);
}
@Override
public Builder setRootUrl(String rootUrl) {
return (Builder) super.setRootUrl(rootUrl);
}
@Override
public Builder setServicePath(String servicePath) {
return (Builder) super.setServicePath(servicePath);
}
@Override
public Builder setBatchPath(String batchPath) {
return (Builder) super.setBatchPath(batchPath);
}
@Override
public Builder setHttpRequestInitializer(com.google.api.client.http.HttpRequestInitializer httpRequestInitializer) {
return (Builder) super.setHttpRequestInitializer(httpRequestInitializer);
}
@Override
public Builder setApplicationName(String applicationName) {
return (Builder) super.setApplicationName(applicationName);
}
@Override
public Builder setSuppressPatternChecks(boolean suppressPatternChecks) {
return (Builder) super.setSuppressPatternChecks(suppressPatternChecks);
}
@Override
public Builder setSuppressRequiredParameterChecks(boolean suppressRequiredParameterChecks) {
return (Builder) super.setSuppressRequiredParameterChecks(suppressRequiredParameterChecks);
}
@Override
public Builder setSuppressAllChecks(boolean suppressAllChecks) {
return (Builder) super.setSuppressAllChecks(suppressAllChecks);
}
/**
* Set the {@link AbusiveExperienceReportRequestInitializer}.
*
* @since 1.12
*/
public Builder setAbusiveExperienceReportRequestInitializer(
AbusiveExperienceReportRequestInitializer abusiveexperiencereportRequestInitializer) {
return (Builder) super.setGoogleClientRequestInitializer(abusiveexperiencereportRequestInitializer);
}
@Override
public Builder setGoogleClientRequestInitializer(
com.google.api.client.googleapis.services.GoogleClientRequestInitializer googleClientRequestInitializer) {
return (Builder) super.setGoogleClientRequestInitializer(googleClientRequestInitializer);
}
}
}
|
apache-2.0
|
yamamoto-febc/usacloud
|
vendor/github.com/sacloud/libsacloud/v2/helper/service/sim/find_service.go
|
1193
|
// Copyright 2016-2021 The Libsacloud Authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package sim
import (
"context"
"github.com/sacloud/libsacloud/v2/sacloud"
)
func (s *Service) Find(req *FindRequest) ([]*sacloud.SIM, error) {
return s.FindWithContext(context.Background(), req)
}
func (s *Service) FindWithContext(ctx context.Context, req *FindRequest) ([]*sacloud.SIM, error) {
if err := req.Validate(); err != nil {
return nil, err
}
params, err := req.ToRequestParameter()
if err != nil {
return nil, err
}
client := sacloud.NewSIMOp(s.caller)
found, err := client.Find(ctx, params)
if err != nil {
return nil, err
}
return found.SIMs, nil
}
|
apache-2.0
|
alfredeperjesi/cucumber-blueprint
|
scala/cucumber-blueprint-scala-example/src/test/scala/cucumber/blueprint/jvm/example/integration/cucumber/step/common/BaseStepDefinition.scala
|
663
|
package cucumber.blueprint.jvm.example.integration.cucumber.step.common
import org.apache.camel.{CamelContext, ProducerTemplate}
import cucumber.blueprint.jvm.example.infrastructure.integration.camel.ExampleOsgiService
import javax.inject.Inject
import cucumber.runtime.java.blueprint.BlueprintDescriptorLocation
@BlueprintDescriptorLocation(value = "/OSGI-INF/blueprint/blueprintContext.xml")
class BaseStepDefinition {
@Inject
protected var producerTemplate: ProducerTemplate = null
@Inject
protected var camelContext: CamelContext = null
@Inject
protected var exampleOsgiService: ExampleOsgiService = null
protected var result: String = _
}
|
apache-2.0
|
timvandermeij/pdf.js
|
src/scripting_api/aform.js
|
13871
|
/* Copyright 2020 Mozilla Foundation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import { GlobalConstants } from "./constants.js";
class AForm {
constructor(document, app, util, color) {
this._document = document;
this._app = app;
this._util = util;
this._color = color;
this._dateFormats = [
"m/d",
"m/d/yy",
"mm/dd/yy",
"mm/yy",
"d-mmm",
"d-mmm-yy",
"dd-mmm-yy",
"yy-mm-dd",
"mmm-yy",
"mmmm-yy",
"mmm d, yyyy",
"mmmm d, yyyy",
"m/d/yy h:MM tt",
"m/d/yy HH:MM",
];
this._timeFormats = ["HH:MM", "h:MM tt", "HH:MM:ss", "h:MM:ss tt"];
// The e-mail address regex below originates from:
// https://html.spec.whatwg.org/multipage/input.html#valid-e-mail-address
this._emailRegex = new RegExp(
"^[a-zA-Z0-9.!#$%&'*+\\/=?^_`{|}~-]+" +
"@[a-zA-Z0-9](?:[a-zA-Z0-9-]{0,61}[a-zA-Z0-9])?" +
"(?:\\.[a-zA-Z0-9](?:[a-zA-Z0-9-]{0,61}[a-zA-Z0-9])?)*$"
);
}
_mkTargetName(event) {
return event.target ? `[ ${event.target.name} ]` : "";
}
_parseDate(cFormat, cDate) {
const ddate = Date.parse(cDate);
if (isNaN(ddate)) {
try {
return this._util.scand(cFormat, cDate);
} catch (error) {
return null;
}
} else {
return new Date(ddate);
}
}
AFMergeChange(event = globalThis.event) {
if (event.willCommit) {
return event.value.toString();
}
return this._app._eventDispatcher.mergeChange(event);
}
AFParseDateEx(cString, cOrder) {
return this._parseDate(cOrder, cString);
}
AFExtractNums(str) {
if (typeof str === "number") {
return [str];
}
if (!str || typeof str !== "string") {
return null;
}
const first = str.charAt(0);
if (first === "." || first === ",") {
str = `0${str}`;
}
const numbers = str.match(/(\d+)/g);
if (numbers.length === 0) {
return null;
}
return numbers;
}
AFMakeNumber(str) {
if (typeof str === "number") {
return str;
}
if (typeof str !== "string") {
return null;
}
str = str.trim().replace(",", ".");
const number = parseFloat(str);
if (isNaN(number) || !isFinite(number)) {
return null;
}
return number;
}
AFMakeArrayFromList(string) {
if (typeof string === "string") {
return string.split(/, ?/g);
}
return string;
}
AFNumber_Format(
nDec,
sepStyle,
negStyle,
currStyle /* unused */,
strCurrency,
bCurrencyPrepend
) {
const event = globalThis.event;
if (!event.value) {
return;
}
let value = this.AFMakeNumber(event.value);
if (value === null) {
event.value = "";
return;
}
const sign = Math.sign(value);
const buf = [];
let hasParen = false;
if (sign === -1 && bCurrencyPrepend && negStyle === 0) {
buf.push("-");
}
if ((negStyle === 2 || negStyle === 3) && sign === -1) {
buf.push("(");
hasParen = true;
}
if (bCurrencyPrepend) {
buf.push(strCurrency);
}
// sepStyle is an integer in [0;4]
sepStyle = Math.min(Math.max(0, Math.floor(sepStyle)), 4);
buf.push("%,", sepStyle, ".", nDec.toString(), "f");
if (!bCurrencyPrepend) {
buf.push(strCurrency);
}
if (hasParen) {
buf.push(")");
}
if (negStyle === 1 || negStyle === 3) {
event.target.textColor = sign === 1 ? this._color.black : this._color.red;
}
if ((negStyle !== 0 || bCurrencyPrepend) && sign === -1) {
value = -value;
}
const formatStr = buf.join("");
event.value = this._util.printf(formatStr, value);
}
AFNumber_Keystroke(
nDec /* unused */,
sepStyle,
negStyle /* unused */,
currStyle /* unused */,
strCurrency /* unused */,
bCurrencyPrepend /* unused */
) {
const event = globalThis.event;
let value = this.AFMergeChange(event);
if (!value) {
return;
}
value = value.trim();
let pattern;
if (sepStyle > 1) {
// comma sep
pattern = event.willCommit
? /^[+-]?(\d+(,\d*)?|,\d+)$/
: /^[+-]?\d*,?\d*$/;
} else {
// dot sep
pattern = event.willCommit
? /^[+-]?(\d+(\.\d*)?|\.\d+)$/
: /^[+-]?\d*\.?\d*$/;
}
if (!pattern.test(value)) {
if (event.willCommit) {
const err = `${GlobalConstants.IDS_INVALID_VALUE} ${this._mkTargetName(
event
)}`;
this._app.alert(err);
}
event.rc = false;
}
if (event.willCommit && sepStyle > 1) {
event.value = parseFloat(value.replace(",", "."));
}
}
AFPercent_Format(nDec, sepStyle, percentPrepend = false) {
if (typeof nDec !== "number") {
return;
}
if (typeof sepStyle !== "number") {
return;
}
if (nDec < 0) {
throw new Error("Invalid nDec value in AFPercent_Format");
}
const event = globalThis.event;
if (nDec > 512) {
event.value = "%";
return;
}
nDec = Math.floor(nDec);
// sepStyle is an integer in [0;4]
sepStyle = Math.min(Math.max(0, Math.floor(sepStyle)), 4);
let value = this.AFMakeNumber(event.value);
if (value === null) {
event.value = "%";
return;
}
const formatStr = `%,${sepStyle}.${nDec}f`;
value = this._util.printf(formatStr, value * 100);
if (percentPrepend) {
event.value = `%${value}`;
} else {
event.value = `${value}%`;
}
}
AFPercent_Keystroke(nDec, sepStyle) {
this.AFNumber_Keystroke(nDec, sepStyle, 0, 0, "", true);
}
AFDate_FormatEx(cFormat) {
const event = globalThis.event;
const value = event.value;
if (!value) {
return;
}
const date = this._parseDate(cFormat, value);
if (date !== null) {
event.value = this._util.printd(cFormat, date);
}
}
AFDate_Format(pdf) {
if (pdf >= 0 && pdf < this._dateFormats.length) {
this.AFDate_FormatEx(this._dateFormats[pdf]);
}
}
AFDate_KeystrokeEx(cFormat) {
const event = globalThis.event;
if (!event.willCommit) {
return;
}
const value = this.AFMergeChange(event);
if (!value) {
return;
}
if (this._parseDate(cFormat, value) === null) {
const invalid = GlobalConstants.IDS_INVALID_DATE;
const invalid2 = GlobalConstants.IDS_INVALID_DATE2;
const err = `${invalid} ${this._mkTargetName(
event
)}${invalid2}${cFormat}`;
this._app.alert(err);
event.rc = false;
}
}
AFDate_Keystroke(pdf) {
if (pdf >= 0 && pdf < this._dateFormats.length) {
this.AFDate_KeystrokeEx(this._dateFormats[pdf]);
}
}
AFRange_Validate(bGreaterThan, nGreaterThan, bLessThan, nLessThan) {
const event = globalThis.event;
if (!event.value) {
return;
}
const value = this.AFMakeNumber(event.value);
if (value === null) {
return;
}
bGreaterThan = !!bGreaterThan;
bLessThan = !!bLessThan;
if (bGreaterThan) {
nGreaterThan = this.AFMakeNumber(nGreaterThan);
if (nGreaterThan === null) {
return;
}
}
if (bLessThan) {
nLessThan = this.AFMakeNumber(nLessThan);
if (nLessThan === null) {
return;
}
}
let err = "";
if (bGreaterThan && bLessThan) {
if (value < nGreaterThan || value > nLessThan) {
err = this._util.printf(
GlobalConstants.IDS_GT_AND_LT,
nGreaterThan,
nLessThan
);
}
} else if (bGreaterThan) {
if (value < nGreaterThan) {
err = this._util.printf(GlobalConstants.IDS_GREATER_THAN, nGreaterThan);
}
} else if (value > nLessThan) {
err = this._util.printf(GlobalConstants.IDS_LESS_THAN, nLessThan);
}
if (err) {
this._app.alert(err);
event.rc = false;
}
}
AFSimple(cFunction, nValue1, nValue2) {
const value1 = this.AFMakeNumber(nValue1);
if (value1 === null) {
throw new Error("Invalid nValue1 in AFSimple");
}
const value2 = this.AFMakeNumber(nValue2);
if (value2 === null) {
throw new Error("Invalid nValue2 in AFSimple");
}
switch (cFunction) {
case "AVG":
return (value1 + value2) / 2;
case "SUM":
return value1 + value2;
case "PRD":
return value1 * value2;
case "MIN":
return Math.min(value1, value2);
case "MAX":
return Math.max(value1, value2);
}
throw new Error("Invalid cFunction in AFSimple");
}
AFSimple_Calculate(cFunction, cFields) {
const actions = {
AVG: args => args.reduce((acc, value) => acc + value, 0) / args.length,
SUM: args => args.reduce((acc, value) => acc + value, 0),
PRD: args => args.reduce((acc, value) => acc * value, 1),
MIN: args =>
args.reduce((acc, value) => Math.min(acc, value), Number.MAX_VALUE),
MAX: args =>
args.reduce((acc, value) => Math.max(acc, value), Number.MIN_VALUE),
};
if (!(cFunction in actions)) {
throw new TypeError("Invalid function in AFSimple_Calculate");
}
const event = globalThis.event;
const values = [];
for (const cField of cFields) {
const field = this._document.getField(cField);
const number = this.AFMakeNumber(field.value);
if (number !== null) {
values.push(number);
}
}
if (values.length === 0) {
event.value = cFunction === "PRD" ? 1 : 0;
return;
}
const res = actions[cFunction](values);
event.value = Math.round(1e6 * res) / 1e6;
}
AFSpecial_Format(psf) {
const event = globalThis.event;
if (!event.value) {
return;
}
psf = this.AFMakeNumber(psf);
let formatStr;
switch (psf) {
case 0:
formatStr = "99999";
break;
case 1:
formatStr = "99999-9999";
break;
case 2:
if (this._util.printx("9999999999", event.value).length >= 10) {
formatStr = "(999) 999-9999";
} else {
formatStr = "999-9999";
}
break;
case 3:
formatStr = "999-99-9999";
break;
default:
throw new Error("Invalid psf in AFSpecial_Format");
}
event.value = this._util.printx(formatStr, event.value);
}
AFSpecial_KeystrokeEx(cMask) {
if (!cMask) {
return;
}
const event = globalThis.event;
const value = this.AFMergeChange(event);
const checkers = new Map([
["9", char => char >= "0" && char <= "9"],
[
"A",
char => ("a" <= char && char <= "z") || ("A" <= char && char <= "Z"),
],
[
"O",
char =>
("a" <= char && char <= "z") ||
("A" <= char && char <= "Z") ||
("0" <= char && char <= "9"),
],
["X", char => true],
]);
function _checkValidity(_value, _cMask) {
for (let i = 0, ii = _value.length; i < ii; i++) {
const mask = _cMask.charAt(i);
const char = _value.charAt(i);
const checker = checkers.get(mask);
if (checker) {
if (!checker(char)) {
return false;
}
} else if (mask !== char) {
return false;
}
}
return true;
}
if (!value) {
return;
}
const err = `${GlobalConstants.IDS_INVALID_VALUE} = "${cMask}"`;
if (value.length > cMask.length) {
this._app.alert(err);
event.rc = false;
return;
}
if (event.willCommit) {
if (value.length < cMask.length) {
this._app.alert(err);
event.rc = false;
return;
}
if (!_checkValidity(value, cMask)) {
this._app.alert(err);
event.rc = false;
return;
}
event.value += cMask.substring(value.length);
return;
}
if (value.length < cMask.length) {
cMask = cMask.substring(0, value.length);
}
if (!_checkValidity(value, cMask)) {
this._app.alert(err);
event.rc = false;
}
}
AFSpecial_Keystroke(psf) {
const event = globalThis.event;
if (!event.value) {
return;
}
psf = this.AFMakeNumber(psf);
let formatStr;
switch (psf) {
case 0:
formatStr = "99999";
break;
case 1:
formatStr = "99999-9999";
break;
case 2:
const value = this.AFMergeChange(event);
if (value.length > 8 || value.startsWith("(")) {
formatStr = "(999) 999-9999";
} else {
formatStr = "999-9999";
}
break;
case 3:
formatStr = "999-99-9999";
break;
default:
throw new Error("Invalid psf in AFSpecial_Keystroke");
}
this.AFSpecial_KeystrokeEx(formatStr);
}
AFTime_FormatEx(cFormat) {
this.AFDate_FormatEx(cFormat);
}
AFTime_Format(pdf) {
if (pdf >= 0 && pdf < this._timeFormats.length) {
this.AFDate_FormatEx(this._timeFormats[pdf]);
}
}
AFTime_KeystrokeEx(cFormat) {
this.AFDate_KeystrokeEx(cFormat);
}
AFTime_Keystroke(pdf) {
if (pdf >= 0 && pdf < this._timeFormats.length) {
this.AFDate_KeystrokeEx(this._timeFormats[pdf]);
}
}
eMailValidate(str) {
return this._emailRegex.test(str);
}
}
export { AForm };
|
apache-2.0
|
fran98moreno/Outfit
|
plantillaWEB/controller/Utils.php
|
1672
|
<?php
// Documentación Swift_Message [http://goo.gl/Z12Bo]
class Utilidades {
const ENTORNO_DESARROLLO = 'localhost';
const ENTORNO_PRODUCCION = 'dictados.wesped.es';
// Devuelve el entorno actual
public static function getEntorno() {
switch ($_SERVER['SERVER_NAME']) {
case self::ENTORNO_DESARROLLO:
return self::ENTORNO_DESARROLLO;
break;
case self::ENTORNO_PRODUCCION:
return self::ENTORNO_PRODUCCION;
break;
default:
throw new Exception("Entorno de servidor desconocido. Compruebe que '".$_SERVER['SERVER_NAME']."' se encuentra entre los nombres de servidor indicados como entornos.");
break;
}
}
// Indica si nos encontramos en el entorno que nos indican
public static function isEntorno($entorno) {
return self::getEntorno()==$entorno;
}
// Obtiene la url http[s] al servidor actual
public static function getCurrentUrl($full = true) {
return "http" . (($_SERVER['SERVER_PORT']==443) ? "s://" : "://") . $_SERVER['HTTP_HOST'];
}
// Obtiene el ID de la sesión actual
public static function getSessionID() {
return session_id();
}
public static function generateUUID() {
return sprintf( '%04x%04x-%04x-%04x-%04x-%04x%04x%04x',
mt_rand( 0, 0xffff ), mt_rand( 0, 0xffff ),
mt_rand( 0, 0xffff ),
mt_rand( 0, 0x0fff ) | 0x4000,
mt_rand( 0, 0x3fff ) | 0x8000,
mt_rand( 0, 0xffff ), mt_rand( 0, 0xffff ), mt_rand( 0, 0xffff )
);
}
public static function mostrarSessionID(){
$app = \Slim\Slim::getInstance();
global $twig;
$valores=array(
'id_sesion'=>self::getSessionID()
);
echo $twig->render('session.php',$valores);
}
}
?>
|
apache-2.0
|
vlzl/HealthXuhui
|
app/src/main/java/com/wondersgroup/healthxuhui/util/NetUtil.java
|
1105
|
package com.wondersgroup.healthxuhui.util;
import android.content.Context;
import android.net.ConnectivityManager;
import android.net.NetworkInfo;
import android.support.v4.net.ConnectivityManagerCompat;
/**
* Created by yang on 16/7/10.
*/
public class NetUtil {
public static final int MOBILE = 0;//移动网络
public static final int WIFI = 1;//WiFi网络
public static final int EMPTY =2;//都不是
/**
* 获取当前网络类型
* @param context
* @return
*/
public static int getNetType(Context context){
ConnectivityManager manager = (ConnectivityManager) context.getSystemService(Context.CONNECTIVITY_SERVICE);
NetworkInfo networkInfo = manager.getActiveNetworkInfo();
int type = networkInfo.getType();
switch (type){
case ConnectivityManager.TYPE_WIFI:
LogUtil.i("wifi");
return WIFI;
case ConnectivityManager.TYPE_MOBILE:
LogUtil.i("mobile");
return MOBILE;
default:
return EMPTY;
}
}
}
|
apache-2.0
|
freeVM/freeVM
|
enhanced/buildtest/tests/functional/src/test/functional/org/apache/harmony/test/func/api/java/beans/persistence/beans/Bean2.java
|
2628
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.harmony.test.func.api.java.beans.persistence.beans;
import java.io.Serializable;
import java.lang.reflect.Field;
import java.util.Arrays;
/**
*/
public class Bean2 implements Serializable {
private Bean3 title;
private Bean3 subTitle;
public boolean equals(Object obj) {
if (obj == null)
return false;
Class class1 = obj.getClass();
if (class1 != this.getClass())
return false;
Field[] fields = class1.getDeclaredFields();
for (int i = 0; i < fields.length; i++) {
try {
Object field = fields[i].get(obj);
if (field == null) {
if (fields[i].get(this) == null)
continue;
else
return false;
}
if (field.getClass().isArray()) {
if (int[].class.equals(field.getClass())) {
if (!Arrays.equals((int[])field, (int[])fields[i]
.get(this)))
return false;
} else if (!Arrays.equals((Object[])field,
(Object[])fields[i].get(this)))
return false;
continue;
}
if (!field.equals(fields[i].get(this)))
return false;
} catch (Exception e) {
e.printStackTrace();
}
}
return true;
}
public Bean3 getSubTitle() {
return subTitle;
}
public void setSubTitle(Bean3 subTitle) {
this.subTitle = subTitle;
}
public Bean3 getTitle() {
return title;
}
public void setTitle(Bean3 title) {
this.title = title;
}
}
|
apache-2.0
|
appshaper/appshaper
|
test/spec/collection.spec.js
|
7057
|
describe('collection', function () {
var Model,
Collection;
it('should be able to import the Model and Collection modules', function (done) {
require([
'appshaper/mvc/model',
'appshaper/mvc/collection'
], function (
ModelModule,
CollectionModule
) {
expect(ModelModule).toEqual(jasmine.any(Function));
expect(CollectionModule).toEqual(jasmine.any(Function));
Model = ModelModule;
Collection = CollectionModule;
done();
});
});
it('should be able to create a basic collection', function () {
var basicModel = Model('basic', {
fields: {
id: 'number',
title: 'string',
greeting: 'string'
}
}),
data1 = {
id: 1,
title: 'title 1',
greeting: 'hi'
},
data2 = {
id: 2,
title: 'title 2',
greeting: 'hello'
},
data3 = {
id: 1,
title: 'title 1',
greeting: 'hello there!'
},
data4 = {
id: 2,
title: 'title 2',
greeting: 'hey'
},
basicCollection = Collection('basic', {
model: basicModel
}),
collection1 = basicCollection([
basicModel(data1),
basicModel(data2)
]),
collection2 = basicCollection([
basicModel(data3),
basicModel(data4)
]);
expect(collection1.getFlatData()).toEqual([data1, data2]);
expect(collection2.getFlatData()).toEqual([data3, data4]);
});
it('should be able to create a basic collection and use custom methods', function () {
var basicModel = Model('basic', {
fields: {
id: 'number',
title: 'string',
greeting: 'string'
}
}),
basicCollection = Collection('basic', {
model: basicModel,
methods: {
toString: function () {
var string = '',
flatData = this.getFlatData();
flatData.forEach(function (item) {
string += item.id + ', ' + item.title + ', ' + item.greeting + ' | '
});
return string;
}
}
}),
collection = basicCollection([
basicModel({
id: 20,
title: 'Some title here',
greeting: 'hey dude!'
}),
basicModel({
id: 21,
title: 'Some other title here',
greeting: 'hey there!'
})
]);
expect(collection.toString()).toEqual(
'20, Some title here, hey dude! | 21, Some other title here, hey there! | ');
});
it('should be able to retrieve specific models by using the get and getAll methods', function () {
var basicModel = Model('basic', {
fields: {
id: 'number',
type: 'string',
model: 'string',
title: 'string',
greeting: 'string'
}
}),
basicCollection = Collection('basic', {
model: basicModel
}),
model1 = basicModel({
id: 1,
type: 'a',
model: 'standard',
title: 'The title',
greeting: 'I salute you!'
}),
model2 = basicModel({
id: 2,
type: 'b',
model: 'advanced',
title: 'Some other title here',
greeting: 'Reporting!'
}),
model3 = basicModel({
id: 3,
type: 'c',
model: 'standard',
title: 'And yet some other title here',
greeting: 'Yes sir!'
}),
model4 = basicModel({
id: 4,
type: 'a',
model: 'advanced',
title: 'Some title here',
greeting: 'I salute you!'
}),
model5 = basicModel({
id: 5,
type: 'a',
model: 'standard',
title: 'The title',
greeting: 'Hello!'
}),
collection = basicCollection([
model1,
model2,
model3,
model4,
model5
]);
var allModels = collection.getAll({
type: 'a',
model: 'standard',
title: 'The title'
}),
allModelsFlat = collection.getAll({
type: 'a',
model: 'standard',
title: 'The title'
}, true),
model = collection.get({
id: 2,
type: 'b',
greeting: 'Reporting!'
}),
modelFlat = collection.get({
id: 2,
type: 'b',
greeting: 'Reporting!'
}, true);
expect(allModels).toEqual([
model1,
model5
]);
expect(allModelsFlat).toEqual([
model1.getData(),
model5.getData()
]);
expect(model).toEqual(model2);
expect(modelFlat).toEqual(model2.getData());
});
it('should be able to remove a model from the collection using the remove method', function () {
var basicModel = Model('basic', {
fields: {
id: 'number',
title: 'string',
greeting: 'string'
}
}),
basicCollection = Collection('basic', {
model: basicModel
}),
model1 = basicModel({
id: 20,
title: 'Some title here',
greeting: 'hey dude!'
}),
model2 = basicModel({
id: 21,
title: 'Some other title here',
greeting: 'hey there!'
}),
model3 = basicModel({
id: 22,
title: 'And yet some other title here',
greeting: 'hey!'
})
collection = basicCollection([
model1,
model2,
model3
]);
collection.remove(model2);
expect(collection.getData()).toEqual([model1, model3]);
});
});
|
apache-2.0
|
GoogleCloudPlatform/datanucleus-appengine
|
tests/com/google/appengine/datanucleus/jdo/JDOSubclassTest.java
|
29099
|
/**********************************************************************
Copyright (c) 2009 Google Inc.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
**********************************************************************/
package com.google.appengine.datanucleus.jdo;
import com.google.appengine.api.datastore.Entity;
import com.google.appengine.api.datastore.EntityNotFoundException;
import com.google.appengine.api.datastore.Key;
import com.google.appengine.api.datastore.KeyFactory;
import com.google.appengine.datanucleus.DatastoreManager;
import com.google.appengine.datanucleus.test.jdo.SubclassesJDO;
import com.google.appengine.datanucleus.test.jdo.SubclassesJDO.CompleteTableParentNoChildStrategy;
import com.google.appengine.datanucleus.test.jdo.SubclassesJDO.CompleteTableParentWithCompleteTableChild;
import com.google.appengine.datanucleus.test.jdo.SubclassesJDO.CompleteTableParentWithEmbedded;
import com.google.appengine.datanucleus.test.jdo.SubclassesJDO.CompleteTableParentWithNewTableChild;
import com.google.appengine.datanucleus.test.jdo.SubclassesJDO.CompleteTableParentWithSubclassTableChild;
import com.google.appengine.datanucleus.test.jdo.SubclassesJDO.DurableChild;
import com.google.appengine.datanucleus.test.jdo.SubclassesJDO.Grandchild;
import com.google.appengine.datanucleus.test.jdo.SubclassesJDO.NewTableParentWithCompleteTableChild;
import com.google.appengine.datanucleus.test.jdo.SubclassesJDO.NewTableParentWithNewTableChild;
import com.google.appengine.datanucleus.test.jdo.SubclassesJDO.NewTableParentWithSubclassTableChild;
import com.google.appengine.datanucleus.test.jdo.SubclassesJDO.OverrideParent;
import com.google.appengine.datanucleus.test.jdo.SubclassesJDO.Parent;
import com.google.appengine.datanucleus.test.jdo.SubclassesJDO.SubclassTableParentWithCompleteTableChild;
import com.google.appengine.datanucleus.test.jdo.SubclassesJDO.SubclassTableParentWithNewTableChild;
import com.google.appengine.datanucleus.test.jdo.SubclassesJDO.SubclassTableParentWithSubclassTableChild;
import org.datanucleus.api.jdo.exceptions.NoPersistenceInformationException;
import java.util.List;
import javax.jdo.JDOFatalUserException;
import javax.jdo.Query;
/**
* There's something flaky here that will probably show up as a real bug at
* some point. If the Parent class gets used first, the subclass
* tests fail. To get around this I'm just running the subclass tests
* first. There's definitely something funny going on though.
*
* @author Max Ross <maxr@google.com>
*/
// TODO(maxr): non-transactional tests
public class JDOSubclassTest extends JDOTestCase {
public void testGrandchildren() throws Exception {
testGrandchild(new CompleteTableParentWithCompleteTableChild.Child.Grandchild());
testGrandchild(new CompleteTableParentNoChildStrategy.Child.Grandchild());
testGrandchild(new SubclassTableParentWithCompleteTableChild.Child.Grandchild());
}
public void testChildren() throws Exception {
testChild(new CompleteTableParentWithCompleteTableChild.Child());
testChild(new CompleteTableParentNoChildStrategy.Child());
testChild(new SubclassTableParentWithCompleteTableChild.Child());
testChild(new SubclassTableParentWithNewTableChild.Child());
testChild(new NewTableParentWithCompleteTableChild.Child());
}
public void testUnsupportedStrategies_GAE() {
assertUnsupportedByGAE(new NewTableParentWithNewTableChild.Child());
assertUnsupportedByGAE(new CompleteTableParentWithNewTableChild.Child());
assertUnsupportedByGAE(new SubclassTableParentWithNewTableChild.Child.Grandchild());
}
public void testUnsupportedStrategies_DataNuc() throws Exception {
assertUnsupportedByDataNuc(new SubclassTableParentWithSubclassTableChild.Child());
assertUnsupportedByDataNuc(new SubclassTableParentWithCompleteTableChild());
}
public void testParents() throws Exception {
testParent(new CompleteTableParentWithCompleteTableChild());
testParent(new CompleteTableParentWithNewTableChild());
testParent(new CompleteTableParentWithSubclassTableChild());
testParent(new CompleteTableParentNoChildStrategy());
testParent(new NewTableParentWithCompleteTableChild());
testParent(new NewTableParentWithSubclassTableChild());
testParent(new NewTableParentWithNewTableChild());
}
public void testOverride() throws Exception {
OverrideParent.Child child = new OverrideParent.Child();
child.setOverriddenString("blarg");
beginTxn();
pm.makePersistent(child);
commitTxn();
Entity e = ds.get(KeyFactory.createKey(kindForClass(child.getClass()), child.getId()));
assertEquals("blarg", e.getProperty("overridden_string"));
assertFalse(e.hasProperty("overriddenProperty"));
}
public void testEmbedded_Child() throws Exception {
CompleteTableParentWithEmbedded.Child child = new CompleteTableParentWithEmbedded.Child();
child.setAString("aString");
child.setBString("bString");
SubclassesJDO.IsEmbeddedOnly embedded = new SubclassesJDO.IsEmbeddedOnly();
embedded.setVal0("embedded val 0");
embedded.setVal1("embedded val 1");
child.setEmbedded(embedded);
SubclassesJDO.IsEmbeddedOnlyBase embeddedBase = new SubclassesJDO.IsEmbeddedOnlyBase();
embeddedBase.setVal0("embedded base val 0");
child.setEmbeddedBase(embeddedBase);
SubclassesJDO.IsEmbeddedOnly2 embedded2 = new SubclassesJDO.IsEmbeddedOnly2();
embedded2.setVal2("embedded val 2");
embedded2.setVal3("embedded val 3");
child.setEmbedded2(embedded2);
SubclassesJDO.IsEmbeddedOnlyBase2 embeddedBase2 = new SubclassesJDO.IsEmbeddedOnlyBase2();
embeddedBase2.setVal2("embedded base val 2");
child.setEmbeddedBase2(embeddedBase2);
beginTxn();
pm.makePersistent(child);
commitTxn();
Key key = KeyFactory.createKey(kindForClass(child.getClass()), child.getId());
Entity e = ds.get(key);
assertEquals("aString", e.getProperty("aString"));
assertEquals("bString", e.getProperty("bString"));
assertEquals("embedded val 0", e.getProperty("val0"));
assertEquals("embedded val 1", e.getProperty("val1"));
assertEquals("embedded base val 0", e.getProperty("VAL0"));
assertEquals("embedded val 2", e.getProperty("val2"));
assertEquals("embedded val 3", e.getProperty("val3"));
assertEquals("embedded base val 2", e.getProperty("VAL2"));
pm.close();
pm = pmf.getPersistenceManager();
beginTxn();
child = pm.getObjectById(child.getClass(), child.getId());
assertEmbeddedChildContents(child);
commitTxn();
pm.close();
pm = pmf.getPersistenceManager();
beginTxn();
Query q = pm.newQuery("select from " + child.getClass().getName() + " where embedded.val1 == :p "
+ "order by embedded.val1 desc, embedded.val0 desc, embeddedBase.val0 desc, "
+ "embedded2.val2 desc, embedded2.val3 desc, embeddedBase2.val2");
q.setUnique(true);
child = (CompleteTableParentWithEmbedded.Child) q.execute("embedded val 1");
assertEmbeddedChildContents(child);
q = pm.newQuery("select from " + child.getClass().getName() + " where embedded.val0 == :p "
+ "order by embedded.val1 desc, embedded.val0 desc, embeddedBase.val0 desc, "
+ "embedded2.val2 desc, embedded2.val3 desc, embeddedBase2.val2");
q.setUnique(true);
child = (CompleteTableParentWithEmbedded.Child) q.execute("embedded val 0");
assertEmbeddedChildContents(child);
q = pm.newQuery("select from " + child.getClass().getName() + " where embeddedBase.val0 == :p "
+ "order by embedded.val1 desc, embedded.val0 desc, embeddedBase.val0 desc, "
+ "embedded2.val2 desc, embedded2.val3 desc, embeddedBase2.val2");
q.setUnique(true);
child = (CompleteTableParentWithEmbedded.Child) q.execute("embedded base val 0");
assertEmbeddedChildContents(child);
q = pm.newQuery("select from " + child.getClass().getName() + " where embedded2.val2 == :p "
+ "order by embedded.val1 desc, embedded.val0 desc, embeddedBase.val0 desc, "
+ "embedded2.val2 desc, embedded2.val3 desc, embeddedBase2.val2");
q.setUnique(true);
child = (CompleteTableParentWithEmbedded.Child) q.execute("embedded val 2");
assertEmbeddedChildContents(child);
q = pm.newQuery("select from " + child.getClass().getName() + " where embedded2.val3 == :p "
+ "order by embedded.val1 desc, embedded.val0 desc, embeddedBase.val0 desc, "
+ "embedded2.val2 desc, embedded2.val3 desc, embeddedBase2.val2");
q.setUnique(true);
child = (CompleteTableParentWithEmbedded.Child) q.execute("embedded val 3");
assertEmbeddedChildContents(child);
q = pm.newQuery("select from " + child.getClass().getName() + " where embeddedBase2.val2 == :p "
+ "order by embedded.val1 desc, embedded.val0 desc, embeddedBase.val0 desc, "
+ "embedded2.val2 desc, embedded2.val3 desc, embeddedBase2.val2");
q.setUnique(true);
child = (CompleteTableParentWithEmbedded.Child) q.execute("embedded base val 2");
assertEmbeddedChildContents(child);
q = pm.newQuery("select embedded.val1, embedded.val0, embeddedBase.val0, embedded2.val2, embedded2.val3, embeddedBase2.val2 from " +
child.getClass().getName() + " where embeddedBase2.val2 == :p");
q.setUnique(true);
Object[] result = (Object[]) q.execute("embedded base val 2");
assertEquals("embedded val 1", result[0]);
assertEquals("embedded val 0", result[1]);
assertEquals("embedded base val 0", result[2]);
assertEquals("embedded val 2", result[3]);
assertEquals("embedded val 3", result[4]);
assertEquals("embedded base val 2", result[5]);
pm.deletePersistent(child);
commitTxn();
try {
ds.get(key);
fail("expected enfe");
} catch (EntityNotFoundException enfe) {
// good
}
}
public void testEmbedded_Parent() throws Exception {
CompleteTableParentWithEmbedded parent = new CompleteTableParentWithEmbedded();
parent.setAString("aString");
SubclassesJDO.IsEmbeddedOnly embedded = new SubclassesJDO.IsEmbeddedOnly();
embedded.setVal0("embedded val 0");
embedded.setVal1("embedded val 1");
parent.setEmbedded(embedded);
SubclassesJDO.IsEmbeddedOnlyBase embeddedBase = new SubclassesJDO.IsEmbeddedOnlyBase();
embeddedBase.setVal0("embedded base val 0");
parent.setEmbeddedBase(embeddedBase);
beginTxn();
pm.makePersistent(parent);
commitTxn();
Key key = KeyFactory.createKey(kindForClass(parent.getClass()), parent.getId());
Entity e = ds.get(key);
assertEquals("aString", e.getProperty("aString"));
assertEquals("embedded val 0", e.getProperty("val0"));
assertEquals("embedded val 1", e.getProperty("val1"));
assertEquals("embedded base val 0", e.getProperty("VAL0"));
pm.close();
pm = pmf.getPersistenceManager();
beginTxn();
parent = pm.getObjectById(parent.getClass(), parent.getId());
assertEmbeddedParentContents(parent);
commitTxn();
pm.close();
pm = pmf.getPersistenceManager();
beginTxn();
Query q = pm.newQuery(
"select from " + parent.getClass().getName() + " where embedded.val1 == :p "
+ "order by embedded.val1 desc, embedded.val0 asc, embeddedBase.val0 desc");
q.setUnique(true);
parent = (CompleteTableParentWithEmbedded) q.execute("embedded val 1");
assertEmbeddedParentContents(parent);
q = pm.newQuery(
"select from " + parent.getClass().getName() + " where embedded.val0 == :p "
+ "order by embedded.val1 desc, embedded.val0 asc, embeddedBase.val0 desc");
q.setUnique(true);
parent = (CompleteTableParentWithEmbedded) q.execute("embedded val 0");
assertEmbeddedParentContents(parent);
q = pm.newQuery(
"select from " + parent.getClass().getName() + " where embeddedBase.val0 == :p "
+ "order by embedded.val1 desc, embedded.val0 asc, embeddedBase.val0 desc");
q.setUnique(true);
parent = (CompleteTableParentWithEmbedded) q.execute("embedded base val 0");
assertEmbeddedParentContents(parent);
q = pm.newQuery("select embedded.val1, embedded.val0, embeddedBase.val0 from " +
parent.getClass().getName() + " where embeddedBase.val0 == :p "
+ "order by embedded.val1 desc, embedded.val0 asc, embeddedBase.val0 desc");
q.setUnique(true);
Object[] result = (Object[]) q.execute("embedded base val 0");
assertEquals("embedded val 1", result[0]);
assertEquals("embedded val 0", result[1]);
assertEquals("embedded base val 0", result[2]);
pm.deletePersistent(parent);
commitTxn();
try {
ds.get(key);
fail("expected enfe");
} catch (EntityNotFoundException enfe) {
// good
}
}
public void testNondurableParent() {
DurableChild dc = new DurableChild();
dc.setStr("yar");
beginTxn();
pm.makePersistent(dc);
commitTxn();
beginTxn();
dc = pm.getObjectById(DurableChild.class, dc.getId());
assertEquals("yar", dc.getStr());
}
// This is absurd, but if the signature of this method and the one below
// refers to the actual type we want the runtime enhancer gets totally
// confused.
private void assertEmbeddedChildContents(Object obj) {
CompleteTableParentWithEmbedded.Child child = (CompleteTableParentWithEmbedded.Child) obj;
assertEquals("bString", child.getBString());
assertEquals("embedded val 2", child.getEmbedded2().getVal2());
assertEquals("embedded val 3", child.getEmbedded2().getVal3());
assertEquals("embedded base val 2", child.getEmbeddedBase2().getVal2());
assertEmbeddedParentContents(child);
}
private void assertEmbeddedParentContents(Object obj) {
CompleteTableParentWithEmbedded parentWithEmbedded = (CompleteTableParentWithEmbedded) obj;
assertEquals("aString", parentWithEmbedded.getAString());
assertEquals("embedded val 0", parentWithEmbedded.getEmbedded().getVal0());
assertEquals("embedded val 1", parentWithEmbedded.getEmbedded().getVal1());
assertEquals("embedded base val 0", parentWithEmbedded.getEmbeddedBase().getVal0());
}
private void assertUnsupportedByDataNuc(Object obj) {
switchDatasource(PersistenceManagerFactoryName.transactional);
beginTxn();
try {
pm.makePersistent(obj);
fail("expected exception");
} catch (NoPersistenceInformationException e) {
// good
}
rollbackTxn();
}
private void assertUnsupportedByGAE(Object obj) {
switchDatasource(PersistenceManagerFactoryName.transactional);
beginTxn();
try {
pm.makePersistent(obj);
fail("expected exception");
} catch (JDOFatalUserException e) {
// good
assertTrue(e.getCause().getClass().getName(),
DatastoreManager.UnsupportedInheritanceStrategyException.class.isAssignableFrom(e.getCause().getClass()));
}
rollbackTxn();
}
private void testInsertParent(Parent parent) throws Exception {
parent.setAString("a");
beginTxn();
pm.makePersistent(parent);
commitTxn();
Entity e = ds.get(KeyFactory.createKey(kindForClass(parent.getClass()), parent.getId()));
assertEquals("a", e.getProperty("aString"));
}
private void testInsertChild(SubclassesJDO.Child child) throws Exception {
child.setAString("a");
child.setBString("b");
beginTxn();
pm.makePersistent(child);
commitTxn();
Entity e = ds.get(KeyFactory.createKey(kindForClass(child.getClass()), child.getId()));
assertEquals("a", e.getProperty("aString"));
assertEquals("b", e.getProperty("bString"));
}
private void testInsertGrandchild(com.google.appengine.datanucleus.test.jdo.SubclassesJDO.Grandchild grandchild) throws Exception {
grandchild.setAString("a");
grandchild.setBString("b");
grandchild.setCString("c");
beginTxn();
pm.makePersistent(grandchild);
commitTxn();
Entity e = ds.get(KeyFactory.createKey(kindForClass(grandchild.getClass()), grandchild.getId()));
assertEquals("a", e.getProperty("aString"));
assertEquals("b", e.getProperty("bString"));
assertEquals("c", e.getProperty("cString"));
}
private void testFetchParent(Class<? extends Parent> parentClass) {
Entity e = new Entity(kindForClass(parentClass));
e.setProperty("aString", "a");
ds.put(e);
beginTxn();
Parent parent = pm.getObjectById(parentClass, e.getKey());
assertEquals(parentClass, parent.getClass());
assertEquals("a", parent.getAString());
commitTxn();
}
private void testFetchChild(Class<? extends SubclassesJDO.Child> childClass) {
Entity e = new Entity(kindForClass(childClass));
e.setProperty("aString", "a");
e.setProperty("bString", "b");
ds.put(e);
beginTxn();
SubclassesJDO.Child child = pm.getObjectById(childClass, e.getKey());
assertEquals(childClass, child.getClass());
assertEquals("a", child.getAString());
assertEquals("b", child.getBString());
commitTxn();
}
private void testFetchGrandchild(Class<? extends com.google.appengine.datanucleus.test.jdo.SubclassesJDO.Grandchild> grandchildClass) {
Entity e = new Entity(kindForClass(grandchildClass));
e.setProperty("aString", "a");
e.setProperty("bString", "b");
e.setProperty("cString", "c");
ds.put(e);
beginTxn();
com.google.appengine.datanucleus.test.jdo.SubclassesJDO.Grandchild
grandchild = pm.getObjectById(grandchildClass, e.getKey());
assertEquals(grandchildClass, grandchild.getClass());
assertEquals("a", grandchild.getAString());
assertEquals("b", grandchild.getBString());
assertEquals("c", grandchild.getCString());
commitTxn();
}
private void testQueryParent(Class<? extends Parent> parentClass) {
Entity e = new Entity(kindForClass(parentClass));
e.setProperty("aString", "z8");
ds.put(e);
e = new Entity(kindForClass(parentClass));
e.setProperty("aString", "z9");
ds.put(e);
beginTxn();
Parent parent = ((List<Parent>) pm.newQuery(
"select from " + parentClass.getName() + " where aString == 'z8'").execute()).get(0);
assertEquals(parentClass, parent.getClass());
assertEquals("z8", parent.getAString());
commitTxn();
beginTxn();
List<Parent> parents = ((List<Parent>) pm.newQuery(
"select from " + parentClass.getName() + " where aString >= 'z8' order by aString desc").execute());
assertEquals(2, parents.size());
assertEquals("z9", parents.get(0).getAString());
assertEquals("z8", parents.get(1).getAString());
commitTxn();
beginTxn();
String aString = ((List<String>) pm.newQuery(
"select aString from " + parentClass.getName() + " where aString == 'z8'").execute()).get(0);
assertEquals("z8", aString);
commitTxn();
}
private void testQueryChild(Class<? extends SubclassesJDO.Child> childClass) {
Entity e1 = new Entity(kindForClass(childClass));
e1.setProperty("aString", "a2");
e1.setProperty("bString", "b2");
ds.put(e1);
Entity e2 = new Entity(kindForClass(childClass));
e2.setProperty("aString", "a2");
e2.setProperty("bString", "b3");
ds.put(e2);
Entity e3 = new Entity(kindForClass(childClass));
e3.setProperty("aString", "a2");
e3.setProperty("bString", "b3");
ds.put(e3);
beginTxn();
SubclassesJDO.Child child = ((List<SubclassesJDO.Child>) pm.newQuery(
"select from " + childClass.getName() + " where aString == 'a2'").execute()).get(0);
assertEquals(childClass, child.getClass());
assertEquals("a2", child.getAString());
assertEquals("b2", child.getBString());
child = ((List<SubclassesJDO.Child>) pm.newQuery(
"select from " + childClass.getName() + " where bString == 'b2'").execute()).get(0);
assertEquals(childClass, child.getClass());
assertEquals("a2", child.getAString());
assertEquals("b2", child.getBString());
List<SubclassesJDO.Child> kids = ((List<SubclassesJDO.Child>) pm.newQuery(
"select from " + childClass.getName() + " where aString == 'a2' order by bString desc").execute());
assertEquals(3, kids.size());
assertEquals(e2.getKey().getId(), kids.get(0).getId().longValue());
assertEquals(e3.getKey().getId(), kids.get(1).getId().longValue());
assertEquals(e1.getKey().getId(), kids.get(2).getId().longValue());
kids = ((List<SubclassesJDO.Child>) pm.newQuery("select from " + childClass.getName() + " where aString == 'a2' order by aString desc").execute());
assertEquals(3, kids.size());
assertEquals(e1.getKey().getId(), kids.get(0).getId().longValue());
assertEquals(e2.getKey().getId(), kids.get(1).getId().longValue());
assertEquals(e3.getKey().getId(), kids.get(2).getId().longValue());
Object[] result = ((List<Object[]>) pm.newQuery("select bString, aString from " + childClass.getName() + " where bString == 'b2'").execute()).get(0);
assertEquals(2, result.length);
assertEquals("b2", result[0]);
assertEquals("a2", result[1]);
commitTxn();
}
private void testQueryGrandchild(Class<? extends com.google.appengine.datanucleus.test.jdo.SubclassesJDO.Grandchild> grandchildClass) {
Entity e1 = new Entity(kindForClass(grandchildClass));
e1.setProperty("aString", "a2");
e1.setProperty("bString", "b1");
e1.setProperty("cString", "c2");
ds.put(e1);
Entity e2 = new Entity(kindForClass(grandchildClass));
e2.setProperty("aString", "a2");
e2.setProperty("bString", "b3");
e2.setProperty("cString", "c3");
ds.put(e2);
Entity e3 = new Entity(kindForClass(grandchildClass));
e3.setProperty("aString", "a2");
e3.setProperty("bString", "b2");
e3.setProperty("cString", "c3");
ds.put(e3);
beginTxn();
Grandchild grandchild = ((List<Grandchild>) pm.newQuery(
"select from " + grandchildClass.getName() + " where aString == 'a2'").execute()).get(0);
assertEquals(grandchildClass, grandchild.getClass());
assertEquals("a2", grandchild.getAString());
assertEquals("b1", grandchild.getBString());
assertEquals("c2", grandchild.getCString());
grandchild = ((List<Grandchild>) pm.newQuery(
"select from " + grandchildClass.getName() + " where bString == 'b2'").execute()).get(0);
assertEquals(grandchildClass, grandchild.getClass());
assertEquals("a2", grandchild.getAString());
assertEquals("b2", grandchild.getBString());
assertEquals("c3", grandchild.getCString());
grandchild = ((List<Grandchild>) pm.newQuery(
"select from " + grandchildClass.getName() + " where cString == 'c2'").execute()).get(0);
assertEquals(grandchildClass, grandchild.getClass());
assertEquals("a2", grandchild.getAString());
assertEquals("b1", grandchild.getBString());
assertEquals("c2", grandchild.getCString());
List<Grandchild> grandkids = ((List<Grandchild>) pm.newQuery(
"select from " + grandchildClass.getName() + " where aString == 'a2' order by bString desc").execute());
assertEquals(3, grandkids.size());
assertEquals(e2.getKey().getId(), grandkids.get(0).getId().longValue());
assertEquals(e3.getKey().getId(), grandkids.get(1).getId().longValue());
assertEquals(e1.getKey().getId(), grandkids.get(2).getId().longValue());
grandkids = ((List<Grandchild>) pm.newQuery(
"select from " + grandchildClass.getName() + " where aString == 'a2' order by aString desc").execute());
assertEquals(3, grandkids.size());
assertEquals(e1.getKey().getId(), grandkids.get(0).getId().longValue());
assertEquals(e2.getKey().getId(), grandkids.get(1).getId().longValue());
assertEquals(e3.getKey().getId(), grandkids.get(2).getId().longValue());
grandkids = ((List<Grandchild>) pm.newQuery(
"select from " + grandchildClass.getName() + " where aString == 'a2' order by cString desc").execute());
assertEquals(3, grandkids.size());
assertEquals(e2.getKey().getId(), grandkids.get(0).getId().longValue());
assertEquals(e3.getKey().getId(), grandkids.get(1).getId().longValue());
assertEquals(e1.getKey().getId(), grandkids.get(2).getId().longValue());
Object[] result = ((List<Object[]>) pm.newQuery(
"select bString, aString, cString from " + grandchildClass.getName() + " where cString == 'c2'").execute()).get(0);
assertEquals(3, result.length);
assertEquals("b1", result[0]);
assertEquals("a2", result[1]);
assertEquals("c2", result[2]);
commitTxn();
}
private void testDeleteParent(Class<? extends Parent> parentClass) {
Entity e = new Entity(kindForClass(parentClass));
e.setProperty("aString", "a");
ds.put(e);
beginTxn();
Parent parent = pm.getObjectById(parentClass, e.getKey());
pm.deletePersistent(parent);
commitTxn();
try {
ds.get(e.getKey());
fail("expected exception");
} catch (EntityNotFoundException e1) {
// good
}
}
private void testDeleteChild(Class<? extends SubclassesJDO.Child> childClass) {
Entity e = new Entity(kindForClass(childClass));
e.setProperty("aString", "a");
e.setProperty("bString", "b");
ds.put(e);
beginTxn();
SubclassesJDO.Child child = pm.getObjectById(childClass, e.getKey());
pm.deletePersistent(child);
commitTxn();
try {
ds.get(e.getKey());
fail("expected exception");
} catch (EntityNotFoundException e1) {
// good
}
}
private void testDeleteGrandchild(Class<? extends com.google.appengine.datanucleus.test.jdo.SubclassesJDO.Grandchild> grandchildClass) {
Entity e = new Entity(kindForClass(grandchildClass));
e.setProperty("aString", "a");
e.setProperty("bString", "b");
e.setProperty("cString", "c");
ds.put(e);
beginTxn();
SubclassesJDO.Child child = pm.getObjectById(grandchildClass, e.getKey());
pm.deletePersistent(child);
commitTxn();
try {
ds.get(e.getKey());
fail("expected exception");
} catch (EntityNotFoundException e1) {
// good
}
}
private void testUpdateParent(Class<? extends Parent> parentClass) throws Exception {
Entity e = new Entity(kindForClass(parentClass));
e.setProperty("aString", "a");
ds.put(e);
beginTxn();
Parent parent = pm.getObjectById(parentClass, e.getKey());
parent.setAString("not a");
commitTxn();
e = ds.get(e.getKey());
assertEquals("not a", e.getProperty("aString"));
}
private void testUpdateChild(Class<? extends SubclassesJDO.Child> childClass) throws Exception {
Entity e = new Entity(kindForClass(childClass));
e.setProperty("aString", "a");
e.setProperty("bString", "b");
ds.put(e);
beginTxn();
SubclassesJDO.Child child = pm.getObjectById(childClass, e.getKey());
child.setAString("not a");
child.setBString("not b");
commitTxn();
e = ds.get(e.getKey());
assertEquals("not a", e.getProperty("aString"));
assertEquals("not b", e.getProperty("bString"));
}
private void testUpdateGrandchild(Class<? extends com.google.appengine.datanucleus.test.jdo.SubclassesJDO.Grandchild> grandchildClass) throws Exception {
Entity e = new Entity(kindForClass(grandchildClass));
e.setProperty("aString", "a");
e.setProperty("bString", "b");
ds.put(e);
beginTxn();
com.google.appengine.datanucleus.test.jdo.SubclassesJDO.Grandchild
grandchild = pm.getObjectById(grandchildClass, e.getKey());
grandchild.setAString("not a");
grandchild.setBString("not b");
grandchild.setCString("not c");
commitTxn();
e = ds.get(e.getKey());
assertEquals("not a", e.getProperty("aString"));
assertEquals("not b", e.getProperty("bString"));
assertEquals("not c", e.getProperty("cString"));
}
private void testGrandchild(com.google.appengine.datanucleus.test.jdo.SubclassesJDO.Grandchild grandchild) throws Exception {
testInsertGrandchild(grandchild);
testUpdateGrandchild(grandchild.getClass());
testDeleteGrandchild(grandchild.getClass());
testFetchGrandchild(grandchild.getClass());
testQueryGrandchild(grandchild.getClass());
}
private void testChild(SubclassesJDO.Child child) throws Exception {
testInsertChild(child);
testUpdateChild(child.getClass());
testDeleteChild(child.getClass());
testFetchChild(child.getClass());
testQueryChild(child.getClass());
}
private void testParent(Parent parent) throws Exception {
testInsertParent(parent);
testUpdateParent(parent.getClass());
testDeleteParent(parent.getClass());
testFetchParent(parent.getClass());
testQueryParent(parent.getClass());
}
}
|
apache-2.0
|
davebarnes97/geode
|
geode-gfsh/src/integrationTest/java/org/apache/geode/management/internal/cli/commands/HistoryCommandIntegrationTest.java
|
4263
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more contributor license
* agreements. See the NOTICE file distributed with this work for additional information regarding
* copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License. You may obtain a
* copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package org.apache.geode.management.internal.cli.commands;
import static org.assertj.core.api.Assertions.assertThat;
import java.io.File;
import java.io.IOException;
import java.nio.file.Files;
import java.util.List;
import org.junit.After;
import org.junit.ClassRule;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.TemporaryFolder;
import org.apache.geode.management.internal.i18n.CliStrings;
import org.apache.geode.test.junit.rules.GfshCommandRule;
public class HistoryCommandIntegrationTest {
@ClassRule
public static GfshCommandRule gfsh = new GfshCommandRule();
@Rule
public TemporaryFolder temporaryFolder = new TemporaryFolder();
@After
public void tearDown() throws Exception {
gfsh.getGfsh().clearHistory();
}
@Test
public void testHistoryWithEntry() {
// Generate a line of history
gfsh.executeAndAssertThat("echo --string=string");
gfsh.executeAndAssertThat("connect");
gfsh.executeAndAssertThat("history").statusIsSuccess()
.hasInfoSection()
.hasLines()
.containsExactly("0: echo --string=string", "1: connect");
}
@Test
public void testEmptyHistory() {
gfsh.executeAndAssertThat("history").statusIsSuccess();
assertThat(gfsh.getGfshOutput()).isEqualToIgnoringWhitespace("");
}
@Test
public void testHistoryWithFileName() throws IOException {
gfsh.executeCommand("echo --string=string");
File historyFile = temporaryFolder.newFile("history.txt");
historyFile.delete();
assertThat(historyFile).doesNotExist();
String command = "history --file=" + historyFile.getAbsolutePath();
gfsh.executeAndAssertThat(command).statusIsSuccess()
.containsOutput("Wrote successfully to file");
assertThat(historyFile).exists();
assertThat(historyFile).hasContent("0: echo --string=string");
}
@Test
public void testClearHistory() {
// Generate a line of history
gfsh.executeAndAssertThat("echo --string=string");
gfsh.executeAndAssertThat("history --clear").statusIsSuccess()
.containsOutput(CliStrings.HISTORY__MSG__CLEARED_HISTORY);
// only the history --clear is in the history now.
assertThat(gfsh.getGfsh().getGfshHistory().size()).isEqualTo(1);
}
@Test
public void testHistoryContainsRedactedPasswordWithEquals() throws IOException {
gfsh.executeCommand("connect --password=redacted");
File historyFile = temporaryFolder.newFile("history.txt");
historyFile.delete();
assertThat(historyFile).doesNotExist();
String command = "history --file=" + historyFile.getAbsolutePath();
gfsh.executeAndAssertThat(command).statusIsSuccess();
assertThat(historyFile).exists();
List<String> historyLines = Files.readAllLines(historyFile.toPath());
assertThat(historyLines.get(0)).isEqualTo("0: connect --password=********");
}
@Test
public void testHistoryContainsRedactedPasswordWithoutEquals() throws IOException {
gfsh.executeCommand("connect --password redacted");
File historyFile = temporaryFolder.newFile("history.txt");
historyFile.delete();
assertThat(historyFile).doesNotExist();
String command = "history --file=" + historyFile.getAbsolutePath();
gfsh.executeAndAssertThat(command).statusIsSuccess();
assertThat(historyFile).exists();
List<String> historyLines = Files.readAllLines(historyFile.toPath());
assertThat(historyLines.get(0)).isEqualTo("0: connect --password ********");
}
}
|
apache-2.0
|
erikdw/storm
|
storm-client/src/jvm/org/apache/storm/generated/StormBase.java
|
57866
|
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* Autogenerated by Thrift Compiler (0.11.0)
*
* DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
* @generated
*/
package org.apache.storm.generated;
@SuppressWarnings({"cast", "rawtypes", "serial", "unchecked", "unused"})
@javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.11.0)")
public class StormBase implements org.apache.storm.thrift.TBase<StormBase, StormBase._Fields>, java.io.Serializable, Cloneable, Comparable<StormBase> {
private static final org.apache.storm.thrift.protocol.TStruct STRUCT_DESC = new org.apache.storm.thrift.protocol.TStruct("StormBase");
private static final org.apache.storm.thrift.protocol.TField NAME_FIELD_DESC = new org.apache.storm.thrift.protocol.TField("name", org.apache.storm.thrift.protocol.TType.STRING, (short)1);
private static final org.apache.storm.thrift.protocol.TField STATUS_FIELD_DESC = new org.apache.storm.thrift.protocol.TField("status", org.apache.storm.thrift.protocol.TType.I32, (short)2);
private static final org.apache.storm.thrift.protocol.TField NUM_WORKERS_FIELD_DESC = new org.apache.storm.thrift.protocol.TField("num_workers", org.apache.storm.thrift.protocol.TType.I32, (short)3);
private static final org.apache.storm.thrift.protocol.TField COMPONENT_EXECUTORS_FIELD_DESC = new org.apache.storm.thrift.protocol.TField("component_executors", org.apache.storm.thrift.protocol.TType.MAP, (short)4);
private static final org.apache.storm.thrift.protocol.TField LAUNCH_TIME_SECS_FIELD_DESC = new org.apache.storm.thrift.protocol.TField("launch_time_secs", org.apache.storm.thrift.protocol.TType.I32, (short)5);
private static final org.apache.storm.thrift.protocol.TField OWNER_FIELD_DESC = new org.apache.storm.thrift.protocol.TField("owner", org.apache.storm.thrift.protocol.TType.STRING, (short)6);
private static final org.apache.storm.thrift.protocol.TField TOPOLOGY_ACTION_OPTIONS_FIELD_DESC = new org.apache.storm.thrift.protocol.TField("topology_action_options", org.apache.storm.thrift.protocol.TType.STRUCT, (short)7);
private static final org.apache.storm.thrift.protocol.TField PREV_STATUS_FIELD_DESC = new org.apache.storm.thrift.protocol.TField("prev_status", org.apache.storm.thrift.protocol.TType.I32, (short)8);
private static final org.apache.storm.thrift.protocol.TField COMPONENT_DEBUG_FIELD_DESC = new org.apache.storm.thrift.protocol.TField("component_debug", org.apache.storm.thrift.protocol.TType.MAP, (short)9);
private static final org.apache.storm.thrift.protocol.TField PRINCIPAL_FIELD_DESC = new org.apache.storm.thrift.protocol.TField("principal", org.apache.storm.thrift.protocol.TType.STRING, (short)10);
private static final org.apache.storm.thrift.protocol.TField TOPOLOGY_VERSION_FIELD_DESC = new org.apache.storm.thrift.protocol.TField("topology_version", org.apache.storm.thrift.protocol.TType.STRING, (short)11);
private static final org.apache.storm.thrift.scheme.SchemeFactory STANDARD_SCHEME_FACTORY = new StormBaseStandardSchemeFactory();
private static final org.apache.storm.thrift.scheme.SchemeFactory TUPLE_SCHEME_FACTORY = new StormBaseTupleSchemeFactory();
private java.lang.String name; // required
private TopologyStatus status; // required
private int num_workers; // required
private java.util.Map<java.lang.String,java.lang.Integer> component_executors; // optional
private int launch_time_secs; // optional
private java.lang.String owner; // optional
private TopologyActionOptions topology_action_options; // optional
private TopologyStatus prev_status; // optional
private java.util.Map<java.lang.String,DebugOptions> component_debug; // optional
private java.lang.String principal; // optional
private java.lang.String topology_version; // optional
/** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
public enum _Fields implements org.apache.storm.thrift.TFieldIdEnum {
NAME((short)1, "name"),
/**
*
* @see TopologyStatus
*/
STATUS((short)2, "status"),
NUM_WORKERS((short)3, "num_workers"),
COMPONENT_EXECUTORS((short)4, "component_executors"),
LAUNCH_TIME_SECS((short)5, "launch_time_secs"),
OWNER((short)6, "owner"),
TOPOLOGY_ACTION_OPTIONS((short)7, "topology_action_options"),
/**
*
* @see TopologyStatus
*/
PREV_STATUS((short)8, "prev_status"),
COMPONENT_DEBUG((short)9, "component_debug"),
PRINCIPAL((short)10, "principal"),
TOPOLOGY_VERSION((short)11, "topology_version");
private static final java.util.Map<java.lang.String, _Fields> byName = new java.util.HashMap<java.lang.String, _Fields>();
static {
for (_Fields field : java.util.EnumSet.allOf(_Fields.class)) {
byName.put(field.getFieldName(), field);
}
}
/**
* Find the _Fields constant that matches fieldId, or null if its not found.
*/
public static _Fields findByThriftId(int fieldId) {
switch(fieldId) {
case 1: // NAME
return NAME;
case 2: // STATUS
return STATUS;
case 3: // NUM_WORKERS
return NUM_WORKERS;
case 4: // COMPONENT_EXECUTORS
return COMPONENT_EXECUTORS;
case 5: // LAUNCH_TIME_SECS
return LAUNCH_TIME_SECS;
case 6: // OWNER
return OWNER;
case 7: // TOPOLOGY_ACTION_OPTIONS
return TOPOLOGY_ACTION_OPTIONS;
case 8: // PREV_STATUS
return PREV_STATUS;
case 9: // COMPONENT_DEBUG
return COMPONENT_DEBUG;
case 10: // PRINCIPAL
return PRINCIPAL;
case 11: // TOPOLOGY_VERSION
return TOPOLOGY_VERSION;
default:
return null;
}
}
/**
* Find the _Fields constant that matches fieldId, throwing an exception
* if it is not found.
*/
public static _Fields findByThriftIdOrThrow(int fieldId) {
_Fields fields = findByThriftId(fieldId);
if (fields == null) throw new java.lang.IllegalArgumentException("Field " + fieldId + " doesn't exist!");
return fields;
}
/**
* Find the _Fields constant that matches name, or null if its not found.
*/
public static _Fields findByName(java.lang.String name) {
return byName.get(name);
}
private final short _thriftId;
private final java.lang.String _fieldName;
_Fields(short thriftId, java.lang.String fieldName) {
_thriftId = thriftId;
_fieldName = fieldName;
}
public short getThriftFieldId() {
return _thriftId;
}
public java.lang.String getFieldName() {
return _fieldName;
}
}
// isset id assignments
private static final int __NUM_WORKERS_ISSET_ID = 0;
private static final int __LAUNCH_TIME_SECS_ISSET_ID = 1;
private byte __isset_bitfield = 0;
private static final _Fields optionals[] = {_Fields.COMPONENT_EXECUTORS,_Fields.LAUNCH_TIME_SECS,_Fields.OWNER,_Fields.TOPOLOGY_ACTION_OPTIONS,_Fields.PREV_STATUS,_Fields.COMPONENT_DEBUG,_Fields.PRINCIPAL,_Fields.TOPOLOGY_VERSION};
public static final java.util.Map<_Fields, org.apache.storm.thrift.meta_data.FieldMetaData> metaDataMap;
static {
java.util.Map<_Fields, org.apache.storm.thrift.meta_data.FieldMetaData> tmpMap = new java.util.EnumMap<_Fields, org.apache.storm.thrift.meta_data.FieldMetaData>(_Fields.class);
tmpMap.put(_Fields.NAME, new org.apache.storm.thrift.meta_data.FieldMetaData("name", org.apache.storm.thrift.TFieldRequirementType.REQUIRED,
new org.apache.storm.thrift.meta_data.FieldValueMetaData(org.apache.storm.thrift.protocol.TType.STRING)));
tmpMap.put(_Fields.STATUS, new org.apache.storm.thrift.meta_data.FieldMetaData("status", org.apache.storm.thrift.TFieldRequirementType.REQUIRED,
new org.apache.storm.thrift.meta_data.EnumMetaData(org.apache.storm.thrift.protocol.TType.ENUM, TopologyStatus.class)));
tmpMap.put(_Fields.NUM_WORKERS, new org.apache.storm.thrift.meta_data.FieldMetaData("num_workers", org.apache.storm.thrift.TFieldRequirementType.REQUIRED,
new org.apache.storm.thrift.meta_data.FieldValueMetaData(org.apache.storm.thrift.protocol.TType.I32)));
tmpMap.put(_Fields.COMPONENT_EXECUTORS, new org.apache.storm.thrift.meta_data.FieldMetaData("component_executors", org.apache.storm.thrift.TFieldRequirementType.OPTIONAL,
new org.apache.storm.thrift.meta_data.MapMetaData(org.apache.storm.thrift.protocol.TType.MAP,
new org.apache.storm.thrift.meta_data.FieldValueMetaData(org.apache.storm.thrift.protocol.TType.STRING),
new org.apache.storm.thrift.meta_data.FieldValueMetaData(org.apache.storm.thrift.protocol.TType.I32))));
tmpMap.put(_Fields.LAUNCH_TIME_SECS, new org.apache.storm.thrift.meta_data.FieldMetaData("launch_time_secs", org.apache.storm.thrift.TFieldRequirementType.OPTIONAL,
new org.apache.storm.thrift.meta_data.FieldValueMetaData(org.apache.storm.thrift.protocol.TType.I32)));
tmpMap.put(_Fields.OWNER, new org.apache.storm.thrift.meta_data.FieldMetaData("owner", org.apache.storm.thrift.TFieldRequirementType.OPTIONAL,
new org.apache.storm.thrift.meta_data.FieldValueMetaData(org.apache.storm.thrift.protocol.TType.STRING)));
tmpMap.put(_Fields.TOPOLOGY_ACTION_OPTIONS, new org.apache.storm.thrift.meta_data.FieldMetaData("topology_action_options", org.apache.storm.thrift.TFieldRequirementType.OPTIONAL,
new org.apache.storm.thrift.meta_data.StructMetaData(org.apache.storm.thrift.protocol.TType.STRUCT, TopologyActionOptions.class)));
tmpMap.put(_Fields.PREV_STATUS, new org.apache.storm.thrift.meta_data.FieldMetaData("prev_status", org.apache.storm.thrift.TFieldRequirementType.OPTIONAL,
new org.apache.storm.thrift.meta_data.EnumMetaData(org.apache.storm.thrift.protocol.TType.ENUM, TopologyStatus.class)));
tmpMap.put(_Fields.COMPONENT_DEBUG, new org.apache.storm.thrift.meta_data.FieldMetaData("component_debug", org.apache.storm.thrift.TFieldRequirementType.OPTIONAL,
new org.apache.storm.thrift.meta_data.MapMetaData(org.apache.storm.thrift.protocol.TType.MAP,
new org.apache.storm.thrift.meta_data.FieldValueMetaData(org.apache.storm.thrift.protocol.TType.STRING),
new org.apache.storm.thrift.meta_data.StructMetaData(org.apache.storm.thrift.protocol.TType.STRUCT, DebugOptions.class))));
tmpMap.put(_Fields.PRINCIPAL, new org.apache.storm.thrift.meta_data.FieldMetaData("principal", org.apache.storm.thrift.TFieldRequirementType.OPTIONAL,
new org.apache.storm.thrift.meta_data.FieldValueMetaData(org.apache.storm.thrift.protocol.TType.STRING)));
tmpMap.put(_Fields.TOPOLOGY_VERSION, new org.apache.storm.thrift.meta_data.FieldMetaData("topology_version", org.apache.storm.thrift.TFieldRequirementType.OPTIONAL,
new org.apache.storm.thrift.meta_data.FieldValueMetaData(org.apache.storm.thrift.protocol.TType.STRING)));
metaDataMap = java.util.Collections.unmodifiableMap(tmpMap);
org.apache.storm.thrift.meta_data.FieldMetaData.addStructMetaDataMap(StormBase.class, metaDataMap);
}
public StormBase() {
}
public StormBase(
java.lang.String name,
TopologyStatus status,
int num_workers)
{
this();
this.name = name;
this.status = status;
this.num_workers = num_workers;
set_num_workers_isSet(true);
}
/**
* Performs a deep copy on <i>other</i>.
*/
public StormBase(StormBase other) {
__isset_bitfield = other.__isset_bitfield;
if (other.is_set_name()) {
this.name = other.name;
}
if (other.is_set_status()) {
this.status = other.status;
}
this.num_workers = other.num_workers;
if (other.is_set_component_executors()) {
java.util.Map<java.lang.String,java.lang.Integer> __this__component_executors = new java.util.HashMap<java.lang.String,java.lang.Integer>(other.component_executors);
this.component_executors = __this__component_executors;
}
this.launch_time_secs = other.launch_time_secs;
if (other.is_set_owner()) {
this.owner = other.owner;
}
if (other.is_set_topology_action_options()) {
this.topology_action_options = new TopologyActionOptions(other.topology_action_options);
}
if (other.is_set_prev_status()) {
this.prev_status = other.prev_status;
}
if (other.is_set_component_debug()) {
java.util.Map<java.lang.String,DebugOptions> __this__component_debug = new java.util.HashMap<java.lang.String,DebugOptions>(other.component_debug.size());
for (java.util.Map.Entry<java.lang.String, DebugOptions> other_element : other.component_debug.entrySet()) {
java.lang.String other_element_key = other_element.getKey();
DebugOptions other_element_value = other_element.getValue();
java.lang.String __this__component_debug_copy_key = other_element_key;
DebugOptions __this__component_debug_copy_value = new DebugOptions(other_element_value);
__this__component_debug.put(__this__component_debug_copy_key, __this__component_debug_copy_value);
}
this.component_debug = __this__component_debug;
}
if (other.is_set_principal()) {
this.principal = other.principal;
}
if (other.is_set_topology_version()) {
this.topology_version = other.topology_version;
}
}
public StormBase deepCopy() {
return new StormBase(this);
}
@Override
public void clear() {
this.name = null;
this.status = null;
set_num_workers_isSet(false);
this.num_workers = 0;
this.component_executors = null;
set_launch_time_secs_isSet(false);
this.launch_time_secs = 0;
this.owner = null;
this.topology_action_options = null;
this.prev_status = null;
this.component_debug = null;
this.principal = null;
this.topology_version = null;
}
public java.lang.String get_name() {
return this.name;
}
public void set_name(java.lang.String name) {
this.name = name;
}
public void unset_name() {
this.name = null;
}
/** Returns true if field name is set (has been assigned a value) and false otherwise */
public boolean is_set_name() {
return this.name != null;
}
public void set_name_isSet(boolean value) {
if (!value) {
this.name = null;
}
}
/**
*
* @see TopologyStatus
*/
public TopologyStatus get_status() {
return this.status;
}
/**
*
* @see TopologyStatus
*/
public void set_status(TopologyStatus status) {
this.status = status;
}
public void unset_status() {
this.status = null;
}
/** Returns true if field status is set (has been assigned a value) and false otherwise */
public boolean is_set_status() {
return this.status != null;
}
public void set_status_isSet(boolean value) {
if (!value) {
this.status = null;
}
}
public int get_num_workers() {
return this.num_workers;
}
public void set_num_workers(int num_workers) {
this.num_workers = num_workers;
set_num_workers_isSet(true);
}
public void unset_num_workers() {
__isset_bitfield = org.apache.storm.thrift.EncodingUtils.clearBit(__isset_bitfield, __NUM_WORKERS_ISSET_ID);
}
/** Returns true if field num_workers is set (has been assigned a value) and false otherwise */
public boolean is_set_num_workers() {
return org.apache.storm.thrift.EncodingUtils.testBit(__isset_bitfield, __NUM_WORKERS_ISSET_ID);
}
public void set_num_workers_isSet(boolean value) {
__isset_bitfield = org.apache.storm.thrift.EncodingUtils.setBit(__isset_bitfield, __NUM_WORKERS_ISSET_ID, value);
}
public int get_component_executors_size() {
return (this.component_executors == null) ? 0 : this.component_executors.size();
}
public void put_to_component_executors(java.lang.String key, int val) {
if (this.component_executors == null) {
this.component_executors = new java.util.HashMap<java.lang.String,java.lang.Integer>();
}
this.component_executors.put(key, val);
}
public java.util.Map<java.lang.String,java.lang.Integer> get_component_executors() {
return this.component_executors;
}
public void set_component_executors(java.util.Map<java.lang.String,java.lang.Integer> component_executors) {
this.component_executors = component_executors;
}
public void unset_component_executors() {
this.component_executors = null;
}
/** Returns true if field component_executors is set (has been assigned a value) and false otherwise */
public boolean is_set_component_executors() {
return this.component_executors != null;
}
public void set_component_executors_isSet(boolean value) {
if (!value) {
this.component_executors = null;
}
}
public int get_launch_time_secs() {
return this.launch_time_secs;
}
public void set_launch_time_secs(int launch_time_secs) {
this.launch_time_secs = launch_time_secs;
set_launch_time_secs_isSet(true);
}
public void unset_launch_time_secs() {
__isset_bitfield = org.apache.storm.thrift.EncodingUtils.clearBit(__isset_bitfield, __LAUNCH_TIME_SECS_ISSET_ID);
}
/** Returns true if field launch_time_secs is set (has been assigned a value) and false otherwise */
public boolean is_set_launch_time_secs() {
return org.apache.storm.thrift.EncodingUtils.testBit(__isset_bitfield, __LAUNCH_TIME_SECS_ISSET_ID);
}
public void set_launch_time_secs_isSet(boolean value) {
__isset_bitfield = org.apache.storm.thrift.EncodingUtils.setBit(__isset_bitfield, __LAUNCH_TIME_SECS_ISSET_ID, value);
}
public java.lang.String get_owner() {
return this.owner;
}
public void set_owner(java.lang.String owner) {
this.owner = owner;
}
public void unset_owner() {
this.owner = null;
}
/** Returns true if field owner is set (has been assigned a value) and false otherwise */
public boolean is_set_owner() {
return this.owner != null;
}
public void set_owner_isSet(boolean value) {
if (!value) {
this.owner = null;
}
}
public TopologyActionOptions get_topology_action_options() {
return this.topology_action_options;
}
public void set_topology_action_options(TopologyActionOptions topology_action_options) {
this.topology_action_options = topology_action_options;
}
public void unset_topology_action_options() {
this.topology_action_options = null;
}
/** Returns true if field topology_action_options is set (has been assigned a value) and false otherwise */
public boolean is_set_topology_action_options() {
return this.topology_action_options != null;
}
public void set_topology_action_options_isSet(boolean value) {
if (!value) {
this.topology_action_options = null;
}
}
/**
*
* @see TopologyStatus
*/
public TopologyStatus get_prev_status() {
return this.prev_status;
}
/**
*
* @see TopologyStatus
*/
public void set_prev_status(TopologyStatus prev_status) {
this.prev_status = prev_status;
}
public void unset_prev_status() {
this.prev_status = null;
}
/** Returns true if field prev_status is set (has been assigned a value) and false otherwise */
public boolean is_set_prev_status() {
return this.prev_status != null;
}
public void set_prev_status_isSet(boolean value) {
if (!value) {
this.prev_status = null;
}
}
public int get_component_debug_size() {
return (this.component_debug == null) ? 0 : this.component_debug.size();
}
public void put_to_component_debug(java.lang.String key, DebugOptions val) {
if (this.component_debug == null) {
this.component_debug = new java.util.HashMap<java.lang.String,DebugOptions>();
}
this.component_debug.put(key, val);
}
public java.util.Map<java.lang.String,DebugOptions> get_component_debug() {
return this.component_debug;
}
public void set_component_debug(java.util.Map<java.lang.String,DebugOptions> component_debug) {
this.component_debug = component_debug;
}
public void unset_component_debug() {
this.component_debug = null;
}
/** Returns true if field component_debug is set (has been assigned a value) and false otherwise */
public boolean is_set_component_debug() {
return this.component_debug != null;
}
public void set_component_debug_isSet(boolean value) {
if (!value) {
this.component_debug = null;
}
}
public java.lang.String get_principal() {
return this.principal;
}
public void set_principal(java.lang.String principal) {
this.principal = principal;
}
public void unset_principal() {
this.principal = null;
}
/** Returns true if field principal is set (has been assigned a value) and false otherwise */
public boolean is_set_principal() {
return this.principal != null;
}
public void set_principal_isSet(boolean value) {
if (!value) {
this.principal = null;
}
}
public java.lang.String get_topology_version() {
return this.topology_version;
}
public void set_topology_version(java.lang.String topology_version) {
this.topology_version = topology_version;
}
public void unset_topology_version() {
this.topology_version = null;
}
/** Returns true if field topology_version is set (has been assigned a value) and false otherwise */
public boolean is_set_topology_version() {
return this.topology_version != null;
}
public void set_topology_version_isSet(boolean value) {
if (!value) {
this.topology_version = null;
}
}
public void setFieldValue(_Fields field, java.lang.Object value) {
switch (field) {
case NAME:
if (value == null) {
unset_name();
} else {
set_name((java.lang.String)value);
}
break;
case STATUS:
if (value == null) {
unset_status();
} else {
set_status((TopologyStatus)value);
}
break;
case NUM_WORKERS:
if (value == null) {
unset_num_workers();
} else {
set_num_workers((java.lang.Integer)value);
}
break;
case COMPONENT_EXECUTORS:
if (value == null) {
unset_component_executors();
} else {
set_component_executors((java.util.Map<java.lang.String,java.lang.Integer>)value);
}
break;
case LAUNCH_TIME_SECS:
if (value == null) {
unset_launch_time_secs();
} else {
set_launch_time_secs((java.lang.Integer)value);
}
break;
case OWNER:
if (value == null) {
unset_owner();
} else {
set_owner((java.lang.String)value);
}
break;
case TOPOLOGY_ACTION_OPTIONS:
if (value == null) {
unset_topology_action_options();
} else {
set_topology_action_options((TopologyActionOptions)value);
}
break;
case PREV_STATUS:
if (value == null) {
unset_prev_status();
} else {
set_prev_status((TopologyStatus)value);
}
break;
case COMPONENT_DEBUG:
if (value == null) {
unset_component_debug();
} else {
set_component_debug((java.util.Map<java.lang.String,DebugOptions>)value);
}
break;
case PRINCIPAL:
if (value == null) {
unset_principal();
} else {
set_principal((java.lang.String)value);
}
break;
case TOPOLOGY_VERSION:
if (value == null) {
unset_topology_version();
} else {
set_topology_version((java.lang.String)value);
}
break;
}
}
public java.lang.Object getFieldValue(_Fields field) {
switch (field) {
case NAME:
return get_name();
case STATUS:
return get_status();
case NUM_WORKERS:
return get_num_workers();
case COMPONENT_EXECUTORS:
return get_component_executors();
case LAUNCH_TIME_SECS:
return get_launch_time_secs();
case OWNER:
return get_owner();
case TOPOLOGY_ACTION_OPTIONS:
return get_topology_action_options();
case PREV_STATUS:
return get_prev_status();
case COMPONENT_DEBUG:
return get_component_debug();
case PRINCIPAL:
return get_principal();
case TOPOLOGY_VERSION:
return get_topology_version();
}
throw new java.lang.IllegalStateException();
}
/** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */
public boolean isSet(_Fields field) {
if (field == null) {
throw new java.lang.IllegalArgumentException();
}
switch (field) {
case NAME:
return is_set_name();
case STATUS:
return is_set_status();
case NUM_WORKERS:
return is_set_num_workers();
case COMPONENT_EXECUTORS:
return is_set_component_executors();
case LAUNCH_TIME_SECS:
return is_set_launch_time_secs();
case OWNER:
return is_set_owner();
case TOPOLOGY_ACTION_OPTIONS:
return is_set_topology_action_options();
case PREV_STATUS:
return is_set_prev_status();
case COMPONENT_DEBUG:
return is_set_component_debug();
case PRINCIPAL:
return is_set_principal();
case TOPOLOGY_VERSION:
return is_set_topology_version();
}
throw new java.lang.IllegalStateException();
}
@Override
public boolean equals(java.lang.Object that) {
if (that == null)
return false;
if (that instanceof StormBase)
return this.equals((StormBase)that);
return false;
}
public boolean equals(StormBase that) {
if (that == null)
return false;
if (this == that)
return true;
boolean this_present_name = true && this.is_set_name();
boolean that_present_name = true && that.is_set_name();
if (this_present_name || that_present_name) {
if (!(this_present_name && that_present_name))
return false;
if (!this.name.equals(that.name))
return false;
}
boolean this_present_status = true && this.is_set_status();
boolean that_present_status = true && that.is_set_status();
if (this_present_status || that_present_status) {
if (!(this_present_status && that_present_status))
return false;
if (!this.status.equals(that.status))
return false;
}
boolean this_present_num_workers = true;
boolean that_present_num_workers = true;
if (this_present_num_workers || that_present_num_workers) {
if (!(this_present_num_workers && that_present_num_workers))
return false;
if (this.num_workers != that.num_workers)
return false;
}
boolean this_present_component_executors = true && this.is_set_component_executors();
boolean that_present_component_executors = true && that.is_set_component_executors();
if (this_present_component_executors || that_present_component_executors) {
if (!(this_present_component_executors && that_present_component_executors))
return false;
if (!this.component_executors.equals(that.component_executors))
return false;
}
boolean this_present_launch_time_secs = true && this.is_set_launch_time_secs();
boolean that_present_launch_time_secs = true && that.is_set_launch_time_secs();
if (this_present_launch_time_secs || that_present_launch_time_secs) {
if (!(this_present_launch_time_secs && that_present_launch_time_secs))
return false;
if (this.launch_time_secs != that.launch_time_secs)
return false;
}
boolean this_present_owner = true && this.is_set_owner();
boolean that_present_owner = true && that.is_set_owner();
if (this_present_owner || that_present_owner) {
if (!(this_present_owner && that_present_owner))
return false;
if (!this.owner.equals(that.owner))
return false;
}
boolean this_present_topology_action_options = true && this.is_set_topology_action_options();
boolean that_present_topology_action_options = true && that.is_set_topology_action_options();
if (this_present_topology_action_options || that_present_topology_action_options) {
if (!(this_present_topology_action_options && that_present_topology_action_options))
return false;
if (!this.topology_action_options.equals(that.topology_action_options))
return false;
}
boolean this_present_prev_status = true && this.is_set_prev_status();
boolean that_present_prev_status = true && that.is_set_prev_status();
if (this_present_prev_status || that_present_prev_status) {
if (!(this_present_prev_status && that_present_prev_status))
return false;
if (!this.prev_status.equals(that.prev_status))
return false;
}
boolean this_present_component_debug = true && this.is_set_component_debug();
boolean that_present_component_debug = true && that.is_set_component_debug();
if (this_present_component_debug || that_present_component_debug) {
if (!(this_present_component_debug && that_present_component_debug))
return false;
if (!this.component_debug.equals(that.component_debug))
return false;
}
boolean this_present_principal = true && this.is_set_principal();
boolean that_present_principal = true && that.is_set_principal();
if (this_present_principal || that_present_principal) {
if (!(this_present_principal && that_present_principal))
return false;
if (!this.principal.equals(that.principal))
return false;
}
boolean this_present_topology_version = true && this.is_set_topology_version();
boolean that_present_topology_version = true && that.is_set_topology_version();
if (this_present_topology_version || that_present_topology_version) {
if (!(this_present_topology_version && that_present_topology_version))
return false;
if (!this.topology_version.equals(that.topology_version))
return false;
}
return true;
}
@Override
public int hashCode() {
int hashCode = 1;
hashCode = hashCode * 8191 + ((is_set_name()) ? 131071 : 524287);
if (is_set_name())
hashCode = hashCode * 8191 + name.hashCode();
hashCode = hashCode * 8191 + ((is_set_status()) ? 131071 : 524287);
if (is_set_status())
hashCode = hashCode * 8191 + status.getValue();
hashCode = hashCode * 8191 + num_workers;
hashCode = hashCode * 8191 + ((is_set_component_executors()) ? 131071 : 524287);
if (is_set_component_executors())
hashCode = hashCode * 8191 + component_executors.hashCode();
hashCode = hashCode * 8191 + ((is_set_launch_time_secs()) ? 131071 : 524287);
if (is_set_launch_time_secs())
hashCode = hashCode * 8191 + launch_time_secs;
hashCode = hashCode * 8191 + ((is_set_owner()) ? 131071 : 524287);
if (is_set_owner())
hashCode = hashCode * 8191 + owner.hashCode();
hashCode = hashCode * 8191 + ((is_set_topology_action_options()) ? 131071 : 524287);
if (is_set_topology_action_options())
hashCode = hashCode * 8191 + topology_action_options.hashCode();
hashCode = hashCode * 8191 + ((is_set_prev_status()) ? 131071 : 524287);
if (is_set_prev_status())
hashCode = hashCode * 8191 + prev_status.getValue();
hashCode = hashCode * 8191 + ((is_set_component_debug()) ? 131071 : 524287);
if (is_set_component_debug())
hashCode = hashCode * 8191 + component_debug.hashCode();
hashCode = hashCode * 8191 + ((is_set_principal()) ? 131071 : 524287);
if (is_set_principal())
hashCode = hashCode * 8191 + principal.hashCode();
hashCode = hashCode * 8191 + ((is_set_topology_version()) ? 131071 : 524287);
if (is_set_topology_version())
hashCode = hashCode * 8191 + topology_version.hashCode();
return hashCode;
}
@Override
public int compareTo(StormBase other) {
if (!getClass().equals(other.getClass())) {
return getClass().getName().compareTo(other.getClass().getName());
}
int lastComparison = 0;
lastComparison = java.lang.Boolean.valueOf(is_set_name()).compareTo(other.is_set_name());
if (lastComparison != 0) {
return lastComparison;
}
if (is_set_name()) {
lastComparison = org.apache.storm.thrift.TBaseHelper.compareTo(this.name, other.name);
if (lastComparison != 0) {
return lastComparison;
}
}
lastComparison = java.lang.Boolean.valueOf(is_set_status()).compareTo(other.is_set_status());
if (lastComparison != 0) {
return lastComparison;
}
if (is_set_status()) {
lastComparison = org.apache.storm.thrift.TBaseHelper.compareTo(this.status, other.status);
if (lastComparison != 0) {
return lastComparison;
}
}
lastComparison = java.lang.Boolean.valueOf(is_set_num_workers()).compareTo(other.is_set_num_workers());
if (lastComparison != 0) {
return lastComparison;
}
if (is_set_num_workers()) {
lastComparison = org.apache.storm.thrift.TBaseHelper.compareTo(this.num_workers, other.num_workers);
if (lastComparison != 0) {
return lastComparison;
}
}
lastComparison = java.lang.Boolean.valueOf(is_set_component_executors()).compareTo(other.is_set_component_executors());
if (lastComparison != 0) {
return lastComparison;
}
if (is_set_component_executors()) {
lastComparison = org.apache.storm.thrift.TBaseHelper.compareTo(this.component_executors, other.component_executors);
if (lastComparison != 0) {
return lastComparison;
}
}
lastComparison = java.lang.Boolean.valueOf(is_set_launch_time_secs()).compareTo(other.is_set_launch_time_secs());
if (lastComparison != 0) {
return lastComparison;
}
if (is_set_launch_time_secs()) {
lastComparison = org.apache.storm.thrift.TBaseHelper.compareTo(this.launch_time_secs, other.launch_time_secs);
if (lastComparison != 0) {
return lastComparison;
}
}
lastComparison = java.lang.Boolean.valueOf(is_set_owner()).compareTo(other.is_set_owner());
if (lastComparison != 0) {
return lastComparison;
}
if (is_set_owner()) {
lastComparison = org.apache.storm.thrift.TBaseHelper.compareTo(this.owner, other.owner);
if (lastComparison != 0) {
return lastComparison;
}
}
lastComparison = java.lang.Boolean.valueOf(is_set_topology_action_options()).compareTo(other.is_set_topology_action_options());
if (lastComparison != 0) {
return lastComparison;
}
if (is_set_topology_action_options()) {
lastComparison = org.apache.storm.thrift.TBaseHelper.compareTo(this.topology_action_options, other.topology_action_options);
if (lastComparison != 0) {
return lastComparison;
}
}
lastComparison = java.lang.Boolean.valueOf(is_set_prev_status()).compareTo(other.is_set_prev_status());
if (lastComparison != 0) {
return lastComparison;
}
if (is_set_prev_status()) {
lastComparison = org.apache.storm.thrift.TBaseHelper.compareTo(this.prev_status, other.prev_status);
if (lastComparison != 0) {
return lastComparison;
}
}
lastComparison = java.lang.Boolean.valueOf(is_set_component_debug()).compareTo(other.is_set_component_debug());
if (lastComparison != 0) {
return lastComparison;
}
if (is_set_component_debug()) {
lastComparison = org.apache.storm.thrift.TBaseHelper.compareTo(this.component_debug, other.component_debug);
if (lastComparison != 0) {
return lastComparison;
}
}
lastComparison = java.lang.Boolean.valueOf(is_set_principal()).compareTo(other.is_set_principal());
if (lastComparison != 0) {
return lastComparison;
}
if (is_set_principal()) {
lastComparison = org.apache.storm.thrift.TBaseHelper.compareTo(this.principal, other.principal);
if (lastComparison != 0) {
return lastComparison;
}
}
lastComparison = java.lang.Boolean.valueOf(is_set_topology_version()).compareTo(other.is_set_topology_version());
if (lastComparison != 0) {
return lastComparison;
}
if (is_set_topology_version()) {
lastComparison = org.apache.storm.thrift.TBaseHelper.compareTo(this.topology_version, other.topology_version);
if (lastComparison != 0) {
return lastComparison;
}
}
return 0;
}
public _Fields fieldForId(int fieldId) {
return _Fields.findByThriftId(fieldId);
}
public void read(org.apache.storm.thrift.protocol.TProtocol iprot) throws org.apache.storm.thrift.TException {
scheme(iprot).read(iprot, this);
}
public void write(org.apache.storm.thrift.protocol.TProtocol oprot) throws org.apache.storm.thrift.TException {
scheme(oprot).write(oprot, this);
}
@Override
public java.lang.String toString() {
java.lang.StringBuilder sb = new java.lang.StringBuilder("StormBase(");
boolean first = true;
sb.append("name:");
if (this.name == null) {
sb.append("null");
} else {
sb.append(this.name);
}
first = false;
if (!first) sb.append(", ");
sb.append("status:");
if (this.status == null) {
sb.append("null");
} else {
sb.append(this.status);
}
first = false;
if (!first) sb.append(", ");
sb.append("num_workers:");
sb.append(this.num_workers);
first = false;
if (is_set_component_executors()) {
if (!first) sb.append(", ");
sb.append("component_executors:");
if (this.component_executors == null) {
sb.append("null");
} else {
sb.append(this.component_executors);
}
first = false;
}
if (is_set_launch_time_secs()) {
if (!first) sb.append(", ");
sb.append("launch_time_secs:");
sb.append(this.launch_time_secs);
first = false;
}
if (is_set_owner()) {
if (!first) sb.append(", ");
sb.append("owner:");
if (this.owner == null) {
sb.append("null");
} else {
sb.append(this.owner);
}
first = false;
}
if (is_set_topology_action_options()) {
if (!first) sb.append(", ");
sb.append("topology_action_options:");
if (this.topology_action_options == null) {
sb.append("null");
} else {
sb.append(this.topology_action_options);
}
first = false;
}
if (is_set_prev_status()) {
if (!first) sb.append(", ");
sb.append("prev_status:");
if (this.prev_status == null) {
sb.append("null");
} else {
sb.append(this.prev_status);
}
first = false;
}
if (is_set_component_debug()) {
if (!first) sb.append(", ");
sb.append("component_debug:");
if (this.component_debug == null) {
sb.append("null");
} else {
sb.append(this.component_debug);
}
first = false;
}
if (is_set_principal()) {
if (!first) sb.append(", ");
sb.append("principal:");
if (this.principal == null) {
sb.append("null");
} else {
sb.append(this.principal);
}
first = false;
}
if (is_set_topology_version()) {
if (!first) sb.append(", ");
sb.append("topology_version:");
if (this.topology_version == null) {
sb.append("null");
} else {
sb.append(this.topology_version);
}
first = false;
}
sb.append(")");
return sb.toString();
}
public void validate() throws org.apache.storm.thrift.TException {
// check for required fields
if (!is_set_name()) {
throw new org.apache.storm.thrift.protocol.TProtocolException("Required field 'name' is unset! Struct:" + toString());
}
if (!is_set_status()) {
throw new org.apache.storm.thrift.protocol.TProtocolException("Required field 'status' is unset! Struct:" + toString());
}
if (!is_set_num_workers()) {
throw new org.apache.storm.thrift.protocol.TProtocolException("Required field 'num_workers' is unset! Struct:" + toString());
}
// check for sub-struct validity
}
private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException {
try {
write(new org.apache.storm.thrift.protocol.TCompactProtocol(new org.apache.storm.thrift.transport.TIOStreamTransport(out)));
} catch (org.apache.storm.thrift.TException te) {
throw new java.io.IOException(te);
}
}
private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, java.lang.ClassNotFoundException {
try {
// it doesn't seem like you should have to do this, but java serialization is wacky, and doesn't call the default constructor.
__isset_bitfield = 0;
read(new org.apache.storm.thrift.protocol.TCompactProtocol(new org.apache.storm.thrift.transport.TIOStreamTransport(in)));
} catch (org.apache.storm.thrift.TException te) {
throw new java.io.IOException(te);
}
}
private static class StormBaseStandardSchemeFactory implements org.apache.storm.thrift.scheme.SchemeFactory {
public StormBaseStandardScheme getScheme() {
return new StormBaseStandardScheme();
}
}
private static class StormBaseStandardScheme extends org.apache.storm.thrift.scheme.StandardScheme<StormBase> {
public void read(org.apache.storm.thrift.protocol.TProtocol iprot, StormBase struct) throws org.apache.storm.thrift.TException {
org.apache.storm.thrift.protocol.TField schemeField;
iprot.readStructBegin();
while (true)
{
schemeField = iprot.readFieldBegin();
if (schemeField.type == org.apache.storm.thrift.protocol.TType.STOP) {
break;
}
switch (schemeField.id) {
case 1: // NAME
if (schemeField.type == org.apache.storm.thrift.protocol.TType.STRING) {
struct.name = iprot.readString();
struct.set_name_isSet(true);
} else {
org.apache.storm.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
break;
case 2: // STATUS
if (schemeField.type == org.apache.storm.thrift.protocol.TType.I32) {
struct.status = org.apache.storm.generated.TopologyStatus.findByValue(iprot.readI32());
struct.set_status_isSet(true);
} else {
org.apache.storm.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
break;
case 3: // NUM_WORKERS
if (schemeField.type == org.apache.storm.thrift.protocol.TType.I32) {
struct.num_workers = iprot.readI32();
struct.set_num_workers_isSet(true);
} else {
org.apache.storm.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
break;
case 4: // COMPONENT_EXECUTORS
if (schemeField.type == org.apache.storm.thrift.protocol.TType.MAP) {
{
org.apache.storm.thrift.protocol.TMap _map752 = iprot.readMapBegin();
struct.component_executors = new java.util.HashMap<java.lang.String,java.lang.Integer>(2*_map752.size);
java.lang.String _key753;
int _val754;
for (int _i755 = 0; _i755 < _map752.size; ++_i755)
{
_key753 = iprot.readString();
_val754 = iprot.readI32();
struct.component_executors.put(_key753, _val754);
}
iprot.readMapEnd();
}
struct.set_component_executors_isSet(true);
} else {
org.apache.storm.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
break;
case 5: // LAUNCH_TIME_SECS
if (schemeField.type == org.apache.storm.thrift.protocol.TType.I32) {
struct.launch_time_secs = iprot.readI32();
struct.set_launch_time_secs_isSet(true);
} else {
org.apache.storm.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
break;
case 6: // OWNER
if (schemeField.type == org.apache.storm.thrift.protocol.TType.STRING) {
struct.owner = iprot.readString();
struct.set_owner_isSet(true);
} else {
org.apache.storm.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
break;
case 7: // TOPOLOGY_ACTION_OPTIONS
if (schemeField.type == org.apache.storm.thrift.protocol.TType.STRUCT) {
struct.topology_action_options = new TopologyActionOptions();
struct.topology_action_options.read(iprot);
struct.set_topology_action_options_isSet(true);
} else {
org.apache.storm.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
break;
case 8: // PREV_STATUS
if (schemeField.type == org.apache.storm.thrift.protocol.TType.I32) {
struct.prev_status = org.apache.storm.generated.TopologyStatus.findByValue(iprot.readI32());
struct.set_prev_status_isSet(true);
} else {
org.apache.storm.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
break;
case 9: // COMPONENT_DEBUG
if (schemeField.type == org.apache.storm.thrift.protocol.TType.MAP) {
{
org.apache.storm.thrift.protocol.TMap _map756 = iprot.readMapBegin();
struct.component_debug = new java.util.HashMap<java.lang.String,DebugOptions>(2*_map756.size);
java.lang.String _key757;
DebugOptions _val758;
for (int _i759 = 0; _i759 < _map756.size; ++_i759)
{
_key757 = iprot.readString();
_val758 = new DebugOptions();
_val758.read(iprot);
struct.component_debug.put(_key757, _val758);
}
iprot.readMapEnd();
}
struct.set_component_debug_isSet(true);
} else {
org.apache.storm.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
break;
case 10: // PRINCIPAL
if (schemeField.type == org.apache.storm.thrift.protocol.TType.STRING) {
struct.principal = iprot.readString();
struct.set_principal_isSet(true);
} else {
org.apache.storm.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
break;
case 11: // TOPOLOGY_VERSION
if (schemeField.type == org.apache.storm.thrift.protocol.TType.STRING) {
struct.topology_version = iprot.readString();
struct.set_topology_version_isSet(true);
} else {
org.apache.storm.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
break;
default:
org.apache.storm.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
iprot.readFieldEnd();
}
iprot.readStructEnd();
struct.validate();
}
public void write(org.apache.storm.thrift.protocol.TProtocol oprot, StormBase struct) throws org.apache.storm.thrift.TException {
struct.validate();
oprot.writeStructBegin(STRUCT_DESC);
if (struct.name != null) {
oprot.writeFieldBegin(NAME_FIELD_DESC);
oprot.writeString(struct.name);
oprot.writeFieldEnd();
}
if (struct.status != null) {
oprot.writeFieldBegin(STATUS_FIELD_DESC);
oprot.writeI32(struct.status.getValue());
oprot.writeFieldEnd();
}
oprot.writeFieldBegin(NUM_WORKERS_FIELD_DESC);
oprot.writeI32(struct.num_workers);
oprot.writeFieldEnd();
if (struct.component_executors != null) {
if (struct.is_set_component_executors()) {
oprot.writeFieldBegin(COMPONENT_EXECUTORS_FIELD_DESC);
{
oprot.writeMapBegin(new org.apache.storm.thrift.protocol.TMap(org.apache.storm.thrift.protocol.TType.STRING, org.apache.storm.thrift.protocol.TType.I32, struct.component_executors.size()));
for (java.util.Map.Entry<java.lang.String, java.lang.Integer> _iter760 : struct.component_executors.entrySet())
{
oprot.writeString(_iter760.getKey());
oprot.writeI32(_iter760.getValue());
}
oprot.writeMapEnd();
}
oprot.writeFieldEnd();
}
}
if (struct.is_set_launch_time_secs()) {
oprot.writeFieldBegin(LAUNCH_TIME_SECS_FIELD_DESC);
oprot.writeI32(struct.launch_time_secs);
oprot.writeFieldEnd();
}
if (struct.owner != null) {
if (struct.is_set_owner()) {
oprot.writeFieldBegin(OWNER_FIELD_DESC);
oprot.writeString(struct.owner);
oprot.writeFieldEnd();
}
}
if (struct.topology_action_options != null) {
if (struct.is_set_topology_action_options()) {
oprot.writeFieldBegin(TOPOLOGY_ACTION_OPTIONS_FIELD_DESC);
struct.topology_action_options.write(oprot);
oprot.writeFieldEnd();
}
}
if (struct.prev_status != null) {
if (struct.is_set_prev_status()) {
oprot.writeFieldBegin(PREV_STATUS_FIELD_DESC);
oprot.writeI32(struct.prev_status.getValue());
oprot.writeFieldEnd();
}
}
if (struct.component_debug != null) {
if (struct.is_set_component_debug()) {
oprot.writeFieldBegin(COMPONENT_DEBUG_FIELD_DESC);
{
oprot.writeMapBegin(new org.apache.storm.thrift.protocol.TMap(org.apache.storm.thrift.protocol.TType.STRING, org.apache.storm.thrift.protocol.TType.STRUCT, struct.component_debug.size()));
for (java.util.Map.Entry<java.lang.String, DebugOptions> _iter761 : struct.component_debug.entrySet())
{
oprot.writeString(_iter761.getKey());
_iter761.getValue().write(oprot);
}
oprot.writeMapEnd();
}
oprot.writeFieldEnd();
}
}
if (struct.principal != null) {
if (struct.is_set_principal()) {
oprot.writeFieldBegin(PRINCIPAL_FIELD_DESC);
oprot.writeString(struct.principal);
oprot.writeFieldEnd();
}
}
if (struct.topology_version != null) {
if (struct.is_set_topology_version()) {
oprot.writeFieldBegin(TOPOLOGY_VERSION_FIELD_DESC);
oprot.writeString(struct.topology_version);
oprot.writeFieldEnd();
}
}
oprot.writeFieldStop();
oprot.writeStructEnd();
}
}
private static class StormBaseTupleSchemeFactory implements org.apache.storm.thrift.scheme.SchemeFactory {
public StormBaseTupleScheme getScheme() {
return new StormBaseTupleScheme();
}
}
private static class StormBaseTupleScheme extends org.apache.storm.thrift.scheme.TupleScheme<StormBase> {
@Override
public void write(org.apache.storm.thrift.protocol.TProtocol prot, StormBase struct) throws org.apache.storm.thrift.TException {
org.apache.storm.thrift.protocol.TTupleProtocol oprot = (org.apache.storm.thrift.protocol.TTupleProtocol) prot;
oprot.writeString(struct.name);
oprot.writeI32(struct.status.getValue());
oprot.writeI32(struct.num_workers);
java.util.BitSet optionals = new java.util.BitSet();
if (struct.is_set_component_executors()) {
optionals.set(0);
}
if (struct.is_set_launch_time_secs()) {
optionals.set(1);
}
if (struct.is_set_owner()) {
optionals.set(2);
}
if (struct.is_set_topology_action_options()) {
optionals.set(3);
}
if (struct.is_set_prev_status()) {
optionals.set(4);
}
if (struct.is_set_component_debug()) {
optionals.set(5);
}
if (struct.is_set_principal()) {
optionals.set(6);
}
if (struct.is_set_topology_version()) {
optionals.set(7);
}
oprot.writeBitSet(optionals, 8);
if (struct.is_set_component_executors()) {
{
oprot.writeI32(struct.component_executors.size());
for (java.util.Map.Entry<java.lang.String, java.lang.Integer> _iter762 : struct.component_executors.entrySet())
{
oprot.writeString(_iter762.getKey());
oprot.writeI32(_iter762.getValue());
}
}
}
if (struct.is_set_launch_time_secs()) {
oprot.writeI32(struct.launch_time_secs);
}
if (struct.is_set_owner()) {
oprot.writeString(struct.owner);
}
if (struct.is_set_topology_action_options()) {
struct.topology_action_options.write(oprot);
}
if (struct.is_set_prev_status()) {
oprot.writeI32(struct.prev_status.getValue());
}
if (struct.is_set_component_debug()) {
{
oprot.writeI32(struct.component_debug.size());
for (java.util.Map.Entry<java.lang.String, DebugOptions> _iter763 : struct.component_debug.entrySet())
{
oprot.writeString(_iter763.getKey());
_iter763.getValue().write(oprot);
}
}
}
if (struct.is_set_principal()) {
oprot.writeString(struct.principal);
}
if (struct.is_set_topology_version()) {
oprot.writeString(struct.topology_version);
}
}
@Override
public void read(org.apache.storm.thrift.protocol.TProtocol prot, StormBase struct) throws org.apache.storm.thrift.TException {
org.apache.storm.thrift.protocol.TTupleProtocol iprot = (org.apache.storm.thrift.protocol.TTupleProtocol) prot;
struct.name = iprot.readString();
struct.set_name_isSet(true);
struct.status = org.apache.storm.generated.TopologyStatus.findByValue(iprot.readI32());
struct.set_status_isSet(true);
struct.num_workers = iprot.readI32();
struct.set_num_workers_isSet(true);
java.util.BitSet incoming = iprot.readBitSet(8);
if (incoming.get(0)) {
{
org.apache.storm.thrift.protocol.TMap _map764 = new org.apache.storm.thrift.protocol.TMap(org.apache.storm.thrift.protocol.TType.STRING, org.apache.storm.thrift.protocol.TType.I32, iprot.readI32());
struct.component_executors = new java.util.HashMap<java.lang.String,java.lang.Integer>(2*_map764.size);
java.lang.String _key765;
int _val766;
for (int _i767 = 0; _i767 < _map764.size; ++_i767)
{
_key765 = iprot.readString();
_val766 = iprot.readI32();
struct.component_executors.put(_key765, _val766);
}
}
struct.set_component_executors_isSet(true);
}
if (incoming.get(1)) {
struct.launch_time_secs = iprot.readI32();
struct.set_launch_time_secs_isSet(true);
}
if (incoming.get(2)) {
struct.owner = iprot.readString();
struct.set_owner_isSet(true);
}
if (incoming.get(3)) {
struct.topology_action_options = new TopologyActionOptions();
struct.topology_action_options.read(iprot);
struct.set_topology_action_options_isSet(true);
}
if (incoming.get(4)) {
struct.prev_status = org.apache.storm.generated.TopologyStatus.findByValue(iprot.readI32());
struct.set_prev_status_isSet(true);
}
if (incoming.get(5)) {
{
org.apache.storm.thrift.protocol.TMap _map768 = new org.apache.storm.thrift.protocol.TMap(org.apache.storm.thrift.protocol.TType.STRING, org.apache.storm.thrift.protocol.TType.STRUCT, iprot.readI32());
struct.component_debug = new java.util.HashMap<java.lang.String,DebugOptions>(2*_map768.size);
java.lang.String _key769;
DebugOptions _val770;
for (int _i771 = 0; _i771 < _map768.size; ++_i771)
{
_key769 = iprot.readString();
_val770 = new DebugOptions();
_val770.read(iprot);
struct.component_debug.put(_key769, _val770);
}
}
struct.set_component_debug_isSet(true);
}
if (incoming.get(6)) {
struct.principal = iprot.readString();
struct.set_principal_isSet(true);
}
if (incoming.get(7)) {
struct.topology_version = iprot.readString();
struct.set_topology_version_isSet(true);
}
}
}
private static <S extends org.apache.storm.thrift.scheme.IScheme> S scheme(org.apache.storm.thrift.protocol.TProtocol proto) {
return (org.apache.storm.thrift.scheme.StandardScheme.class.equals(proto.getScheme()) ? STANDARD_SCHEME_FACTORY : TUPLE_SCHEME_FACTORY).getScheme();
}
}
|
apache-2.0
|
james-horrocks/StratGA
|
Common/Market.cs
|
6158
|
/*
* QUANTCONNECT.COM - Democratizing Finance, Empowering Individuals.
* Lean Algorithmic Trading Engine v2.0. Copyright 2014 QuantConnect Corporation.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
using System;
using System.Collections.Generic;
namespace QuantConnect
{
/// <summary>
/// Markets Collection: Soon to be expanded to a collection of items specifying the market hour, timezones and country codes.
/// </summary>
public static class Market
{
// the upper bound (non-inclusive) for market identifiers
private const int MaxMarketIdentifier = 1000;
private static readonly object _lock = new object();
private static readonly Dictionary<string, int> Markets = new Dictionary<string, int>();
private static readonly Dictionary<int, string> ReverseMarkets = new Dictionary<int, string>();
private static readonly IEnumerable<Tuple<string, int>> HardcodedMarkets = new List<Tuple<string, int>>
{
Tuple.Create("empty", 0),
Tuple.Create(USA, 1),
Tuple.Create(FXCM, 2),
Tuple.Create(Oanda, 3),
Tuple.Create(Dukascopy, 4),
Tuple.Create(Bitfinex, 5),
Tuple.Create(Globex, 6),
Tuple.Create(NYMEX, 7),
Tuple.Create(CBOT, 8),
Tuple.Create(ICE, 9),
Tuple.Create(CBOE, 10)
};
static Market()
{
// initialize our maps
foreach (var market in HardcodedMarkets)
{
Markets[market.Item1] = market.Item2;
ReverseMarkets[market.Item2] = market.Item1;
}
}
/// <summary>
/// USA Market
/// </summary>
public const string USA = "usa";
/// <summary>
/// Oanda Market
/// </summary>
public const string Oanda = "oanda";
/// <summary>
/// FXCM Market Hours
/// </summary>
public const string FXCM = "fxcm";
/// <summary>
/// Dukascopy Market
/// </summary>
public const string Dukascopy = "dukascopy";
/// <summary>
/// Bitfinex market
/// </summary>
public const string Bitfinex = "bitfinex";
// Futures exchanges
/// <summary>
/// CME Globex
/// </summary>
public const string Globex = "cmeglobex";
/// <summary>
/// NYMEX
/// </summary>
public const string NYMEX = "nymex";
/// <summary>
/// CBOT
/// </summary>
public const string CBOT = "cbot";
/// <summary>
/// ICE
/// </summary>
public const string ICE = "ice";
/// <summary>
/// CBOE
/// </summary>
public const string CBOE = "cboe";
/// <summary>
/// Adds the specified market to the map of available markets with the specified identifier.
/// </summary>
/// <param name="market">The market string to add</param>
/// <param name="identifier">The identifier for the market, this value must be positive and less than 1000</param>
public static void Add(string market, int identifier)
{
if (identifier >= MaxMarketIdentifier)
{
var message = string.Format("The market identifier is limited to positive values less than {0}.", MaxMarketIdentifier);
throw new ArgumentOutOfRangeException("identifier", message);
}
market = market.ToLower();
// we lock since we don't want multiple threads getting these two dictionaries out of sync
lock (_lock)
{
int marketIdentifier;
if (Markets.TryGetValue(market, out marketIdentifier) && identifier != marketIdentifier)
{
throw new ArgumentException("Attempted to add an already added market with a different identifier. Market: " + market);
}
string existingMarket;
if (ReverseMarkets.TryGetValue(identifier, out existingMarket))
{
throw new ArgumentException("Attempted to add a market identifier that is already in use. New Market: " + market + " Existing Market: " + existingMarket);
}
// update our maps
Markets[market] = identifier;
ReverseMarkets[identifier] = market;
}
}
/// <summary>
/// Gets the market code for the specified market. Returns <c>null</c> if the market is not found
/// </summary>
/// <param name="market">The market to check for (case sensitive)</param>
/// <returns>The internal code used for the market. Corresponds to the value used when calling <see cref="Add"/></returns>
public static int? Encode(string market)
{
lock (_lock)
{
int code;
return !Markets.TryGetValue(market, out code) ? (int?) null : code;
}
}
/// <summary>
/// Gets the market string for the specified market code.
/// </summary>
/// <param name="code">The market code to be decoded</param>
/// <returns>The string representation of the market, or null if not found</returns>
public static string Decode(int code)
{
lock (_lock)
{
string market;
return !ReverseMarkets.TryGetValue(code, out market) ? null : market;
}
}
}
}
|
apache-2.0
|
sajuptpm/contrail-controller
|
src/bgp/bgp_condition_listener.cc
|
14666
|
/*
* Copyright (c) 2013 Juniper Networks, Inc. All rights reserved.
*/
#include "bgp/bgp_condition_listener.h"
#include <boost/bind.hpp>
#include <utility>
#include "base/task_annotations.h"
#include "base/task_trigger.h"
#include "bgp/bgp_route.h"
#include "bgp/bgp_server.h"
#include "bgp/bgp_table.h"
#include "db/db_table_partition.h"
using std::make_pair;
using std::map;
using std::pair;
using std::set;
//
// Helper class to maintain the WalkRequests
// Contains a map of ConditionMatch object and RequestComplete callback
// pending_walk_list_: List of ConditionObjects requesting walk(not completed)
// current_walk_list_: List of ConditionObjects for which current walk is done
//
class WalkRequest {
public:
typedef map<ConditionMatchPtr,
BgpConditionListener::RequestDoneCb> WalkList;
WalkRequest();
void AddMatchObject(ConditionMatch *obj,
BgpConditionListener::RequestDoneCb cb) {
pair<WalkList::iterator, bool> ret =
pending_walk_list_.insert(make_pair(obj, cb));
if (!ret.second) {
if (ret.first->second.empty()) {
ret.first->second = cb;
}
}
}
//
// When the walk actually starts, pending_walk_list_ entries are moved to
// current_walk_list_.
//
void WalkStarted(DBTableWalker::WalkId id) {
id_ = id;
pending_walk_list_.swap(current_walk_list_);
pending_walk_list_.clear();
}
DBTableWalker::WalkId GetWalkId() const {
return id_;
}
void ResetWalkId() {
id_ = DBTableWalker::kInvalidWalkerId;
}
bool walk_in_progress() {
return (id_ != DBTableWalker::kInvalidWalkerId);
}
//
// Table requires further walk as requests are cached in pending_walk_list_
// during the current table walk
//
bool walk_again() {
return !pending_walk_list_.empty();
}
WalkList *walk_list() {
return ¤t_walk_list_;
}
bool is_walk_pending(ConditionMatch *obj) {
if (pending_walk_list_.empty()) {
return false;
} else {
return (pending_walk_list_.find(ConditionMatchPtr(obj)) !=
pending_walk_list_.end());
}
}
private:
WalkList pending_walk_list_;
WalkList current_walk_list_;
DBTableWalker::WalkId id_;
};
//
// ConditionMatchTableState
// State managed by the BgpConditionListener for each of the table it is
// listening to.
// BgpConditionListener registers for a DBTable when application request
// for ConditionMatch
// BgpConditionListener unregisters from the DBTable when application removes
// the ConditionMatch and all table walks have finished
// Holds a table reference to ensure that table with active walk or listener
// is not deleted
//
class ConditionMatchTableState {
public:
typedef set<ConditionMatchPtr> MatchList;
ConditionMatchTableState(BgpTable *table, DBTableBase::ListenerId id);
~ConditionMatchTableState();
void ManagedDelete() {
}
DBTableBase::ListenerId GetListenerId() const {
return id_;
}
MatchList *match_objects() {
return &match_object_list_;
}
void AddMatchObject(ConditionMatch *obj) {
match_object_list_.insert(ConditionMatchPtr(obj));
}
//
// Mutex required to manager MatchState list for concurrency
//
tbb::mutex &table_state_mutex() {
return table_state_mutex_;
}
private:
tbb::mutex table_state_mutex_;
DBTableBase::ListenerId id_;
MatchList match_object_list_;
LifetimeRef<ConditionMatchTableState> table_delete_ref_;
DISALLOW_COPY_AND_ASSIGN(ConditionMatchTableState);
};
BgpConditionListener::BgpConditionListener(BgpServer *server) :
server_(server),
walk_trigger_(new TaskTrigger(boost::bind(&BgpConditionListener::StartWalk,
this),
TaskScheduler::GetInstance()->GetTaskId("bgp::Config"), 0)) {
}
//
// AddMatchCondition:
// API to add ConditionMatch object against a table
// All entries present in this table will be matched for this ConditionMatch
// object.[All table partition]
// Match is done either in TableWalk or During Table entry notification
//
void BgpConditionListener::AddMatchCondition(BgpTable *table,
ConditionMatch *obj,
RequestDoneCb cb) {
CHECK_CONCURRENCY("bgp::Config");
ConditionMatchTableState *ts = NULL;
TableMap::iterator loc = map_.find(table);
if (loc == map_.end()) {
DBTableBase::ListenerId id =
table->Register(
boost::bind(&BgpConditionListener::BgpRouteNotify,
this, server(), _1, _2),
"BgpConditionListener");
ts = new ConditionMatchTableState(table, id);
map_.insert(make_pair(table, ts));
} else {
ts = loc->second;
}
ts->AddMatchObject(obj);
TableWalk(table, obj, cb);
}
//
// RemoveMatchCondition:
// API to Remove ConditionMatch object from a table
// All entries present in this table will be matched for this ConditionMatch
// object[All table partition] and notified to application with "DELETE" flag
// All Match notifications after invoking this API is called with "DELETE" flag
//
void BgpConditionListener::RemoveMatchCondition(BgpTable *table,
ConditionMatch *obj,
RequestDoneCb cb) {
CHECK_CONCURRENCY("bgp::Config");
obj->SetDeleted();
TableWalk(table, obj, cb);
}
//
// MatchState
// BgpConditionListener will hold this as DBState for each BgpRoute
//
class MatchState : public DBState {
public:
typedef map<ConditionMatchPtr, ConditionMatchState *> MatchStateList;
private:
friend class BgpConditionListener;
MatchStateList list_;
};
//
// CheckMatchState
// API to check if MatchState is added by module registering ConditionMatch
// object.
//
bool BgpConditionListener::CheckMatchState(BgpTable *table, BgpRoute *route,
ConditionMatch *obj) {
TableMap::iterator loc = map_.find(table);
ConditionMatchTableState *ts = loc->second;
tbb::mutex::scoped_lock lock(ts->table_state_mutex());
// Get the DBState.
MatchState *dbstate =
static_cast<MatchState *>(route->GetState(table, ts->GetListenerId()));
if (dbstate == NULL)
return false;
// Index with ConditionMatch object to check.
MatchState::MatchStateList::iterator it =
dbstate->list_.find(ConditionMatchPtr(obj));
return (it != dbstate->list_.end()) ? true : false;
}
//
// GetMatchState
// API to fetch MatchState added by module registering ConditionMatch object
// MatchState is maintained as Map of ConditionMatch object and State in
// DBState added BgpConditionListener module
//
ConditionMatchState * BgpConditionListener::GetMatchState(BgpTable *table,
BgpRoute *route,
ConditionMatch *obj) {
TableMap::iterator loc = map_.find(table);
ConditionMatchTableState *ts = loc->second;
tbb::mutex::scoped_lock lock(ts->table_state_mutex());
// Get the DBState
MatchState *dbstate =
static_cast<MatchState *>(route->GetState(table, ts->GetListenerId()));
if (dbstate == NULL) return NULL;
// Index with ConditionMatch object to retrieve the MatchState
MatchState::MatchStateList::iterator it =
dbstate->list_.find(ConditionMatchPtr(obj));
return (it != dbstate->list_.end()) ? it->second : NULL;
}
//
// SetMatchState
// API for module registering ConditionMatch object to add MatchState
//
void BgpConditionListener::SetMatchState(BgpTable *table, BgpRoute *route,
ConditionMatch *obj,
ConditionMatchState *state) {
TableMap::iterator loc = map_.find(table);
ConditionMatchTableState *ts = loc->second;
tbb::mutex::scoped_lock lock(ts->table_state_mutex());
// Get the DBState
MatchState *dbstate =
static_cast<MatchState *>(route->GetState(table, ts->GetListenerId()));
if (!dbstate) {
// Add new DBState when first application requests for MatchState
dbstate = new MatchState();
route->SetState(table, ts->GetListenerId(), dbstate);
} else {
// Add Match to the existing list
MatchState::MatchStateList::iterator it =
dbstate->list_.find(ConditionMatchPtr(obj));
assert(it == dbstate->list_.end());
}
dbstate->list_.insert(make_pair(obj, state));
obj->IncrementNumMatchstate();
}
//
// RemoveMatchState
// Clear the module specific MatchState
//
void BgpConditionListener::RemoveMatchState(BgpTable *table, BgpRoute *route,
ConditionMatch *obj) {
TableMap::iterator loc = map_.find(table);
ConditionMatchTableState *ts = loc->second;
tbb::mutex::scoped_lock lock(ts->table_state_mutex());
// Get the DBState
MatchState *dbstate =
static_cast<MatchState *>(route->GetState(table, ts->GetListenerId()));
MatchState::MatchStateList::iterator it =
dbstate->list_.find(ConditionMatchPtr(obj));
assert(it != dbstate->list_.end());
dbstate->list_.erase(it);
obj->DecrementNumMatchstate();
if (dbstate->list_.empty()) {
// Remove the DBState when last module removes the MatchState
route->ClearState(table, ts->GetListenerId());
delete dbstate;
}
}
//
// Add ConditionMatch object to pending_walk_list_
// and trigger the task to actually start the walk
//
void BgpConditionListener::TableWalk(BgpTable *table, ConditionMatch *obj,
RequestDoneCb cb) {
CHECK_CONCURRENCY("bgp::Config");
WalkRequestMap::iterator loc = walk_map_.find(table);
WalkRequest *walk_req = NULL;
if (loc != walk_map_.end()) {
walk_req = loc->second;
walk_req->AddMatchObject(obj, cb);
} else {
walk_req = new WalkRequest();
walk_map_.insert(make_pair(table, walk_req));
walk_req->AddMatchObject(obj, cb);
}
walk_trigger_->Set();
}
bool BgpConditionListener::StartWalk() {
CHECK_CONCURRENCY("bgp::Config");
DBTableWalker::WalkCompleteFn walk_complete
= boost::bind(&BgpConditionListener::WalkDone, this, _1);
DBTableWalker::WalkFn walker
= boost::bind(&BgpConditionListener::BgpRouteNotify, this, server(),
_1, _2);
for (WalkRequestMap::iterator it = walk_map_.begin();
it != walk_map_.end(); ++it) {
if (it->second->walk_in_progress()) {
continue;
}
DB *db = server()->database();
DBTableWalker::WalkId id =
db->GetWalker()->WalkTable(it->first, NULL, walker, walk_complete);
it->second->WalkStarted(id);
}
return true;
}
// Table listener
bool BgpConditionListener::BgpRouteNotify(BgpServer *server,
DBTablePartBase *root,
DBEntryBase *entry) {
BgpTable *bgptable = static_cast<BgpTable *>(root->parent());
BgpRoute *rt = static_cast<BgpRoute *> (entry);
// Either the route is deleted or no valid path exists
bool del_rt = !rt->IsUsable();
TableMap::iterator loc = map_.find(bgptable);
assert(loc != map_.end());
ConditionMatchTableState *ts = loc->second;
DBTableBase::ListenerId id = ts->GetListenerId();
assert(id != DBTableBase::kInvalidId);
for (ConditionMatchTableState::MatchList::iterator match_obj_it =
ts->match_objects()->begin();
match_obj_it != ts->match_objects()->end(); match_obj_it++) {
bool deleted = false;
if ((*match_obj_it)->deleted() || del_rt) {
deleted = true;
}
(*match_obj_it)->Match(server, bgptable, rt, deleted);
}
return true;
}
//
// WalkComplete function
// At the end of the walk reset the WalkId.
// Invoke the RequestDoneCb for all objects for which walk was started
// Clear the current_walk_list_ and check whether the table needs to be
// walked again.
//
void BgpConditionListener::WalkDone(DBTableBase *table) {
BgpTable *bgptable = static_cast<BgpTable *>(table);
WalkRequestMap::iterator it = walk_map_.find(bgptable);
assert(it != walk_map_.end());
WalkRequest *walk_state = it->second;
walk_state->ResetWalkId();
//
// Invoke the RequestDoneCb after the TableWalk
//
for (WalkRequest::WalkList::iterator walk_it =
walk_state->walk_list()->begin();
walk_it != walk_state->walk_list()->end(); ++walk_it) {
// If application has registered a WalkDone callback, invoke it
if (!walk_it->second.empty())
walk_it->second(bgptable, walk_it->first.get());
}
walk_state->walk_list()->clear();
if (walk_state->walk_again()) {
// More walk requests are pending
walk_trigger_->Set();
} else {
delete walk_state;
walk_map_.erase(it);
}
}
void BgpConditionListener::UnregisterMatchCondition(BgpTable *bgptable,
ConditionMatch *obj) {
TableMap::iterator loc = map_.find(bgptable);
assert(loc != map_.end());
ConditionMatchTableState *ts = loc->second;
WalkRequestMap::iterator it = walk_map_.find(bgptable);
WalkRequest *walk_state = NULL;
if (it != walk_map_.end()) {
walk_state = it->second;
}
//
// Wait for Walk completion of deleted ConditionMatch object
//
if ((!walk_state || !walk_state->is_walk_pending(obj)) &&
obj->deleted()) {
ts->match_objects()->erase(obj);
}
if (ts->match_objects()->empty()) {
bgptable->Unregister(ts->GetListenerId());
map_.erase(bgptable);
delete ts;
}
}
void BgpConditionListener::DisableTableWalkProcessing() {
walk_trigger_->set_disable();
}
void BgpConditionListener::EnableTableWalkProcessing() {
walk_trigger_->set_enable();
}
ConditionMatchTableState::ConditionMatchTableState(BgpTable *table,
DBTableBase::ListenerId id)
: id_(id), table_delete_ref_(this, table->deleter()) {
assert(table->deleter() != NULL);
}
ConditionMatchTableState::~ConditionMatchTableState() {
}
WalkRequest::WalkRequest() : id_(DBTableWalker::kInvalidWalkerId) {
}
|
apache-2.0
|
CMPUT301W16T05/c301_w16
|
app/src/main/java/com/example/c301_w16_g5/c301_w16_g5/GenericModel.java
|
811
|
package com.example.c301_w16_g5.c301_w16_g5;
import java.util.ArrayList;
/**
* Provides the basic framework for all models in the application, such that
* they have views that display them, and ensure these views are updated
* appropriately. This is an integral part of our MVC design.
*
* @author Hailey
* @version 1.4, 03/02/2016
*/
public abstract class GenericModel<V extends GenericView> {
private ArrayList<V> views;
public GenericModel() {
views = new ArrayList<>();
}
public void addView(V view) {
if (!views.contains(view)) {
views.add(view);
}
}
public void deleteView(V view) {
views.remove(view);
}
public void notifyViews() {
for (V view : views) {
view.update(this);
}
}
}
|
apache-2.0
|
peridotperiod/isis
|
core/metamodel/src/main/java/org/apache/isis/core/metamodel/facets/param/choices/ActionParameterChoicesFacet.java
|
1434
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.isis.core.metamodel.facets.param.choices;
import java.util.List;
import org.apache.isis.core.metamodel.adapter.ObjectAdapter;
import org.apache.isis.core.metamodel.facetapi.Facet;
/**
* Obtain choices for each of the parameters of the action.
*
* <p>
* In the standard Apache Isis Programming Model, corresponds to invoking the
* <tt>choicesNXxx</tt> support method for an action (where N is the 0-based
* parameter number).
*/
public interface ActionParameterChoicesFacet extends Facet {
public Object[] getChoices(ObjectAdapter target, List<ObjectAdapter> arguments);
}
|
apache-2.0
|
dc3-plaso/dfvfs
|
tests/path/bde_path_spec.py
|
1099
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
"""Tests for the BDE path specification implementation."""
import unittest
from dfvfs.path import bde_path_spec
from tests.path import test_lib
class BDEPathSpecTest(test_lib.PathSpecTestCase):
"""Tests for the BDE path specification implementation."""
def testInitialize(self):
"""Tests the path specification initialization."""
path_spec = bde_path_spec.BDEPathSpec(parent=self._path_spec)
self.assertIsNotNone(path_spec)
with self.assertRaises(ValueError):
_ = bde_path_spec.BDEPathSpec(parent=None)
with self.assertRaises(ValueError):
_ = bde_path_spec.BDEPathSpec(parent=self._path_spec, bogus=u'BOGUS')
def testComparable(self):
"""Tests the path specification comparable property."""
path_spec = bde_path_spec.BDEPathSpec(parent=self._path_spec)
self.assertIsNotNone(path_spec)
expected_comparable = u'\n'.join([
u'type: TEST',
u'type: BDE',
u''])
self.assertEqual(path_spec.comparable, expected_comparable)
if __name__ == '__main__':
unittest.main()
|
apache-2.0
|
talenguyen/JFlux
|
library/src/test/java/com/tale/jflux/FluxStoreTest.java
|
1383
|
package com.tale.jflux;
import org.mockito.Mock;
/**
* Author tale. Created on 8/1/15.
*/
public class FluxStoreTest {
@Mock OnChangeListener onChangeListener;
Action sampleAction;
FluxStore fluxStore;
//@Before
//public void setUp() throws Exception {
//
// MockitoAnnotations.initMocks(this);
//
// fluxStore = new FluxStore() {
// @Override public void onReceivedAction(Action payload) {
// emitChange();
// }
// };
//
// sampleAction = new Action() {
// @Override
// public int getId() {
// return 0;
// }
// };
//}
//
//@org.junit.Test
//public void testBindView() throws Exception {
// fluxStore.registerForChangeEvent(onChangeListener);
// fluxStore.onReceivedAction(sampleAction);
// Mockito.verify(onChangeListener).onChanged();
//}
//
//@org.junit.Test
//public void testUnBindView() throws Exception {
// // Bind view.
// fluxStore.registerForChangeEvent(onChangeListener);
// // Then unbind view.
// fluxStore.unregisterForChangeEvent(onChangeListener);
// fluxStore.onReceivedAction(sampleAction);
// // Expect onChanged not be call.
// Mockito.verify(onChangeListener, Mockito.never()).onChanged();
//}
}
|
apache-2.0
|
dingjun84/mq-backup
|
rocketmq-example/src/main/java/com/alibaba/rocketmq/example/benchmark/Producer.java
|
8279
|
/**
* Copyright (C) 2010-2013 Alibaba Group Holding Limited
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.alibaba.rocketmq.example.benchmark;
import java.util.LinkedList;
import java.util.Timer;
import java.util.TimerTask;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.atomic.AtomicLong;
import com.alibaba.rocketmq.client.exception.MQBrokerException;
import com.alibaba.rocketmq.client.exception.MQClientException;
import com.alibaba.rocketmq.client.producer.DefaultMQProducer;
import com.alibaba.rocketmq.common.message.Message;
import com.alibaba.rocketmq.remoting.exception.RemotingException;
/**
* 性能测试,多线程同步发送消息
*/
public class Producer {
public static void main(String[] args) throws MQClientException {
final int threadCount = args.length >= 1 ? Integer.parseInt(args[0]) : 32;
final int messageSize = args.length >= 2 ? Integer.parseInt(args[1]) : 256;
System.out.printf("threadCount %d messageSize %d\n", threadCount, messageSize);
final Message msg = buildMessage(messageSize);
final ExecutorService sendThreadPool = Executors.newFixedThreadPool(threadCount);
final StatsBenchmarkProducer statsBenchmark = new StatsBenchmarkProducer();
final Timer timer = new Timer("BenchmarkTimerThread", true);
final LinkedList<Long[]> snapshotList = new LinkedList<Long[]>();
timer.scheduleAtFixedRate(new TimerTask() {
@Override
public void run() {
snapshotList.addLast(statsBenchmark.createSnapshot());
if (snapshotList.size() > 10) {
snapshotList.removeFirst();
}
}
}, 1000, 1000);
timer.scheduleAtFixedRate(new TimerTask() {
private void printStats() {
if (snapshotList.size() >= 10) {
Long[] begin = snapshotList.getFirst();
Long[] end = snapshotList.getLast();
final long sendTps =
(long) (((end[3] - begin[3]) / (double) (end[0] - begin[0])) * 1000L);
final double averageRT = ((end[5] - begin[5]) / (double) (end[3] - begin[3]));
System.out.printf(
"Send TPS: %d Max RT: %d Average RT: %7.3f Send Failed: %d Response Failed: %d\n"//
, sendTps//
, statsBenchmark.getSendMessageMaxRT().get()//
, averageRT//
, end[2]//
, end[4]//
);
}
}
@Override
public void run() {
try {
this.printStats();
}
catch (Exception e) {
e.printStackTrace();
}
}
}, 10000, 10000);
final DefaultMQProducer producer = new DefaultMQProducer("benchmark_producer");
producer.setInstanceName(Long.toString(System.currentTimeMillis()));
producer.setCompressMsgBodyOverHowmuch(Integer.MAX_VALUE);
producer.start();
for (int i = 0; i < threadCount; i++) {
sendThreadPool.execute(new Runnable() {
@Override
public void run() {
while (true) {
try {
final long beginTimestamp = System.currentTimeMillis();
producer.send(msg);
statsBenchmark.getSendRequestSuccessCount().incrementAndGet();
statsBenchmark.getReceiveResponseSuccessCount().incrementAndGet();
final long currentRT = System.currentTimeMillis() - beginTimestamp;
statsBenchmark.getSendMessageSuccessTimeTotal().addAndGet(currentRT);
long prevMaxRT = statsBenchmark.getSendMessageMaxRT().get();
while (currentRT > prevMaxRT) {
boolean updated =
statsBenchmark.getSendMessageMaxRT().compareAndSet(prevMaxRT,
currentRT);
if (updated)
break;
prevMaxRT = statsBenchmark.getSendMessageMaxRT().get();
}
}
catch (RemotingException e) {
statsBenchmark.getSendRequestFailedCount().incrementAndGet();
e.printStackTrace();
}
catch (InterruptedException e) {
statsBenchmark.getSendRequestFailedCount().incrementAndGet();
e.printStackTrace();
}
catch (MQClientException e) {
statsBenchmark.getSendRequestFailedCount().incrementAndGet();
e.printStackTrace();
}
catch (MQBrokerException e) {
statsBenchmark.getReceiveResponseFailedCount().incrementAndGet();
e.printStackTrace();
}
}
}
});
}
}
private static Message buildMessage(final int messageSize) {
Message msg = new Message();
msg.setTopic("BenchmarkTest");
StringBuilder sb = new StringBuilder();
for (int i = 0; i < messageSize; i += 10) {
sb.append("hello baby");
}
msg.setBody(sb.toString().getBytes());
return msg;
}
}
class StatsBenchmarkProducer {
// 1
private final AtomicLong sendRequestSuccessCount = new AtomicLong(0L);
// 2
private final AtomicLong sendRequestFailedCount = new AtomicLong(0L);
// 3
private final AtomicLong receiveResponseSuccessCount = new AtomicLong(0L);
// 4
private final AtomicLong receiveResponseFailedCount = new AtomicLong(0L);
// 5
private final AtomicLong sendMessageSuccessTimeTotal = new AtomicLong(0L);
// 6
private final AtomicLong sendMessageMaxRT = new AtomicLong(0L);
public Long[] createSnapshot() {
Long[] snap = new Long[] {//
System.currentTimeMillis(),//
this.sendRequestSuccessCount.get(),//
this.sendRequestFailedCount.get(),//
this.receiveResponseSuccessCount.get(),//
this.receiveResponseFailedCount.get(),//
this.sendMessageSuccessTimeTotal.get(), //
};
return snap;
}
public AtomicLong getSendRequestSuccessCount() {
return sendRequestSuccessCount;
}
public AtomicLong getSendRequestFailedCount() {
return sendRequestFailedCount;
}
public AtomicLong getReceiveResponseSuccessCount() {
return receiveResponseSuccessCount;
}
public AtomicLong getReceiveResponseFailedCount() {
return receiveResponseFailedCount;
}
public AtomicLong getSendMessageSuccessTimeTotal() {
return sendMessageSuccessTimeTotal;
}
public AtomicLong getSendMessageMaxRT() {
return sendMessageMaxRT;
}
}
|
apache-2.0
|
intecap/javase1
|
basics/src/main/java/co/edu/intecap/basics/Constants.java
|
89
|
package co.edu.intecap.basics;
public enum Constants {
ADOLESCENTE,
JOVEN,
ADULTO;
}
|
apache-2.0
|
S0AndS0/PayBit_Forward
|
templates/FaucetBox_S0AndS0/adverts/advert.js
|
198
|
document.write('<div id="tester" style="display:none">an advertisemen</div>');
/* above is used to display ads with js for adblock detection
source of source : http://w3guy.com/detecting-adblock/ */
|
apache-2.0
|
metowa1227/PocketMine
|
src/pocketmine/packs/ResourcePacks.php
|
2567
|
<?php
/*
* ____ _ _ _ _ _
* | _ \| | | | (_) | | | |
* | |_) | |_ _ ___| | _ __ _| |__ | |_
* | _ <| | | | |/ _ \ | | |/ _` | '_ \| __|
* | |_) | | |_| | __/ |____| | (_| | | | | |_
* |____/|_|\__,_|\___|______|_|\__, |_| |_|\__|
* __/ |
* |___/
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* @author BlueLightJapan Team
*
*/
namespace pocketmine\packs;
use pocketmine\network\protocol\ResourcePacksInfoPacket;
use pocketmine\network\protocol\ResourcePackDataInfoPacket;
use pocketmine\network\protocol\ResourcePackStackPacket;
use pocketmine\network\protocol\ResourcePackChunkRequestPacket;
use pocketmine\network\protocol\ResourcePackChunkDataPacket;
class ResourcePacks{
public $mustAccept = false;
/** @var ResourcePackInfoEntry */
public $behaviourPackEntries = [];
/** @var ResourcePackInfoEntry */
public $resourcePackEntries = [];
/** @var ResourcePackInfoEntry */
public $packEntries = [];
public function __construct(){
}
public function sendPacksInfo($player){
$info = new ResourcePacksInfoPacket();
$info->mustAccept = $this->mustAccept;
$info->behaviourPackEntries = $this->behaviourPackEntries;
$info->resourcePackEntries = $this->resourcePackEntries;
$player->dataPacket($info);
}
public function sendPackDataInfo($player, $packid){
$datainfo = new ReourcePackDataInfoPacket();
$datainfo->packid = $packid;
$datainfo->int1 = 0;
$datainfo->int2 = 1;
$datainfo->size = $packEntries[$packid]->getPackSize();
$datainfo->pack = $packEntries[$packid]->getPackData();
$player->dataPacket($datainfo);
}
public function sendPackStack($player){
$stack = new ResourcePackStackPacket();
$stack->mustAccept = $this->mustAccept;
$stack->behaviourPackEntries = $this->behaviourPackEntries;
$stack->resourcePackEntries = $this->resourcePackEntries;
$player->dataPacket($stack);
}
public function sendPackChunkData($player, $packid){
$chunkdata = new ReourcePackDataChunkPacket();
$chunkdata->packid = $packid;
$chunkdata->int1 = 0;
$chunkdata->size = $packEntries[$packid]->getPackSize();
$chunkdata->int2 = 1;
//$chunkdata->payload = $packEntries[$packid]->getPackData();
$chunkdata->byte = 0;
$player->dataPacket($chunkdata);
}
}
|
apache-2.0
|
wapalxj/Android_C2_UI
|
C2_UI/c4_13_communicating/src/androidTest/java/com/example/vero1/c4_13_communicating/ApplicationTest.java
|
380
|
package com.example.vero1.c4_13_communicating;
import android.app.Application;
import android.test.ApplicationTestCase;
/**
* <a href="http://d.android.com/tools/testing/testing_android.html">Testing Fundamentals</a>
*/
public class ApplicationTest extends ApplicationTestCase<Application> {
public ApplicationTest() {
super(Application.class);
}
}
|
apache-2.0
|
jhipster/generator-jhipster
|
generators/docker-utils.js
|
4445
|
/**
* Copyright 2013-2022 the original author or authors from the JHipster project.
*
* This file is part of the JHipster project, see https://www.jhipster.tech/
* for more information.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
const shelljs = require('shelljs');
const chalk = require('chalk');
const dockerCLI = require('./docker-cli');
const { GRADLE, MAVEN } = require('../jdl/jhipster/build-tool-types');
/**
* This is the Generator base class.
* This provides all the public API methods exposed via the module system.
* The public API methods can be directly utilized as well using commonJS require.
*
* The method signatures in public API should not be changed without a major version change
*/
module.exports = {
checkDocker,
checkImageExist,
checkAndBuildImages,
};
/**
* Check that Docker exists.
* @param failOver flag
*/
function checkDocker() {
if (this.abort || this.skipChecks) return;
const done = this.async();
shelljs.exec('docker -v', { silent: true }, (code, stdout, stderr) => {
if (stderr) {
this.log(
chalk.red(
'Docker version 1.10.0 or later is not installed on your computer.\n' +
' Read http://docs.docker.com/engine/installation/#installation\n'
)
);
this.abort = true;
} else {
const dockerVersion = stdout.split(' ')[2].replace(/,/g, '');
const dockerVersionMajor = dockerVersion.split('.')[0];
const dockerVersionMinor = dockerVersion.split('.')[1];
if (dockerVersionMajor < 1 || (dockerVersionMajor === 1 && dockerVersionMinor < 10)) {
this.log(
chalk.red(
`Docker version 1.10.0 or later is not installed on your computer.
Docker version found: ${dockerVersion}
Read http://docs.docker.com/engine/installation/#installation`
)
);
this.abort = true;
} else {
this.log.ok('Docker is installed');
}
}
done();
});
}
/**
* Check that a Docker image exists in a JHipster app.
*
* @param opts Options to pass.
* @property pwd JHipster app directory. default is './'
* @property appConfig Configuration for the current application
*/
function checkImageExist(opts = { cwd: './', appConfig: null }) {
if (this.abort) return;
let imagePath = '';
this.hasWarning = false;
this.warningMessage = 'To generate the missing Docker image(s), please run:\n';
if (opts.appConfig.buildTool === MAVEN) {
imagePath = this.destinationPath(`${opts.cwd + opts.cwd}/target/docker`);
this.dockerBuildCommand = './mvnw -ntp -Pprod verify jib:dockerBuild';
} else {
imagePath = this.destinationPath(`${opts.cwd + opts.cwd}/build/docker`);
this.dockerBuildCommand = './gradlew bootJar -Pprod jibDockerBuild';
}
if (shelljs.ls(imagePath).length === 0) {
this.hasWarning = true;
this.warningMessage += ` ${chalk.cyan(this.dockerBuildCommand)} in ${this.destinationPath(this.directoryPath + opts.cwd)}\n`;
}
}
/**
* Check that a Docker image exists (using {@link #checkImageExists} and if the user agrees, rebuild it.
* @param opts
* @property pwd JHipster app directory. default is './'
* @property forceBuild flag to force the image build.
* @property appConfig Configuration for the current application
* @returns {Promise.<TResult>|Promise}
*/
function checkAndBuildImages(opts = { cwd: './', forceBuild: false, appConfig: { buildTool: GRADLE } }) {
if (this.abort) return null;
checkImageExist.call(this, opts);
const pwd = shelljs.pwd();
shelljs.cd(opts.cwd);
return new Promise((resolve, reject) =>
dockerCLI.command(`${opts.cwd}${this.dockerBuildCommand}`, err => {
shelljs.cd(pwd);
if (err) {
this.log.error(chalk.red(`The Docker image build failed. ${err}`));
this.abort = true;
reject();
}
resolve();
})
);
}
|
apache-2.0
|
PoGoDex/PoGoDex
|
src/js/worker/patchMonsterDetail.js
|
923
|
require('regenerator/runtime');
var renderDetailView = require('../shared/renderMonsterDetailView');
var getMonsterDarkTheme = require('../shared/monster/getMonsterDarkTheme');
var dbService = require('./databaseService');
var diff = require('virtual-dom/diff');
var Stopwatch = require('../shared/util/stopwatch');
var fromJson = require('vdom-as-json/fromJson');
var lastDetailView = fromJson(require('../shared/prerendered/bulbasaur'));
module.exports = async nationalId => {
var stopwatch = new Stopwatch('patchMonsterDetail()');
stopwatch.start('getFullMonsterDataById()');
var fullMonsterData = await dbService.getFullMonsterDataById(nationalId);
stopwatch.time('renderDetailView()');
var newDetailView = renderDetailView(fullMonsterData);
stopwatch.time('diff()');
var patch = diff(lastDetailView, newDetailView);
lastDetailView = newDetailView;
stopwatch.totalTime();
return {patch};
};
|
apache-2.0
|
Artezzyan/basis-hadoop
|
hadoop-dfs/src/main/java/com/artezzyan/technology/hdfs/input/NativeAPI.java
|
1337
|
package com.artezzyan.technology.hdfs.input;
import org.apache.hadoop.fs.FsUrlStreamHandlerFactory;
import org.apache.hadoop.io.IOUtils;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.net.URL;
/**
* Java Native API
*
* @author yan.guo
* @version 1.0
*/
public class NativeAPI {
private final static Logger _logger = LogManager.getLogger(NativeAPI.class);
static {
// import some protocol that hdfs can support
URL.setURLStreamHandlerFactory(new FsUrlStreamHandlerFactory());
}
public static void main(String[] args) {
InputStream is = null;
OutputStream os = null;
try {
URL url = new URL(args[0]);
// open input stream
is = url.openStream();
// copy bytes to stdout
os = System.out;
// copy bytes to file
// os = new FileOutputStream("W:\\Workspace\\Study\\Data Technology\\Hadoop\\hadoop-dfs\\src\\main\\script\\yarn-site.xml");
IOUtils.copyBytes(is, os, 4096, true);
} catch (IOException ioe) {
_logger.error("An error has occurred while reading data from Hadoop Distributed File System! ", ioe);
}
}
}
|
apache-2.0
|
consulo/consulo-apache-thrift
|
src/main/java/com/intellij/plugins/thrift/ThriftIconProvider.java
|
1851
|
package com.intellij.plugins.thrift;
import javax.annotation.Nonnull;
import com.intellij.icons.AllIcons;
import com.intellij.plugins.thrift.lang.psi.ThriftConst;
import com.intellij.plugins.thrift.lang.psi.ThriftEnum;
import com.intellij.plugins.thrift.lang.psi.ThriftException;
import com.intellij.plugins.thrift.lang.psi.ThriftField;
import com.intellij.plugins.thrift.lang.psi.ThriftFunction;
import com.intellij.plugins.thrift.lang.psi.ThriftService;
import com.intellij.plugins.thrift.lang.psi.ThriftStruct;
import com.intellij.plugins.thrift.lang.psi.ThriftTypedef;
import com.intellij.plugins.thrift.lang.psi.ThriftUnion;
import com.intellij.psi.PsiElement;
import consulo.ide.IconDescriptor;
import consulo.ide.IconDescriptorUpdater;
public class ThriftIconProvider implements IconDescriptorUpdater
{
@Override
public void updateIcon(@Nonnull IconDescriptor iconDescriptor, @Nonnull PsiElement element, int i)
{
if(element instanceof ThriftConst)
{
iconDescriptor.setMainIcon(AllIcons.Nodes.Value);
}
if(element instanceof ThriftEnum)
{
iconDescriptor.setMainIcon(AllIcons.Nodes.Enum);
}
if(element instanceof ThriftException)
{
iconDescriptor.setMainIcon(AllIcons.Nodes.ExceptionClass);
}
if(element instanceof ThriftService)
{
iconDescriptor.setMainIcon(AllIcons.Nodes.Static);
}
if(element instanceof ThriftStruct)
{
iconDescriptor.setMainIcon(AllIcons.Nodes.Struct);
}
if(element instanceof ThriftTypedef)
{
iconDescriptor.setMainIcon(AllIcons.Nodes.TypeAlias);
}
if(element instanceof ThriftUnion)
{
iconDescriptor.setMainIcon(AllIcons.Nodes.AnonymousClass);
}
if(element instanceof ThriftField)
{
iconDescriptor.setMainIcon(AllIcons.Nodes.Field);
}
if(element instanceof ThriftFunction)
{
iconDescriptor.setMainIcon(AllIcons.Nodes.Function);
}
}
}
|
apache-2.0
|
jingax10/kubernetes
|
cmd/kubeadm/app/phases/certs/certs.go
|
16160
|
/*
Copyright 2016 The Kubernetes Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package certs
import (
"crypto/rsa"
"crypto/x509"
"fmt"
"os"
"path/filepath"
"github.com/pkg/errors"
certutil "k8s.io/client-go/util/cert"
"k8s.io/klog"
kubeadmapi "k8s.io/kubernetes/cmd/kubeadm/app/apis/kubeadm"
kubeadmconstants "k8s.io/kubernetes/cmd/kubeadm/app/constants"
"k8s.io/kubernetes/cmd/kubeadm/app/util/pkiutil"
)
// CreatePKIAssets will create and write to disk all PKI assets necessary to establish the control plane.
// If the PKI assets already exists in the target folder, they are used only if evaluated equal; otherwise an error is returned.
func CreatePKIAssets(cfg *kubeadmapi.InitConfiguration) error {
klog.V(1).Infoln("creating PKI assets")
// This structure cannot handle multilevel CA hierarchies.
// This isn't a problem right now, but may become one in the future.
var certList Certificates
if cfg.Etcd.Local == nil {
certList = GetCertsWithoutEtcd()
} else {
certList = GetDefaultCertList()
}
certTree, err := certList.AsMap().CertTree()
if err != nil {
return err
}
if err := certTree.CreateTree(cfg); err != nil {
return errors.Wrap(err, "error creating PKI assets")
}
fmt.Printf("[certs] valid certificates and keys now exist in %q\n", cfg.CertificatesDir)
// Service accounts are not x509 certs, so handled separately
if err := CreateServiceAccountKeyAndPublicKeyFiles(cfg); err != nil {
return err
}
return nil
}
// CreateServiceAccountKeyAndPublicKeyFiles create a new public/private key files for signing service account users.
// If the sa public/private key files already exists in the target folder, they are used only if evaluated equals; otherwise an error is returned.
func CreateServiceAccountKeyAndPublicKeyFiles(cfg *kubeadmapi.InitConfiguration) error {
klog.V(1).Infoln("creating a new public/private key files for signing service account users")
saSigningKey, err := NewServiceAccountSigningKey()
if err != nil {
return err
}
return writeKeyFilesIfNotExist(
cfg.CertificatesDir,
kubeadmconstants.ServiceAccountKeyBaseName,
saSigningKey,
)
}
// NewServiceAccountSigningKey generate public/private key pairs for signing service account tokens.
func NewServiceAccountSigningKey() (*rsa.PrivateKey, error) {
// The key does NOT exist, let's generate it now
saSigningKey, err := certutil.NewPrivateKey()
if err != nil {
return nil, errors.Wrap(err, "failure while creating service account token signing key")
}
return saSigningKey, nil
}
// NewCACertAndKey will generate a self signed CA.
func NewCACertAndKey(certSpec *certutil.Config) (*x509.Certificate, *rsa.PrivateKey, error) {
caCert, caKey, err := pkiutil.NewCertificateAuthority(certSpec)
if err != nil {
return nil, nil, errors.Wrap(err, "failure while generating CA certificate and key")
}
return caCert, caKey, nil
}
// CreateCACertAndKeyFiles generates and writes out a given certificate authority.
// The certSpec should be one of the variables from this package.
func CreateCACertAndKeyFiles(certSpec *KubeadmCert, cfg *kubeadmapi.InitConfiguration) error {
if certSpec.CAName != "" {
return errors.Errorf("this function should only be used for CAs, but cert %s has CA %s", certSpec.Name, certSpec.CAName)
}
klog.V(1).Infof("creating a new certificate authority for %s", certSpec.Name)
certConfig, err := certSpec.GetConfig(cfg)
if err != nil {
return err
}
caCert, caKey, err := NewCACertAndKey(certConfig)
if err != nil {
return err
}
return writeCertificateAuthorithyFilesIfNotExist(
cfg.CertificatesDir,
certSpec.BaseName,
caCert,
caKey,
)
}
// CreateCertAndKeyFilesWithCA loads the given certificate authority from disk, then generates and writes out the given certificate and key.
// The certSpec and caCertSpec should both be one of the variables from this package.
func CreateCertAndKeyFilesWithCA(certSpec *KubeadmCert, caCertSpec *KubeadmCert, cfg *kubeadmapi.InitConfiguration) error {
if certSpec.CAName != caCertSpec.Name {
return errors.Errorf("expected CAname for %s to be %q, but was %s", certSpec.Name, certSpec.CAName, caCertSpec.Name)
}
caCert, caKey, err := LoadCertificateAuthority(cfg.CertificatesDir, caCertSpec.BaseName)
if err != nil {
return errors.Wrapf(err, "couldn't load CA certificate %s", caCertSpec.Name)
}
return certSpec.CreateFromCA(cfg, caCert, caKey)
}
// LoadCertificateAuthority tries to load a CA in the given directory with the given name.
func LoadCertificateAuthority(pkiDir string, baseName string) (*x509.Certificate, *rsa.PrivateKey, error) {
// Checks if certificate authority exists in the PKI directory
if !pkiutil.CertOrKeyExist(pkiDir, baseName) {
return nil, nil, errors.Errorf("couldn't load %s certificate authority from %s", baseName, pkiDir)
}
// Try to load certificate authority .crt and .key from the PKI directory
caCert, caKey, err := pkiutil.TryLoadCertAndKeyFromDisk(pkiDir, baseName)
if err != nil {
return nil, nil, errors.Wrapf(err, "failure loading %s certificate authority", baseName)
}
// Make sure the loaded CA cert actually is a CA
if !caCert.IsCA {
return nil, nil, errors.Errorf("%s certificate is not a certificate authority", baseName)
}
return caCert, caKey, nil
}
// writeCertificateAuthorithyFilesIfNotExist write a new certificate Authority to the given path.
// If there already is a certificate file at the given path; kubeadm tries to load it and check if the values in the
// existing and the eexpected certificate equals. If they do; kubeadm will just skip writing the file as it's up-to-date,
// otherwise this function returns an error.
func writeCertificateAuthorithyFilesIfNotExist(pkiDir string, baseName string, caCert *x509.Certificate, caKey *rsa.PrivateKey) error {
// If cert or key exists, we should try to load them
if pkiutil.CertOrKeyExist(pkiDir, baseName) {
// Try to load .crt and .key from the PKI directory
caCert, _, err := pkiutil.TryLoadCertAndKeyFromDisk(pkiDir, baseName)
if err != nil {
return errors.Wrapf(err, "failure loading %s certificate", baseName)
}
// Check if the existing cert is a CA
if !caCert.IsCA {
return errors.Errorf("certificate %s is not a CA", baseName)
}
// kubeadm doesn't validate the existing certificate Authority more than this;
// Basically, if we find a certificate file with the same path; and it is a CA
// kubeadm thinks those files are equal and doesn't bother writing a new file
fmt.Printf("[certs] Using the existing %q certificate and key\n", baseName)
} else {
// Write .crt and .key files to disk
fmt.Printf("[certs] Generating %q certificate and key\n", baseName)
if err := pkiutil.WriteCertAndKey(pkiDir, baseName, caCert, caKey); err != nil {
return errors.Wrapf(err, "failure while saving %s certificate and key", baseName)
}
}
return nil
}
// writeCertificateFilesIfNotExist write a new certificate to the given path.
// If there already is a certificate file at the given path; kubeadm tries to load it and check if the values in the
// existing and the expected certificate equals. If they do; kubeadm will just skip writing the file as it's up-to-date,
// otherwise this function returns an error.
func writeCertificateFilesIfNotExist(pkiDir string, baseName string, signingCert *x509.Certificate, cert *x509.Certificate, key *rsa.PrivateKey) error {
// Checks if the signed certificate exists in the PKI directory
if pkiutil.CertOrKeyExist(pkiDir, baseName) {
// Try to load signed certificate .crt and .key from the PKI directory
signedCert, _, err := pkiutil.TryLoadCertAndKeyFromDisk(pkiDir, baseName)
if err != nil {
return errors.Wrapf(err, "failure loading %s certificate", baseName)
}
// Check if the existing cert is signed by the given CA
if err := signedCert.CheckSignatureFrom(signingCert); err != nil {
return errors.Errorf("certificate %s is not signed by corresponding CA", baseName)
}
// kubeadm doesn't validate the existing certificate more than this;
// Basically, if we find a certificate file with the same path; and it is signed by
// the expected certificate authority, kubeadm thinks those files are equal and
// doesn't bother writing a new file
fmt.Printf("[certs] Using the existing %q certificate and key\n", baseName)
} else {
// Write .crt and .key files to disk
fmt.Printf("[certs] Generating %q certificate and key\n", baseName)
if err := pkiutil.WriteCertAndKey(pkiDir, baseName, cert, key); err != nil {
return errors.Wrapf(err, "failure while saving %s certificate and key", baseName)
}
if pkiutil.HasServerAuth(cert) {
fmt.Printf("[certs] %s serving cert is signed for DNS names %v and IPs %v\n", baseName, cert.DNSNames, cert.IPAddresses)
}
}
return nil
}
// writeKeyFilesIfNotExist write a new key to the given path.
// If there already is a key file at the given path; kubeadm tries to load it and check if the values in the
// existing and the expected key equals. If they do; kubeadm will just skip writing the file as it's up-to-date,
// otherwise this function returns an error.
func writeKeyFilesIfNotExist(pkiDir string, baseName string, key *rsa.PrivateKey) error {
// Checks if the key exists in the PKI directory
if pkiutil.CertOrKeyExist(pkiDir, baseName) {
// Try to load .key from the PKI directory
_, err := pkiutil.TryLoadKeyFromDisk(pkiDir, baseName)
if err != nil {
return errors.Wrapf(err, "%s key existed but it could not be loaded properly", baseName)
}
// kubeadm doesn't validate the existing certificate key more than this;
// Basically, if we find a key file with the same path kubeadm thinks those files
// are equal and doesn't bother writing a new file
fmt.Printf("[certs] Using the existing %q key\n", baseName)
} else {
// Write .key and .pub files to disk
fmt.Printf("[certs] Generating %q key and public key\n", baseName)
if err := pkiutil.WriteKey(pkiDir, baseName, key); err != nil {
return errors.Wrapf(err, "failure while saving %s key", baseName)
}
if err := pkiutil.WritePublicKey(pkiDir, baseName, &key.PublicKey); err != nil {
return errors.Wrapf(err, "failure while saving %s public key", baseName)
}
}
return nil
}
type certKeyLocation struct {
pkiDir string
caBaseName string
baseName string
uxName string
}
// SharedCertificateExists verifies if the shared certificates - the certificates that must be
// equal across masters: ca.key, ca.crt, sa.key, sa.pub + etcd/ca.key, etcd/ca.crt if local/stacked etcd
func SharedCertificateExists(cfg *kubeadmapi.InitConfiguration) (bool, error) {
if err := validateCACertAndKey(certKeyLocation{cfg.CertificatesDir, kubeadmconstants.CACertAndKeyBaseName, "", "CA"}); err != nil {
return false, err
}
if err := validatePrivatePublicKey(certKeyLocation{cfg.CertificatesDir, "", kubeadmconstants.ServiceAccountKeyBaseName, "service account"}); err != nil {
return false, err
}
if err := validateCACertAndKey(certKeyLocation{cfg.CertificatesDir, kubeadmconstants.FrontProxyCACertAndKeyBaseName, "", "front-proxy CA"}); err != nil {
return false, err
}
// in case of local/stacked etcd
if cfg.Etcd.External == nil {
if err := validateCACertAndKey(certKeyLocation{cfg.CertificatesDir, kubeadmconstants.EtcdCACertAndKeyBaseName, "", "etcd CA"}); err != nil {
return false, err
}
}
return true, nil
}
// UsingExternalCA determines whether the user is relying on an external CA. We currently implicitly determine this is the case
// when both the CA Cert and the front proxy CA Cert are present but the CA Key and front proxy CA Key are not.
// This allows us to, e.g., skip generating certs or not start the csr signing controller.
func UsingExternalCA(cfg *kubeadmapi.InitConfiguration) (bool, error) {
if err := validateCACert(certKeyLocation{cfg.CertificatesDir, kubeadmconstants.CACertAndKeyBaseName, "", "CA"}); err != nil {
return false, err
}
caKeyPath := filepath.Join(cfg.CertificatesDir, kubeadmconstants.CAKeyName)
if _, err := os.Stat(caKeyPath); !os.IsNotExist(err) {
return false, errors.Errorf("%s exists", kubeadmconstants.CAKeyName)
}
if err := validateSignedCert(certKeyLocation{cfg.CertificatesDir, kubeadmconstants.CACertAndKeyBaseName, kubeadmconstants.APIServerCertAndKeyBaseName, "API server"}); err != nil {
return false, err
}
if err := validateSignedCert(certKeyLocation{cfg.CertificatesDir, kubeadmconstants.CACertAndKeyBaseName, kubeadmconstants.APIServerKubeletClientCertAndKeyBaseName, "API server kubelet client"}); err != nil {
return false, err
}
if err := validatePrivatePublicKey(certKeyLocation{cfg.CertificatesDir, "", kubeadmconstants.ServiceAccountKeyBaseName, "service account"}); err != nil {
return false, err
}
if err := validateCACert(certKeyLocation{cfg.CertificatesDir, kubeadmconstants.FrontProxyCACertAndKeyBaseName, "", "front-proxy CA"}); err != nil {
return false, err
}
frontProxyCAKeyPath := filepath.Join(cfg.CertificatesDir, kubeadmconstants.FrontProxyCAKeyName)
if _, err := os.Stat(frontProxyCAKeyPath); !os.IsNotExist(err) {
return false, errors.Errorf("%s exists", kubeadmconstants.FrontProxyCAKeyName)
}
if err := validateSignedCert(certKeyLocation{cfg.CertificatesDir, kubeadmconstants.FrontProxyCACertAndKeyBaseName, kubeadmconstants.FrontProxyClientCertAndKeyBaseName, "front-proxy client"}); err != nil {
return false, err
}
return true, nil
}
// validateCACert tries to load a x509 certificate from pkiDir and validates that it is a CA
func validateCACert(l certKeyLocation) error {
// Check CA Cert
caCert, err := pkiutil.TryLoadCertFromDisk(l.pkiDir, l.caBaseName)
if err != nil {
return errors.Wrapf(err, "failure loading certificate for %s", l.uxName)
}
// Check if cert is a CA
if !caCert.IsCA {
return errors.Errorf("certificate %s is not a CA", l.uxName)
}
return nil
}
// validateCACertAndKey tries to load a x509 certificate and private key from pkiDir,
// and validates that the cert is a CA
func validateCACertAndKey(l certKeyLocation) error {
if err := validateCACert(l); err != nil {
return err
}
_, err := pkiutil.TryLoadKeyFromDisk(l.pkiDir, l.caBaseName)
if err != nil {
return errors.Wrapf(err, "failure loading key for %s", l.uxName)
}
return nil
}
// validateSignedCert tries to load a x509 certificate and private key from pkiDir and validates
// that the cert is signed by a given CA
func validateSignedCert(l certKeyLocation) error {
// Try to load CA
caCert, err := pkiutil.TryLoadCertFromDisk(l.pkiDir, l.caBaseName)
if err != nil {
return errors.Wrapf(err, "failure loading certificate authority for %s", l.uxName)
}
return validateSignedCertWithCA(l, caCert)
}
// validateSignedCertWithCA tries to load a certificate and validate it with the given caCert
func validateSignedCertWithCA(l certKeyLocation, caCert *x509.Certificate) error {
// Try to load key and signed certificate
signedCert, _, err := pkiutil.TryLoadCertAndKeyFromDisk(l.pkiDir, l.baseName)
if err != nil {
return errors.Wrapf(err, "failure loading certificate for %s", l.uxName)
}
// Check if the cert is signed by the CA
if err := signedCert.CheckSignatureFrom(caCert); err != nil {
return errors.Wrapf(err, "certificate %s is not signed by corresponding CA", l.uxName)
}
return nil
}
// validatePrivatePublicKey tries to load a private key from pkiDir
func validatePrivatePublicKey(l certKeyLocation) error {
// Try to load key
_, _, err := pkiutil.TryLoadPrivatePublicKeyFromDisk(l.pkiDir, l.baseName)
if err != nil {
return errors.Wrapf(err, "failure loading key for %s", l.uxName)
}
return nil
}
|
apache-2.0
|
yurloc/assertj-core
|
src/test/java/org/assertj/core/api/integer_/IntegerAssert_isStrictlyBetween_Integers_Test.java
|
1308
|
/*
* Created on May 5, 2013
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the
* License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS"
* BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language
* governing permissions and limitations under the License.
*
* Copyright @2010-2011 the original author or authors.
*/
package org.assertj.core.api.integer_;
import org.assertj.core.api.IntegerAssert;
import org.assertj.core.api.IntegerAssertBaseTest;
import static org.mockito.Mockito.verify;
/**
* Tests for <code>{@link IntegerAssert#isStrictlyBetween(Integer, Integer)}</code>.
*
* @author William Delanoue
*/
public class IntegerAssert_isStrictlyBetween_Integers_Test extends IntegerAssertBaseTest {
@Override
protected IntegerAssert invoke_api_method() {
return assertions.isStrictlyBetween(6, 8);
}
@Override
protected void verify_internal_effects() {
verify(integers).assertIsStrictlyBetween(getInfo(assertions), getActual(assertions), 6, 8);
}
}
|
apache-2.0
|
eemirtekin/Sakai-10.6-TR
|
metaobj/metaobj-impl/api-impl/src/java/org/sakaiproject/metaobj/shared/mgt/home/XmlElementHome.java
|
12126
|
/**********************************************************************************
* $URL: https://source.sakaiproject.org/svn/metaobj/tags/sakai-10.6/metaobj-impl/api-impl/src/java/org/sakaiproject/metaobj/shared/mgt/home/XmlElementHome.java $
* $Id: XmlElementHome.java 120216 2013-02-18 19:44:04Z ottenhoff@longsight.com $
***********************************************************************************
*
* Copyright (c) 2004, 2005, 2006, 2007, 2008 The Sakai Foundation
*
* Licensed under the Educational Community License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.opensource.org/licenses/ECL-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
**********************************************************************************/
package org.sakaiproject.metaobj.shared.mgt.home;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Date;
import java.util.List;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.jdom.Document;
import org.jdom.Element;
import org.jdom.input.SAXBuilder;
import org.jdom.output.Format;
import org.jdom.output.XMLOutputter;
import org.sakaiproject.metaobj.shared.mgt.IdManager;
import org.sakaiproject.metaobj.shared.model.Agent;
import org.sakaiproject.metaobj.shared.model.Artifact;
import org.sakaiproject.metaobj.shared.model.FinderException;
import org.sakaiproject.metaobj.shared.model.Id;
import org.sakaiproject.metaobj.shared.model.OspException;
import org.sakaiproject.metaobj.shared.model.PersistenceException;
import org.sakaiproject.metaobj.shared.model.StructuredArtifact;
import org.sakaiproject.metaobj.shared.model.Type;
import org.sakaiproject.metaobj.utils.xml.SchemaFactory;
import org.sakaiproject.metaobj.utils.xml.SchemaInvalidException;
import org.sakaiproject.metaobj.utils.xml.SchemaNode;
import org.sakaiproject.tool.cover.ToolManager;
import org.sakaiproject.content.api.ContentResource;
import org.springframework.beans.factory.InitializingBean;
import org.springframework.context.ResourceLoaderAware;
import org.springframework.core.io.ResourceLoader;
/**
* Created by IntelliJ IDEA.
* User: John Ellis
* Date: Apr 9, 2004
* Time: 1:22:35 PM
* To change this template use File | Settings | File Templates.
*/
public class XmlElementHome implements StructuredArtifactHomeInterface, InitializingBean, ResourceLoaderAware {
private SchemaNode schema = null;
private String rootNode = null;
private Date schemaDate = null;
protected final Log logger = LogFactory.getLog(getClass());
private File homeDirectory = null;
private String schemaFileName;
private Type type = null;
private String typeId = null;
private IdManager idManager = null;
public static final String XSD_DIR = "xsd";
public static final String XML_HOME_PATH = "xmlHome";
private ResourceLoader resourceLoader;
/**
* help information supplied to the user when creating an instance of this xmlelement
*/
private String instruction;
public XmlElementHome() {
}
public XmlElementHome(String rootNode) {
this.rootNode = rootNode;
}
public SchemaNode getSchema() {
if (schema == null) {
File schemaFile = getSchemaFile(schemaFileName);
schema = SchemaFactory.getInstance().getSchema(schemaFile);
schemaDate = new Date(schemaFile.lastModified());
}
return schema;
}
public String getDocumentRoot() {
return null;
}
protected File getSchemaFile(String schemaFileName) {
return new File(this.pathToWebInf() + File.separator + XSD_DIR + File.separator + schemaFileName);
}
public void setSchema(SchemaNode schema) {
this.schema = schema;
}
public Artifact store(Artifact object) throws PersistenceException {
String id = (String) object.getId().getValue();
File objectFile = null;
if (id == null) {
try {
objectFile = File.createTempFile(rootNode, ".xml", homeDirectory);
}
catch (IOException e) {
logger.error("", e);
throw new OspException(e);
}
}
else {
objectFile = new File(homeDirectory, id);
if (objectFile.exists()) {
objectFile.delete();
}
}
XMLOutputter outputter = new XMLOutputter();
StructuredArtifact xmlObject = (StructuredArtifact) object;
xmlObject.setId(objectFile.getName());
FileOutputStream outstream = null;
try {
outstream = new FileOutputStream(objectFile);
Format format = Format.getPrettyFormat();
outputter.setFormat(format);
outputter.output(xmlObject.getBaseElement(), outstream);
}
catch (IOException e) {
logger.error("", e);
throw new OspException(e);
}
finally {
try {
if (outstream != null)
outstream.close();
}
catch (Exception e2) {
logger.warn("Problem closing stream: ", e2);
}
}
return object;
}
public void remove(Artifact object) {
File objectFile = null;
if (object != null && object.getId() != null)
objectFile = new File(homeDirectory, object.getId().getValue());
boolean deleted = false;
if (objectFile != null)
deleted = objectFile.delete();
if (!deleted)
logger.warn("Could not delete file: " + objectFile.getPath());
}
public Artifact store(String displayName, String contentType, Type type,
InputStream in) throws PersistenceException {
// todo complete
return null;
}
public Artifact update(Artifact object, InputStream in) throws PersistenceException {
return null;//todo
}
public Type getType() {
return type;
}
public String getExternalType() {
if (getSchema() == null) {
return "";
}
return getSchema().getTargetNamespace().getURI() + "?" + getRootNode();
}
public void setType(Type type) {
this.type = type;
}
public Artifact load(Id id) throws PersistenceException {
return load(id.getValue());
}
public StructuredArtifact load(ContentResource resource) {
return null;
}
public StructuredArtifact load(ContentResource resource, Id artifactId) {
return null;
}
protected Artifact load(String id) throws PersistenceException {
File objectFile = new File(homeDirectory, id);
SAXBuilder builder = new SAXBuilder();
builder.setFeature("http://apache.org/xml/features/disallow-doctype-decl", true); // SAK-23245
try {
Document doc = builder.build(objectFile);
StructuredArtifact xmlObject =
new StructuredArtifact(doc.getRootElement(), getSchema().getChild(rootNode));
xmlObject.setId(id);
xmlObject.setHome(this);
return xmlObject;
}
catch (Exception e) {
throw new SchemaInvalidException(e);
}
}
public Artifact createInstance() {
StructuredArtifact instance = new StructuredArtifact(rootNode, getSchema().getChild(rootNode));
prepareInstance(instance);
return instance;
}
public void prepareInstance(Artifact object) {
object.setHome(this);
StructuredArtifact xmlObject = (StructuredArtifact) object;
xmlObject.getBaseElement().setName(rootNode);
}
public Artifact createSample() {
return createInstance();
}
public Collection findByOwner(Agent owner) throws FinderException {
// really just list all here for now...
String[] files = homeDirectory.list();
List returnedList = new ArrayList();
for (int i = 0; i < files.length; i++) {
try {
returnedList.add(load(files[i]));
}
catch (PersistenceException e) {
throw new FinderException();
}
}
return returnedList;
}
public boolean isInstance(Artifact testObject) {
return (testObject instanceof StructuredArtifact);
}
public void refresh() {
schema = null;
getSchema();
}
public String getExternalUri(Id artifactId, String name) {
//http://johnellis.rsmart.com:8080/osp/member/viewNode.osp?pid=1107451588272-643&nodeId=48D2AFE5A98453AD673579E14405607C
return "viewNode.osp?pid=" + ToolManager.getCurrentPlacement().getId() +
"&nodeId=" + artifactId.getValue();
}
public InputStream getStream(Id artifactId) {
// todo ... implement this
return null;
}
public boolean isSystemOnly() {
return false;
}
public Class getInterface() {
return StructuredArtifactHomeInterface.class;
}
public String getRootNode() {
return rootNode;
}
public void setRootNode(String rootNode) {
this.rootNode = rootNode;
}
public Date getModified() {
return schemaDate;
}
public void setModified(Date schemaDate) {
this.schemaDate = schemaDate;
}
public String getSchemaFileName() {
return schemaFileName;
}
public void setSchemaFileName(String schemaFileName) {
this.schemaFileName = schemaFileName;
}
/**
* Invoked by a BeanFactory after it has set all bean properties supplied
* (and satisfied BeanFactoryAware and ApplicationContextAware).
* <p>This method allows the bean instance to perform initialization only
* possible when all bean properties have been set and to throw an
* exception in the event of misconfiguration.
*
* @throws SchemaInvalidException in the event of misconfiguration (such
* as failure to set an essential property) or if initialization fails.
*/
public void afterPropertiesSet() throws SchemaInvalidException {
homeDirectory = new File(pathToWebInf(), XML_HOME_PATH + File.separator + rootNode);
if (!homeDirectory.exists()) {
if (!homeDirectory.mkdirs()) {
logger.warn("Couldn't create homeDirectory: " + homeDirectory.getPath());
}
}
getSchema();
getType().setId(getIdManager().getId(getTypeId()));
}
protected String pathToWebInf() {
try {
return resourceLoader.getResource("WEB-INF").getFile().getCanonicalPath();
}
catch (IOException e) {
throw new RuntimeException(e);
}
}
public IdManager getIdManager() {
return idManager;
}
public void setIdManager(IdManager idManager) {
this.idManager = idManager;
}
public String getTypeId() {
return typeId;
}
public byte[] getBytes(StructuredArtifact artifact) {
return new byte[0];
}
public void setTypeId(String typeId) {
this.typeId = typeId;
}
public void setResourceLoader(ResourceLoader resourceLoader) {
this.resourceLoader = resourceLoader;
}
public String getInstruction() {
return instruction;
}
public void setInstruction(String instruction) {
this.instruction = instruction;
}
public SchemaNode getRootSchema() {
return getSchema().getChild(getRootNode());
}
public String getSiteId() {
return null;
}
public Artifact cloneArtifact(Artifact copy, String newName) throws PersistenceException {
return null;
}
public Element getArtifactAsXml(Artifact art) {
return null;
}
public Element getArtifactAsXml(Artifact artifact, String container, String site, String context) {
return null;
}
public StructuredArtifactHomeInterface getParentHome() {
return this;
}
}
|
apache-2.0
|
smjurcak/csm
|
csmserver/restful/api_host.py
|
13078
|
# =============================================================================
# Copyright (c) 2016, Cisco Systems, Inc
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
# Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
# THE POSSIBILITY OF SUCH DAMAGE.
# =============================================================================
from flask import jsonify
from flask import g
from sqlalchemy import and_
from database import DBSession
from common import get_host
from common import create_or_update_host
from common import delete_host
from common import get_region
from common import get_region_id_to_name_dict
from common import get_jump_host_id_to_name_dict
from common import get_software_profile_id_to_name_dict
from common import get_region_name_to_id_dict
from common import get_jump_host_name_to_id_dict
from common import get_software_profile_name_to_id_dict
from models import Host
from constants import ConnectionType
from utils import is_empty
from utils import get_acceptable_string
from api_utils import get_total_pages
from api_utils import validate_url_parameters
from api_utils import failed_response
from api_utils import check_none
from api_utils import convert_json_request_to_list
from api_utils import validate_required_keys_in_dict
from api_utils import convert_value_to_list
from api_utils import validate_acceptable_keys_in_dict
from api_constants import HTTP_OK
from api_constants import HTTP_MULTI_STATUS_ERROR
from api_constants import RECORDS_PER_PAGE
from api_constants import RESPONSE_ENVELOPE
from api_constants import RESPONSE_STATUS
from api_constants import RESPONSE_STATUS_MESSAGE
from api_constants import APIStatus
# Acceptable JSON keys
KEY_HOSTNAME = 'hostname'
KEY_REGION = 'region'
KEY_LOCATION = 'location'
KEY_CONNECTION_TYPE = 'connection_type'
KEY_ROLES = 'roles'
KEY_TS_OR_IP = 'ts_or_ip'
KEY_PORT_NUMBER = 'port_number'
KEY_USERNAME = 'username'
KEY_PASSWORD = 'password'
KEY_ENABLE_PASSWORD = 'enable_password'
KEY_JUMP_HOST = 'jump_host'
KEY_SOFTWARE_PROFILE = 'software_profile'
KEY_FAMILY = 'family'
KEY_CHASSIS = 'chassis'
KEY_SOFTWARE_PLATFORM = 'software_platform'
KEY_SOFTWARE_VERSION = 'software_version'
KEY_OS_TYPE = 'os_type'
def api_create_hosts(request):
"""
POST: http://localhost:5000/api/v1/hosts
BODY:
[ {'hostname': 'My Host 1',
'region': 'SJ Labs',
'roles': 'PE',
'connection_type': 'telnet',
'host_or_ip': '172.28.98.2',
'username': 'cisco',
'password': 'cisco',
'enable_password': 'cisco',
'location': 'building 20'
} ]
RETURN:
{"api_response": {
"host_list": [ {"status": "SUCCESS", "hostname": "My Host 1"},
{"status": "SUCCESS", "hostname": "My Host 2"} ]
}
}
"""
rows = []
db_session = DBSession()
error_found = False
# Pre-fetched information to speed up bulk host creation.
region_dict = get_region_name_to_id_dict(db_session)
jump_host_dict = get_jump_host_name_to_id_dict(db_session)
software_profile_dict = get_software_profile_name_to_id_dict(db_session)
json_list = convert_json_request_to_list(request)
for data in json_list:
row = dict()
try:
validate_required_keys_in_dict(data, [KEY_HOSTNAME])
hostname = get_acceptable_string(data.get(KEY_HOSTNAME))
row[KEY_HOSTNAME] = hostname
if hostname is None or len(hostname) == 0:
raise ValueError("'{}' is an invalid hostname.".format(data.get(KEY_HOSTNAME)))
validate_acceptable_keys_in_dict(data, [KEY_HOSTNAME, KEY_REGION, KEY_LOCATION, KEY_ROLES,
KEY_SOFTWARE_PROFILE, KEY_CONNECTION_TYPE, KEY_TS_OR_IP,
KEY_PORT_NUMBER, KEY_USERNAME, KEY_PASSWORD,
KEY_ENABLE_PASSWORD, KEY_JUMP_HOST])
host = get_host(db_session, hostname)
if host is None:
# These are the required fields for a new host creation.
validate_required_keys_in_dict(data, [KEY_REGION, KEY_CONNECTION_TYPE, KEY_TS_OR_IP])
value = get_id_from_value('Region', region_dict, data, KEY_REGION)
region_id = value if value is not None else \
(None if host is None else host.region_id)
value = get_id_from_value('Jump host', jump_host_dict, data, KEY_JUMP_HOST)
jump_host_id = value if value is not None else \
(None if host is None else host.connection_param[0].jump_host_id)
value = get_id_from_value('Software profile', software_profile_dict, data, KEY_SOFTWARE_PROFILE)
software_profile_id = value if value is not None else \
(None if host is None else host.software_profile_id)
connection_type = data.get(KEY_CONNECTION_TYPE)
if connection_type is not None:
if connection_type not in [ConnectionType.SSH, ConnectionType.TELNET]:
raise ValueError('Connection Type must be either telnet or ssh')
else:
connection_type = None if host is None else host.connection_param[0].connection_type
roles = convert_value_to_list(data, KEY_ROLES)
roles = ','.join(roles) if roles is not None else \
(None if host is None else host.roles)
host_or_ip = convert_value_to_list(data, KEY_TS_OR_IP)
host_or_ip = ','.join(host_or_ip) if host_or_ip is not None else \
(None if host is None else host.connection_param[0].host_or_ip)
port_number = convert_value_to_list(data, KEY_PORT_NUMBER)
port_number = ','.join(str(p) for p in port_number) if port_number is not None else \
(None if host is None else host.connection_param[0].port_number)
location = data.get(KEY_LOCATION) if data.get(KEY_LOCATION ) is not None else \
(None if host is None else host.location)
username = data.get(KEY_USERNAME) if data.get(KEY_USERNAME) is not None else \
(None if host is None else host.connection_param[0].username)
password = data.get(KEY_PASSWORD) if data.get(KEY_PASSWORD) is not None else \
(None if host is None else host.connection_param[0].password)
enable_password = data.get(KEY_ENABLE_PASSWORD) if data.get(KEY_ENABLE_PASSWORD) is not None else \
(None if host is None else host.connection_param[0].enable_password)
create_or_update_host(db_session=db_session, hostname=hostname, region_id=region_id,
location=location, roles=roles,
software_profile_id=software_profile_id,
connection_type=connection_type,
host_or_ip=host_or_ip, username=username,
password=password, enable_password=enable_password,
port_number=port_number, jump_host_id=jump_host_id,
created_by=g.api_user.username, host=host)
row[RESPONSE_STATUS] = APIStatus.SUCCESS
except Exception as e:
row[RESPONSE_STATUS] = APIStatus.FAILED
row[RESPONSE_STATUS_MESSAGE] = e.message
error_found = True
rows.append(row)
# end loop
return jsonify(**{RESPONSE_ENVELOPE: {'host_list': rows}}), (HTTP_OK if not error_found else HTTP_MULTI_STATUS_ERROR)
def get_id_from_value(item, dictionary, data, key):
id = None
name = data.get(key)
if name:
id = dictionary.get(name)
if not id:
raise ValueError('{} "{}" does not exist in the database.'.format(item, name))
return id
def api_get_hosts(request):
"""
GET:
http://localhost:5000/api/v1/hosts
http://localhost:5000/api/v1/hosts?hostname=Host_1
http://localhost:5000/api/v1/hosts?region=SJ Labs
http://localhost:5000/api/v1/hosts?region=SJ Labs&page=2
http://localhost:5000/api/v1/hosts?region=SJ%20Labs&family=ASR9K
"""
validate_url_parameters(request, [KEY_HOSTNAME, KEY_REGION, KEY_FAMILY, 'page'])
page = 1
clauses = []
db_session = DBSession
hostname = request.args.get(KEY_HOSTNAME)
if hostname:
host = get_host(db_session, hostname)
if host is None:
raise ValueError("Host '{}' does not exist in the database.".format(hostname))
hosts = [host]
else:
try:
page = int(request.args.get('page')) if request.args.get('page') else 1
if page <= 0: page = 1
except Exception:
return failed_response('page must be an numeric value')
region_name = request.args.get(KEY_REGION)
if region_name:
region = get_region(db_session, region_name)
if region:
clauses.append(Host.region_id == region.id)
else:
return failed_response("Region '{}' does not exist in the database.".format(region_name))
family = request.args.get(KEY_FAMILY)
if family:
clauses.append(Host.family == family)
hosts = get_hosts_by_page(db_session, clauses, page)
region_dict = get_region_id_to_name_dict(db_session)
jump_host_dict = get_jump_host_id_to_name_dict(db_session)
software_profile_dict = get_software_profile_id_to_name_dict(db_session)
rows = []
for host in hosts:
row = dict()
row[KEY_HOSTNAME] = host.hostname
row[KEY_REGION] = check_none(region_dict.get(host.region_id))
row[KEY_ROLES] = [] if is_empty(host.roles) else host.roles.split(',')
connection_param = check_none(host.connection_param[0])
row[KEY_FAMILY] = check_none(host.family)
row[KEY_CHASSIS] = check_none(host.platform)
row[KEY_SOFTWARE_PLATFORM] = check_none(host.software_platform)
row[KEY_SOFTWARE_VERSION] = check_none(host.software_version)
row[KEY_OS_TYPE] = check_none(host.os_type)
row[KEY_LOCATION] = check_none(host.location)
row[KEY_SOFTWARE_PROFILE] = check_none(software_profile_dict.get(host.software_profile_id))
if connection_param:
row[KEY_TS_OR_IP] = [] if is_empty(connection_param.host_or_ip) else connection_param.host_or_ip.split(',')
row[KEY_CONNECTION_TYPE] = check_none(connection_param.connection_type)
row[KEY_USERNAME] = check_none(connection_param.username)
row[KEY_PORT_NUMBER] = [] if is_empty(connection_param.port_number) else connection_param.port_number.split(',')
row[KEY_JUMP_HOST] = check_none(jump_host_dict.get(connection_param.jump_host_id))
rows.append(row)
total_pages = get_total_pages(db_session, Host, clauses)
return jsonify(**{RESPONSE_ENVELOPE: {'host_list': rows}, 'current_page': page, 'total_pages': total_pages})
def api_delete_host(hostname):
"""
:param hostname:
:return:
{
"api_response": {
"status": "SUCCESS",
"hostname": "My Host 2"
}
}
or
{
"api_response": {
"status": "FAILED",
"hostname": "My Host 2",
"status_message": "Unable to locate host My Host 2"
}
}
"""
db_session = DBSession()
delete_host(db_session, hostname)
return jsonify(**{RESPONSE_ENVELOPE: {KEY_HOSTNAME: hostname, RESPONSE_STATUS: APIStatus.SUCCESS}})
def get_hosts_by_page(db_session, clauses, page):
return db_session.query(Host).filter(and_(*clauses)).\
order_by(Host.hostname.asc()).slice((page - 1) * RECORDS_PER_PAGE, page * RECORDS_PER_PAGE).all()
|
apache-2.0
|
jackylk/incubator-carbondata
|
core/src/main/java/org/apache/carbondata/core/datamap/DataMapJob.java
|
1637
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.carbondata.core.datamap;
import java.io.Serializable;
import java.util.List;
import org.apache.carbondata.core.indexstore.BlockletDataMapIndexWrapper;
import org.apache.carbondata.core.indexstore.ExtendedBlocklet;
import org.apache.carbondata.core.metadata.schema.table.CarbonTable;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
/**
* Distributable datamap job to execute the #DistributableDataMapFormat in cluster. it prunes the
* datamaps distributably and returns the final blocklet list
*/
public interface DataMapJob extends Serializable {
void execute(CarbonTable carbonTable, FileInputFormat<Void, BlockletDataMapIndexWrapper> format);
List<ExtendedBlocklet> execute(DistributableDataMapFormat dataMapFormat);
Long executeCountJob(DistributableDataMapFormat dataMapFormat);
}
|
apache-2.0
|
MrVine/encryptor-rsa
|
utils.go
|
480
|
package encryptor_rsa
import (
"os"
"encoding/base64"
)
func createFile(filePath, content string) error {
file, err := os.Create(filePath)
if err != nil {
return err
}
defer file.Close()
_, err = file.WriteString(content)
return err
}
func toBase64(in []byte) string {
return base64.StdEncoding.EncodeToString(in)
}
func fromBase64(in string) ([]byte, error) {
b, err := base64.StdEncoding.DecodeString(in)
if err != nil {
return nil, err
}
return b, nil
}
|
apache-2.0
|
baoyongan/java-examples
|
java-jackson/src/test/java/com/baoyongan/javajackson/bean/Item2.java
|
280
|
package com.baoyongan.javajackson.bean;
public class Item2 {
public int id;
public String itemName;
public User owner;
public Item2(int id, String itemName, User owner) {
this.id = id;
this.itemName = itemName;
this.owner = owner;
}
}
|
apache-2.0
|
valzevul/thermostat
|
app/src/main/java/com/example/vladimirsinicyn/thermostat/model/ChangeTester.java
|
987
|
package com.example.vladimirsinicyn.thermostat.model;
public class ChangeTester {
public void test() throws Exception {
/*
DaySchedule dp = new DaySchedule();
TemperatureChange tc1 = new TemperatureChange(LightCondition.NIGHT, new Time(100));
ArrayList<TemperatureChange> listA = new ArrayList<TemperatureChange>();
int time = 100;
for (int i = 0; i < 10; i++, time += 100) {
TemperatureChange tc;
if (i % 2 == 0) {
tc = new TemperatureChange(LightCondition.NIGHT, new Time(time));
dp.addChange(tc);
} else {
tc = new TemperatureChange(LightCondition.DAY, new Time(time));
dp.addChange(tc);
}
listA.add(tc);
}
dp.print();
System.out.println("=====================");
dp.deleteTimeChange(tc1);
dp.print();*/
WeekSchedule ws = new WeekSchedule();
}
}
|
apache-2.0
|
sgudupat/force5
|
src/main/webapp/app/blocks/config/alert.config.js
|
355
|
(function() {
'use strict';
angular
.module('facilitymgmtApp')
.config(alertServiceConfig);
alertServiceConfig.$inject = ['AlertServiceProvider'];
function alertServiceConfig(AlertServiceProvider) {
// set below to true to make alerts look like toast
AlertServiceProvider.showAsToast(false);
}
})();
|
apache-2.0
|
akarnokd/RxJava2Extensions
|
src/main/java/hu/akarnokd/rxjava2/consumers/ObservableConsumers.java
|
5692
|
/*
* Copyright 2016-2019 David Karnok
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package hu.akarnokd.rxjava2.consumers;
import io.reactivex.Observable;
import io.reactivex.disposables.*;
import io.reactivex.functions.*;
import io.reactivex.internal.functions.*;
/**
* Utility methods for creating and using consumers {@link io.reactivex.Observable}s.
* @since 0.18.0
*/
public final class ObservableConsumers {
/** Utility class. */
private ObservableConsumers() {
throw new IllegalStateException("No instances!");
}
/**
* Wraps the given onXXX callbacks into a {@code Disposable} {@code Observer},
* adds it to the given {@code CompositeDisposable} and ensures, that if the upstream
* completes or this particlular Disposable is disposed, the Observer is removed
* from the given composite.
* <p>
* The Observer will be removed after the callback for the terminal event has been invoked.
* @param <T> the value type
* @param source the source Observable to subscribe to.
* @param composite the composite Disposable to add and remove the created Disposable Observer
* @param onNext the callback for upstream items
* @return the Disposable that allows disposing the particular subscription.
*/
public static <T> Disposable subscribeAutoDispose(
Observable<T> source,
CompositeDisposable composite,
Consumer<? super T> onNext) {
ObjectHelper.requireNonNull(source, "source is null");
ObjectHelper.requireNonNull(composite, "composite is null");
ObjectHelper.requireNonNull(onNext, "onNext is null");
DisposableAutoReleaseObserver<T> observer = new DisposableAutoReleaseObserver<T>(
composite, onNext, null, Functions.EMPTY_ACTION);
composite.add(observer);
source.subscribe(observer);
return observer;
}
/**
* Wraps the given onXXX callbacks into a {@code Disposable} {@code Observer},
* adds it to the given {@code CompositeDisposable} and ensures, that if the upstream
* completes or this particlular Disposable is disposed, the Observer is removed
* from the given composite.
* <p>
* The Observer will be removed after the callback for the terminal event has been invoked.
* @param <T> the value type
* @param source the source Observable to subscribe to.
* @param composite the composite Disposable to add and remove the created Disposable Observer
* @param onNext the callback for upstream items
* @param onError the callback for an upstream error if any
* @return the Disposable that allows disposing the particular subscription.
*/
public static <T> Disposable subscribeAutoDispose(
Observable<T> source,
CompositeDisposable composite,
Consumer<? super T> onNext,
Consumer<? super Throwable> onError) {
ObjectHelper.requireNonNull(source, "source is null");
ObjectHelper.requireNonNull(composite, "composite is null");
ObjectHelper.requireNonNull(onNext, "onNext is null");
ObjectHelper.requireNonNull(onError, "onError is null");
DisposableAutoReleaseObserver<T> observer = new DisposableAutoReleaseObserver<T>(
composite, onNext, onError, Functions.EMPTY_ACTION);
composite.add(observer);
source.subscribe(observer);
return observer;
}
/**
* Wraps the given onXXX callbacks into a {@code Disposable} {@code Observer},
* adds it to the given {@code CompositeDisposable} and ensures, that if the upstream
* completes or this particlular Disposable is disposed, the Observer is removed
* from the given composite.
* <p>
* The Observer will be removed after the callback for the terminal event has been invoked.
* @param <T> the value type
* @param source the source Observable to subscribe to.
* @param composite the composite Disposable to add and remove the created Disposable Observer
* @param onNext the callback for upstream items
* @param onError the callback for an upstream error if any
* @param onComplete the callback for the upstream completion if any
* @return the Disposable that allows disposing the particular subscription.
*/
public static <T> Disposable subscribeAutoDispose(
Observable<T> source,
CompositeDisposable composite,
Consumer<? super T> onNext,
Consumer<? super Throwable> onError,
Action onComplete) {
ObjectHelper.requireNonNull(source, "source is null");
ObjectHelper.requireNonNull(composite, "composite is null");
ObjectHelper.requireNonNull(onNext, "onNext is null");
ObjectHelper.requireNonNull(onError, "onError is null");
ObjectHelper.requireNonNull(onComplete, "onComplete is null");
DisposableAutoReleaseObserver<T> observer = new DisposableAutoReleaseObserver<T>(
composite, onNext, onError, onComplete);
composite.add(observer);
source.subscribe(observer);
return observer;
}
}
|
apache-2.0
|
eug48/hapi-fhir
|
examples/src/main/java/example/HttpProxy.java
|
1673
|
package example;
import org.apache.http.HttpHost;
import org.apache.http.auth.AuthScope;
import org.apache.http.auth.UsernamePasswordCredentials;
import org.apache.http.client.CredentialsProvider;
import org.apache.http.impl.client.*;
import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.model.dstu2.resource.Patient;
import ca.uhn.fhir.model.primitive.IdDt;
import ca.uhn.fhir.rest.client.api.IGenericClient;
public class HttpProxy {
public static void main(String[] args) {
/*
* This is out ot date - Just keeping
* it in case it's helpful...
*/
final String authUser = "username";
final String authPassword = "password";
CredentialsProvider credsProvider = new BasicCredentialsProvider();
credsProvider.setCredentials(new AuthScope("10.10.10.10", 8080),
new UsernamePasswordCredentials(authUser, authPassword));
HttpHost myProxy = new HttpHost("10.10.10.10", 8080);
HttpClientBuilder clientBuilder = HttpClientBuilder.create();
clientBuilder
.setProxy(myProxy)
.setProxyAuthenticationStrategy(new ProxyAuthenticationStrategy())
.setDefaultCredentialsProvider(credsProvider)
.disableCookieManagement();
CloseableHttpClient httpClient = clientBuilder.build();
FhirContext ctx = FhirContext.forDstu2();
String serverBase = "http://spark.furore.com/fhir/";
ctx.getRestfulClientFactory().setHttpClient(httpClient);
IGenericClient client = ctx.newRestfulGenericClient(serverBase);
IdDt id = new IdDt("Patient", "123");
client.read(Patient.class, id);
}
}
|
apache-2.0
|
CMPUT404Proj/CMPUT404-project-socialdistribution
|
social_dist/comments/urls.py
|
126
|
from django.conf.urls import url
from . import views
urlpatterns = [
url(r'^new/', views.comment_new, name='comment_new'),
]
|
apache-2.0
|
meirfaraj/tiproject
|
finance/src/finance/oblig/ex1.lua
|
1107
|
--------------------------------------------------------------------------
-- Obligation --
--------------------------------------------------------------------------
require("ui/wscreen")
require("finance/oblig/func/obligation")
-- obligation in fine
Oblig_Ex1 = Tmv(EX_OBLIGATION_1_TITLE_ID,EX_OBLIGATION_1_TITLE_HEADER_ID)
function Oblig_Ex1:widgetsInit()
self:add(-1,{"%","value"},"%type")
self:add(0,"Emission oblig in fine au terme de ","T")
self:add(1,"Taux nominal r:","r")
self:add(1,"valeur d'emis. Ve:","Ve")
self:add(2,"Valuer nominal Vn=Vr:","Vn")
end
function Oblig_Ex1:performCalc()
local T = varValue["T"]
local r = varValue["r"]
local Ve = varValue["Ve"]
local Vn = varValue["Vn"]
if varValue["%type"] == "%" then
local calc = tostring(r).."/100"
self:appendToResult(tostring(r).."%="..calc)
r=tostring(tiNspire.approx(calc))
self:appendToResult("="..tostring(r).."\n")
end
Obligation.FluxInFine(self,T,r,Ve,Vn )
Obligation.TABInFine(self,T,r,Ve,Vn )
end
|
apache-2.0
|
uchenm/CnChess
|
src/chess/model/player/ai/Thinking.java
|
770
|
/**
* Copyright 2013 Ming Chen<uchenm@gmail.com>
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package chess.model.player.ai;
import chess.model.game.Move;
public interface Thinking {
public Move getBestMove();
public void makeMove(Move move);
}
|
apache-2.0
|
brint/openswan
|
cookbooks/openswan/recipes/default.rb
|
4014
|
#
# Cookbook Name:: openswan
# Recipe:: default
#
# Copyright 2013, Wanelo, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
include_recipe 'ipaddr_extensions'
vpn_group = Array.new
users = []
search(:users, 'groups:vpn AND NOT action:remove').each do |u|
u['username'] ||= u['id']
vpn_group << u['username']
users << {:username => u['username'], :vpn_password => u['vpn_password']}
end
template "#{node['openswan']['ppp_path']}/chap-secrets" do
source "chap-secrets.erb"
variables({
:users => users
})
notifies :restart, "service[xl2tpd]"
notifies :restart, "service[ipsec]"
end
execute "apt-get update" do
command "apt-get update && touch /etc/apt/openswan_update_completed"
not_if "ls /etc/apt/openswan_update_completed"
end
file "/var/log/ppp-ipupdown.log" do
action :touch
not_if { File.exists?("/var/log/ppp-ipupdown.log") } # No touching!
end
package "openswan" do
action :install
end
execute "turn on ipv4 forwarding" do
command "echo 1 > /proc/sys/net/ipv4/ip_forward"
not_if "grep 1 /proc/sys/net/ipv4/ip_forward"
end
bash "turn off redirects" do
code <<-EOH
for redirect in `ls /proc/sys/net/ipv4/conf/*/send_redirects`
do echo 0 > $redirect
done
EOH
not_if "grep 0 /proc/sys/net/ipv4/conf/tunl0/send_redirects"
end
["ppp", "xl2tpd"].each do |p|
package p
end
template "#{node['openswan']['xl2tpd_path']}/xl2tpd.conf" do
source "xl2tpd.conf.erb"
notifies :restart, "service[xl2tpd]"
end
template "#{node['openswan']['ppp_path']}/options.xl2tpd" do
source "options.xl2tpd.erb"
notifies :restart, "service[xl2tpd]"
end
template "/etc/ipsec.secrets" do
source "ipsec.secrets.erb"
notifies :restart, "service[ipsec]"
end
template "/etc/ipsec.conf" do
source "ipsec.conf.erb"
notifies :restart, "service[ipsec]"
end
service "xl2tpd" do
supports :status => true, :restart => true, :start => true, :stop => true
end
service "ipsec" do
supports :status => true, :restart => true, :start => true, :stop => true
end
remote_file "/var/tmp/linux-image-3.8.4-joyent-ubuntu-12-opt_1.0.0_amd64.deb" do
source "http://l03.ryan.net/data/linux-image-3.8.4-joyent-ubuntu-12-opt_1.0.0_amd64.deb"
end
remote_file "/var/tmp/linux-headers-3.8.4-joyent-ubuntu-12-opt_1.0.0_amd64.deb" do
source "http://l03.ryan.net/data/linux-headers-3.8.4-joyent-ubuntu-12-opt_1.0.0_amd64.deb"
end
execute "install custom joyent linux headers" do
command "dpkg --install --force-confnew /var/tmp/linux-headers-3.8.4-joyent-ubuntu-12-opt_1.0.0_amd64.deb && dpkg --install --force-confnew /var/tmp/linux-image-3.8.4-joyent-ubuntu-12-opt_1.0.0_amd64.deb"
not_if "ls /lib/modules/3.8.4-joyent-ubuntu-12-opt/kernel"
end
public_interface = node['network']['interfaces'].detect { |k,v| v['addresses'].keys.include?(node['ipaddress'])}.first
execute "turn on public SNAT" do
command "iptables -t nat -I POSTROUTING -o #{public_interface} -j SNAT --to #{node['ipaddress']}"
not_if "iptables -L -t nat | grep #{node['ipaddress']}"
notifies :restart, "service[xl2tpd]"
notifies :restart, "service[ipsec]"
end
private_interface = node['network']['interfaces'].detect { |k,v| v['addresses'].keys.include?(node['openswan']['private_ip'])}.first
execute "turn on private SNAT" do
command "iptables -t nat -I POSTROUTING -o #{private_interface} -j SNAT --to #{node['openswan']['private_ip']}"
not_if "iptables -L -t nat | grep #{node['openswan']['private_ip']}"
notifies :restart, "service[xl2tpd]"
notifies :restart, "service[ipsec]"
end
|
apache-2.0
|
vivantech/kc_fixes
|
src/main/java/org/kuali/kra/iacuc/committee/print/IacucCommitteeXmlStream.java
|
12493
|
/*
* Copyright 2005-2014 The Kuali Foundation
*
* Licensed under the Educational Community License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.opensource.org/licenses/ecl1.php
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.kuali.kra.iacuc.committee.print;
import edu.mit.coeus.xml.iacuc.*;
import edu.mit.coeus.xml.iacuc.CommitteeType;
import edu.mit.coeus.xml.iacuc.ProtocolType.Submissions;
import org.apache.xmlbeans.XmlObject;
import org.kuali.kra.bo.KcPerson;
import org.kuali.kra.bo.KraPersistableBusinessObjectBase;
import org.kuali.kra.common.committee.bo.*;
import org.kuali.kra.document.ResearchDocumentBase;
import org.kuali.kra.iacuc.committee.print.service.IacucPrintXmlUtilService;
import org.kuali.kra.printing.xmlstream.PrintBaseXmlStream;
import org.kuali.kra.protocol.personnel.ProtocolPersonRolodexBase;
import java.math.BigInteger;
import java.util.*;
/**
* This class generates XML that confirms with the XSD related to
* committee reports. The data for XML is derived from
* {@link ResearchDocumentBase} and {@link Map} of details passed to the class.
*/
public class IacucCommitteeXmlStream extends PrintBaseXmlStream {
private IacucScheduleXmlStream scheduleXmlStream;
private IacucPrintXmlUtilService printXmlUtilService;
/**
* This method generates XML committee report. It uses data passed in
* {@link ResearchDocumentBase} for populating the XML nodes. The XMl once
* generated is returned as {@link XmlObject}
*
* @param printableBusinessObject
* using which XML is generated
* @param reportParameters
* parameters related to XML generation
* @return {@link XmlObject} representing the XML
*/
public Map<String, XmlObject> generateXmlStream(KraPersistableBusinessObjectBase printableBusinessObject, Map<String, Object> reportParameters) {
CommitteeBase committee = (CommitteeBase)printableBusinessObject;
Map<String, XmlObject> xmlObjectList = new LinkedHashMap<String, XmlObject>();
CommitteeDocument committeeDocumentType = CommitteeDocument.Factory.newInstance();
committeeDocumentType.setCommittee(getCommitteeCompleteDetails(committee));
xmlObjectList.put("CommitteeBase", committeeDocumentType);
return xmlObjectList;
}
public CommitteeType getCommitteeCompleteDetails(CommitteeBase committee) {
CommitteeType committeeType = CommitteeType.Factory.newInstance();
setCommitteeMasterData(committee,committeeType.addNewCommitteeMasterData()) ;
setCommitteeMembers(committee,committeeType);
setScheduleForcommittee(committee,committeeType) ;
setCommitteeResearchArea(committee,committeeType) ;
return committeeType ;
}
private void setCommitteeResearchArea(org.kuali.kra.common.committee.bo.CommitteeBase committee, CommitteeType committeeType) {
List<CommitteeResearchAreaBase> committeeResearchAreas = committee.getCommitteeResearchAreas();
if(committeeResearchAreas.isEmpty()) return;
for (CommitteeResearchAreaBase committeeResearchArea : committeeResearchAreas) {
ResearchAreaType researchArea = committeeType.addNewResearchArea();
researchArea.setResearchAreaCode(committeeResearchArea.getResearchAreaCode()) ;
researchArea.setResearchAreaDescription(committeeResearchArea.getResearchArea().getDescription()) ;
}
}
private void setScheduleForcommittee(org.kuali.kra.common.committee.bo.CommitteeBase committee, CommitteeType committeeType) {
Date currentDate = new Date();
Boolean isRooster=committee.getPrintRooster();
List<CommitteeScheduleBase> vecSchedule = committee.getCommitteeSchedules();
if (vecSchedule.isEmpty()) return;
for (CommitteeScheduleBase scheduleDetailsBean : vecSchedule) {
Date scheduleDate = scheduleDetailsBean.getScheduledDate();
int dateCount = scheduleDate.compareTo(currentDate);
if(isRooster){
ScheduleType scheduleType = committeeType.addNewSchedule();
getScheduleXmlStream().setScheduleMasterData(scheduleDetailsBean,scheduleType.addNewScheduleMasterData()) ;
ScheduleSummaryType nextSchedule = scheduleType.addNewNextSchedule();
getScheduleXmlStream().setNextSchedule(scheduleDetailsBean,nextSchedule.addNewScheduleMasterData());
}
if(!isRooster){
if(dateCount>0){
ScheduleType scheduleType = committeeType.addNewSchedule();
getScheduleXmlStream().setScheduleMasterData(scheduleDetailsBean,scheduleType.addNewScheduleMasterData()) ;
ScheduleSummaryType nextSchedule = scheduleType.addNewNextSchedule();
getScheduleXmlStream().setNextSchedule(scheduleDetailsBean,nextSchedule.addNewScheduleMasterData());
}}}
}
public void setCommitteeMembers(org.kuali.kra.common.committee.bo.CommitteeBase committee, CommitteeType committeeType) {
List<CommitteeMembershipBase> committeeMemberships = committee.getCommitteeMemberships();
if(committeeMemberships.isEmpty()) return;
for (CommitteeMembershipBase membershipBean : committeeMemberships) {
CommitteeMemberType committeeMember = committeeType.addNewCommitteeMember();
setCommitteeMembershipType(membershipBean, committeeMember);
}
}
public void setCommitteeMembers(org.kuali.kra.common.committee.bo.CommitteeBase committee, Submissions committeeType) {
List<CommitteeMembershipBase> committeeMemberships = committee.getCommitteeMemberships();
if(committeeMemberships.isEmpty()) return;
for (CommitteeMembershipBase membershipBean : committeeMemberships) {
CommitteeMemberType committeeMember = committeeType.addNewCommitteeMember();
setCommitteeMembershipType(membershipBean, committeeMember);
}
}
/**
* This method...
* @param membershipBean
* @param committeeMember
*/
private void setCommitteeMembershipType(CommitteeMembershipBase membershipBean, CommitteeMemberType committeeMember) {
membershipBean.refreshNonUpdateableReferences();
setPersonType(membershipBean,committeeMember);
committeeMember.setMemberStatus(membershipBean.isActive()?"active":"inactive") ;
committeeMember.setMemberStatusStartDt(Calendar.getInstance());
committeeMember.setMemberStatusEndDt(Calendar.getInstance()) ;
if (membershipBean.getTermEndDate() != null){
committeeMember.setTermEnd(getDateTimeService().getCalendar(membershipBean.getTermEndDate()));
}
if (membershipBean.getTermStartDate() != null){
committeeMember.setTermStart(getDateTimeService().getCalendar(membershipBean.getTermStartDate())) ;
}
if (membershipBean.getMembershipType() != null){
committeeMember.setMemberType(membershipBean.getMembershipType().getDescription()) ;
}
committeeMember.setPaidMemberFlag(membershipBean.getPaidMember());
List<CommitteeMembershipExpertiseBase> committeeMemResearchArea = membershipBean.getMembershipExpertise();
if (committeeMemResearchArea != null){
for (CommitteeMembershipExpertiseBase committeeMemberExpertise : committeeMemResearchArea) {
ResearchAreaType researchArea = committeeMember.addNewResearchArea();
researchArea.setResearchAreaCode(committeeMemberExpertise.getResearchAreaCode()) ;
if (committeeMemberExpertise.getResearchArea()!=null){
researchArea.setResearchAreaDescription(committeeMemberExpertise.getResearchArea().getDescription()) ;
}
}
}
List<CommitteeMembershipRole> vecMemRoles = membershipBean.getMembershipRoles();
if ( vecMemRoles != null){
for (CommitteeMembershipRole committeeMembershipRole : vecMemRoles) {
CommitteeMemberRoleType committeeMemRole = committeeMember.addNewCommitteeMemberRole();
committeeMemRole.setMemberRoleCode(new BigInteger(String.valueOf(committeeMembershipRole.getMembershipRoleCode()))) ;
if (committeeMembershipRole.getMembershipRole()!= null){
committeeMemRole.setMemberRoleDesc(committeeMembershipRole.getMembershipRole().getDescription()) ;
}
if (committeeMembershipRole.getStartDate() != null){
committeeMemRole.setMemberRoleStartDt(getDateTimeService().getCalendar(committeeMembershipRole.getStartDate())) ;
}
if (committeeMembershipRole.getEndDate() != null){
committeeMemRole.setMemberRoleEndDt(getDateTimeService().getCalendar(committeeMembershipRole.getEndDate())) ;
}
}
}
}
private void setPersonType(CommitteeMembershipBase membershipBean,CommitteeMemberType committeeMember) {
PersonType person = committeeMember.addNewPerson();
boolean employeeFlag = membershipBean.getPerson()!=null;
person.setFacultyFlag(false) ;
person.setEmployeeFlag(!employeeFlag) ;
if (employeeFlag ){
KcPerson personBean = membershipBean.getPerson();
getPrintXmlUtilService().setPersonXml(personBean, person);
}else{
ProtocolPersonRolodexBase rolodexBean = membershipBean.getRolodex();
getPrintXmlUtilService().setPersonXml(rolodexBean, person);
}
}
public void setCommitteeMasterData(CommitteeBase committee,CommitteeMasterDataType committeeMasterDataType){
committeeMasterDataType.setCommitteeId(committee.getCommitteeId()) ;
committeeMasterDataType.setCommitteeName(committee.getCommitteeName()) ;
committeeMasterDataType.setHomeUnitNumber(committee.getHomeUnitNumber()) ;
committeeMasterDataType.setHomeUnitName(committee.getUnitName()) ;
committeeMasterDataType.setCommitteeTypeCode(new BigInteger(String.valueOf(committee.getCommitteeTypeCode()))) ;
committeeMasterDataType.setCommitteeTypeDesc(committee.getCommitteeType().getDescription()) ;
committeeMasterDataType.setScheduleDescription(committee.getScheduleDescription()) ;
committeeMasterDataType.setMinimumMembersRequired(new BigInteger(String.valueOf(committee.getMinimumMembersRequired()))) ;
committeeMasterDataType.setMaxProtocols(new BigInteger(String.valueOf(committee.getMaxProtocols()))) ;
committeeMasterDataType.setAdvSubmissionDays(new BigInteger(String.valueOf(committee.getAdvancedSubmissionDaysRequired()))) ;
if(committee.getReviewType()!=null){
committeeMasterDataType.setDefaultReviewTypeCode(new BigInteger(String.valueOf(committee.getReviewTypeCode()))) ;
committeeMasterDataType.setDefaultReviewTypeDesc(committee.getReviewType().getDescription()) ;
}
}
/**
* Sets the scheduleXmlStream attribute value.
* @param scheduleXmlStream The scheduleXmlStream to set.
*/
public void setScheduleXmlStream(IacucScheduleXmlStream scheduleXmlStream) {
this.scheduleXmlStream = scheduleXmlStream;
}
/**
* Gets the scheduleXmlStream attribute.
* @return Returns the scheduleXmlStream.
*/
public IacucScheduleXmlStream getScheduleXmlStream() {
return scheduleXmlStream;
}
/**
* Sets the irbPrintXmlUtilService attribute value.
* @param irbPrintXmlUtilService The irbPrintXmlUtilService to set.
*/
public void setPrintXmlUtilService(IacucPrintXmlUtilService printXmlUtilService) {
this.printXmlUtilService = printXmlUtilService;
}
/**
* Gets the irbPrintXmlUtilService attribute.
* @return Returns the irbPrintXmlUtilService.
*/
public IacucPrintXmlUtilService getPrintXmlUtilService() {
return printXmlUtilService;
}
}
|
apache-2.0
|
NakedObjectsGroup/NakedObjectsFramework
|
NakedFramework/NakedFramework.Architecture/SpecImmutable/ITypeSpecImmutable.cs
|
2223
|
// Copyright Naked Objects Group Ltd, 45 Station Road, Henley on Thames, UK, RG9 1AT
// Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License.
// You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0.
// Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and limitations under the License.
using System;
using System.Collections.Generic;
using NakedFramework.Architecture.Menu;
using NakedFramework.Architecture.Spec;
namespace NakedFramework.Architecture.SpecImmutable;
/// <summary>
/// This is the immutable or 'static' core of the IObjectSpec. It is created by the reflector during start-up, but can
/// also be
/// serialized/deserialized and hence persisted. However, it needs to be wrapped as an IObjectSpec at run-time in
/// order to
/// provide various run-time behaviors required of the Spec, which depend upon the run-time framework services.
/// </summary>
public interface ITypeSpecImmutable : ISpecificationBuilder {
Type Type { get; }
string FullName { get; }
string ShortName { get; }
IMenuImmutable ObjectMenu { get; }
IReadOnlyList<IActionSpecImmutable> OrderedObjectActions { get; }
IReadOnlyList<IActionSpecImmutable> OrderedContributedActions { get; }
IReadOnlyList<IActionSpecImmutable> OrderedCollectionContributedActions { get; }
IReadOnlyList<IActionSpecImmutable> OrderedFinderActions { get; }
IReadOnlyList<IAssociationSpecImmutable> OrderedFields { get; }
IReadOnlyList<ITypeSpecImmutable> Interfaces { get; }
IReadOnlyList<ITypeSpecImmutable> Subclasses { get; }
ITypeSpecImmutable Superclass { get; }
bool IsObject { get; }
bool IsCollection { get; }
bool IsQueryable { get; }
bool IsParseable { get; }
//Will return true if this is a sub-type of the passed-in spec
bool IsOfType(ITypeSpecImmutable otherSpecification);
}
// Copyright (c) Naked Objects Group Ltd.
|
apache-2.0
|
wangzhengbo/Linenoise4Java
|
test/cn/com/linenoise/HistoryAddTest.java
|
2641
|
package cn.com.linenoise;
import java.io.File;
import org.junit.Assert;
import org.junit.Test;
public class HistoryAddTest extends BaseTest {
@Test
public void historyAdd() {
String hello = "你好";
String fileName = "test/historyAdd.txt";
File file = new File(fileName);
if (file.exists()) {
Assert.assertTrue(file.delete());
}
try {
Assert.assertTrue(lineNoise.historyAdd("one"));
Assert.assertTrue(lineNoise.historyAdd("two"));
// Can't add duplicated lines
Assert.assertFalse(lineNoise.historyAdd("two"));
Assert.assertTrue(lineNoise.historyAdd(null));
Assert.assertTrue(lineNoise.historyAdd(" "));
Assert.assertTrue(lineNoise.historyAdd("three"));
Assert.assertTrue(lineNoise.historySave(fileName));
Assert.assertArrayEquals(new String[] { "one", "two", "", " ",
"three" }, readLines(fileName));
Assert.assertTrue(lineNoise.historyAdd(hello));
Assert.assertArrayEquals(new String[] { "one", "two", "", " ",
"three" }, readLines(fileName));
Assert.assertTrue(lineNoise.historySave(fileName));
Assert.assertArrayEquals(new String[] { "one", "two", "", " ",
"three", hello }, readLines(fileName));
// Set max history length to 9
Assert.assertEquals(Linenoise.DEFAULT_HISTORY_MAX_LEN,
lineNoise.historyGetMaxLen());
Assert.assertTrue(lineNoise.historySetMaxLen(9));
Assert.assertEquals(9, lineNoise.historyGetMaxLen());
Assert.assertArrayEquals(new String[] { "one", "two", "", " ",
"three", hello }, readLines(fileName));
Assert.assertTrue(lineNoise.historySave(fileName));
Assert.assertArrayEquals(new String[] { "one", "two", "", " ",
"three", hello }, readLines(fileName));
Assert.assertTrue(lineNoise.historyAdd("four"));
Assert.assertTrue(lineNoise.historyAdd("five"));
Assert.assertTrue(lineNoise.historySave(fileName));
Assert.assertArrayEquals(new String[] { "one", "two", "", " ",
"three", hello, "four", "five" }, readLines(fileName));
Assert.assertTrue(lineNoise.historyAdd("six"));
Assert.assertTrue(lineNoise.historySave(fileName));
Assert.assertArrayEquals(new String[] { "one", "two", "", " ",
"three", hello, "four", "five", "six" },
readLines(fileName));
Assert.assertTrue(lineNoise.historyAdd("seven"));
Assert.assertTrue(lineNoise.historySave(fileName));
Assert.assertArrayEquals(new String[] { "two", "", " ", "three",
hello, "four", "five", "six", "seven" },
readLines(fileName));
} finally {
if (file.exists()) {
Assert.assertTrue(file.delete());
}
}
}
}
|
apache-2.0
|
projectVenco/Venco
|
app/src/main/java/ua/kpi/venco/util/LogUtils.java
|
2785
|
/*
* Copyright 2014 Project Venco
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package ua.kpi.venco.util;
import android.util.Log;
public class LogUtils {
private static final String LOG_PREFIX = "iosched_";
private static final int LOG_PREFIX_LENGTH = LOG_PREFIX.length();
private static final int MAX_LOG_TAG_LENGTH = 23;
public static String makeLogTag(String str) {
if (str.length() > MAX_LOG_TAG_LENGTH - LOG_PREFIX_LENGTH) {
return LOG_PREFIX + str.substring(0, MAX_LOG_TAG_LENGTH - LOG_PREFIX_LENGTH - 1);
}
return LOG_PREFIX + str;
}
/**
* Don't use this when obfuscating class names!
*/
public static String makeLogTag(Class cls) {
return makeLogTag(cls.getSimpleName());
}
public static void LOGD(final String tag, String message) {
//noinspection PointlessBooleanExpression,ConstantConditions
if (Log.isLoggable(tag, Log.DEBUG)) {
Log.d(tag, message);
}
}
public static void LOGD(final String tag, String message, Throwable cause) {
//noinspection PointlessBooleanExpression,ConstantConditions
if (Log.isLoggable(tag, Log.DEBUG)) {
Log.d(tag, message, cause);
}
}
public static void LOGV(final String tag, String message) {
//noinspection PointlessBooleanExpression,ConstantConditions
if (Log.isLoggable(tag, Log.VERBOSE)) {
Log.v(tag, message);
}
}
public static void LOGV(final String tag, String message, Throwable cause) {
//noinspection PointlessBooleanExpression,ConstantConditions
if (Log.isLoggable(tag, Log.VERBOSE)) {
Log.v(tag, message, cause);
}
}
public static void LOGI(final String tag, String message) {
Log.i(tag, message);
}
public static void LOGI(final String tag, String message, Throwable cause) {
Log.i(tag, message, cause);
}
public static void LOGW(final String tag, String message) {
Log.w(tag, message);
}
public static void LOGW(final String tag, String message, Throwable cause) {
Log.w(tag, message, cause);
}
public static void LOGE(final String tag, String message) {
Log.e(tag, message);
}
public static void LOGE(final String tag, String message, Throwable cause) {
Log.e(tag, message, cause);
}
private LogUtils() {
}
}
|
apache-2.0
|
pniekamp/datum-studio
|
src/datumstudio.cpp
|
4414
|
//
// Datum Studio
//
//
// Copyright (C) 2016 Peter Niekamp
//
#include "datumstudio.h"
#include "dialogfactory.h"
#include "forms/ui_about.h"
#include <leap/pathstring.h>
#include <QMessageBox>
#include <QCloseEvent>
#include <QSettings>
#include <QScreen>
#include <QFile>
#include <QtDebug>
using namespace std;
using namespace leap;
const char *VersionString = "0.0.1";
//|---------------------- DatumStudio ---------------------------------------
//|--------------------------------------------------------------------------
///////////////////////// DatumStudio::Constructor //////////////////////////
DatumStudio::DatumStudio()
{
ui.setupUi(this);
auto actionmanager = Studio::Core::instance()->find_object<Studio::ActionManager>();
actionmanager->register_container("Studio.Menu", ui.MenuBar);
actionmanager->register_container("Studio.Menu.File", ui.FileMenu);
actionmanager->register_container("Studio.Menu.Help", ui.HelpMenu);
Studio::Core::instance()->add_object(new MainWindow(this));
actionmanager->register_container("Studio.Meta.Bar", ui.SideMetaBar);
actionmanager->register_container("Studio.Meta.Box", ui.SideMetaBox);
actionmanager->register_container("Studio.Main.StatusBar", ui.MainStatusBar);
actionmanager->register_container("Studio.Main.StatusBox", ui.MainStatusBox);
actionmanager->register_container("Studio.Main.StatusReport", ui.MainStatusReport);
Studio::Core::instance()->add_object(new ModeManager(ui.SideMetaBar, ui.SideMetaBox, ui.Container));
Studio::Core::instance()->add_object(new StatusManager(ui.MainStatusBar, ui.MainStatusBox, ui.MainStatusReport, ui.MainStatusView));
Studio::Core::instance()->add_object(new ViewFactory);
QFile theme(pathstring("theme.css").c_str());
if (theme.open(QIODevice::ReadOnly))
{
setStyleSheet(theme.readAll());
}
QSettings settings;
move(settings.value("mainwindow/pos", pos()).toPoint());
resize(settings.value("mainwindow/size", size()).toSize());
restoreState(settings.value("mainwindow/state", QByteArray()).toByteArray());
ui.Splitter->restoreState(settings.value("mainwindow/splitter", QByteArray()).toByteArray());
}
///////////////////////// DatumStudio::Destructor ///////////////////////////
DatumStudio::~DatumStudio()
{
}
///////////////////////// DatumStudio::set_screen_geometry //////////////////
// WIDTHxHEIGHT+XOFF+YOFF
void DatumStudio::set_screen_geometry(std::string const &geometry)
{
QScreen *screen = QGuiApplication::primaryScreen();
if (geometry.substr(0, 3) == "fs=")
{
int screenid = atoi(geometry.substr(3).c_str());
if (screenid >= 0 && screenid < QGuiApplication::screens().size())
screen = QGuiApplication::screens()[screenid];
}
int x = screen->geometry().left();
int y = screen->geometry().top();
int w = screen->geometry().width();
int h = screen->geometry().height();
QRegExp rx(R"((\d+)x(\d+)([+-]\d+)*([+-]\d+)*)");
if (rx.indexIn(geometry.c_str()) == 0)
{
x = rx.cap(3).toInt();
y = rx.cap(4).toInt();
w = rx.cap(1).toInt();
h = rx.cap(2).toInt();
if (x < 0)
x += screen->geometry().width();
if (y < 0)
y += screen->geometry().height();
}
// setWindowFlags(Qt::FramelessWindowHint);
// setGeometry(x, y, w, h);
// QTBUG-41883
setWindowFlags(Qt::FramelessWindowHint | Qt::WindowStaysOnTopHint);
setGeometry(x, y, w, h+1);
}
///////////////////////// DatumStudio::closeEvent ///////////////////////////
void DatumStudio::closeEvent(QCloseEvent *event)
{
auto mainwindow = Studio::Core::instance()->find_object<MainWindow>();
if (mainwindow->close())
{
QSettings settings;
if (!(windowFlags() & Qt::FramelessWindowHint) && !isMaximized())
{
settings.setValue("mainwindow/pos", pos());
settings.setValue("mainwindow/size", size());
}
settings.setValue("mainwindow/state", saveState());
settings.setValue("mainwindow/splitter", ui.Splitter->saveState());
}
else
{
event->ignore();
}
}
///////////////////////// DatumStudio::on_About_triggered ///////////////////
void DatumStudio::on_About_triggered()
{
DialogFactory<Ui::About> dlg(this);
dlg.ui.Version->setText(QString("Version %1").arg(VersionString));
dlg.exec();
}
|
apache-2.0
|
SLAsticSPE/slastic
|
src-gen/kieker/tools/slastic/metamodel/adaptation/impl/PerformancePredictionImpl.java
|
5475
|
/**
* <copyright>
* </copyright>
*
* $Id$
*/
package kieker.tools.slastic.metamodel.adaptation.impl;
import kieker.tools.slastic.metamodel.adaptation.AdaptationPackage;
import kieker.tools.slastic.metamodel.adaptation.Control;
import kieker.tools.slastic.metamodel.adaptation.PerformancePrediction;
import org.eclipse.emf.common.notify.Notification;
import org.eclipse.emf.common.notify.NotificationChain;
import org.eclipse.emf.ecore.EClass;
import org.eclipse.emf.ecore.InternalEObject;
import org.eclipse.emf.ecore.impl.ENotificationImpl;
import org.eclipse.emf.ecore.impl.EObjectImpl;
import org.eclipse.emf.ecore.util.EcoreUtil;
/**
* <!-- begin-user-doc -->
* An implementation of the model object '<em><b>Performance Prediction</b></em>'.
* <!-- end-user-doc -->
* <p>
* The following features are implemented:
* <ul>
* <li>{@link kieker.tools.slastic.metamodel.adaptation.impl.PerformancePredictionImpl#getControl <em>Control</em>}</li>
* </ul>
* </p>
*
* @generated
*/
public class PerformancePredictionImpl extends EObjectImpl implements PerformancePrediction {
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
protected PerformancePredictionImpl() {
super();
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
protected EClass eStaticClass() {
return AdaptationPackage.Literals.PERFORMANCE_PREDICTION;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public Control getControl() {
if (eContainerFeatureID() != AdaptationPackage.PERFORMANCE_PREDICTION__CONTROL) return null;
return (Control)eContainer();
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public NotificationChain basicSetControl(Control newControl, NotificationChain msgs) {
msgs = eBasicSetContainer((InternalEObject)newControl, AdaptationPackage.PERFORMANCE_PREDICTION__CONTROL, msgs);
return msgs;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public void setControl(Control newControl) {
if (newControl != eInternalContainer() || (eContainerFeatureID() != AdaptationPackage.PERFORMANCE_PREDICTION__CONTROL && newControl != null)) {
if (EcoreUtil.isAncestor(this, newControl))
throw new IllegalArgumentException("Recursive containment not allowed for " + toString());
NotificationChain msgs = null;
if (eInternalContainer() != null)
msgs = eBasicRemoveFromContainer(msgs);
if (newControl != null)
msgs = ((InternalEObject)newControl).eInverseAdd(this, AdaptationPackage.CONTROL__PERFORMANCE_PREDICTOR, Control.class, msgs);
msgs = basicSetControl(newControl, msgs);
if (msgs != null) msgs.dispatch();
}
else if (eNotificationRequired())
eNotify(new ENotificationImpl(this, Notification.SET, AdaptationPackage.PERFORMANCE_PREDICTION__CONTROL, newControl, newControl));
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public NotificationChain eInverseAdd(InternalEObject otherEnd, int featureID, NotificationChain msgs) {
switch (featureID) {
case AdaptationPackage.PERFORMANCE_PREDICTION__CONTROL:
if (eInternalContainer() != null)
msgs = eBasicRemoveFromContainer(msgs);
return basicSetControl((Control)otherEnd, msgs);
}
return super.eInverseAdd(otherEnd, featureID, msgs);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public NotificationChain eInverseRemove(InternalEObject otherEnd, int featureID, NotificationChain msgs) {
switch (featureID) {
case AdaptationPackage.PERFORMANCE_PREDICTION__CONTROL:
return basicSetControl(null, msgs);
}
return super.eInverseRemove(otherEnd, featureID, msgs);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public NotificationChain eBasicRemoveFromContainerFeature(NotificationChain msgs) {
switch (eContainerFeatureID()) {
case AdaptationPackage.PERFORMANCE_PREDICTION__CONTROL:
return eInternalContainer().eInverseRemove(this, AdaptationPackage.CONTROL__PERFORMANCE_PREDICTOR, Control.class, msgs);
}
return super.eBasicRemoveFromContainerFeature(msgs);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public Object eGet(int featureID, boolean resolve, boolean coreType) {
switch (featureID) {
case AdaptationPackage.PERFORMANCE_PREDICTION__CONTROL:
return getControl();
}
return super.eGet(featureID, resolve, coreType);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public void eSet(int featureID, Object newValue) {
switch (featureID) {
case AdaptationPackage.PERFORMANCE_PREDICTION__CONTROL:
setControl((Control)newValue);
return;
}
super.eSet(featureID, newValue);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public void eUnset(int featureID) {
switch (featureID) {
case AdaptationPackage.PERFORMANCE_PREDICTION__CONTROL:
setControl((Control)null);
return;
}
super.eUnset(featureID);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public boolean eIsSet(int featureID) {
switch (featureID) {
case AdaptationPackage.PERFORMANCE_PREDICTION__CONTROL:
return getControl() != null;
}
return super.eIsSet(featureID);
}
} //PerformancePredictionImpl
|
apache-2.0
|
zkidkid/elasticsearch
|
core/src/main/java/org/elasticsearch/rest/action/search/RestSearchAction.java
|
11744
|
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.rest.action.search;
import org.elasticsearch.action.search.SearchRequest;
import org.elasticsearch.action.search.SearchType;
import org.elasticsearch.action.support.IndicesOptions;
import org.elasticsearch.client.node.NodeClient;
import org.elasticsearch.common.ParseFieldMatcher;
import org.elasticsearch.common.ParsingException;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.query.QueryBuilder;
import org.elasticsearch.index.query.QueryParseContext;
import org.elasticsearch.indices.query.IndicesQueriesRegistry;
import org.elasticsearch.rest.BaseRestHandler;
import org.elasticsearch.rest.RestChannel;
import org.elasticsearch.rest.RestController;
import org.elasticsearch.rest.RestRequest;
import org.elasticsearch.rest.action.support.RestActions;
import org.elasticsearch.rest.action.support.RestStatusToXContentListener;
import org.elasticsearch.search.Scroll;
import org.elasticsearch.search.aggregations.AggregatorParsers;
import org.elasticsearch.search.builder.SearchSourceBuilder;
import org.elasticsearch.search.fetch.subphase.FetchSourceContext;
import org.elasticsearch.search.internal.SearchContext;
import org.elasticsearch.search.sort.SortOrder;
import org.elasticsearch.search.suggest.SuggestBuilder;
import org.elasticsearch.search.suggest.Suggesters;
import org.elasticsearch.search.suggest.term.TermSuggestionBuilder.SuggestMode;
import java.io.IOException;
import java.util.Arrays;
import static org.elasticsearch.common.unit.TimeValue.parseTimeValue;
import static org.elasticsearch.rest.RestRequest.Method.GET;
import static org.elasticsearch.rest.RestRequest.Method.POST;
import static org.elasticsearch.search.suggest.SuggestBuilders.termSuggestion;
/**
*
*/
public class RestSearchAction extends BaseRestHandler {
private final IndicesQueriesRegistry queryRegistry;
private final AggregatorParsers aggParsers;
private final Suggesters suggesters;
@Inject
public RestSearchAction(Settings settings, RestController controller, IndicesQueriesRegistry queryRegistry,
AggregatorParsers aggParsers, Suggesters suggesters) {
super(settings);
this.queryRegistry = queryRegistry;
this.aggParsers = aggParsers;
this.suggesters = suggesters;
controller.registerHandler(GET, "/_search", this);
controller.registerHandler(POST, "/_search", this);
controller.registerHandler(GET, "/{index}/_search", this);
controller.registerHandler(POST, "/{index}/_search", this);
controller.registerHandler(GET, "/{index}/{type}/_search", this);
controller.registerHandler(POST, "/{index}/{type}/_search", this);
}
@Override
public void handleRequest(final RestRequest request, final RestChannel channel, final NodeClient client) throws IOException {
SearchRequest searchRequest = new SearchRequest();
BytesReference restContent = RestActions.hasBodyContent(request) ? RestActions.getRestContent(request) : null;
parseSearchRequest(searchRequest, queryRegistry, request, parseFieldMatcher, aggParsers, suggesters, restContent);
client.search(searchRequest, new RestStatusToXContentListener<>(channel));
}
/**
* Parses the rest request on top of the SearchRequest, preserving values
* that are not overridden by the rest request.
*
* @param restContent
* override body content to use for the request. If null body
* content is read from the request using
* RestAction.hasBodyContent.
*/
public static void parseSearchRequest(SearchRequest searchRequest, IndicesQueriesRegistry indicesQueriesRegistry, RestRequest request,
ParseFieldMatcher parseFieldMatcher, AggregatorParsers aggParsers, Suggesters suggesters, BytesReference restContent)
throws IOException {
if (searchRequest.source() == null) {
searchRequest.source(new SearchSourceBuilder());
}
searchRequest.indices(Strings.splitStringByCommaToArray(request.param("index")));
if (restContent != null) {
try (XContentParser parser = XContentFactory.xContent(restContent).createParser(restContent)) {
QueryParseContext context = new QueryParseContext(indicesQueriesRegistry, parser, parseFieldMatcher);
searchRequest.source().parseXContent(context, aggParsers, suggesters);
}
}
// do not allow 'query_and_fetch' or 'dfs_query_and_fetch' search types
// from the REST layer. these modes are an internal optimization and should
// not be specified explicitly by the user.
String searchType = request.param("search_type");
if (SearchType.fromString(searchType, parseFieldMatcher).equals(SearchType.QUERY_AND_FETCH) ||
SearchType.fromString(searchType, parseFieldMatcher).equals(SearchType.DFS_QUERY_AND_FETCH)) {
throw new IllegalArgumentException("Unsupported search type [" + searchType + "]");
} else {
searchRequest.searchType(searchType);
}
parseSearchSource(searchRequest.source(), request);
searchRequest.requestCache(request.paramAsBoolean("request_cache", null));
String scroll = request.param("scroll");
if (scroll != null) {
searchRequest.scroll(new Scroll(parseTimeValue(scroll, null, "scroll")));
}
searchRequest.types(Strings.splitStringByCommaToArray(request.param("type")));
searchRequest.routing(request.param("routing"));
searchRequest.preference(request.param("preference"));
searchRequest.indicesOptions(IndicesOptions.fromRequest(request, searchRequest.indicesOptions()));
}
/**
* Parses the rest request on top of the SearchSourceBuilder, preserving
* values that are not overridden by the rest request.
*/
private static void parseSearchSource(final SearchSourceBuilder searchSourceBuilder, RestRequest request) {
QueryBuilder queryBuilder = RestActions.urlParamsToQueryBuilder(request);
if (queryBuilder != null) {
searchSourceBuilder.query(queryBuilder);
}
int from = request.paramAsInt("from", -1);
if (from != -1) {
searchSourceBuilder.from(from);
}
int size = request.paramAsInt("size", -1);
if (size != -1) {
searchSourceBuilder.size(size);
}
if (request.hasParam("explain")) {
searchSourceBuilder.explain(request.paramAsBoolean("explain", null));
}
if (request.hasParam("version")) {
searchSourceBuilder.version(request.paramAsBoolean("version", null));
}
if (request.hasParam("timeout")) {
searchSourceBuilder.timeout(request.paramAsTime("timeout", null));
}
if (request.hasParam("terminate_after")) {
int terminateAfter = request.paramAsInt("terminate_after",
SearchContext.DEFAULT_TERMINATE_AFTER);
if (terminateAfter < 0) {
throw new IllegalArgumentException("terminateAfter must be > 0");
} else if (terminateAfter > 0) {
searchSourceBuilder.terminateAfter(terminateAfter);
}
}
if (request.param("fields") != null) {
throw new IllegalArgumentException("The parameter [" +
SearchSourceBuilder.FIELDS_FIELD + "] is no longer supported, please use [" +
SearchSourceBuilder.STORED_FIELDS_FIELD + "] to retrieve stored fields or _source filtering " +
"if the field is not stored");
}
String sField = request.param("stored_fields");
if (sField != null) {
if (!Strings.hasText(sField)) {
searchSourceBuilder.noStoredFields();
} else {
String[] sFields = Strings.splitStringByCommaToArray(sField);
if (sFields != null) {
for (String field : sFields) {
searchSourceBuilder.storedField(field);
}
}
}
}
String sDocValueFields = request.param("docvalue_fields");
if (sDocValueFields == null) {
sDocValueFields = request.param("fielddata_fields");
}
if (sDocValueFields != null) {
if (Strings.hasText(sDocValueFields)) {
String[] sFields = Strings.splitStringByCommaToArray(sDocValueFields);
for (String field : sFields) {
searchSourceBuilder.docValueField(field);
}
}
}
FetchSourceContext fetchSourceContext = FetchSourceContext.parseFromRestRequest(request);
if (fetchSourceContext != null) {
searchSourceBuilder.fetchSource(fetchSourceContext);
}
if (request.hasParam("track_scores")) {
searchSourceBuilder.trackScores(request.paramAsBoolean("track_scores", false));
}
String sSorts = request.param("sort");
if (sSorts != null) {
String[] sorts = Strings.splitStringByCommaToArray(sSorts);
for (String sort : sorts) {
int delimiter = sort.lastIndexOf(":");
if (delimiter != -1) {
String sortField = sort.substring(0, delimiter);
String reverse = sort.substring(delimiter + 1);
if ("asc".equals(reverse)) {
searchSourceBuilder.sort(sortField, SortOrder.ASC);
} else if ("desc".equals(reverse)) {
searchSourceBuilder.sort(sortField, SortOrder.DESC);
}
} else {
searchSourceBuilder.sort(sort);
}
}
}
String sStats = request.param("stats");
if (sStats != null) {
searchSourceBuilder.stats(Arrays.asList(Strings.splitStringByCommaToArray(sStats)));
}
String suggestField = request.param("suggest_field");
if (suggestField != null) {
String suggestText = request.param("suggest_text", request.param("q"));
int suggestSize = request.paramAsInt("suggest_size", 5);
String suggestMode = request.param("suggest_mode");
searchSourceBuilder.suggest(new SuggestBuilder().addSuggestion(suggestField,
termSuggestion(suggestField)
.text(suggestText).size(suggestSize)
.suggestMode(SuggestMode.resolve(suggestMode))));
}
}
}
|
apache-2.0
|
googleapis/google-api-php-client-services
|
src/Directory/Groups.php
|
1913
|
<?php
/*
* Copyright 2014 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
namespace Google\Service\Directory;
class Groups extends \Google\Collection
{
protected $collection_key = 'groups';
/**
* @var string
*/
public $etag;
protected $groupsType = Group::class;
protected $groupsDataType = 'array';
/**
* @var string
*/
public $kind;
/**
* @var string
*/
public $nextPageToken;
/**
* @param string
*/
public function setEtag($etag)
{
$this->etag = $etag;
}
/**
* @return string
*/
public function getEtag()
{
return $this->etag;
}
/**
* @param Group[]
*/
public function setGroups($groups)
{
$this->groups = $groups;
}
/**
* @return Group[]
*/
public function getGroups()
{
return $this->groups;
}
/**
* @param string
*/
public function setKind($kind)
{
$this->kind = $kind;
}
/**
* @return string
*/
public function getKind()
{
return $this->kind;
}
/**
* @param string
*/
public function setNextPageToken($nextPageToken)
{
$this->nextPageToken = $nextPageToken;
}
/**
* @return string
*/
public function getNextPageToken()
{
return $this->nextPageToken;
}
}
// Adding a class alias for backwards compatibility with the previous class name.
class_alias(Groups::class, 'Google_Service_Directory_Groups');
|
apache-2.0
|
BryceBrown/LinkstrDjango
|
rest_framework/tests/negotiation.py
|
1501
|
from __future__ import unicode_literals
from django.test import TestCase
from django.test.client import RequestFactory
from rest_framework.negotiation import DefaultContentNegotiation
from rest_framework.request import Request
factory = RequestFactory()
class MockJSONRenderer(object):
media_type = 'application/json'
class MockHTMLRenderer(object):
media_type = 'text/html'
class TestAcceptedMediaType(TestCase):
def setUp(self):
self.renderers = [MockJSONRenderer(), MockHTMLRenderer()]
self.negotiator = DefaultContentNegotiation()
def select_renderer(self, request):
return self.negotiator.select_renderer(request, self.renderers)
def test_client_without_accept_use_renderer(self):
request = Request(factory.get('/'))
accepted_renderer, accepted_media_type = self.select_renderer(request)
self.assertEqual(accepted_media_type, 'application/json')
def test_client_underspecifies_accept_use_renderer(self):
request = Request(factory.get('/', HTTP_ACCEPT='*/*'))
accepted_renderer, accepted_media_type = self.select_renderer(request)
self.assertEqual(accepted_media_type, 'application/json')
def test_client_overspecifies_accept_use_client(self):
request = Request(factory.get('/', HTTP_ACCEPT='application/json; indent=8'))
accepted_renderer, accepted_media_type = self.select_renderer(request)
self.assertEqual(accepted_media_type, 'application/json; indent=8')
|
apache-2.0
|
eHarmony/seeking
|
seeking-hibernate/src/main/java/com/eharmony/matching/seeking/handler/hibernate/ContainsExpressionNonHandler.java
|
1040
|
/*
* Copyright 2012 eHarmony, Inc
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.eharmony.matching.seeking.handler.hibernate;
/**
* A Contains Expression Handler that doesn't handle the translation and throws
* an exception
*
* @param <Q>
*/
public class ContainsExpressionNonHandler<Q> implements ContainsExpressionHandler<Q> {
@Override
public Q contains(String fieldName, Object[] values) {
throw new UnsupportedOperationException("contains is not implemented");
}
}
|
apache-2.0
|
beatific/daram
|
daram-web/src/main/java/org/beatific/daram/web/config/MybatisConfig.java
|
1618
|
package org.beatific.daram.web.config;
import javax.inject.Inject;
import org.apache.ibatis.session.SqlSession;
import org.apache.ibatis.session.SqlSessionFactory;
import org.beatific.daram.web.common.DaoRepository;
import org.mybatis.spring.SqlSessionFactoryBean;
import org.mybatis.spring.SqlSessionTemplate;
import org.mybatis.spring.annotation.MapperScan;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.Import;
import org.springframework.core.io.Resource;
@Configuration
@MapperScan(value="org.beatific.daram.web", annotationClass=DaoRepository.class)
@Import(DatabaseConfig.class)
public class MybatisConfig {
@Inject DatabaseConfig dataConfig;
@Value("classpath*:META-INF/mapper/oracle/**/*.xml")
Resource[] mapperLocation;
@Value("classpath:META-INF/mybatis/mybatis-config.xml")
Resource configLocation;
@Bean
public SqlSessionFactory sqlSessionFactory() {
SqlSessionFactoryBean sessionFactory = new SqlSessionFactoryBean();
sessionFactory.setDataSource(dataConfig.dataSource());
sessionFactory.setConfigLocation(configLocation);
sessionFactory.setMapperLocations(mapperLocation);
try {
return (SqlSessionFactory) sessionFactory.getObject();
} catch (Exception e) {
throw new RuntimeException(e);
}
}
@Bean
public SqlSession sqlSessionTemplate() {
return new SqlSessionTemplate(sqlSessionFactory());
}
}
|
apache-2.0
|
trasa/aws-sdk-java
|
aws-java-sdk-iam/src/main/java/com/amazonaws/services/identitymanagement/model/transform/GetSAMLProviderResultStaxUnmarshaller.java
|
3131
|
/*
* Copyright 2010-2016 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazonaws.services.identitymanagement.model.transform;
import java.util.Map;
import java.util.Map.Entry;
import javax.xml.stream.events.XMLEvent;
import com.amazonaws.services.identitymanagement.model.*;
import com.amazonaws.transform.Unmarshaller;
import com.amazonaws.transform.MapEntry;
import com.amazonaws.transform.StaxUnmarshallerContext;
import com.amazonaws.transform.SimpleTypeStaxUnmarshallers.*;
/**
* GetSAMLProviderResult StAX Unmarshaller
*/
public class GetSAMLProviderResultStaxUnmarshaller implements
Unmarshaller<GetSAMLProviderResult, StaxUnmarshallerContext> {
public GetSAMLProviderResult unmarshall(StaxUnmarshallerContext context)
throws Exception {
GetSAMLProviderResult getSAMLProviderResult = new GetSAMLProviderResult();
int originalDepth = context.getCurrentDepth();
int targetDepth = originalDepth + 1;
if (context.isStartOfDocument())
targetDepth += 2;
while (true) {
XMLEvent xmlEvent = context.nextEvent();
if (xmlEvent.isEndDocument())
return getSAMLProviderResult;
if (xmlEvent.isAttribute() || xmlEvent.isStartElement()) {
if (context.testExpression("SAMLMetadataDocument", targetDepth)) {
getSAMLProviderResult
.setSAMLMetadataDocument(StringStaxUnmarshaller
.getInstance().unmarshall(context));
continue;
}
if (context.testExpression("CreateDate", targetDepth)) {
getSAMLProviderResult.setCreateDate(DateStaxUnmarshaller
.getInstance().unmarshall(context));
continue;
}
if (context.testExpression("ValidUntil", targetDepth)) {
getSAMLProviderResult.setValidUntil(DateStaxUnmarshaller
.getInstance().unmarshall(context));
continue;
}
} else if (xmlEvent.isEndElement()) {
if (context.getCurrentDepth() < originalDepth) {
return getSAMLProviderResult;
}
}
}
}
private static GetSAMLProviderResultStaxUnmarshaller instance;
public static GetSAMLProviderResultStaxUnmarshaller getInstance() {
if (instance == null)
instance = new GetSAMLProviderResultStaxUnmarshaller();
return instance;
}
}
|
apache-2.0
|
delebash/orientdb-parent
|
tests/src/test/java/com/orientechnologies/orient/test/internal/index/MVRBTreeInsertionSpeedTest.java
|
2162
|
package com.orientechnologies.orient.test.internal.index;
import org.testng.annotations.Test;
import com.orientechnologies.common.test.SpeedTestMonoThread;
import com.orientechnologies.common.util.MersenneTwisterFast;
import com.orientechnologies.orient.core.config.OGlobalConfiguration;
import com.orientechnologies.orient.core.db.document.ODatabaseDocumentTx;
import com.orientechnologies.orient.core.id.OClusterPositionLong;
import com.orientechnologies.orient.core.id.ORecordId;
import com.orientechnologies.orient.core.index.OIndexUnique;
import com.orientechnologies.orient.core.index.OSimpleKeyIndexDefinition;
import com.orientechnologies.orient.core.metadata.schema.OType;
/**
* @author Andrey Lomakin
* @author Luca Garulli
* @since 30.01.13
*/
public class MVRBTreeInsertionSpeedTest extends SpeedTestMonoThread {
private ODatabaseDocumentTx databaseDocumentTx;
private OIndexUnique index;
private MersenneTwisterFast random = new MersenneTwisterFast();
public MVRBTreeInsertionSpeedTest() {
super(5000000);
}
@Override
@Test(enabled = false)
public void init() throws Exception {
OGlobalConfiguration.NON_TX_CLUSTERS_SYNC_IMMEDIATELY.setValue("");
OGlobalConfiguration.INDEX_MANUAL_LAZY_UPDATES.setValue(10000);
String buildDirectory = System.getProperty("buildDirectory", ".");
if (buildDirectory == null)
buildDirectory = ".";
databaseDocumentTx = new ODatabaseDocumentTx("local:" + buildDirectory + "/uniqueHashIndexTest");
if (databaseDocumentTx.exists()) {
databaseDocumentTx.open("admin", "admin");
databaseDocumentTx.drop();
}
databaseDocumentTx.create();
index = (OIndexUnique) databaseDocumentTx.getMetadata().getIndexManager()
.createIndex("mvrbtreeIndexTest", "UNIQUE", new OSimpleKeyIndexDefinition(OType.STRING), new int[0], null, null);
}
@Override
@Test(enabled = false)
public void cycle() throws Exception {
String key = "bsadfasfas" + random.nextInt();
index.put(key, new ORecordId(0, new OClusterPositionLong(0)));
}
@Override
public void deinit() throws Exception {
databaseDocumentTx.close();
}
}
|
apache-2.0
|
zzyakenzz/3000-words
|
app/src/main/java/qlearn/com/quang/english/customview/TextRoboThin.java
|
556
|
package qlearn.com.quang.english.customview;
import android.content.Context;
import android.graphics.Color;
import android.util.AttributeSet;
import android.widget.TextView;
import com.elmargomez.typer.Font;
import com.elmargomez.typer.Typer;
/**
* Created by Administrator on 10/02/2016.
*/
public class TextRoboThin extends TextView {
public TextRoboThin(Context context, AttributeSet attrs) {
super(context, attrs);
this.setTypeface(Typer.set(context).getFont(Font.ROBOTO_THIN));
this.setTextColor(Color.BLACK);
}
}
|
apache-2.0
|
Laurawly/tvm-1
|
tests/python/contrib/test_cmsisnn/utils.py
|
6780
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""CMSIS-NN functions for testing networks"""
import platform
import math
import numpy as np
import pytest
from typing import List, Dict, Optional, Any, Union, Tuple
import tvm
from tvm import relay
def skip_if_no_reference_system(func):
return pytest.mark.skipif(
platform.machine() == "i686", reason="Reference system unavailable in i386 container"
)(func)
def count_num_calls(mod):
"""Counts number of CallNode(s) in the IRModule"""
class CallCounter(relay.ExprVisitor):
def __init__(self):
super().__init__()
self.count = 0
def visit_call(self, call):
if isinstance(call.op, tvm.ir.Op):
self.count += 1
super().visit_call(call)
counter = CallCounter()
for var in mod.get_global_vars():
counter.visit(mod[var.name_hint])
return counter.count
def get_range_for_dtype_str(dtype):
"""
Produces the min,max for a give data type.
Parameters
----------
dtype : str
a type string (e.g., int8)
Returns
-------
type_info.min : int
the minimum of the range
type_info.max : int
the maximum of the range
"""
try:
type_info = np.iinfo(dtype)
except ValueError:
type_info = np.finfo(dtype)
return type_info.min, type_info.max
def make_module(func):
"""Creates IRModule from Function"""
func = relay.Function(relay.analysis.free_vars(func), func)
mod = tvm.IRModule.from_expr(func)
mod = relay.transform.InferType()(mod)
return mod
def get_same_padding(in_shape, kernel, dilation, stride):
"""
Provides CMSIS-NN padding when output dim == input dim.
This is TFLu's "SAME" padding case.
"""
dilated_kernel_h = dilation[0] * (kernel[0] - 1) + 1
out = int(math.ceil(float(in_shape[0]) / float(stride[0])))
pad = max(0, (out - 1) * stride[0] + dilated_kernel_h - in_shape[0])
pad_top = pad // 2
pad_bottom = pad - pad_top
dilated_kernel_w = dilation[1] * (kernel[1] - 1) + 1
out = int(math.ceil(float(in_shape[1]) / float(stride[1])))
pad = max(0, (out - 1) * stride[1] + dilated_kernel_w - in_shape[1])
pad_left = pad // 2
pad_right = pad - pad_left
return [pad_top, pad_left, pad_bottom, pad_right]
def get_conv2d_qnn_params(
weight_shape: List[int],
input_scale: float,
input_zp: int,
weights_scale: Union[float, List[float]],
weights_zp: int,
input_dtype: str = "int8",
weights_dtype: str = "int8",
output_dtype: str = "int8",
is_depthwise: bool = False,
) -> Tuple[float, int]:
"""
Calculate the output quantization parameters for convolution based on the input and
weights quantization paramters and the data types.
Parameters
----------
weight_shape : List[int]
shape of the weights
input_scale : float
scale of the input tensor
input_zp : int
zero point of the input tensor
weights_scale : Union[float, List[float]]
scale(s) of the weights tensor
weights_zp : int
zero point of the weights tensor
is_depthwise : bool
whether it is a depthwise convolution
input_dtype : str
data type of the input tensor
weights_dtype : str
data type of the weights tensor
output_dtype : str
data type of the output tensor
Returns
-------
output_scale : float
scale of the output tensor
output_zp : int
zero point of the output tensor
"""
input_dtype_min, input_dtype_max = get_range_for_dtype_str(input_dtype)
input_max = input_scale * (input_dtype_max - input_zp)
input_min = input_scale * (input_dtype_min - input_zp)
weights_dtype_min, weights_dtype_max = get_range_for_dtype_str(weights_dtype)
weights_sc_max = np.max(weights_scale)
weights_max = weights_sc_max * (weights_dtype_max - weights_zp)
weights_sc_min = np.min(weights_scale)
weights_min = weights_sc_min * (weights_dtype_min - weights_zp)
weights_h = weight_shape[1]
weights_w = weight_shape[2]
channels = weight_shape[3]
num_elements = weights_h * weights_w * channels
# Adjust the result if it is a depthwise convolution
if is_depthwise:
num_elements = num_elements / channels
# The smallest and largest possible values in the unquantized output tensor
output_limits = [
weights_max * input_max * num_elements,
weights_min * input_max * num_elements,
weights_min * input_min * num_elements,
weights_max * input_min * num_elements,
]
output_max = max(output_limits)
output_min = min(output_limits)
output_dtype_min, output_dtype_max = get_range_for_dtype_str(output_dtype)
output_scale = (output_max - output_min) / (output_dtype_max - output_dtype_min)
output_zp = int(output_dtype_min - (output_min / output_scale))
return output_scale, output_zp
def make_qnn_relu(expr, fused_activation_fn, scale, zero_point, dtype):
"""Mimics convert_qnn_fused_activation_function from TFLite frontend"""
quantize = lambda x: float(int(round(x / scale)) + zero_point)
# Get min/max of the output dtype. This will be used to ensure that clip a_min/a_max are not
# beyond the dtype range.
qmin, qmax = get_range_for_dtype_str(dtype)
# The input expr is a quantized tensor with its scale and zero point. We calculate the
# suitable clip off points based on these scale and zero point.
if fused_activation_fn == "NONE":
return expr
if fused_activation_fn == "RELU6":
return tvm.relay.op.clip(expr, a_min=max(qmin, quantize(0)), a_max=min(qmax, quantize(6.0)))
if fused_activation_fn == "RELU_N1_TO_1":
return tvm.relay.op.clip(
expr, a_min=max(qmin, quantize(-1.0)), a_max=min(qmax, quantize(1.0))
)
if fused_activation_fn == "RELU":
return tvm.relay.op.clip(expr, a_min=max(qmin, quantize(0.0)), a_max=qmax)
|
apache-2.0
|
mattxia/spring-2.5-analysis
|
samples/petclinic/src/org/springframework/samples/petclinic/web/EditPetForm.java
|
1957
|
package org.springframework.samples.petclinic.web;
import java.util.Collection;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.samples.petclinic.Clinic;
import org.springframework.samples.petclinic.Pet;
import org.springframework.samples.petclinic.PetType;
import org.springframework.samples.petclinic.validation.PetValidator;
import org.springframework.stereotype.Controller;
import org.springframework.ui.ModelMap;
import org.springframework.validation.BindingResult;
import org.springframework.web.bind.annotation.ModelAttribute;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.SessionAttributes;
import org.springframework.web.bind.support.SessionStatus;
/**
* JavaBean Form controller that is used to edit an existing <code>Pet</code>.
*
* @author Juergen Hoeller
* @author Ken Krebs
*/
@Controller
@RequestMapping("/editPet.do")
@SessionAttributes("pet")
public class EditPetForm {
private final Clinic clinic;
@Autowired
public EditPetForm(Clinic clinic) {
this.clinic = clinic;
}
@ModelAttribute("types")
public Collection<PetType> populatePetTypes() {
return this.clinic.getPetTypes();
}
@RequestMapping(method = RequestMethod.GET)
public String setupForm(@RequestParam("petId") int petId, ModelMap model) {
Pet pet = this.clinic.loadPet(petId);
model.addAttribute("pet", pet);
return "petForm";
}
@RequestMapping(method = RequestMethod.POST)
public String processSubmit(@ModelAttribute("pet") Pet pet, BindingResult result, SessionStatus status) {
new PetValidator().validate(pet, result);
if (result.hasErrors()) {
return "petForm";
}
else {
this.clinic.storePet(pet);
status.setComplete();
return "redirect:owner.do?ownerId=" + pet.getOwner().getId();
}
}
}
|
apache-2.0
|
xiaobao-zhaoyiyang/coolweather
|
app/src/main/java/com/coolweather/receiver/AutoUpdateReceiver.java
|
542
|
package com.coolweather.receiver;
import android.content.BroadcastReceiver;
import android.content.Context;
import android.content.Intent;
import com.coolweather.service.AutoUpdateService;
/**
* Created by yo on 2016/6/17.
*/
public class AutoUpdateReceiver extends BroadcastReceiver {
@Override
public void onReceive(Context context, Intent intent) {
if (!intent.getAction().equals("Weather"))
return;
Intent i = new Intent(context, AutoUpdateService.class);
context.startService(i);
}
}
|
apache-2.0
|
huggingface/pytorch-transformers
|
src/transformers/models/bigbird_pegasus/modeling_bigbird_pegasus.py
|
140300
|
# coding=utf-8
# Copyright 2021 Google Research The HuggingFace Inc. team. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
""" PyTorch BigBirdPegasus model. """
import copy
import math
import random
from typing import Optional, Tuple
import numpy as np
import torch
import torch.nn.functional as F
from torch import nn
from torch.nn import CrossEntropyLoss, MSELoss
from ...activations import ACT2FN
from ...file_utils import (
add_code_sample_docstrings,
add_end_docstrings,
add_start_docstrings,
add_start_docstrings_to_model_forward,
replace_return_docstrings,
)
from ...modeling_outputs import (
BaseModelOutput,
BaseModelOutputWithPastAndCrossAttentions,
CausalLMOutputWithCrossAttentions,
Seq2SeqLMOutput,
Seq2SeqModelOutput,
Seq2SeqQuestionAnsweringModelOutput,
Seq2SeqSequenceClassifierOutput,
)
from ...modeling_utils import PreTrainedModel
from ...utils import logging
from .configuration_bigbird_pegasus import BigBirdPegasusConfig
logger = logging.get_logger(__name__)
_CHECKPOINT_FOR_DOC = "google/bigbird-pegasus-large-arxiv"
_CONFIG_FOR_DOC = "BigBirdPegasusConfig"
_TOKENIZER_FOR_DOC = "PegasusTokenizer"
BIGBIRD_PEGASUS_PRETRAINED_MODEL_ARCHIVE_LIST = [
"google/bigbird-pegasus-large-arxiv",
"google/bigbird-pegasus-large-pubmed",
"google/bigbird-pegasus-large-bigpatent",
# See all BigBirdPegasus models at https://huggingface.co/models?filter=bigbird_pegasus
]
def shift_tokens_right(input_ids: torch.Tensor, pad_token_id: int, decoder_start_token_id: int):
"""
Shift input ids one token to the right.
"""
shifted_input_ids = input_ids.new_zeros(input_ids.shape)
shifted_input_ids[:, 1:] = input_ids[:, :-1].clone()
shifted_input_ids[:, 0] = decoder_start_token_id
assert pad_token_id is not None, "self.model.config.pad_token_id has to be defined."
# replace possible -100 values in labels by `pad_token_id`
shifted_input_ids.masked_fill_(shifted_input_ids == -100, pad_token_id)
return shifted_input_ids
def _make_causal_mask(input_ids_shape: torch.Size, dtype: torch.dtype, past_key_values_length: int = 0):
"""
Make causal mask used for bi-directional self-attention.
"""
bsz, tgt_len = input_ids_shape
mask = torch.full((tgt_len, tgt_len), float("-inf"))
mask_cond = torch.arange(mask.size(-1))
mask.masked_fill_(mask_cond < (mask_cond + 1).view(mask.size(-1), 1), 0)
mask = mask.to(dtype)
if past_key_values_length > 0:
mask = torch.cat([torch.zeros(tgt_len, past_key_values_length, dtype=dtype), mask], dim=-1)
return mask[None, None, :, :].expand(bsz, 1, tgt_len, tgt_len + past_key_values_length)
def _expand_mask(mask: torch.Tensor, dtype: torch.dtype, tgt_len: Optional[int] = None):
"""
Expands attention_mask from `[bsz, seq_len]` to `[bsz, 1, tgt_seq_len, src_seq_len]`.
"""
bsz, src_len = mask.size()
tgt_len = tgt_len if tgt_len is not None else src_len
expanded_mask = mask[:, None, None, :].expand(bsz, 1, tgt_len, src_len).to(dtype)
inverted_mask = 1.0 - expanded_mask
return inverted_mask.masked_fill(inverted_mask.bool(), torch.finfo(dtype).min)
class BigBirdPegasusLearnedPositionalEmbedding(nn.Embedding):
"""
This module learns positional embeddings up to a fixed maximum size.
"""
def __init__(self, num_embeddings: int, embedding_dim: int):
super().__init__(num_embeddings, embedding_dim)
def forward(self, input_ids_shape: torch.Size, past_key_values_length: int = 0):
"""`input_ids_shape` is expected to be [bsz x seqlen]."""
bsz, seq_len = input_ids_shape[:2]
positions = torch.arange(
past_key_values_length, past_key_values_length + seq_len, dtype=torch.long, device=self.weight.device
)
return super().forward(positions)
# Copied from transformers.models.big_bird.modeling_big_bird.BigBirdSelfAttention with BigBird->BigBirdPegasus
class BigBirdPegasusSelfAttention(nn.Module):
def __init__(self, config):
super().__init__()
if config.hidden_size % config.num_attention_heads != 0 and not hasattr(config, "embedding_size"):
raise ValueError(
f"The hidden size ({config.hidden_size}) is not a multiple of the number of attention "
f"heads ({config.num_attention_heads})"
)
self.num_attention_heads = config.num_attention_heads
self.attention_head_size = int(config.hidden_size / config.num_attention_heads)
self.all_head_size = self.num_attention_heads * self.attention_head_size
self.query = nn.Linear(config.hidden_size, self.all_head_size, bias=config.use_bias)
self.key = nn.Linear(config.hidden_size, self.all_head_size, bias=config.use_bias)
self.value = nn.Linear(config.hidden_size, self.all_head_size, bias=config.use_bias)
self.dropout = nn.Dropout(config.attention_probs_dropout_prob)
self.is_decoder = config.is_decoder
def transpose_for_scores(self, x):
new_x_shape = x.size()[:-1] + (self.num_attention_heads, self.attention_head_size)
x = x.view(*new_x_shape)
return x.permute(0, 2, 1, 3)
def forward(
self,
hidden_states,
attention_mask=None,
head_mask=None,
encoder_hidden_states=None,
encoder_attention_mask=None,
past_key_value=None,
output_attentions=False,
):
mixed_query_layer = self.query(hidden_states)
# If this is instantiated as a cross-attention module, the keys
# and values come from an encoder; the attention mask needs to be
# such that the encoder's padding tokens are not attended to.
is_cross_attention = encoder_hidden_states is not None
if is_cross_attention and past_key_value is not None:
# reuse k,v, cross_attentions
key_layer = past_key_value[0]
value_layer = past_key_value[1]
attention_mask = encoder_attention_mask
elif is_cross_attention:
key_layer = self.transpose_for_scores(self.key(encoder_hidden_states))
value_layer = self.transpose_for_scores(self.value(encoder_hidden_states))
attention_mask = encoder_attention_mask
elif past_key_value is not None:
key_layer = self.transpose_for_scores(self.key(hidden_states))
value_layer = self.transpose_for_scores(self.value(hidden_states))
key_layer = torch.cat([past_key_value[0], key_layer], dim=2)
value_layer = torch.cat([past_key_value[1], value_layer], dim=2)
else:
key_layer = self.transpose_for_scores(self.key(hidden_states))
value_layer = self.transpose_for_scores(self.value(hidden_states))
query_layer = self.transpose_for_scores(mixed_query_layer)
if self.is_decoder:
# if cross_attention save Tuple(torch.Tensor, torch.Tensor) of all cross attention key/value_states.
# Further calls to cross_attention layer can then reuse all cross-attention
# key/value_states (first "if" case)
# if uni-directional self-attention (decoder) save Tuple(torch.Tensor, torch.Tensor) of
# all previous decoder key/value_states. Further calls to uni-directional self-attention
# can concat previous decoder key/value_states to current projected key/value_states (third "elif" case)
# if encoder bi-directional self-attention `past_key_value` is always `None`
past_key_value = (key_layer, value_layer)
# Take the dot product between "query" and "key" to get the raw attention scores.
attention_scores = torch.matmul(query_layer, key_layer.transpose(-1, -2))
attention_scores = attention_scores / math.sqrt(self.attention_head_size)
if attention_mask is not None:
# Apply the attention mask is (precomputed for all layers in BigBirdPegasusModel forward() function)
attention_scores = attention_scores + attention_mask
# Normalize the attention scores to probabilities.
attention_probs = F.softmax(attention_scores, dim=-1)
# This is actually dropping out entire tokens to attend to, which might
# seem a bit unusual, but is taken from the original Transformer paper.
attention_probs = self.dropout(attention_probs)
# Mask heads if we want to
if head_mask is not None:
attention_probs = attention_probs * head_mask
context_layer = torch.matmul(attention_probs, value_layer)
context_layer = context_layer.permute(0, 2, 1, 3).contiguous()
new_context_layer_shape = context_layer.size()[:-2] + (self.all_head_size,)
context_layer = context_layer.view(*new_context_layer_shape)
outputs = (context_layer, attention_probs) if output_attentions else (context_layer,)
if self.is_decoder:
outputs = outputs + (past_key_value,)
return outputs
# Copied from transformers.models.big_bird.modeling_big_bird.BigBirdBlockSparseAttention with BigBird->BigBirdPegasus
class BigBirdPegasusBlockSparseAttention(nn.Module):
def __init__(self, config, seed=None):
super().__init__()
self.max_seqlen = config.max_position_embeddings
self.seed = seed
if config.hidden_size % config.num_attention_heads != 0:
raise ValueError(
f"The hidden size {config.hidden_size} is not a multiple of the number of attention "
f"heads {config.num_attention_heads}."
)
self.num_attention_heads = config.num_attention_heads
self.num_random_blocks = config.num_random_blocks
self.block_size = config.block_size
self.attention_head_size = int(config.hidden_size / config.num_attention_heads)
self.all_head_size = self.num_attention_heads * self.attention_head_size
self.query = nn.Linear(config.hidden_size, self.all_head_size, bias=config.use_bias)
self.key = nn.Linear(config.hidden_size, self.all_head_size, bias=config.use_bias)
self.value = nn.Linear(config.hidden_size, self.all_head_size, bias=config.use_bias)
def transpose_for_scores(self, x):
new_x_shape = x.size()[:-1] + (self.num_attention_heads, self.attention_head_size)
x = x.view(*new_x_shape)
return x.permute(0, 2, 1, 3)
def forward(
self,
hidden_states,
band_mask=None,
from_mask=None,
to_mask=None,
from_blocked_mask=None,
to_blocked_mask=None,
output_attentions=None,
):
# Currently this `class` can't be used in decoder.
batch_size, seqlen, _ = hidden_states.size()
to_seq_length = from_seq_length = seqlen
from_block_size = to_block_size = self.block_size
assert from_seq_length % from_block_size == 0, "Query sided sequence length must be multiple of block size"
assert to_seq_length % to_block_size == 0, "Key/Value sided sequence length must be multiple of block size"
query_layer = self.transpose_for_scores(self.query(hidden_states))
key_layer = self.transpose_for_scores(self.key(hidden_states))
value_layer = self.transpose_for_scores(self.value(hidden_states))
context_layer, attention_probs = self.bigbird_block_sparse_attention(
query_layer,
key_layer,
value_layer,
band_mask,
from_mask,
to_mask,
from_blocked_mask,
to_blocked_mask,
self.num_attention_heads,
self.num_random_blocks,
self.attention_head_size,
from_block_size,
to_block_size,
batch_size,
from_seq_length,
to_seq_length,
seed=self.seed,
plan_from_length=None,
plan_num_rand_blocks=None,
output_attentions=output_attentions,
)
context_layer = context_layer.contiguous().view(batch_size, from_seq_length, -1)
outputs = (context_layer, attention_probs) if output_attentions else (context_layer,)
return outputs
@staticmethod
def torch_bmm_nd(inp_1, inp_2, ndim=None):
"""Fast nd matrix multiplication"""
# faster replacement of torch.einsum ("bhqk,bhkd->bhqd")
return torch.bmm(inp_1.reshape((-1,) + inp_1.shape[-2:]), inp_2.reshape((-1,) + inp_2.shape[-2:])).view(
inp_1.shape[: ndim - 2] + (inp_1.shape[ndim - 2], inp_2.shape[ndim - 1])
)
@staticmethod
def torch_bmm_nd_transpose(inp_1, inp_2, ndim=None):
"""Fast nd matrix multiplication with transpose"""
# faster replacement of torch.einsum (bhqd,bhkd->bhqk)
return torch.bmm(
inp_1.reshape((-1,) + inp_1.shape[-2:]), inp_2.reshape((-1,) + inp_2.shape[-2:]).transpose(1, 2)
).view(inp_1.shape[: ndim - 2] + (inp_1.shape[ndim - 2], inp_2.shape[ndim - 2]))
def bigbird_block_sparse_attention(
self,
query_layer,
key_layer,
value_layer,
band_mask,
from_mask,
to_mask,
from_blocked_mask,
to_blocked_mask,
n_heads,
n_rand_blocks,
attention_head_size,
from_block_size,
to_block_size,
batch_size,
from_seq_len,
to_seq_len,
seed,
plan_from_length,
plan_num_rand_blocks,
output_attentions,
):
# BigBirdPegasus block-sparse attention as suggested in paper
# ITC:
# global tokens: 2 x block_size
# window tokens: 3 x block_size
# random tokens: num_rand_tokens x block_size
# ETC:
# global tokens: extra_globals_tokens + 2 x block_size
# window tokens: 3 x block_size
# random tokens: num_rand_tokens x block_size
# Note:
# 1) Currently, ETC is not supported.
# 2) Window size is fixed to 3 blocks & it can be changed only by
# changing `block_size`.
# 3) Number of global blocks are fixed (2 blocks here) & global tokens can be
# controlled only by `block_size`.
# attention is calculated separately for q[0], q[1], q[2:-2], q[-2], q[-1] in order to use special trick of shifting tokens (for calculating sliding attention)
# hence following code can be divided into 5 parts.
if from_seq_len // from_block_size != to_seq_len // to_block_size:
raise ValueError("Error the number of blocks needs to be same!")
rsqrt_d = 1 / math.sqrt(attention_head_size)
bsz = batch_size
attn_mask_penalty = -10000.0
# generate random attention and corresponding masks
np.random.seed(seed)
if from_seq_len in [1024, 3072, 4096]: # old plans used in paper
rand_attn = [
self._bigbird_block_rand_mask(
self.max_seqlen, self.max_seqlen, from_block_size, to_block_size, n_rand_blocks, last_idx=1024
)[: (from_seq_len // from_block_size - 2)]
for _ in range(n_heads)
]
else:
if plan_from_length is None:
plan_from_length, plan_num_rand_blocks = self._get_rand_attn_plan(
from_seq_len, from_block_size, n_rand_blocks
)
rand_attn = self._bigbird_block_rand_mask_with_head(
from_seq_length=from_seq_len,
to_seq_length=to_seq_len,
from_block_size=from_block_size,
to_block_size=to_block_size,
num_heads=n_heads,
plan_from_length=plan_from_length,
plan_num_rand_blocks=plan_num_rand_blocks,
)
rand_attn = np.stack(rand_attn, axis=0)
rand_attn = torch.tensor(rand_attn, device=query_layer.device, dtype=torch.long)
rand_attn.unsqueeze_(0)
rand_attn = torch.cat([rand_attn for _ in range(batch_size)], dim=0)
rand_mask = self._create_rand_mask_from_inputs(
from_blocked_mask, to_blocked_mask, rand_attn, n_heads, n_rand_blocks, bsz, from_seq_len, from_block_size
)
blocked_query_matrix = query_layer.view(bsz, n_heads, from_seq_len // from_block_size, from_block_size, -1)
blocked_key_matrix = key_layer.view(bsz, n_heads, to_seq_len // to_block_size, to_block_size, -1)
blocked_value_matrix = value_layer.view(bsz, n_heads, to_seq_len // to_block_size, to_block_size, -1)
# preparing block for randn attn
gathered_key = self.torch_gather_b2(blocked_key_matrix, rand_attn)
gathered_key = gathered_key.view(
bsz, n_heads, to_seq_len // to_block_size - 2, n_rand_blocks * to_block_size, -1
) # [bsz, n_heads, to_seq_len//to_block_size-2, n_rand_blocks, to_block_size, -1]
gathered_value = self.torch_gather_b2(blocked_value_matrix, rand_attn)
gathered_value = gathered_value.view(
bsz, n_heads, to_seq_len // to_block_size - 2, n_rand_blocks * to_block_size, -1
) # [bsz, n_heads, to_seq_len//to_block_size-2, n_rand_blocks, to_block_size, -1]
# 1st PART
# 1st block (global block) attention scores
# q[0] x (k[0], k[1], k[2], k[3], k[4] .... )
# [bsz, n_heads, from_block_size, -1] x [bsz, n_heads, to_seq_len, -1] ==> [bsz, n_heads, from_block_size, to_seq_len]
first_product = self.torch_bmm_nd_transpose(blocked_query_matrix[:, :, 0], key_layer, ndim=4)
first_product = first_product * rsqrt_d
first_product += (1.0 - to_mask) * attn_mask_penalty
first_attn_weights = F.softmax(first_product, dim=-1) # [bsz, n_heads, from_block_size, to_seq_len]
# [bsz, n_heads, from_block_size, to_seq_len] x [bsz, n_heads, to_seq_len, -1] ==> [bsz, n_heads, from_block_size, -1]
first_context_layer = self.torch_bmm_nd(first_attn_weights, value_layer, ndim=4)
first_context_layer.unsqueeze_(2)
# 2nd PART
# 2nd block attention scores
# q[1] x (sliding_keys, random_keys, global_keys)
# sliding key blocks -> 2nd, 3rd blocks
# global key blocks -> 1st block
second_key_mat = torch.cat(
[
blocked_key_matrix[:, :, 0],
blocked_key_matrix[:, :, 1],
blocked_key_matrix[:, :, 2],
blocked_key_matrix[:, :, -1],
gathered_key[:, :, 0],
],
dim=2,
) # [bsz, n_heads, (4+n_rand_blocks)*to_block_size, -1]
second_value_mat = torch.cat(
[
blocked_value_matrix[:, :, 0],
blocked_value_matrix[:, :, 1],
blocked_value_matrix[:, :, 2],
blocked_value_matrix[:, :, -1],
gathered_value[:, :, 0],
],
dim=2,
) # [bsz, n_heads, (4+n_rand_blocks)*to_block_size, -1]
# [bsz, n_heads, from_block_size, -1] x [bsz, n_heads, (4+n_rand_blocks)*to_block_size, -1] ==> [bsz, n_heads, from_block_size, (4+n_rand_blocks)*to_block_size]
second_product = self.torch_bmm_nd_transpose(blocked_query_matrix[:, :, 1], second_key_mat, ndim=4)
second_seq_pad = torch.cat(
[
to_mask[:, :, :, : 3 * to_block_size],
to_mask[:, :, :, -to_block_size:],
to_mask.new_ones([bsz, 1, 1, n_rand_blocks * to_block_size]),
],
dim=3,
)
second_rand_pad = torch.cat(
[
rand_mask.new_ones([bsz, n_heads, from_block_size, 4 * to_block_size]),
rand_mask[:, :, 0],
],
dim=3,
)
second_product = second_product * rsqrt_d
second_product += (1.0 - torch.minimum(second_seq_pad, second_rand_pad)) * attn_mask_penalty
second_attn_weights = F.softmax(
second_product, dim=-1
) # [bsz, n_heads, from_block_size, (4+n_rand_blocks)*to_block_size]
# [bsz, n_heads, from_block_size, (4+n_rand_blocks)*to_block_size] x [bsz, n_heads, (4+n_rand_blocks)*to_block_size, -1] ==> [bsz, n_heads, from_block_size, -1]
second_context_layer = self.torch_bmm_nd(second_attn_weights, second_value_mat, ndim=4)
second_context_layer.unsqueeze_(2)
# 3rd PART
# Middle blocks attention scores
# q[-2:2] x (sliding_keys, random_keys, global_keys)
# sliding attn is calculated using special trick of shifting tokens as discussed in paper
# random keys are generated by taking random indices as per `rand_attn`
# global keys -> 1st & last block
exp_blocked_key_matrix = torch.cat(
[blocked_key_matrix[:, :, 1:-3], blocked_key_matrix[:, :, 2:-2], blocked_key_matrix[:, :, 3:-1]], dim=3
) # [bsz, n_heads, from_seq_len//from_block_size-4, 3*to_block_size, -1]
exp_blocked_value_matrix = torch.cat(
[blocked_value_matrix[:, :, 1:-3], blocked_value_matrix[:, :, 2:-2], blocked_value_matrix[:, :, 3:-1]],
dim=3,
) # [bsz, n_heads, from_seq_len//from_block_size-4, 3*to_block_size, -1]
middle_query_matrix = blocked_query_matrix[:, :, 2:-2]
# sliding attention scores for q[-2:2]
# [bsz, n_heads, from_seq_len//from_block_size-4, from_block_size, -1] x [b, n_heads, from_seq_len//from_block_size-4, 3*to_block_size, -1]
inner_band_product = self.torch_bmm_nd_transpose(middle_query_matrix, exp_blocked_key_matrix, ndim=5)
# ==> [bsz, n_heads, from_seq_len//from_block_size-4, from_block_size, 3*to_block_size]
inner_band_product = inner_band_product * rsqrt_d
# randn attention scores for q[-2:2]
# [bsz, n_heads, from_seq_len//from_block_size-4, from_block_size, -1] x [bsz, n_heads, from_seq_len//from_block_size-4, n_rand_blocks*to_block_size, -1]
rand_band_product = self.torch_bmm_nd_transpose(middle_query_matrix, gathered_key[:, :, 1:-1], ndim=5)
# ==> [bsz, n_heads, from_seq_len//from_block_size-4, from_block_size, n_rand_blocks*to_block_size]
rand_band_product = rand_band_product * rsqrt_d
# Including 1st block (since it's global)
first_band_product = torch.einsum(
"bhlqd,bhkd->bhlqk", middle_query_matrix, blocked_key_matrix[:, :, 0]
) # [bsz, n_heads, from_seq_len//from_block_size-4, from_block_size, -1] x [bsz, n_heads, to_block_size, -1] ==> [bsz, n_heads, from_seq_len//from_block_size-4, from_block_size, to_block_size]
first_band_product = first_band_product * rsqrt_d
# Including last block (since it's global)
last_band_product = torch.einsum(
"bhlqd,bhkd->bhlqk", middle_query_matrix, blocked_key_matrix[:, :, -1]
) # [bsz, n_heads, from_seq_len//from_block_size-4, from_block_size, -1] x [bsz, n_heads, to_block_size, -1] ==> [bsz, n_heads, from_seq_len//from_block_size-4, from_block_size, to_block_size]
last_band_product = last_band_product * rsqrt_d
# masking padded tokens
inner_band_product += (1.0 - band_mask) * attn_mask_penalty
first_band_product += (1.0 - to_mask[:, :, :, :to_block_size].unsqueeze(3)) * attn_mask_penalty
last_band_product += (1.0 - to_mask[:, :, :, -to_block_size:].unsqueeze(3)) * attn_mask_penalty
rand_band_product += (1.0 - rand_mask[:, :, 1:-1]) * attn_mask_penalty
# completing attention scores matrix for all q[-2:2]
band_product = torch.cat(
[first_band_product, inner_band_product, rand_band_product, last_band_product], dim=-1
) # [bsz, n_heads, from_seq_len//from_block_size-4, from_block_size, (5+n_rand_blocks)*to_block_size]
# safely doing softmax since attention matrix is completed
attn_weights = F.softmax(
band_product, dim=-1
) # [bsz, n_heads, from_seq_len//from_block_size-4, from_block_size, (5+n_rand_blocks)*to_block_size]
# contribution of sliding keys
# [bsz, n_heads, m//from_block_size-4, from_block_size, 3*to_block_size] x [bsz, n_heads, from_seq_len//from_block_size-4, 3*to_block_size, -1]
context_layer = self.torch_bmm_nd(
attn_weights[:, :, :, :, to_block_size : 4 * to_block_size], exp_blocked_value_matrix, ndim=5
)
# ==> [bsz, n_heads, from_seq_len//from_block_size-4, from_block_size, -1]
# adding contribution of random keys
# [bsz, n_heads, from_seq_len//from_block_size-4, from_block_size, n_rand_blocks*to_block_size] x [bsz, n_heads, from_seq_len//from_block_size-4, n_rand_blocks*to_block_size, -1]
context_layer += self.torch_bmm_nd(
attn_weights[:, :, :, :, 4 * to_block_size : -to_block_size], gathered_value[:, :, 1:-1], ndim=5
)
# ==> [bsz, n_heads, from_seq_len//from_block_size-4, from_block_size, -1]
# adding contribution of global keys
context_layer += torch.einsum(
"bhlqk,bhkd->bhlqd", attn_weights[:, :, :, :, :to_block_size], blocked_value_matrix[:, :, 0]
) # [bsz, n_heads, from_seq_len//from_block_size-4, from_block_size, to_block_size] x [bsz, n_heads, to_block_size, -1] ==> [bsz, n_heads, from_seq_len//from_block_size-4, from_block_size, -1]
context_layer += torch.einsum(
"bhlqk,bhkd->bhlqd", attn_weights[:, :, :, :, -to_block_size:], blocked_value_matrix[:, :, -1]
) # [bsz, n_heads, from_seq_len//from_block_size-4, from_block_size, to_block_size] x [bsz, n_heads, to_block_size, -1] ==> [bsz, n_heads, from_seq_len//from_block_size-4, from_block_size, -1]
# 4th PART
# last 2nd token attention scores
# q[-2] x (sliding_keys, random_keys, global_keys)
# sliding key blocks -> last 3 blocks
# global key block -> 1st block
# random key block -> based on indices stored in `randn_attn`
second_last_key_mat = torch.cat(
[
blocked_key_matrix[:, :, 0],
blocked_key_matrix[:, :, -3],
blocked_key_matrix[:, :, -2],
blocked_key_matrix[:, :, -1],
gathered_key[:, :, -1],
],
dim=2,
) # [bsz, n_heads, (4+n_random_blocks)*to_block_size, -1]
second_last_value_mat = torch.cat(
[
blocked_value_matrix[:, :, 0],
blocked_value_matrix[:, :, -3],
blocked_value_matrix[:, :, -2],
blocked_value_matrix[:, :, -1],
gathered_value[:, :, -1],
],
dim=2,
) # [bsz, n_heads, (4+r)*to_block_size, -1]
# [bsz, n_heads, from_block_size, -1] x [bsz, n_heads, (4+n_rand_blocks)*to_block_size, -1] ==> [bsz, n_heads, from_block_size, (4+n_rand_blocks)*to_block_size]
second_last_product = self.torch_bmm_nd_transpose(blocked_query_matrix[:, :, -2], second_last_key_mat, ndim=4)
second_last_seq_pad = torch.cat(
[
to_mask[:, :, :, :to_block_size],
to_mask[:, :, :, -3 * to_block_size :],
to_mask.new_ones([bsz, 1, 1, n_rand_blocks * to_block_size]),
],
dim=3,
)
second_last_rand_pad = torch.cat(
[
rand_mask.new_ones([bsz, n_heads, from_block_size, 4 * to_block_size]),
rand_mask[:, :, -1],
],
dim=3,
)
second_last_product = second_last_product * rsqrt_d
second_last_product += (1.0 - torch.minimum(second_last_seq_pad, second_last_rand_pad)) * attn_mask_penalty
second_last_attn_weights = F.softmax(
second_last_product, dim=-1
) # [bsz, n_heads, from_block_size, (4+n_rand_blocks)*to_block_size]
# [bsz, n_heads, from_block_size, (4+n_rand_blocks)*to_block_size] x [bsz, n_heads, (4+n_rand_blocks)*to_block_size, -1] ==> [bsz, n_heads, from_block_size, -1]
second_last_context_layer = self.torch_bmm_nd(second_last_attn_weights, second_last_value_mat, ndim=4)
second_last_context_layer.unsqueeze_(2)
# 5th PART
# last block (global) attention scores
# q[-1] x (k[0], k[1], k[2], k[3], .... )
# [bsz, n_heads, from_block_size, -1] x [bsz, n_heads, to_seq_len, -1] ==> [bsz, n_heads, from_block_size, to_seq_len]
last_product = self.torch_bmm_nd_transpose(blocked_query_matrix[:, :, -1], key_layer, ndim=4)
last_product = last_product * rsqrt_d
last_product += (1.0 - to_mask) * attn_mask_penalty
last_attn_weights = F.softmax(last_product, dim=-1) # [bsz, n_heads, from_block_size, n]
# [bsz, n_heads, from_block_size, to_seq_len] x [bsz, n_heads, to_seq_len, -1] ==> [bsz, n_heads, from_block_size, -1]
last_context_layer = self.torch_bmm_nd(last_attn_weights, value_layer, ndim=4)
last_context_layer.unsqueeze_(2)
# combining representations of all tokens
context_layer = torch.cat(
[first_context_layer, second_context_layer, context_layer, second_last_context_layer, last_context_layer],
dim=2,
)
context_layer = context_layer.view((bsz, n_heads, from_seq_len, -1)) * from_mask
context_layer = torch.transpose(context_layer, 1, 2)
# this is just for visualizing; forward pass doesn't depend on following code
if output_attentions:
# TODO(PVP): need to verify if below code is correct
attention_probs = torch.zeros(
bsz, n_heads, from_seq_len, to_seq_len, dtype=torch.float, device=context_layer.device
)
# 1st query block
# corresponding to `first_context_layer`
attention_probs[:, :, :from_block_size, :] = first_attn_weights # all keys global
# 2nd query block
# corresponding to `second_context_layer`
attention_probs[:, :, from_block_size : 2 * from_block_size, : 3 * to_block_size] = second_attn_weights[
:, :, :, : 3 * to_block_size
] # 1st three key blocks (global + sliding)
attention_probs[:, :, from_block_size : 2 * from_block_size, -to_block_size:] = second_attn_weights[
:, :, :, 3 * to_block_size : 4 * to_block_size
] # last key block (global)
# random keys
for p1, i1, w1 in zip(range(bsz), rand_attn, second_attn_weights):
# p1, i1, w1 corresponds to batch_dim i.e. following operation is done for each sequence in batch
for p2, i2, w2 in zip(range(n_heads), i1, w1):
# p2, i2, w2 corresponds to head_dim i.e. following operation is done for each heads
attn_probs_view = attention_probs.view(
bsz,
n_heads,
from_seq_len // from_block_size,
from_block_size,
to_seq_len // to_block_size,
to_block_size,
)
right_slice = w2[:, 4 * to_block_size :]
attn_probs_view[p1, p2, 1, :, i2[0]] = right_slice.view(
from_block_size, n_rand_blocks, to_block_size
)
# Middle query blocks
# corresponding to `context_layer`
# sliding keys
for q_idx in range(from_seq_len // from_block_size - 4):
attn_probs_view = attention_probs.view(
bsz,
n_heads,
from_seq_len // from_block_size,
from_block_size,
to_seq_len // to_block_size,
to_block_size,
)[:, :, 2:-2, :, 1:-1, :]
right_slice = attn_weights[:, :, q_idx, :, to_block_size : 4 * to_block_size]
attn_probs_view[:, :, q_idx, :, q_idx : q_idx + 3, :] = right_slice.view(
bsz, n_heads, from_block_size, 3, to_block_size
) # inner_band_product
# global keys (corresponding to 1st key block)
attention_probs[:, :, 2 * from_block_size : -2 * from_block_size, :to_block_size] = attn_weights[
:, :, :, :, :to_block_size
].view(
bsz, n_heads, -1, to_block_size
) # first_band_product
# global keys (corresponding to last key block)
attention_probs[:, :, 2 * from_block_size : -2 * from_block_size, -to_block_size:] = attn_weights[
:, :, :, :, -to_block_size:
].view(
bsz, n_heads, -1, to_block_size
) # last_band_product
# random keys
for p1, i1, w1 in zip(range(bsz), rand_attn, attn_weights):
# p1, i1, w1 corresponds to batch_dim i.e. following operation is done for each sequence in batch
for p2, i2, w2 in zip(range(n_heads), i1, w1):
# p2, i2, w2 corresponds to head_dim i.e. following operation is done for each heads
for q_idx in range(1, len(i2) - 1):
attn_probs_view = attention_probs.view(
bsz,
n_heads,
from_seq_len // from_block_size,
from_block_size,
to_seq_len // to_block_size,
to_block_size,
)
right_slice = w2[q_idx - 1, :, 4 * to_block_size : -to_block_size]
attn_probs_view[p1, p2, q_idx + 1, :, i2[q_idx]] = right_slice.view(
from_block_size, n_rand_blocks, to_block_size
)
# Second-last query block
# corresponding to `second_last_context_layer`
attention_probs[:, :, -2 * from_block_size : -from_block_size, :to_block_size] = second_last_attn_weights[
:, :, :, :to_block_size
] # 1st key block (global)
attention_probs[
:, :, -2 * from_block_size : -from_block_size, -3 * to_block_size :
] = second_last_attn_weights[
:, :, :, to_block_size : 4 * to_block_size
] # last three blocks (global + sliding)
# random keys
for p1, i1, w1 in zip(range(bsz), rand_attn, second_last_attn_weights):
# p1, i1, w1 corresponds to batch_dim i.e. following operation is done for each sequence in batch
for p2, i2, w2 in zip(range(n_heads), i1, w1):
# p2, i2, w2 corresponds to head_dim i.e. following operation is done for each heads
attn_probs_view = attention_probs.view(
bsz,
n_heads,
from_seq_len // from_block_size,
from_block_size,
to_seq_len // to_block_size,
to_block_size,
)
right_slice = w2[:, 4 * to_block_size :]
attn_probs_view[p1, p2, -2, :, i2[-1]] = right_slice.view(
from_block_size, n_rand_blocks, to_block_size
)
# last query block
# corresponding to `last_context_layer`
attention_probs[:, :, -from_block_size:, :] = last_attn_weights # all keys global
else:
attention_probs = None
return context_layer, attention_probs
@staticmethod
def torch_gather_b2(params, indices):
# this operation is equivalent to tf.gather when batch_dims=2
if params.shape[:2] != indices.shape[:2]:
raise ValueError(
f"Make sure that the first two dimensions of params and indices are identical, \
but they are params: {params.shape[:2]} vs. indices: {params.shape[:2]}"
)
num_indices_to_gather = indices.shape[-2] * indices.shape[-1]
num_indices_to_pick_from = params.shape[2]
indices_shift = (
torch.arange(indices.shape[0] * indices.shape[1] * num_indices_to_gather, device=indices.device)
// num_indices_to_gather
* num_indices_to_pick_from
)
flattened_indices = indices.view(-1) + indices_shift
flattened_params = params.reshape(-1, params.shape[-2], params.shape[-1])
out_flattened = flattened_params.index_select(0, flattened_indices)
out = out_flattened.reshape(params.shape[:2] + (num_indices_to_gather,) + params.shape[3:])
return out
@staticmethod
def _create_rand_mask_from_inputs(
from_blocked_mask,
to_blocked_mask,
rand_attn,
num_attention_heads,
num_rand_blocks,
batch_size,
from_seq_length,
from_block_size,
):
"""
Create 3D attention mask from a 2D tensor mask.
Args:
from_blocked_mask: 2D Tensor of shape [batch_size,
from_seq_length//from_block_size, from_block_size].
to_blocked_mask: int32 Tensor of shape [batch_size,
to_seq_length//to_block_size, to_block_size].
rand_attn: [batch_size, num_attention_heads,
from_seq_length//from_block_size-2, num_rand_blocks]
num_attention_heads: int. Number of attention heads.
num_rand_blocks: int. Number of random chunks per row.
batch_size: int. Batch size for computation.
from_seq_length: int. length of from sequence.
from_block_size: int. size of block in from sequence.
Returns:
float Tensor of shape [batch_size, num_attention_heads, from_seq_length//from_block_size-2,
from_block_size, num_rand_blocks*to_block_size].
"""
num_windows = from_seq_length // from_block_size - 2
rand_mask = torch.stack([p1[i1.flatten()] for p1, i1 in zip(to_blocked_mask, rand_attn)])
rand_mask = rand_mask.view(batch_size, num_attention_heads, num_windows, num_rand_blocks * from_block_size)
rand_mask = torch.einsum("blq,bhlk->bhlqk", from_blocked_mask[:, 1:-1], rand_mask)
return rand_mask
@staticmethod
def _get_rand_attn_plan(from_seq_length, from_block_size, num_rand_blocks):
"""
Gives the plan of where to put random attention.
Args:
from_seq_length: int. length of from sequence.
from_block_size: int. size of block in from sequence.
num_rand_blocks: int. Number of random chunks per row.
Returns:
plan_from_length: ending location of from block plan_num_rand_blocks: number of random ending location for
each block
"""
plan_from_length = []
plan_num_rand_blocks = []
if (2 * num_rand_blocks + 5) < (from_seq_length // from_block_size):
plan_from_length.append(int((2 * num_rand_blocks + 5) * from_block_size))
plan_num_rand_blocks.append(num_rand_blocks)
plan_from_length.append(from_seq_length)
plan_num_rand_blocks.append(0)
elif (num_rand_blocks + 5) < (from_seq_length // from_block_size):
plan_from_length.append(int((num_rand_blocks + 5) * from_block_size))
plan_num_rand_blocks.append(num_rand_blocks // 2)
plan_from_length.append(from_seq_length)
plan_num_rand_blocks.append(num_rand_blocks - (num_rand_blocks // 2))
else:
plan_from_length.append(from_seq_length)
plan_num_rand_blocks.append(num_rand_blocks)
return plan_from_length, plan_num_rand_blocks
@staticmethod
def _bigbird_block_rand_mask(
from_seq_length, to_seq_length, from_block_size, to_block_size, num_rand_blocks, last_idx=-1
):
"""
Create adjacency list of random attention.
Args:
from_seq_length: int. length of from sequence.
to_seq_length: int. length of to sequence.
from_block_size: int. size of block in from sequence.
to_block_size: int. size of block in to sequence.
num_rand_blocks: int. Number of random chunks per row.
last_idx: if -1 then num_rand_blocks blocks chosen anywhere in to sequence,
if positive then num_rand_blocks blocks chosen only up to last_idx.
Returns:
adjacency list of size from_seq_length//from_block_size-2 by num_rand_blocks
"""
# using this method when from_seq_length in [1024, 3072, 4096]
assert (
from_seq_length // from_block_size == to_seq_length // to_block_size
), "Error the number of blocks needs to be same!"
rand_attn = np.zeros((from_seq_length // from_block_size - 2, num_rand_blocks), dtype=np.int32)
middle_seq = np.arange(1, to_seq_length // to_block_size - 1, dtype=np.int32)
last = to_seq_length // to_block_size - 1
if last_idx > (2 * to_block_size):
last = (last_idx // to_block_size) - 1
r = num_rand_blocks # shorthand
for i in range(1, from_seq_length // from_block_size - 1):
start = i - 2
end = i
if i == 1:
rand_attn[i - 1, :] = np.random.permutation(middle_seq[2:last])[:r]
elif i == 2:
rand_attn[i - 1, :] = np.random.permutation(middle_seq[3:last])[:r]
elif i == from_seq_length // from_block_size - 3:
rand_attn[i - 1, :] = np.random.permutation(middle_seq[:last])[:r]
# Missing -3: should have been sliced till last-3
elif i == from_seq_length // from_block_size - 2:
rand_attn[i - 1, :] = np.random.permutation(middle_seq[:last])[:r]
# Missing -4: should have been sliced till last-4
else:
if start > last:
start = last
rand_attn[i - 1, :] = np.random.permutation(middle_seq[:start])[:r]
elif (end + 1) == last:
rand_attn[i - 1, :] = np.random.permutation(middle_seq[:start])[:r]
else:
rand_attn[i - 1, :] = np.random.permutation(
np.concatenate((middle_seq[:start], middle_seq[end + 1 : last]))
)[:r]
return rand_attn
def _bigbird_block_rand_mask_with_head(
self,
from_seq_length,
to_seq_length,
from_block_size,
to_block_size,
num_heads,
plan_from_length,
plan_num_rand_blocks,
window_block_left=1,
window_block_right=1,
global_block_top=1,
global_block_bottom=1,
global_block_left=1,
global_block_right=1,
):
"""
Create adjacency list of random attention.
Args:
from_seq_length: int. length of from sequence.
to_seq_length: int. length of to sequence.
from_block_size: int. size of block in from sequence.
to_block_size: int. size of block in to sequence.
num_heads: int. total number of heads.
plan_from_length: list. plan from length where num_random_blocks are choosen from.
plan_num_rand_blocks: list. number of rand blocks within the plan.
window_block_left: int. number of blocks of window to left of a block.
window_block_right: int. number of blocks of window to right of a block.
global_block_top: int. number of blocks at the top.
global_block_bottom: int. number of blocks at the bottom.
global_block_left: int. Number of blocks globally used to the left.
global_block_right: int. Number of blocks globally used to the right.
Returns:
adjacency list of size num_head where each element is of size from_seq_length//from_block_size-2 by
num_rand_blocks
"""
# using this method when from_seq_length not in [1024, 3072, 4096]
assert (
from_seq_length // from_block_size == to_seq_length // to_block_size
), "Error the number of blocks needs to be same!"
assert from_seq_length in plan_from_length, "Error from sequence length not in plan!"
# Total number of blocks in the mmask
num_blocks = from_seq_length // from_block_size
# Number of blocks per plan
plan_block_length = np.array(plan_from_length) // from_block_size
# till when to follow plan
max_plan_idx = plan_from_length.index(from_seq_length)
# Random Attention adjacency list
rand_attn = [
np.zeros((num_blocks, np.sum(plan_num_rand_blocks[: max_plan_idx + 1])), dtype=np.int32)
for i in range(num_heads)
]
# We will go iteratively over the plan blocks and pick random number of
# Attention blocks from the legally allowed blocks
for plan_idx in range(max_plan_idx + 1):
rnd_r_cnt = 0
if plan_idx > 0:
# set the row for all from_blocks starting from 0 to
# plan_block_length[plan_idx-1]
# column indx start fromm plan_block_length[plan_idx-1] and ends at
# plan_block_length[plan_idx]
if plan_num_rand_blocks[plan_idx] > 0:
rnd_r_cnt = int(np.sum(plan_num_rand_blocks[:plan_idx]))
curr_r_cnt = int(np.sum(plan_num_rand_blocks[: plan_idx + 1]))
for blk_rw_idx in range(global_block_top, plan_block_length[plan_idx - 1]):
for h in range(num_heads):
rand_attn[h][blk_rw_idx, rnd_r_cnt:curr_r_cnt] = self._get_single_block_row_attention(
block_id=blk_rw_idx,
to_start_block_id=plan_block_length[plan_idx - 1],
to_end_block_id=plan_block_length[plan_idx],
num_rand_blocks=plan_num_rand_blocks[plan_idx],
window_block_left=window_block_left,
window_block_right=window_block_right,
global_block_left=global_block_left,
global_block_right=global_block_right,
)
for pl_id in range(plan_idx):
if plan_num_rand_blocks[pl_id] == 0:
continue
for blk_rw_idx in range(plan_block_length[plan_idx - 1], plan_block_length[plan_idx]):
rnd_r_cnt = 0
to_start_block_id = 0
if pl_id > 0:
rnd_r_cnt = int(np.sum(plan_num_rand_blocks[:pl_id]))
to_start_block_id = plan_block_length[pl_id - 1]
curr_r_cnt = int(np.sum(plan_num_rand_blocks[: pl_id + 1]))
for h in range(num_heads):
rand_attn[h][blk_rw_idx, rnd_r_cnt:curr_r_cnt] = self._get_single_block_row_attention(
block_id=blk_rw_idx,
to_start_block_id=to_start_block_id,
to_end_block_id=plan_block_length[pl_id],
num_rand_blocks=plan_num_rand_blocks[pl_id],
window_block_left=window_block_left,
window_block_right=window_block_right,
global_block_left=global_block_left,
global_block_right=global_block_right,
)
if plan_num_rand_blocks[plan_idx] == 0:
continue
curr_r_cnt = int(np.sum(plan_num_rand_blocks[: plan_idx + 1]))
from_start_block_id = global_block_top
to_start_block_id = 0
if plan_idx > 0:
rnd_r_cnt = int(np.sum(plan_num_rand_blocks[:plan_idx]))
from_start_block_id = plan_block_length[plan_idx - 1]
to_start_block_id = plan_block_length[plan_idx - 1]
for blk_rw_idx in range(from_start_block_id, plan_block_length[plan_idx]):
for h in range(num_heads):
rand_attn[h][blk_rw_idx, rnd_r_cnt:curr_r_cnt] = self._get_single_block_row_attention(
block_id=blk_rw_idx,
to_start_block_id=to_start_block_id,
to_end_block_id=plan_block_length[plan_idx],
num_rand_blocks=plan_num_rand_blocks[plan_idx],
window_block_left=window_block_left,
window_block_right=window_block_right,
global_block_left=global_block_left,
global_block_right=global_block_right,
)
for nh in range(num_heads):
rand_attn[nh] = rand_attn[nh][global_block_top : num_blocks - global_block_bottom, :]
return rand_attn
@staticmethod
def _get_single_block_row_attention(
block_id,
to_start_block_id,
to_end_block_id,
num_rand_blocks,
window_block_left=1,
window_block_right=1,
global_block_left=1,
global_block_right=1,
):
"""
For a single row block get random row attention.
Args:
block_id: int. block id of row.
to_start_block_id: int. random attention column start id.
to_end_block_id: int. random attention column end id.
num_rand_blocks: int. number of random blocks to be selected.
window_block_left: int. number of blocks of window to left of a block.
window_block_right: int. number of blocks of window to right of a block.
global_block_left: int. Number of blocks globally used to the left.
global_block_right: int. Number of blocks globally used to the right.
Returns:
row containing the random attention vector of size num_rand_blocks.
"""
# list of to_blocks from which to choose random attention
to_block_list = np.arange(to_start_block_id, to_end_block_id, dtype=np.int32)
# permute the blocks
perm_block = np.random.permutation(to_block_list)
# illegal blocks for the current block id, using window
illegal_blocks = list(range(block_id - window_block_left, block_id + window_block_right + 1))
# Add blocks at the start and at the end
illegal_blocks.extend(list(range(global_block_left)))
illegal_blocks.extend(list(range(to_end_block_id - global_block_right, to_end_block_id)))
# The second from_block cannot choose random attention on second last to_block
if block_id == 1:
illegal_blocks.append(to_end_block_id - 2)
# The second last from_block cannot choose random attention on second to_block
if block_id == to_end_block_id - 2:
illegal_blocks.append(1)
selected_random_blokcs = []
for i in range(to_end_block_id - to_start_block_id):
if perm_block[i] not in illegal_blocks:
selected_random_blokcs.append(perm_block[i])
if len(selected_random_blokcs) == num_rand_blocks:
break
return np.array(selected_random_blokcs, dtype=np.int32)
class BigBirdPegasusEncoderAttention(nn.Module):
def __init__(self, config, seed=None):
super().__init__()
self.config = config
self.seed = seed
self.attention_type = config.attention_type
if self.attention_type == "original_full":
self.self = BigBirdPegasusSelfAttention(config)
elif self.attention_type == "block_sparse":
self.self = BigBirdPegasusBlockSparseAttention(config, seed)
else:
raise ValueError(
f"attention_type can either be original_full or block_sparse, but is {self.config.attention_type}"
)
self.output = nn.Linear(config.hidden_size, config.hidden_size, bias=config.use_bias)
def set_attention_type(self, value: str):
if value not in ["original_full", "block_sparse"]:
raise ValueError(
f"attention_type can only be set to either 'original_full' or 'block_sparse', but is {value}"
)
# attention type is already correctly set
if value == self.attention_type:
return
self.attention_type = value
if value == "original_full":
# copy all weights to new full attention class
attn_weights = BigBirdPegasusSelfAttention(self.config)
else:
# copy all weights to new sparse attention class
attn_weights = BigBirdPegasusBlockSparseAttention(self.config, self.seed)
attn_weights.query = self.self.query
attn_weights.value = self.self.value
attn_weights.key = self.self.key
self.self = attn_weights
self.attention_type = value
if not self.training:
self.self.eval()
def forward(
self,
hidden_states,
attention_mask=None,
head_mask=None,
past_key_value=None,
output_attentions=False,
band_mask=None,
from_mask=None,
to_mask=None,
from_blocked_mask=None,
to_blocked_mask=None,
):
if self.config.attention_type == "original_full":
self_outputs = self.self(
hidden_states,
attention_mask,
head_mask,
past_key_value=past_key_value,
output_attentions=output_attentions,
)
else:
self_outputs = self.self(
hidden_states, band_mask, from_mask, to_mask, from_blocked_mask, to_blocked_mask, output_attentions
)
attention_output = self.output(self_outputs[0])
outputs = (attention_output,) + self_outputs[1:] # add attentions if we output them
return outputs
# Copied from transformers.models.bart.modeling_bart.BartAttention with Bart->BigBirdPegasusDecoder
class BigBirdPegasusDecoderAttention(nn.Module):
"""Multi-headed attention from 'Attention Is All You Need' paper"""
def __init__(
self,
embed_dim: int,
num_heads: int,
dropout: float = 0.0,
is_decoder: bool = False,
bias: bool = True,
):
super().__init__()
self.embed_dim = embed_dim
self.num_heads = num_heads
self.dropout = dropout
self.head_dim = embed_dim // num_heads
assert (
self.head_dim * num_heads == self.embed_dim
), f"embed_dim must be divisible by num_heads (got `embed_dim`: {self.embed_dim} and `num_heads`: {num_heads})."
self.scaling = self.head_dim ** -0.5
self.is_decoder = is_decoder
self.k_proj = nn.Linear(embed_dim, embed_dim, bias=bias)
self.v_proj = nn.Linear(embed_dim, embed_dim, bias=bias)
self.q_proj = nn.Linear(embed_dim, embed_dim, bias=bias)
self.out_proj = nn.Linear(embed_dim, embed_dim, bias=bias)
def _shape(self, tensor: torch.Tensor, seq_len: int, bsz: int):
return tensor.view(bsz, seq_len, self.num_heads, self.head_dim).transpose(1, 2).contiguous()
def forward(
self,
hidden_states: torch.Tensor,
key_value_states: Optional[torch.Tensor] = None,
past_key_value: Optional[Tuple[torch.Tensor]] = None,
attention_mask: Optional[torch.Tensor] = None,
layer_head_mask: Optional[torch.Tensor] = None,
output_attentions: bool = False,
) -> Tuple[torch.Tensor, Optional[torch.Tensor], Optional[Tuple[torch.Tensor]]]:
"""Input shape: Batch x Time x Channel"""
# if key_value_states are provided this layer is used as a cross-attention layer
# for the decoder
is_cross_attention = key_value_states is not None
bsz, tgt_len, embed_dim = hidden_states.size()
# get query proj
query_states = self.q_proj(hidden_states) * self.scaling
# get key, value proj
if is_cross_attention and past_key_value is not None:
# reuse k,v, cross_attentions
key_states = past_key_value[0]
value_states = past_key_value[1]
elif is_cross_attention:
# cross_attentions
key_states = self._shape(self.k_proj(key_value_states), -1, bsz)
value_states = self._shape(self.v_proj(key_value_states), -1, bsz)
elif past_key_value is not None:
# reuse k, v, self_attention
key_states = self._shape(self.k_proj(hidden_states), -1, bsz)
value_states = self._shape(self.v_proj(hidden_states), -1, bsz)
key_states = torch.cat([past_key_value[0], key_states], dim=2)
value_states = torch.cat([past_key_value[1], value_states], dim=2)
else:
# self_attention
key_states = self._shape(self.k_proj(hidden_states), -1, bsz)
value_states = self._shape(self.v_proj(hidden_states), -1, bsz)
if self.is_decoder:
# if cross_attention save Tuple(torch.Tensor, torch.Tensor) of all cross attention key/value_states.
# Further calls to cross_attention layer can then reuse all cross-attention
# key/value_states (first "if" case)
# if uni-directional self-attention (decoder) save Tuple(torch.Tensor, torch.Tensor) of
# all previous decoder key/value_states. Further calls to uni-directional self-attention
# can concat previous decoder key/value_states to current projected key/value_states (third "elif" case)
# if encoder bi-directional self-attention `past_key_value` is always `None`
past_key_value = (key_states, value_states)
proj_shape = (bsz * self.num_heads, -1, self.head_dim)
query_states = self._shape(query_states, tgt_len, bsz).view(*proj_shape)
key_states = key_states.view(*proj_shape)
value_states = value_states.view(*proj_shape)
src_len = key_states.size(1)
attn_weights = torch.bmm(query_states, key_states.transpose(1, 2))
if attn_weights.size() != (bsz * self.num_heads, tgt_len, src_len):
raise ValueError(
f"Attention weights should be of size {(bsz * self.num_heads, tgt_len, src_len)}, but is {attn_weights.size()}"
)
if attention_mask is not None:
if attention_mask.size() != (bsz, 1, tgt_len, src_len):
raise ValueError(
f"Attention mask should be of size {(bsz, 1, tgt_len, src_len)}, but is {attention_mask.size()}"
)
attn_weights = attn_weights.view(bsz, self.num_heads, tgt_len, src_len) + attention_mask
attn_weights = attn_weights.view(bsz * self.num_heads, tgt_len, src_len)
attn_weights = F.softmax(attn_weights, dim=-1)
if layer_head_mask is not None:
if layer_head_mask.size() != (self.num_heads,):
raise ValueError(
f"Head mask for a single layer should be of size {(self.num_heads,)}, but is {layer_head_mask.size()}"
)
attn_weights = layer_head_mask.view(1, -1, 1, 1) * attn_weights.view(bsz, self.num_heads, tgt_len, src_len)
attn_weights = attn_weights.view(bsz * self.num_heads, tgt_len, src_len)
if output_attentions:
# this operation is a bit awkward, but it's required to
# make sure that attn_weights keeps its gradient.
# In order to do so, attn_weights have to be reshaped
# twice and have to be reused in the following
attn_weights_reshaped = attn_weights.view(bsz, self.num_heads, tgt_len, src_len)
attn_weights = attn_weights_reshaped.view(bsz * self.num_heads, tgt_len, src_len)
else:
attn_weights_reshaped = None
attn_probs = F.dropout(attn_weights, p=self.dropout, training=self.training)
attn_output = torch.bmm(attn_probs, value_states)
if attn_output.size() != (bsz * self.num_heads, tgt_len, self.head_dim):
raise ValueError(
f"`attn_output` should be of size {(bsz, self.num_heads, tgt_len, self.head_dim)}, but is {attn_output.size()}"
)
attn_output = attn_output.view(bsz, self.num_heads, tgt_len, self.head_dim)
attn_output = attn_output.transpose(1, 2)
attn_output = attn_output.reshape(bsz, tgt_len, embed_dim)
attn_output = self.out_proj(attn_output)
return attn_output, attn_weights_reshaped, past_key_value
class BigBirdPegasusEncoderLayer(nn.Module):
def __init__(self, config: BigBirdPegasusConfig, seed=None):
super().__init__()
self.attention_type = config.attention_type
self.embed_dim = config.d_model
self.self_attn = BigBirdPegasusEncoderAttention(config, seed=seed)
self.self_attn_layer_norm = nn.LayerNorm(self.embed_dim)
self.dropout = config.dropout
self.activation_fn = ACT2FN[config.activation_function]
self.activation_dropout = config.activation_dropout
self.fc1 = nn.Linear(self.embed_dim, config.encoder_ffn_dim)
self.fc2 = nn.Linear(config.encoder_ffn_dim, self.embed_dim)
self.final_layer_norm = nn.LayerNorm(self.embed_dim)
def forward(
self,
hidden_states: torch.Tensor,
attention_mask: torch.Tensor,
layer_head_mask: torch.Tensor,
band_mask=None,
from_mask=None,
to_mask=None,
from_blocked_mask=None,
to_blocked_mask=None,
output_attentions: bool = False,
):
"""
Args:
hidden_states (:obj:`torch.FloatTensor`): input to the layer of shape :obj:`(seq_len, batch, embed_dim)`
attention_mask (:obj:`torch.FloatTensor`): attention mask of size
:obj:`(batch, 1, tgt_len, src_len)` where padding elements are indicated by very large negative values.
output_attentions (:obj:`bool`, `optional`):
Whether or not to return the attentions tensors of all attention layers. See ``attentions`` under
returned tensors for more detail.
"""
residual = hidden_states
hidden_states = self.self_attn_layer_norm(hidden_states)
self_attention_outputs = self.self_attn(
hidden_states=hidden_states,
attention_mask=attention_mask,
output_attentions=output_attentions,
band_mask=band_mask,
from_mask=from_mask,
to_mask=to_mask,
from_blocked_mask=from_blocked_mask,
to_blocked_mask=to_blocked_mask,
)
hidden_states = self_attention_outputs[0]
hidden_states = F.dropout(hidden_states, p=self.dropout, training=self.training)
hidden_states = residual + hidden_states
residual = hidden_states
hidden_states = self.final_layer_norm(hidden_states)
hidden_states = self.activation_fn(self.fc1(hidden_states))
hidden_states = self.fc2(hidden_states)
hidden_states = F.dropout(hidden_states, p=self.dropout, training=self.training)
hidden_states = residual + hidden_states
if hidden_states.dtype == torch.float16 and (
torch.isinf(hidden_states).any() or torch.isnan(hidden_states).any()
):
clamp_value = torch.finfo(hidden_states.dtype).max - 1000
hidden_states = torch.clamp(hidden_states, min=-clamp_value, max=clamp_value)
outputs = (hidden_states,)
if output_attentions:
outputs += (self_attention_outputs[1],)
return outputs
def set_attention_type(self, value: str):
if value not in ["original_full", "block_sparse"]:
raise ValueError(
f"attention_type can only be set to either 'original_full' or 'block_sparse', but is {value}"
)
# attention type is already correctly set
if value == self.attention_type:
return
self.attention_type = value
self.self_attn.set_attention_type(value)
class BigBirdPegasusDecoderLayer(nn.Module):
def __init__(self, config: BigBirdPegasusConfig):
super().__init__()
self.embed_dim = config.d_model
self.self_attn = BigBirdPegasusDecoderAttention(
embed_dim=self.embed_dim,
num_heads=config.decoder_attention_heads,
dropout=config.attention_dropout,
is_decoder=True,
bias=config.use_bias,
)
self.dropout = config.dropout
self.activation_fn = ACT2FN[config.activation_function]
self.activation_dropout = config.activation_dropout
self.self_attn_layer_norm = nn.LayerNorm(self.embed_dim)
self.encoder_attn = BigBirdPegasusDecoderAttention(
self.embed_dim,
config.decoder_attention_heads,
dropout=config.attention_dropout,
is_decoder=True,
bias=config.use_bias,
)
self.encoder_attn_layer_norm = nn.LayerNorm(self.embed_dim)
self.fc1 = nn.Linear(self.embed_dim, config.decoder_ffn_dim)
self.fc2 = nn.Linear(config.decoder_ffn_dim, self.embed_dim)
self.final_layer_norm = nn.LayerNorm(self.embed_dim)
# Copied from transformers.models.mbart.modeling_mbart.MBartDecoderLayer.forward
def forward(
self,
hidden_states: torch.Tensor,
attention_mask: Optional[torch.Tensor] = None,
encoder_hidden_states: Optional[torch.Tensor] = None,
encoder_attention_mask: Optional[torch.Tensor] = None,
layer_head_mask: Optional[torch.Tensor] = None,
cross_attn_layer_head_mask: Optional[torch.Tensor] = None,
past_key_value: Optional[Tuple[torch.Tensor]] = None,
output_attentions: Optional[bool] = False,
use_cache: Optional[bool] = True,
):
"""
Args:
hidden_states (:obj:`torch.FloatTensor`): input to the layer of shape `(seq_len, batch, embed_dim)`
attention_mask (:obj:`torch.FloatTensor`): attention mask of size
`(batch, 1, tgt_len, src_len)` where padding elements are indicated by very large negative values.
encoder_hidden_states (:obj:`torch.FloatTensor`): cross attention input to the layer of shape `(seq_len, batch, embed_dim)`
encoder_attention_mask (:obj:`torch.FloatTensor`): encoder attention mask of size
`(batch, 1, tgt_len, src_len)` where padding elements are indicated by very large negative values.
layer_head_mask (:obj:`torch.FloatTensor`): mask for attention heads in a given layer of size
`(encoder_attention_heads,)`.
cross_attn_layer_head_mask (:obj:`torch.FloatTensor`): mask for cross-attention heads in a given layer of
size `(decoder_attention_heads,)`.
past_key_value (:obj:`Tuple(torch.FloatTensor)`): cached past key and value projection states
output_attentions (:obj:`bool`, `optional`):
Whether or not to return the attentions tensors of all attention layers. See ``attentions`` under
returned tensors for more detail.
"""
residual = hidden_states
hidden_states = self.self_attn_layer_norm(hidden_states)
# Self Attention
# decoder uni-directional self-attention cached key/values tuple is at positions 1,2
self_attn_past_key_value = past_key_value[:2] if past_key_value is not None else None
# add present self-attn cache to positions 1,2 of present_key_value tuple
hidden_states, self_attn_weights, present_key_value = self.self_attn(
hidden_states=hidden_states,
past_key_value=self_attn_past_key_value,
attention_mask=attention_mask,
layer_head_mask=layer_head_mask,
output_attentions=output_attentions,
)
hidden_states = F.dropout(hidden_states, p=self.dropout, training=self.training)
hidden_states = residual + hidden_states
# Cross-Attention Block
cross_attn_present_key_value = None
cross_attn_weights = None
if encoder_hidden_states is not None:
residual = hidden_states
hidden_states = self.encoder_attn_layer_norm(hidden_states)
# cross_attn cached key/values tuple is at positions 3,4 of present_key_value tuple
cross_attn_past_key_value = past_key_value[-2:] if past_key_value is not None else None
hidden_states, cross_attn_weights, cross_attn_present_key_value = self.encoder_attn(
hidden_states=hidden_states,
key_value_states=encoder_hidden_states,
attention_mask=encoder_attention_mask,
layer_head_mask=cross_attn_layer_head_mask,
past_key_value=cross_attn_past_key_value,
output_attentions=output_attentions,
)
hidden_states = F.dropout(hidden_states, p=self.dropout, training=self.training)
hidden_states = residual + hidden_states
# add cross-attn to positions 3,4 of present_key_value tuple
present_key_value = present_key_value + cross_attn_present_key_value
# Fully Connected
residual = hidden_states
hidden_states = self.final_layer_norm(hidden_states)
hidden_states = self.activation_fn(self.fc1(hidden_states))
hidden_states = F.dropout(hidden_states, p=self.activation_dropout, training=self.training)
hidden_states = self.fc2(hidden_states)
hidden_states = F.dropout(hidden_states, p=self.dropout, training=self.training)
hidden_states = residual + hidden_states
outputs = (hidden_states,)
if output_attentions:
outputs += (self_attn_weights, cross_attn_weights)
if use_cache:
outputs += (present_key_value,)
return outputs
# Copied from transformers.models.bart.modeling_bart.BartClassificationHead with Bart->BigBirdPegasus
class BigBirdPegasusClassificationHead(nn.Module):
"""Head for sentence-level classification tasks."""
def __init__(
self,
input_dim: int,
inner_dim: int,
num_classes: int,
pooler_dropout: float,
):
super().__init__()
self.dense = nn.Linear(input_dim, inner_dim)
self.dropout = nn.Dropout(p=pooler_dropout)
self.out_proj = nn.Linear(inner_dim, num_classes)
def forward(self, hidden_states: torch.Tensor):
hidden_states = self.dropout(hidden_states)
hidden_states = self.dense(hidden_states)
hidden_states = torch.tanh(hidden_states)
hidden_states = self.dropout(hidden_states)
hidden_states = self.out_proj(hidden_states)
return hidden_states
class BigBirdPegasusPreTrainedModel(PreTrainedModel):
config_class = BigBirdPegasusConfig
base_model_prefix = "model"
def _init_weights(self, module):
std = self.config.init_std
if isinstance(module, nn.Linear):
module.weight.data.normal_(mean=0.0, std=std)
if module.bias is not None:
module.bias.data.zero_()
elif isinstance(module, nn.Embedding):
module.weight.data.normal_(mean=0.0, std=std)
if module.padding_idx is not None:
module.weight.data[module.padding_idx].zero_()
@property
def dummy_inputs(self):
pad_token = self.config.pad_token_id
input_ids = torch.tensor([[0, 6, 10, 4, 2], [0, 8, 12, 2, pad_token]], device=self.device)
dummy_inputs = {
"attention_mask": input_ids.ne(pad_token),
"input_ids": input_ids,
}
return dummy_inputs
BIGBIRD_PEGASUS_START_DOCSTRING = r"""
This model inherits from :class:`~transformers.PreTrainedModel`. Check the superclass documentation for the generic
methods the library implements for all its model (such as downloading or saving, resizing the input embeddings
etc.)
This model is also a PyTorch `torch.nn.Module <https://pytorch.org/docs/stable/nn.html#torch.nn.Module>`__
subclass. Use it as a regular PyTorch Module and refer to the PyTorch documentation for all matter related to
general usage and behavior.
Parameters:
config (:class:`~transformers.BigBirdPegasusConfig`):
Model configuration class with all the parameters of the model. Initializing with a config file does not
load the weights associated with the model, only the configuration. Check out the
:meth:`~transformers.PreTrainedModel.from_pretrained` method to load the model weights.
"""
BIGBIRD_PEGASUS_GENERATION_EXAMPLE = r"""
Summarization example::
>>> from transformers import PegasusTokenizer, BigBirdPegasusForConditionalGeneration, BigBirdPegasusConfig
>>> model = BigBirdPegasusForConditionalGeneration.from_pretrained('bigbird-pegasus-large-arxiv')
>>> tokenizer = PegasusTokenizer.from_pretrained('bigbird-pegasus-large-arxiv')
>>> ARTICLE_TO_SUMMARIZE = "My friends are cool but they eat too many carbs."
>>> inputs = tokenizer([ARTICLE_TO_SUMMARIZE], max_length=4096, return_tensors='pt', truncation=True)
>>> # Generate Summary
>>> summary_ids = model.generate(inputs['input_ids'], num_beams=4, max_length=5, early_stopping=True)
>>> print([tokenizer.decode(g, skip_special_tokens=True, clean_up_tokenization_spaces=False) for g in summary_ids])
"""
BIGBIRD_PEGASUS_INPUTS_DOCSTRING = r"""
Args:
input_ids (:obj:`torch.LongTensor` of shape :obj:`(batch_size, sequence_length)`):
Indices of input sequence tokens in the vocabulary. Padding will be ignored by default should you provide
it.
Indices can be obtained using :class:`~transformers.PegasusTokenizer`. See
:meth:`transformers.PreTrainedTokenizer.encode` and :meth:`transformers.PreTrainedTokenizer.__call__` for
details.
`What are input IDs? <../glossary.html#input-ids>`__
attention_mask (:obj:`torch.Tensor` of shape :obj:`(batch_size, sequence_length)`, `optional`):
Mask to avoid performing attention on padding token indices. Mask values selected in ``[0, 1]``:
- 1 for tokens that are **not masked**,
- 0 for tokens that are **masked**.
`What are attention masks? <../glossary.html#attention-mask>`__
decoder_input_ids (:obj:`torch.LongTensor` of shape :obj:`(batch_size, target_sequence_length)`, `optional`):
Provide for translation and summarization training. By default, the model will create this tensor by
shifting the :obj:`input_ids` to the right, following the paper.
decoder_attention_mask (:obj:`torch.LongTensor` of shape :obj:`(batch_size, target_sequence_length)`, `optional`):
Default behavior: generate a tensor that ignores pad tokens in :obj:`decoder_input_ids`. Causal mask will
also be used by default.
If you want to change padding behavior, you should read
:func:`modeling_bigbird_pegasus._prepare_decoder_inputs` and modify to your needs. See diagram 1 in `the
paper <https://arxiv.org/abs/1910.13461>`__ for more information on the default strategy.
decoder_head_mask (:obj:`torch.Tensor` of shape :obj:`(num_layers, num_heads)`, `optional`):
Mask to nullify selected heads of the attention modules in the decoder. Mask values selected in ``[0, 1]``:
- 1 indicates the head is **not masked**,
- 0 indicates the head is **masked**.
encoder_outputs (:obj:`tuple(tuple(torch.FloatTensor)`, `optional`):
Tuple consists of (:obj:`last_hidden_state`, `optional`: :obj:`hidden_states`, `optional`:
:obj:`attentions`) :obj:`last_hidden_state` of shape :obj:`(batch_size, sequence_length, hidden_size)`,
`optional`) is a sequence of hidden-states at the output of the last layer of the encoder. Used in the
cross-attention of the decoder.
past_key_values (:obj:`Tuple[Tuple[torch.Tensor]]` of length :obj:`config.n_layers` with each tuple having 2 tuples each of which has 2 tensors of shape :obj:`(batch_size, num_heads, sequence_length - 1, embed_size_per_head)`):
Contains precomputed key and value hidden-states of the attention blocks. Can be used to speed up decoding.
If :obj:`past_key_values` are used, the user can optionally input only the last :obj:`decoder_input_ids`
(those that don't have their past key value states given to this model) of shape :obj:`(batch_size, 1)`
instead of all :obj:`decoder_input_ids`` of shape :obj:`(batch_size, sequence_length)`.
inputs_embeds (:obj:`torch.FloatTensor` of shape :obj:`(batch_size, sequence_length, hidden_size)`, `optional`):
Optionally, instead of passing :obj:`input_ids` you can choose to directly pass an embedded representation.
This is useful if you want more control over how to convert :obj:`input_ids` indices into associated
vectors than the model's internal embedding lookup matrix.
decoder_inputs_embeds (:obj:`torch.FloatTensor` of shape :obj:`(batch_size, target_sequence_length, hidden_size)`, `optional`):
Optionally, instead of passing :obj:`decoder_input_ids` you can choose to directly pass an embedded
representation. If :obj:`past_key_values` is used, optionally only the last :obj:`decoder_inputs_embeds`
have to be input (see :obj:`past_key_values`). This is useful if you want more control over how to convert
:obj:`decoder_input_ids` indices into associated vectors than the model's internal embedding lookup matrix.
If :obj:`decoder_input_ids` and :obj:`decoder_inputs_embeds` are both unset, :obj:`decoder_inputs_embeds`
takes the value of :obj:`inputs_embeds`.
use_cache (:obj:`bool`, `optional`):
If set to :obj:`True`, :obj:`past_key_values` key value states are returned and can be used to speed up
decoding (see :obj:`past_key_values`).
output_attentions (:obj:`bool`, `optional`):
Whether or not to return the attentions tensors of all attention layers. See ``attentions`` under returned
tensors for more detail.
output_hidden_states (:obj:`bool`, `optional`):
Whether or not to return the hidden states of all layers. See ``hidden_states`` under returned tensors for
more detail.
return_dict (:obj:`bool`, `optional`):
Whether or not to return a :class:`~transformers.file_utils.ModelOutput` instead of a plain tuple.
"""
BIGBIRD_PEGASUS_STANDALONE_INPUTS_DOCSTRING = r"""
Args:
input_ids (:obj:`torch.LongTensor` of shape :obj:`(batch_size, sequence_length)`):
Indices of input sequence tokens in the vocabulary. Padding will be ignored by default should you provide
it.
Indices can be obtained using :class:`~transformers.ProphetNetTokenizer`. See
:meth:`transformers.PreTrainedTokenizer.encode` and :meth:`transformers.PreTrainedTokenizer.__call__` for
details.
`What are input IDs? <../glossary.html#input-ids>`__
attention_mask (:obj:`torch.Tensor` of shape :obj:`(batch_size, sequence_length)`, `optional`):
Mask to avoid performing attention on padding token indices. Mask values selected in ``[0, 1]``:
- 1 for tokens that are **not masked**,
- 0 for tokens that are **masked**.
`What are attention masks? <../glossary.html#attention-mask>`__
output_attentions (:obj:`bool`, `optional`):
Whether or not to return the attentions tensors of all attention layers. See ``attentions`` under returned
tensors for more detail.
output_hidden_states (:obj:`bool`, `optional`):
Whether or not to return the hidden states of all layers. See ``hidden_states`` under returned tensors for
more detail.
return_dict (:obj:`bool`, `optional`):
Whether or not to return a :class:`~transformers.file_utils.ModelOutput` instead of a plain tuple.
"""
class BigBirdPegasusEncoder(BigBirdPegasusPreTrainedModel):
"""
Transformer encoder consisting of *config.encoder_layers* self attention layers. Each layer is a
:class:`BigBirdPegasusEncoderLayer`.
Args:
config: BigBirdPegasusConfig
embed_tokens (torch.nn.Embedding): output embedding
"""
def __init__(self, config: BigBirdPegasusConfig, embed_tokens: Optional[nn.Embedding] = None):
super().__init__(config)
self.attention_type = config.attention_type
self.block_size = config.block_size
self.dropout = config.dropout
self.layerdrop = config.encoder_layerdrop
embed_dim = config.d_model
self.padding_idx = config.pad_token_id
self.max_source_positions = config.max_position_embeddings
self.embed_scale = math.sqrt(embed_dim) if config.scale_embedding else 1.0
if embed_tokens is not None:
self.embed_tokens = embed_tokens
else:
self.embed_tokens = nn.Embedding(config.vocab_size, embed_dim, self.padding_idx)
self.embed_positions = BigBirdPegasusLearnedPositionalEmbedding(
config.max_position_embeddings,
embed_dim,
)
self.layers = nn.ModuleList([BigBirdPegasusEncoderLayer(config, seed=i) for i in range(config.encoder_layers)])
self.layernorm_embedding = nn.LayerNorm(embed_dim)
self.init_weights()
def forward(
self,
input_ids=None,
attention_mask=None,
head_mask=None,
inputs_embeds=None,
output_attentions=None,
output_hidden_states=None,
return_dict=None,
):
r"""
Args:
input_ids (:obj:`torch.LongTensor` of shape :obj:`(batch_size, sequence_length)`):
Indices of input sequence tokens in the vocabulary. Padding will be ignored by default should you
provide it.
Indices can be obtained using :class:`~transformers.PegasusTokenizer`. See
:meth:`transformers.PreTrainedTokenizer.encode` and :meth:`transformers.PreTrainedTokenizer.__call__`
for details.
`What are input IDs? <../glossary.html#input-ids>`__
attention_mask (:obj:`torch.Tensor` of shape :obj:`(batch_size, sequence_length)`, `optional`):
Mask to avoid performing attention on padding token indices. Mask values selected in ``[0, 1]``:
- 1 for tokens that are **not masked**,
- 0 for tokens that are **masked**.
`What are attention masks? <../glossary.html#attention-mask>`__
inputs_embeds (:obj:`torch.FloatTensor` of shape :obj:`(batch_size, sequence_length, hidden_size)`, `optional`):
Optionally, instead of passing :obj:`input_ids` you can choose to directly pass an embedded
representation. This is useful if you want more control over how to convert :obj:`input_ids` indices
into associated vectors than the model's internal embedding lookup matrix.
output_attentions (:obj:`bool`, `optional`):
Whether or not to return the attentions tensors of all attention layers. See ``attentions`` under
returned tensors for more detail.
output_hidden_states (:obj:`bool`, `optional`):
Whether or not to return the hidden states of all layers. See ``hidden_states`` under returned tensors
for more detail.
return_dict (:obj:`bool`, `optional`):
Whether or not to return a :class:`~transformers.file_utils.ModelOutput` instead of a plain tuple.
"""
output_attentions = output_attentions if output_attentions is not None else self.config.output_attentions
output_hidden_states = (
output_hidden_states if output_hidden_states is not None else self.config.output_hidden_states
)
return_dict = return_dict if return_dict is not None else self.config.use_return_dict
# retrieve input_ids and inputs_embeds
if input_ids is not None and inputs_embeds is not None:
raise ValueError("You cannot specify both input_ids and inputs_embeds at the same time")
elif input_ids is not None:
input_shape = input_ids.size()
input_ids = input_ids.view(-1, input_shape[-1])
elif inputs_embeds is not None:
input_shape = inputs_embeds.size()[:-1]
else:
raise ValueError("You have to specify either input_ids or inputs_embeds")
if inputs_embeds is None:
inputs_embeds = self.embed_tokens(input_ids) * self.embed_scale
embed_pos = self.embed_positions(input_shape)
hidden_states = inputs_embeds + embed_pos
hidden_states = F.dropout(hidden_states, p=self.dropout, training=self.training)
if attention_mask is None:
attention_mask = torch.ones(input_shape, device=hidden_states.device)
attention_mask = attention_mask.long()
# in order to use block_sparse attention, sequence_length has to be at least
# bigger than all global attentions: 2 * block_size
# + sliding tokens: 3 * block_size
# + random tokens: 2 * num_random_blocks * block_size
max_tokens_to_attend = (5 + 2 * self.config.num_random_blocks) * self.config.block_size
if self.attention_type == "block_sparse" and input_shape[1] <= max_tokens_to_attend:
# change attention_type from block_sparse to original_full
sequence_length = input_shape[1]
logger.warning(
"Attention type 'block_sparse' is not possible if sequence_length: "
f"{sequence_length} <= num global tokens: 2 * config.block_size "
"+ min. num sliding tokens: 3 * config.block_size "
"+ config.num_random_blocks * config.block_size "
"+ additional buffer: config.num_random_blocks * config.block_size "
f"= {max_tokens_to_attend} with config.block_size "
f"= {self.config.block_size}, config.num_random_blocks "
f"= {self.config.num_random_blocks}."
"Changing attention type to 'original_full'..."
)
self.set_attention_type("original_full")
if self.attention_type == "block_sparse":
padding_len, hidden_states, attention_mask = self._pad_to_block_size(hidden_states, attention_mask)
else:
padding_len = 0
# expand attention_mask
if self.attention_type == "original_full":
# [bsz, seq_len] -> [bsz, 1, tgt_seq_len, src_seq_len]
attention_mask = _expand_mask(attention_mask, inputs_embeds.dtype)
blocked_encoder_mask = band_mask = from_mask = to_mask = None
elif self.attention_type == "block_sparse":
blocked_encoder_mask, band_mask, from_mask, to_mask = self.create_masks_for_block_sparse_attn(
attention_mask, self.block_size
)
attention_mask = None
else:
raise ValueError(
f"attention_type can either be original_full or block_sparse, but is {self.attention_type}"
)
encoder_states = () if output_hidden_states else None
all_attentions = () if output_attentions else None
# check if head_mask has a correct number of layers specified if desired
if head_mask is not None:
assert head_mask.size()[0] == (
len(self.layers)
), f"The head_mask should be specified for {len(self.layers)} layers, but it is for {head_mask.size()[0]}."
for idx, encoder_layer in enumerate(self.layers):
if output_hidden_states:
encoder_states = encoder_states + (hidden_states,)
# add LayerDrop (see https://arxiv.org/abs/1909.11556 for description)
dropout_probability = random.uniform(0, 1)
if self.training and (dropout_probability < self.layerdrop): # skip the layer
layer_outputs = (None, None)
else:
if getattr(self.config, "gradient_checkpointing", False) and self.training:
def create_custom_forward(module):
def custom_forward(*inputs):
return module(*inputs, output_attentions)
return custom_forward
layer_outputs = torch.utils.checkpoint.checkpoint(
create_custom_forward(encoder_layer),
hidden_states,
attention_mask,
(head_mask[idx] if head_mask is not None else None),
band_mask,
from_mask,
to_mask,
blocked_encoder_mask,
blocked_encoder_mask,
)
else:
layer_outputs = encoder_layer(
hidden_states,
attention_mask,
layer_head_mask=(head_mask[idx] if head_mask is not None else None),
band_mask=band_mask,
from_mask=from_mask,
to_mask=to_mask,
from_blocked_mask=blocked_encoder_mask,
to_blocked_mask=blocked_encoder_mask,
output_attentions=output_attentions,
)
hidden_states = layer_outputs[0]
if output_attentions:
all_attentions = all_attentions + (layer_outputs[1],)
hidden_states = self.layernorm_embedding(hidden_states)
if output_hidden_states:
encoder_states = encoder_states + (hidden_states,)
if padding_len > 0:
# unpad `sequence_output` because the calling function is expecting a length == input_ids.size(1)
hidden_states = hidden_states[:, :-padding_len]
if not return_dict:
return tuple(v for v in [hidden_states, encoder_states, all_attentions] if v is not None)
self.encoder_o = hidden_states
return BaseModelOutput(
last_hidden_state=hidden_states, hidden_states=encoder_states, attentions=all_attentions
)
def set_attention_type(self, value: str):
if value not in ["original_full", "block_sparse"]:
raise ValueError(
f"attention_type can only be set to either 'original_full' or 'block_sparse', but is {value}"
)
# attention type is already correctly set
if value == self.attention_type:
return
self.attention_type = value
for layer in self.layers:
layer.set_attention_type(value)
@staticmethod # Copied from transformers.models.big_bird.modeling_big_bird.BigBirdModel.create_masks_for_block_sparse_attn
def create_masks_for_block_sparse_attn(attention_mask: torch.Tensor, block_size: int):
batch_size, seq_length = attention_mask.size()
assert (
seq_length % block_size == 0
), f"Sequence length must be multiple of block size, but sequence length is {seq_length}, while block size is {block_size}."
def create_band_mask_from_inputs(from_blocked_mask, to_blocked_mask):
"""
Create 3D attention mask from a 2D tensor mask.
Args:
from_blocked_mask: 2D Tensor of shape [batch_size,
from_seq_length//from_block_size, from_block_size].
to_blocked_mask: int32 Tensor of shape [batch_size,
to_seq_length//to_block_size, to_block_size].
Returns:
float Tensor of shape [batch_size, 1, from_seq_length//from_block_size-4, from_block_size,
3*to_block_size].
"""
exp_blocked_to_pad = torch.cat(
[to_blocked_mask[:, 1:-3], to_blocked_mask[:, 2:-2], to_blocked_mask[:, 3:-1]], dim=2
)
band_mask = torch.einsum("blq,blk->blqk", from_blocked_mask[:, 2:-2], exp_blocked_to_pad)
band_mask.unsqueeze_(1)
return band_mask
blocked_encoder_mask = attention_mask.view(batch_size, seq_length // block_size, block_size)
band_mask = create_band_mask_from_inputs(blocked_encoder_mask, blocked_encoder_mask)
from_mask = attention_mask.view(batch_size, 1, seq_length, 1)
to_mask = attention_mask.view(batch_size, 1, 1, seq_length)
return blocked_encoder_mask, band_mask, from_mask, to_mask
def _pad_to_block_size(self, hidden_states: torch.Tensor, attention_mask: torch.Tensor):
"""A helper function to pad tokens and mask to work with implementation of BigBird block-sparse attention."""
# padding
block_size = self.config.block_size
batch_size, seq_len = hidden_states.shape[:2]
padding_len = (block_size - seq_len % block_size) % block_size
if padding_len > 0:
logger.info(
f"Input ids are automatically padded from {seq_len} to {seq_len + padding_len} to be a multiple of "
f"`config.block_size`: {block_size}"
)
pad_id = self.config.pad_token_id
device = hidden_states.device
input_ids_padding = torch.ones((batch_size, padding_len), dtype=torch.long, device=device) * pad_id
inputs_embeds_padding = self.embed_tokens(input_ids_padding)
hidden_states = torch.cat([hidden_states, inputs_embeds_padding], dim=-2)
attention_mask = F.pad(attention_mask, (0, padding_len), value=0) # no attention on the padding tokens
return padding_len, hidden_states, attention_mask
class BigBirdPegasusDecoder(BigBirdPegasusPreTrainedModel):
"""
Transformer decoder consisting of *config.decoder_layers* layers. Each layer is a
:class:`BigBirdPegasusDecoderLayer`
Args:
config: BigBirdPegasusConfig
embed_tokens (torch.nn.Embedding): output embedding
"""
def __init__(self, config: BigBirdPegasusConfig, embed_tokens: Optional[nn.Embedding] = None):
super().__init__(config)
self.dropout = config.dropout
self.layerdrop = config.decoder_layerdrop
self.padding_idx = config.pad_token_id
self.max_target_positions = config.max_position_embeddings
self.embed_scale = math.sqrt(config.d_model) if config.scale_embedding else 1.0
if embed_tokens is not None:
self.embed_tokens = embed_tokens
else:
self.embed_tokens = nn.Embedding(config.vocab_size, config.d_model, self.padding_idx)
self.embed_positions = BigBirdPegasusLearnedPositionalEmbedding(
config.max_position_embeddings,
config.d_model,
)
self.layers = nn.ModuleList([BigBirdPegasusDecoderLayer(config) for _ in range(config.decoder_layers)])
self.layernorm_embedding = nn.LayerNorm(config.d_model)
self.init_weights()
def get_input_embeddings(self):
return self.embed_tokens
def set_input_embeddings(self, value):
self.embed_tokens = value
# Copied from transformers.models.bart.modeling_bart.BartDecoder._prepare_decoder_attention_mask
def _prepare_decoder_attention_mask(self, attention_mask, input_shape, inputs_embeds, past_key_values_length):
# create causal mask
# [bsz, seq_len] -> [bsz, 1, tgt_seq_len, src_seq_len]
combined_attention_mask = None
if input_shape[-1] > 1:
combined_attention_mask = _make_causal_mask(
input_shape, inputs_embeds.dtype, past_key_values_length=past_key_values_length
).to(self.device)
if attention_mask is not None:
# [bsz, seq_len] -> [bsz, 1, tgt_seq_len, src_seq_len]
expanded_attn_mask = _expand_mask(attention_mask, inputs_embeds.dtype, tgt_len=input_shape[-1])
combined_attention_mask = (
expanded_attn_mask if combined_attention_mask is None else expanded_attn_mask + combined_attention_mask
)
return combined_attention_mask
def forward(
self,
input_ids=None,
attention_mask=None,
encoder_hidden_states=None,
encoder_attention_mask=None,
head_mask=None,
cross_attn_head_mask=None,
past_key_values=None,
inputs_embeds=None,
use_cache=None,
output_attentions=None,
output_hidden_states=None,
return_dict=None,
):
r"""
Args:
input_ids (:obj:`torch.LongTensor` of shape :obj:`(batch_size, sequence_length)`):
Indices of input sequence tokens in the vocabulary. Padding will be ignored by default should you
provide it.
Indices can be obtained using :class:`~transformers.BigBirdPegasusTokenizer`. See
:meth:`transformers.PreTrainedTokenizer.encode` and :meth:`transformers.PreTrainedTokenizer.__call__`
for details.
`What are input IDs? <../glossary.html#input-ids>`__
attention_mask (:obj:`torch.Tensor` of shape :obj:`(batch_size, sequence_length)`, `optional`):
Mask to avoid performing attention on padding token indices. Mask values selected in ``[0, 1]``:
- 1 for tokens that are **not masked**,
- 0 for tokens that are **masked**.
`What are attention masks? <../glossary.html#attention-mask>`__
encoder_hidden_states (:obj:`torch.FloatTensor` of shape :obj:`(batch_size, encoder_sequence_length, hidden_size)`, `optional`):
Sequence of hidden-states at the output of the last layer of the encoder. Used in the cross-attention
of the decoder.
encoder_attention_mask (:obj:`torch.LongTensor` of shape :obj:`(batch_size, encoder_sequence_length)`, `optional`):
Mask to avoid performing cross-attention on padding tokens indices of encoder input_ids. Mask values
selected in ``[0, 1]``:
- 1 for tokens that are **not masked**,
- 0 for tokens that are **masked**.
`What are attention masks? <../glossary.html#attention-mask>`__
head_mask (:obj:`torch.Tensor` of shape :obj:`(decoder_layers, decoder_attention_heads)`, `optional`):
Mask to nullify selected heads of the attention modules. Mask values selected in ``[0, 1]``:
- 1 indicates the head is **not masked**,
- 0 indicates the head is **masked**.
cross_attn_head_mask (:obj:`torch.Tensor` of shape :obj:`(decoder_layers, decoder_attention_heads)`, `optional`):
Mask to nullify selected heads of the cross-attention modules in decoder to avoid performing
cross-attention on hidden heads. Mask values selected in ``[0, 1]``:
- 1 indicates the head is **not masked**,
- 0 indicates the head is **masked**.
past_key_values (:obj:`Tuple[Tuple[torch.Tensor]]` of length :obj:`config.n_layers` with each tuple having 2 tuples each of which has 2 tensors of shape :obj:`(batch_size, num_heads, sequence_length - 1, embed_size_per_head)`):
Contains precomputed key and value hidden-states of the attention blocks. Can be used to speed up
decoding.
If :obj:`past_key_values` are used, the user can optionally input only the last
:obj:`decoder_input_ids` (those that don't have their past key value states given to this model) of
shape :obj:`(batch_size, 1)` instead of all :obj:`decoder_input_ids`` of shape :obj:`(batch_size,
sequence_length)`.
inputs_embeds (:obj:`torch.FloatTensor` of shape :obj:`(batch_size, sequence_length, hidden_size)`, `optional`):
Optionally, instead of passing :obj:`input_ids` you can choose to directly pass an embedded
representation. This is useful if you want more control over how to convert :obj:`input_ids` indices
into associated vectors than the model's internal embedding lookup matrix.
output_attentions (:obj:`bool`, `optional`):
Whether or not to return the attentions tensors of all attention layers. See ``attentions`` under
returned tensors for more detail.
output_hidden_states (:obj:`bool`, `optional`):
Whether or not to return the hidden states of all layers. See ``hidden_states`` under returned tensors
for more detail.
return_dict (:obj:`bool`, `optional`):
Whether or not to return a :class:`~transformers.file_utils.ModelOutput` instead of a plain tuple.
"""
output_attentions = output_attentions if output_attentions is not None else self.config.output_attentions
output_hidden_states = (
output_hidden_states if output_hidden_states is not None else self.config.output_hidden_states
)
use_cache = use_cache if use_cache is not None else self.config.use_cache
return_dict = return_dict if return_dict is not None else self.config.use_return_dict
# retrieve input_ids and inputs_embeds
if input_ids is not None and inputs_embeds is not None:
raise ValueError("You cannot specify both decoder_input_ids and decoder_inputs_embeds at the same time")
elif input_ids is not None:
input_shape = input_ids.size()
input_ids = input_ids.view(-1, input_shape[-1])
elif inputs_embeds is not None:
input_shape = inputs_embeds.size()[:-1]
else:
raise ValueError("You have to specify either decoder_input_ids or decoder_inputs_embeds")
# past_key_values_length
past_key_values_length = past_key_values[0][0].shape[2] if past_key_values is not None else 0
if inputs_embeds is None:
inputs_embeds = self.embed_tokens(input_ids) * self.embed_scale
attention_mask = self._prepare_decoder_attention_mask(
attention_mask, input_shape, inputs_embeds, past_key_values_length
)
# expand encoder attention mask
if encoder_hidden_states is not None and encoder_attention_mask is not None:
# [bsz, seq_len] -> [bsz, 1, tgt_seq_len, src_seq_len]
encoder_attention_mask = _expand_mask(encoder_attention_mask, inputs_embeds.dtype, tgt_len=input_shape[-1])
# embed positions
positions = self.embed_positions(input_shape, past_key_values_length)
hidden_states = inputs_embeds + positions
hidden_states = F.dropout(hidden_states, p=self.dropout, training=self.training)
# decoder layers
all_hidden_states = () if output_hidden_states else None
all_self_attns = () if output_attentions else None
all_cross_attentions = () if (output_attentions and encoder_hidden_states is not None) else None
next_decoder_cache = () if use_cache else None
# check if head_mask/cross_attn_head_mask has a correct number of layers specified if desired
for attn_mask, mask_name in zip([head_mask, cross_attn_head_mask], ["head_mask", "cross_attn_head_mask"]):
if attn_mask is not None:
assert attn_mask.size()[0] == (
len(self.layers)
), f"The `{mask_name}` should be specified for {len(self.layers)} layers, but it is for {head_mask.size()[0]}."
for idx, decoder_layer in enumerate(self.layers):
# add LayerDrop (see https://arxiv.org/abs/1909.11556 for description)
if output_hidden_states:
all_hidden_states += (hidden_states,)
dropout_probability = random.uniform(0, 1)
if self.training and (dropout_probability < self.layerdrop):
continue
past_key_value = past_key_values[idx] if past_key_values is not None else None
if getattr(self.config, "gradient_checkpointing", False) and self.training:
if use_cache:
logger.warning(
"`use_cache=True` is incompatible with `config.gradient_checkpointing=True`. Setting "
"`use_cache=False`..."
)
use_cache = False
def create_custom_forward(module):
def custom_forward(*inputs):
# None for past_key_value
return module(*inputs, output_attentions, use_cache)
return custom_forward
layer_outputs = torch.utils.checkpoint.checkpoint(
create_custom_forward(decoder_layer),
hidden_states,
attention_mask,
encoder_hidden_states,
encoder_attention_mask,
head_mask[idx] if head_mask is not None else None,
cross_attn_head_mask[idx] if cross_attn_head_mask is not None else None,
None,
)
else:
layer_outputs = decoder_layer(
hidden_states,
attention_mask=attention_mask,
encoder_hidden_states=encoder_hidden_states,
encoder_attention_mask=encoder_attention_mask,
layer_head_mask=(head_mask[idx] if head_mask is not None else None),
cross_attn_layer_head_mask=(
cross_attn_head_mask[idx] if cross_attn_head_mask is not None else None
),
past_key_value=past_key_value,
output_attentions=output_attentions,
use_cache=use_cache,
)
hidden_states = layer_outputs[0]
if use_cache:
next_decoder_cache += (layer_outputs[3 if output_attentions else 1],)
if output_attentions:
all_self_attns += (layer_outputs[1],)
if encoder_hidden_states is not None:
all_cross_attentions += (layer_outputs[2],)
hidden_states = self.layernorm_embedding(hidden_states)
# add hidden states from the last decoder layer
if output_hidden_states:
all_hidden_states += (hidden_states,)
next_cache = next_decoder_cache if use_cache else None
if not return_dict:
return tuple(
v
for v in [hidden_states, next_cache, all_hidden_states, all_self_attns, all_cross_attentions]
if v is not None
)
return BaseModelOutputWithPastAndCrossAttentions(
last_hidden_state=hidden_states,
past_key_values=next_cache,
hidden_states=all_hidden_states,
attentions=all_self_attns,
cross_attentions=all_cross_attentions,
)
@add_start_docstrings(
"The bare BigBirdPegasus Model outputting raw hidden-states without any specific head on top.",
BIGBIRD_PEGASUS_START_DOCSTRING,
)
# Copied from transformers.models.bart.modeling_bart.BartModel with Bart->BigBirdPegasus, BART->BIGBIRD_PEGASUS
class BigBirdPegasusModel(BigBirdPegasusPreTrainedModel):
def __init__(self, config: BigBirdPegasusConfig):
super().__init__(config)
padding_idx, vocab_size = config.pad_token_id, config.vocab_size
self.shared = nn.Embedding(vocab_size, config.d_model, padding_idx)
self.encoder = BigBirdPegasusEncoder(config, self.shared)
self.decoder = BigBirdPegasusDecoder(config, self.shared)
self.init_weights()
def get_input_embeddings(self):
return self.shared
def set_input_embeddings(self, value):
self.shared = value
self.encoder.embed_tokens = self.shared
self.decoder.embed_tokens = self.shared
def get_encoder(self):
return self.encoder
def get_decoder(self):
return self.decoder
@add_start_docstrings_to_model_forward(BIGBIRD_PEGASUS_INPUTS_DOCSTRING)
@add_code_sample_docstrings(
tokenizer_class=_TOKENIZER_FOR_DOC,
checkpoint=_CHECKPOINT_FOR_DOC,
output_type=Seq2SeqModelOutput,
config_class=_CONFIG_FOR_DOC,
)
def forward(
self,
input_ids=None,
attention_mask=None,
decoder_input_ids=None,
decoder_attention_mask=None,
head_mask=None,
decoder_head_mask=None,
cross_attn_head_mask=None,
encoder_outputs=None,
past_key_values=None,
inputs_embeds=None,
decoder_inputs_embeds=None,
use_cache=None,
output_attentions=None,
output_hidden_states=None,
return_dict=None,
):
# different to other models, BigBirdPegasus automatically creates decoder_input_ids from
# input_ids if no decoder_input_ids are provided
if decoder_input_ids is None and decoder_inputs_embeds is None:
decoder_input_ids = shift_tokens_right(
input_ids, self.config.pad_token_id, self.config.decoder_start_token_id
)
output_attentions = output_attentions if output_attentions is not None else self.config.output_attentions
output_hidden_states = (
output_hidden_states if output_hidden_states is not None else self.config.output_hidden_states
)
use_cache = use_cache if use_cache is not None else self.config.use_cache
return_dict = return_dict if return_dict is not None else self.config.use_return_dict
if encoder_outputs is None:
encoder_outputs = self.encoder(
input_ids=input_ids,
attention_mask=attention_mask,
head_mask=head_mask,
inputs_embeds=inputs_embeds,
output_attentions=output_attentions,
output_hidden_states=output_hidden_states,
return_dict=return_dict,
)
# If the user passed a tuple for encoder_outputs, we wrap it in a BaseModelOutput when return_dict=True
elif return_dict and not isinstance(encoder_outputs, BaseModelOutput):
encoder_outputs = BaseModelOutput(
last_hidden_state=encoder_outputs[0],
hidden_states=encoder_outputs[1] if len(encoder_outputs) > 1 else None,
attentions=encoder_outputs[2] if len(encoder_outputs) > 2 else None,
)
# decoder outputs consists of (dec_features, past_key_value, dec_hidden, dec_attn)
decoder_outputs = self.decoder(
input_ids=decoder_input_ids,
attention_mask=decoder_attention_mask,
encoder_hidden_states=encoder_outputs[0],
encoder_attention_mask=attention_mask,
head_mask=decoder_head_mask,
cross_attn_head_mask=cross_attn_head_mask,
past_key_values=past_key_values,
inputs_embeds=decoder_inputs_embeds,
use_cache=use_cache,
output_attentions=output_attentions,
output_hidden_states=output_hidden_states,
return_dict=return_dict,
)
if not return_dict:
return decoder_outputs + encoder_outputs
return Seq2SeqModelOutput(
last_hidden_state=decoder_outputs.last_hidden_state,
past_key_values=decoder_outputs.past_key_values,
decoder_hidden_states=decoder_outputs.hidden_states,
decoder_attentions=decoder_outputs.attentions,
cross_attentions=decoder_outputs.cross_attentions,
encoder_last_hidden_state=encoder_outputs.last_hidden_state,
encoder_hidden_states=encoder_outputs.hidden_states,
encoder_attentions=encoder_outputs.attentions,
)
@add_start_docstrings(
"The BigBirdPegasus Model with a language modeling head. Can be used for summarization.",
BIGBIRD_PEGASUS_START_DOCSTRING,
)
# Copied from transformers.models.bart.modeling_bart.BartForConditionalGeneration with Bart->BigBirdPegasus, BART->BIGBIRD_PEGASUS
class BigBirdPegasusForConditionalGeneration(BigBirdPegasusPreTrainedModel):
base_model_prefix = "model"
_keys_to_ignore_on_load_missing = [r"final_logits_bias", r"lm_head\.weight"]
def __init__(self, config: BigBirdPegasusConfig):
super().__init__(config)
self.model = BigBirdPegasusModel(config)
self.register_buffer("final_logits_bias", torch.zeros((1, self.model.shared.num_embeddings)))
self.lm_head = nn.Linear(config.d_model, self.model.shared.num_embeddings, bias=False)
self.init_weights()
def get_encoder(self):
return self.model.get_encoder()
def get_decoder(self):
return self.model.get_decoder()
def resize_token_embeddings(self, new_num_tokens: int) -> nn.Embedding:
new_embeddings = super().resize_token_embeddings(new_num_tokens)
self._resize_final_logits_bias(new_num_tokens)
return new_embeddings
def _resize_final_logits_bias(self, new_num_tokens: int) -> None:
old_num_tokens = self.final_logits_bias.shape[-1]
if new_num_tokens <= old_num_tokens:
new_bias = self.final_logits_bias[:, :new_num_tokens]
else:
extra_bias = torch.zeros((1, new_num_tokens - old_num_tokens), device=self.final_logits_bias.device)
new_bias = torch.cat([self.final_logits_bias, extra_bias], dim=1)
self.register_buffer("final_logits_bias", new_bias)
def get_output_embeddings(self):
return self.lm_head
def set_output_embeddings(self, new_embeddings):
self.lm_head = new_embeddings
@add_start_docstrings_to_model_forward(BIGBIRD_PEGASUS_INPUTS_DOCSTRING)
@replace_return_docstrings(output_type=Seq2SeqLMOutput, config_class=_CONFIG_FOR_DOC)
@add_end_docstrings(BIGBIRD_PEGASUS_GENERATION_EXAMPLE)
def forward(
self,
input_ids=None,
attention_mask=None,
decoder_input_ids=None,
decoder_attention_mask=None,
head_mask=None,
decoder_head_mask=None,
cross_attn_head_mask=None,
encoder_outputs=None,
past_key_values=None,
inputs_embeds=None,
decoder_inputs_embeds=None,
labels=None,
use_cache=None,
output_attentions=None,
output_hidden_states=None,
return_dict=None,
):
r"""
labels (:obj:`torch.LongTensor` of shape :obj:`(batch_size, sequence_length)`, `optional`):
Labels for computing the masked language modeling loss. Indices should either be in ``[0, ...,
config.vocab_size]`` or -100 (see ``input_ids`` docstring). Tokens with indices set to ``-100`` are ignored
(masked), the loss is only computed for the tokens with labels in ``[0, ..., config.vocab_size]``.
Returns:
"""
return_dict = return_dict if return_dict is not None else self.config.use_return_dict
if labels is not None:
if decoder_input_ids is None:
decoder_input_ids = shift_tokens_right(
labels, self.config.pad_token_id, self.config.decoder_start_token_id
)
outputs = self.model(
input_ids,
attention_mask=attention_mask,
decoder_input_ids=decoder_input_ids,
encoder_outputs=encoder_outputs,
decoder_attention_mask=decoder_attention_mask,
head_mask=head_mask,
decoder_head_mask=decoder_head_mask,
cross_attn_head_mask=cross_attn_head_mask,
past_key_values=past_key_values,
inputs_embeds=inputs_embeds,
decoder_inputs_embeds=decoder_inputs_embeds,
use_cache=use_cache,
output_attentions=output_attentions,
output_hidden_states=output_hidden_states,
return_dict=return_dict,
)
lm_logits = self.lm_head(outputs[0]) + self.final_logits_bias
masked_lm_loss = None
if labels is not None:
loss_fct = CrossEntropyLoss()
masked_lm_loss = loss_fct(lm_logits.view(-1, self.config.vocab_size), labels.view(-1))
if not return_dict:
output = (lm_logits,) + outputs[1:]
return ((masked_lm_loss,) + output) if masked_lm_loss is not None else output
return Seq2SeqLMOutput(
loss=masked_lm_loss,
logits=lm_logits,
past_key_values=outputs.past_key_values,
decoder_hidden_states=outputs.decoder_hidden_states,
decoder_attentions=outputs.decoder_attentions,
cross_attentions=outputs.cross_attentions,
encoder_last_hidden_state=outputs.encoder_last_hidden_state,
encoder_hidden_states=outputs.encoder_hidden_states,
encoder_attentions=outputs.encoder_attentions,
)
def prepare_inputs_for_generation(
self,
decoder_input_ids,
past=None,
attention_mask=None,
head_mask=None,
decoder_head_mask=None,
cross_attn_head_mask=None,
use_cache=None,
encoder_outputs=None,
**kwargs
):
# cut decoder_input_ids if past is used
if past is not None:
decoder_input_ids = decoder_input_ids[:, -1:]
return {
"input_ids": None, # encoder_outputs is defined. input_ids not needed
"encoder_outputs": encoder_outputs,
"past_key_values": past,
"decoder_input_ids": decoder_input_ids,
"attention_mask": attention_mask,
"head_mask": head_mask,
"decoder_head_mask": decoder_head_mask,
"cross_attn_head_mask": cross_attn_head_mask,
"use_cache": use_cache, # change this to avoid caching (presumably for debugging)
}
def prepare_decoder_input_ids_from_labels(self, labels: torch.Tensor):
return shift_tokens_right(labels, self.config.pad_token_id, self.config.decoder_start_token_id)
@staticmethod
def _reorder_cache(past, beam_idx):
reordered_past = ()
for layer_past in past:
# cached cross_attention states don't have to be reordered -> they are always the same
reordered_past += (
tuple(past_state.index_select(0, beam_idx) for past_state in layer_past[:2]) + layer_past[2:],
)
return reordered_past
@add_start_docstrings(
"""
BigBirdPegasus model with a sequence classification/head on top (a linear layer on top of the pooled output) e.g.
for GLUE tasks.
""",
BIGBIRD_PEGASUS_START_DOCSTRING,
)
# Copied from transformers.models.bart.modeling_bart.BartForSequenceClassification with Bart->BigBirdPegasus, BART->BIGBIRD_PEGASUS
class BigBirdPegasusForSequenceClassification(BigBirdPegasusPreTrainedModel):
def __init__(self, config: BigBirdPegasusConfig, **kwargs):
super().__init__(config, **kwargs)
self.model = BigBirdPegasusModel(config)
self.classification_head = BigBirdPegasusClassificationHead(
config.d_model,
config.d_model,
config.num_labels,
config.classifier_dropout,
)
self.model._init_weights(self.classification_head.dense)
self.model._init_weights(self.classification_head.out_proj)
@add_start_docstrings_to_model_forward(BIGBIRD_PEGASUS_INPUTS_DOCSTRING)
@add_code_sample_docstrings(
tokenizer_class=_TOKENIZER_FOR_DOC,
checkpoint=_CHECKPOINT_FOR_DOC,
output_type=Seq2SeqSequenceClassifierOutput,
config_class=_CONFIG_FOR_DOC,
)
def forward(
self,
input_ids=None,
attention_mask=None,
decoder_input_ids=None,
decoder_attention_mask=None,
head_mask=None,
decoder_head_mask=None,
cross_attn_head_mask=None,
encoder_outputs=None,
inputs_embeds=None,
decoder_inputs_embeds=None,
labels=None,
use_cache=None,
output_attentions=None,
output_hidden_states=None,
return_dict=None,
):
r"""
labels (:obj:`torch.LongTensor` of shape :obj:`(batch_size,)`, `optional`):
Labels for computing the sequence classification/regression loss. Indices should be in :obj:`[0, ...,
config.num_labels - 1]`. If :obj:`config.num_labels > 1` a classification loss is computed (Cross-Entropy).
"""
return_dict = return_dict if return_dict is not None else self.config.use_return_dict
if labels is not None:
use_cache = False
if input_ids is None and inputs_embeds is not None:
raise NotImplementedError(
f"Passing input embeddings is currently not supported for {self.__class__.__name__}"
)
outputs = self.model(
input_ids,
attention_mask=attention_mask,
decoder_input_ids=decoder_input_ids,
decoder_attention_mask=decoder_attention_mask,
head_mask=head_mask,
decoder_head_mask=decoder_head_mask,
cross_attn_head_mask=cross_attn_head_mask,
encoder_outputs=encoder_outputs,
inputs_embeds=inputs_embeds,
decoder_inputs_embeds=decoder_inputs_embeds,
use_cache=use_cache,
output_attentions=output_attentions,
output_hidden_states=output_hidden_states,
return_dict=return_dict,
)
hidden_states = outputs[0] # last hidden state
eos_mask = input_ids.eq(self.config.eos_token_id)
if len(torch.unique(eos_mask.sum(1))) > 1:
raise ValueError("All examples must have the same number of <eos> tokens.")
sentence_representation = hidden_states[eos_mask, :].view(hidden_states.size(0), -1, hidden_states.size(-1))[
:, -1, :
]
logits = self.classification_head(sentence_representation)
loss = None
if labels is not None:
if self.config.num_labels == 1:
# regression
loss_fct = MSELoss()
loss = loss_fct(logits.view(-1), labels.view(-1))
else:
loss_fct = CrossEntropyLoss()
loss = loss_fct(logits.view(-1, self.config.num_labels), labels.view(-1))
if not return_dict:
output = (logits,) + outputs[1:]
return ((loss,) + output) if loss is not None else output
return Seq2SeqSequenceClassifierOutput(
loss=loss,
logits=logits,
past_key_values=outputs.past_key_values,
decoder_hidden_states=outputs.decoder_hidden_states,
decoder_attentions=outputs.decoder_attentions,
cross_attentions=outputs.cross_attentions,
encoder_last_hidden_state=outputs.encoder_last_hidden_state,
encoder_hidden_states=outputs.encoder_hidden_states,
encoder_attentions=outputs.encoder_attentions,
)
@add_start_docstrings(
"""
BigBirdPegasus Model with a span classification head on top for extractive question-answering tasks like SQuAD (a
linear layer on top of the hidden-states output to compute `span start logits` and `span end logits`).
""",
BIGBIRD_PEGASUS_START_DOCSTRING,
)
# Copied from transformers.models.bart.modeling_bart.BartForQuestionAnswering with Bart->BigBirdPegasus, BART->BIGBIRD_PEGASUS
class BigBirdPegasusForQuestionAnswering(BigBirdPegasusPreTrainedModel):
def __init__(self, config):
super().__init__(config)
config.num_labels = 2
self.num_labels = config.num_labels
self.model = BigBirdPegasusModel(config)
self.qa_outputs = nn.Linear(config.hidden_size, config.num_labels)
self.model._init_weights(self.qa_outputs)
@add_start_docstrings_to_model_forward(BIGBIRD_PEGASUS_INPUTS_DOCSTRING)
@add_code_sample_docstrings(
tokenizer_class=_TOKENIZER_FOR_DOC,
checkpoint=_CHECKPOINT_FOR_DOC,
output_type=Seq2SeqQuestionAnsweringModelOutput,
config_class=_CONFIG_FOR_DOC,
)
def forward(
self,
input_ids=None,
attention_mask=None,
decoder_input_ids=None,
decoder_attention_mask=None,
head_mask=None,
decoder_head_mask=None,
cross_attn_head_mask=None,
encoder_outputs=None,
start_positions=None,
end_positions=None,
inputs_embeds=None,
decoder_inputs_embeds=None,
use_cache=None,
output_attentions=None,
output_hidden_states=None,
return_dict=None,
):
r"""
start_positions (:obj:`torch.LongTensor` of shape :obj:`(batch_size,)`, `optional`):
Labels for position (index) of the start of the labelled span for computing the token classification loss.
Positions are clamped to the length of the sequence (`sequence_length`). Position outside of the sequence
are not taken into account for computing the loss.
end_positions (:obj:`torch.LongTensor` of shape :obj:`(batch_size,)`, `optional`):
Labels for position (index) of the end of the labelled span for computing the token classification loss.
Positions are clamped to the length of the sequence (`sequence_length`). Position outside of the sequence
are not taken into account for computing the loss.
"""
return_dict = return_dict if return_dict is not None else self.config.use_return_dict
if start_positions is not None and end_positions is not None:
use_cache = False
outputs = self.model(
input_ids,
attention_mask=attention_mask,
decoder_input_ids=decoder_input_ids,
decoder_attention_mask=decoder_attention_mask,
head_mask=head_mask,
decoder_head_mask=decoder_head_mask,
cross_attn_head_mask=cross_attn_head_mask,
encoder_outputs=encoder_outputs,
inputs_embeds=inputs_embeds,
decoder_inputs_embeds=decoder_inputs_embeds,
use_cache=use_cache,
output_attentions=output_attentions,
output_hidden_states=output_hidden_states,
return_dict=return_dict,
)
sequence_output = outputs[0]
logits = self.qa_outputs(sequence_output)
start_logits, end_logits = logits.split(1, dim=-1)
start_logits = start_logits.squeeze(-1)
end_logits = end_logits.squeeze(-1)
total_loss = None
if start_positions is not None and end_positions is not None:
# If we are on multi-GPU, split add a dimension
if len(start_positions.size()) > 1:
start_positions = start_positions.squeeze(-1)
if len(end_positions.size()) > 1:
end_positions = end_positions.squeeze(-1)
# sometimes the start/end positions are outside our model inputs, we ignore these terms
ignored_index = start_logits.size(1)
start_positions.clamp_(0, ignored_index)
end_positions.clamp_(0, ignored_index)
loss_fct = CrossEntropyLoss(ignore_index=ignored_index)
start_loss = loss_fct(start_logits, start_positions)
end_loss = loss_fct(end_logits, end_positions)
total_loss = (start_loss + end_loss) / 2
if not return_dict:
output = (
start_logits,
end_logits,
) + outputs[1:]
return ((total_loss,) + output) if total_loss is not None else output
return Seq2SeqQuestionAnsweringModelOutput(
loss=total_loss,
start_logits=start_logits,
end_logits=end_logits,
past_key_values=outputs.past_key_values,
decoder_hidden_states=outputs.decoder_hidden_states,
decoder_attentions=outputs.decoder_attentions,
cross_attentions=outputs.cross_attentions,
encoder_last_hidden_state=outputs.encoder_last_hidden_state,
encoder_hidden_states=outputs.encoder_hidden_states,
encoder_attentions=outputs.encoder_attentions,
)
# Copied from transformers.models.pegasus.modeling_pegasus.PegasusDecoderWrapper with Pegasus->BigBirdPegasus
class BigBirdPegasusDecoderWrapper(BigBirdPegasusPreTrainedModel):
"""
This wrapper class is a helper class to correctly load pretrained checkpoints when the causal language model is
used in combination with the :class:`~transformers.EncoderDecoderModel` framework.
"""
def __init__(self, config):
super().__init__(config)
self.decoder = BigBirdPegasusDecoder(config)
def forward(self, *args, **kwargs):
return self.decoder(*args, **kwargs)
# Copied from transformers.models.pegasus.modeling_pegasus.PegasusForCausalLM with Pegasus->BigBirdPegasus, 'facebook/bart-large'->"google/bigbird-pegasus-large-arxiv"
class BigBirdPegasusForCausalLM(BigBirdPegasusPreTrainedModel):
def __init__(self, config):
super().__init__(config)
config = copy.deepcopy(config)
config.is_decoder = True
config.is_encoder_decoder = False
self.model = BigBirdPegasusDecoderWrapper(config)
self.lm_head = nn.Linear(config.hidden_size, config.vocab_size, bias=False)
self.init_weights()
def get_input_embeddings(self):
return self.model.decoder.embed_tokens
def set_input_embeddings(self, value):
self.model.decoder.embed_tokens = value
def get_output_embeddings(self):
return self.lm_head
def set_output_embeddings(self, new_embeddings):
self.lm_head = new_embeddings
def set_decoder(self, decoder):
self.model.decoder = decoder
def get_decoder(self):
return self.model.decoder
@replace_return_docstrings(output_type=CausalLMOutputWithCrossAttentions, config_class=_CONFIG_FOR_DOC)
def forward(
self,
input_ids=None,
attention_mask=None,
encoder_hidden_states=None,
encoder_attention_mask=None,
head_mask=None,
cross_attn_head_mask=None,
past_key_values=None,
inputs_embeds=None,
labels=None,
use_cache=None,
output_attentions=None,
output_hidden_states=None,
return_dict=None,
):
r"""
Args:
input_ids (:obj:`torch.LongTensor` of shape :obj:`(batch_size, sequence_length)`):
Indices of input sequence tokens in the vocabulary. Padding will be ignored by default should you
provide it.
Indices can be obtained using :class:`~transformers.BigBirdPegasusTokenizer`. See
:meth:`transformers.PreTrainedTokenizer.encode` and :meth:`transformers.PreTrainedTokenizer.__call__`
for details.
`What are input IDs? <../glossary.html#input-ids>`__
attention_mask (:obj:`torch.Tensor` of shape :obj:`(batch_size, sequence_length)`, `optional`):
Mask to avoid performing attention on padding token indices. Mask values selected in ``[0, 1]``:
- 1 for tokens that are **not masked**,
- 0 for tokens that are **masked**.
`What are attention masks? <../glossary.html#attention-mask>`__
encoder_hidden_states (:obj:`torch.FloatTensor` of shape :obj:`(batch_size, sequence_length, hidden_size)`, `optional`):
Sequence of hidden-states at the output of the last layer of the encoder. Used in the cross-attention
if the model is configured as a decoder.
encoder_attention_mask (:obj:`torch.FloatTensor` of shape :obj:`(batch_size, sequence_length)`, `optional`):
Mask to avoid performing attention on the padding token indices of the encoder input. This mask is used
in the cross-attention if the model is configured as a decoder. Mask values selected in ``[0, 1]``:
head_mask (:obj:`torch.Tensor` of shape :obj:`(decoder_layers, decoder_attention_heads)`, `optional`):
Mask to nullify selected heads of the attention modules. Mask values selected in ``[0, 1]``:
- 1 indicates the head is **not masked**,
- 0 indicates the head is **masked**.
cross_attn_head_mask (:obj:`torch.Tensor` of shape :obj:`(decoder_layers, decoder_attention_heads)`, `optional`):
Mask to nullify selected heads of the cross-attention modules. Mask values selected in ``[0, 1]``:
- 1 indicates the head is **not masked**,
- 0 indicates the head is **masked**.
past_key_values (:obj:`tuple(tuple(torch.FloatTensor))` of length :obj:`config.n_layers` with each tuple having 4 tensors of shape :obj:`(batch_size, num_heads, sequence_length - 1, embed_size_per_head)`):
Contains precomputed key and value hidden-states of the attention blocks. Can be used to speed up
decoding.
If :obj:`past_key_values` are used, the user can optionally input only the last ``decoder_input_ids``
(those that don't have their past key value states given to this model) of shape :obj:`(batch_size, 1)`
instead of all ``decoder_input_ids`` of shape :obj:`(batch_size, sequence_length)`.
labels (:obj:`torch.LongTensor` of shape :obj:`(batch_size, sequence_length)`, `optional`):
Labels for computing the masked language modeling loss. Indices should either be in ``[0, ...,
config.vocab_size]`` or -100 (see ``input_ids`` docstring). Tokens with indices set to ``-100`` are
ignored (masked), the loss is only computed for the tokens with labels in ``[0, ...,
config.vocab_size]``.
use_cache (:obj:`bool`, `optional`):
If set to :obj:`True`, :obj:`past_key_values` key value states are returned and can be used to speed up
decoding (see :obj:`past_key_values`).
- 1 for tokens that are **not masked**,
- 0 for tokens that are **masked**.
output_attentions (:obj:`bool`, `optional`):
Whether or not to return the attentions tensors of all attention layers. See ``attentions`` under
returned tensors for more detail.
output_hidden_states (:obj:`bool`, `optional`):
Whether or not to return the hidden states of all layers. See ``hidden_states`` under returned tensors
for more detail.
return_dict (:obj:`bool`, `optional`):
Whether or not to return a :class:`~transformers.file_utils.ModelOutput` instead of a plain tuple.
Returns:
Example::
>>> from transformers import BigBirdPegasusTokenizer, BigBirdPegasusForCausalLM
>>> tokenizer = BigBirdPegasusTokenizer.from_pretrained("google/bigbird-pegasus-large-arxiv")
>>> model = BigBirdPegasusForCausalLM.from_pretrained("google/bigbird-pegasus-large-arxiv", add_cross_attention=False)
>>> assert model.config.is_decoder, f"{model.__class__} has to be configured as a decoder."
>>> inputs = tokenizer("Hello, my dog is cute", return_tensors="pt")
>>> outputs = model(**inputs)
>>> last_hidden_states = outputs.last_hidden_state
"""
output_attentions = output_attentions if output_attentions is not None else self.config.output_attentions
output_hidden_states = (
output_hidden_states if output_hidden_states is not None else self.config.output_hidden_states
)
return_dict = return_dict if return_dict is not None else self.config.use_return_dict
# decoder outputs consists of (dec_features, layer_state, dec_hidden, dec_attn)
outputs = self.model.decoder(
input_ids=input_ids,
attention_mask=attention_mask,
encoder_hidden_states=encoder_hidden_states,
encoder_attention_mask=encoder_attention_mask,
head_mask=head_mask,
cross_attn_head_mask=cross_attn_head_mask,
past_key_values=past_key_values,
inputs_embeds=inputs_embeds,
use_cache=use_cache,
output_attentions=output_attentions,
output_hidden_states=output_hidden_states,
return_dict=return_dict,
)
logits = self.lm_head(outputs[0])
loss = None
if labels is not None:
loss_fct = CrossEntropyLoss()
loss = loss_fct(logits.view(-1, self.config.vocab_size), labels.view(-1))
if not return_dict:
output = (logits,) + outputs[1:]
return (loss,) + output if loss is not None else output
return CausalLMOutputWithCrossAttentions(
loss=loss,
logits=logits,
past_key_values=outputs.past_key_values,
hidden_states=outputs.hidden_states,
attentions=outputs.attentions,
cross_attentions=outputs.cross_attentions,
)
def prepare_inputs_for_generation(self, input_ids, past=None, attention_mask=None, use_cache=None, **kwargs):
# if model is used as a decoder in encoder-decoder model, the decoder attention mask is created on the fly
if attention_mask is None:
attention_mask = input_ids.new_ones(input_ids.shape)
if past:
input_ids = input_ids[:, -1:]
# first step, decoder_cached_states are empty
return {
"input_ids": input_ids, # encoder_outputs is defined. input_ids not needed
"attention_mask": attention_mask,
"past_key_values": past,
"use_cache": use_cache,
}
@staticmethod
def _reorder_cache(past, beam_idx):
reordered_past = ()
for layer_past in past:
reordered_past += (tuple(past_state.index_select(0, beam_idx) for past_state in layer_past),)
return reordered_past
|
apache-2.0
|
jengowong/java_program_design_10
|
src/main/java/com/github/jengo/java/program/design10/AdditionQuiz.java
|
671
|
package com.github.jengo.java.program.design10;
import java.util.Scanner;
public class AdditionQuiz {
public static void main(String[] args) {
int number1 = (int) (System.currentTimeMillis() % 10);
int number2 = (int) (System.currentTimeMillis() * 7 % 10);
// Create a Scanner
Scanner input = new Scanner(System.in);
System.out.print(
"What is " + number1 + " + " + number2 + "? ");
int answer = input.nextInt();
System.out.println(
number1 + " + " + number2 + " = " + answer + " is " +
(number1 + number2 == answer));
}
}
|
apache-2.0
|
OpenWiseSolutions/openhub-framework
|
core-api/src/main/java/org/openhubframework/openhub/api/exception/NoDataFoundException.java
|
1400
|
/*
* Copyright 2014 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.openhubframework.openhub.api.exception;
import javax.annotation.Nullable;
/**
* Exception indicates that the expected data was not found.
*
* @author Petr Juza
*/
public class NoDataFoundException extends IntegrationException {
public static final ErrorExtEnum ERROR_CODE = InternalErrorEnum.E107;
/**
* Creates exception with the message.
*
* @param msg the message
*/
public NoDataFoundException(@Nullable String msg) {
super(ERROR_CODE, msg);
}
/**
* @param message exception description message
* @param cause the exception that caused this exception
*/
public NoDataFoundException(@Nullable String message, @Nullable Throwable cause) {
super(ERROR_CODE, message, cause);
}
}
|
apache-2.0
|
vazmer/mudrosti-laravel
|
config/app.php
|
7600
|
<?php
return [
/*
|--------------------------------------------------------------------------
| Application Debug Mode
|--------------------------------------------------------------------------
|
| When your application is in debug mode, detailed error messages with
| stack traces will be shown on every error that occurs within your
| application. If disabled, a simple generic error page is shown.
|
*/
'debug' => env('APP_DEBUG'),
/*
|--------------------------------------------------------------------------
| Application URL
|--------------------------------------------------------------------------
|
| This URL is used by the console to properly generate URLs when using
| the Artisan command line tool. You should set this to the root of
| your application so that it is used when running Artisan tasks.
|
*/
'url' => 'http://mudrosti.localhost',
/*
|--------------------------------------------------------------------------
| Application Timezone
|--------------------------------------------------------------------------
|
| Here you may specify the default timezone for your application, which
| will be used by the PHP date and date-time functions. We have gone
| ahead and set this to a sensible default for you out of the box.
|
*/
'timezone' => 'Europe/Belgrade',
/*
|--------------------------------------------------------------------------
| Application Locale Configuration
|--------------------------------------------------------------------------
|
| The application locale determines the default locale that will be used
| by the translation service provider. You are free to set this value
| to any of the locales which will be supported by the application.
|
*/
'locale' => 'en',
/*
|--------------------------------------------------------------------------
| Application Fallback Locale
|--------------------------------------------------------------------------
|
| The fallback locale determines the locale to use when the current one
| is not available. You may change the value to correspond to any of
| the language folders that are provided through your application.
|
*/
'fallback_locale' => 'en',
/*
|--------------------------------------------------------------------------
| Encryption Key
|--------------------------------------------------------------------------
|
| This key is used by the Illuminate encrypter service and should be set
| to a random, 32 character string, otherwise these encrypted strings
| will not be safe. Please do this before deploying an application!
|
*/
'key' => env('APP_KEY', 'SomeRandomString'),
'cipher' => MCRYPT_RIJNDAEL_128,
/*
|--------------------------------------------------------------------------
| Logging Configuration
|--------------------------------------------------------------------------
|
| Here you may configure the log settings for your application. Out of
| the box, Laravel uses the Monolog PHP logging library. This gives
| you a variety of powerful log handlers / formatters to utilize.
|
| Available Settings: "single", "daily", "syslog", "errorlog"
|
*/
'log' => 'daily',
/*
|--------------------------------------------------------------------------
| Autoloaded Service Providers
|--------------------------------------------------------------------------
|
| The service providers listed here will be automatically loaded on the
| request to your application. Feel free to add your own services to
| this array to grant expanded functionality to your applications.
|
*/
'providers' => array(
/*
* Laravel Framework Service Providers...
*/
'Illuminate\Foundation\Providers\ArtisanServiceProvider',
'Illuminate\Auth\AuthServiceProvider',
'Illuminate\Bus\BusServiceProvider',
'Illuminate\Cache\CacheServiceProvider',
'Illuminate\Foundation\Providers\ConsoleSupportServiceProvider',
'Illuminate\Routing\ControllerServiceProvider',
'Illuminate\Cookie\CookieServiceProvider',
'Illuminate\Database\DatabaseServiceProvider',
'Illuminate\Encryption\EncryptionServiceProvider',
'Illuminate\Filesystem\FilesystemServiceProvider',
'Illuminate\Foundation\Providers\FoundationServiceProvider',
'Illuminate\Hashing\HashServiceProvider',
'Illuminate\Mail\MailServiceProvider',
'Illuminate\Pagination\PaginationServiceProvider',
'Illuminate\Pipeline\PipelineServiceProvider',
'Illuminate\Queue\QueueServiceProvider',
'Illuminate\Redis\RedisServiceProvider',
'Illuminate\Auth\Passwords\PasswordResetServiceProvider',
'Illuminate\Session\SessionServiceProvider',
'Illuminate\Translation\TranslationServiceProvider',
'Illuminate\Validation\ValidationServiceProvider',
'Illuminate\View\ViewServiceProvider',
'Illuminate\Html\HtmlServiceProvider',
/*
* Application Service Providers...
*/
'App\Providers\AppServiceProvider',
'App\Providers\BusServiceProvider',
'App\Providers\ConfigServiceProvider',
'App\Providers\EventServiceProvider',
'App\Providers\RouteServiceProvider',
'Laracasts\Flash\FlashServiceProvider',
'Barryvdh\LaravelIdeHelper\IdeHelperServiceProvider',
'yajra\Datatables\DatatablesServiceProvider',
'Barryvdh\Cors\CorsServiceProvider',
'App\Providers\BladeServiceProvider',
),
/*
|--------------------------------------------------------------------------
| Class Aliases
|--------------------------------------------------------------------------
|
| This array of class aliases will be registered when this application
| is started. However, feel free to register as many as you wish as
| the aliases are "lazy" loaded so they don't hinder performance.
|
*/
'aliases' => [
'App' => 'Illuminate\Support\Facades\App',
'Artisan' => 'Illuminate\Support\Facades\Artisan',
'Auth' => 'Illuminate\Support\Facades\Auth',
'Blade' => 'Illuminate\Support\Facades\Blade',
'Bus' => 'Illuminate\Support\Facades\Bus',
'Cache' => 'Illuminate\Support\Facades\Cache',
'Config' => 'Illuminate\Support\Facades\Config',
'Cookie' => 'Illuminate\Support\Facades\Cookie',
'Crypt' => 'Illuminate\Support\Facades\Crypt',
'DB' => 'Illuminate\Support\Facades\DB',
'Eloquent' => 'Illuminate\Database\Eloquent\Model',
'Event' => 'Illuminate\Support\Facades\Event',
'File' => 'Illuminate\Support\Facades\File',
'Hash' => 'Illuminate\Support\Facades\Hash',
'Input' => 'Illuminate\Support\Facades\Input',
'Inspiring' => 'Illuminate\Foundation\Inspiring',
'Lang' => 'Illuminate\Support\Facades\Lang',
'Log' => 'Illuminate\Support\Facades\Log',
'Mail' => 'Illuminate\Support\Facades\Mail',
'Password' => 'Illuminate\Support\Facades\Password',
'Queue' => 'Illuminate\Support\Facades\Queue',
'Redirect' => 'Illuminate\Support\Facades\Redirect',
'Redis' => 'Illuminate\Support\Facades\Redis',
'Request' => 'Illuminate\Support\Facades\Request',
'Response' => 'Illuminate\Support\Facades\Response',
'Route' => 'Illuminate\Support\Facades\Route',
'Schema' => 'Illuminate\Support\Facades\Schema',
'Session' => 'Illuminate\Support\Facades\Session',
'Storage' => 'Illuminate\Support\Facades\Storage',
'URL' => 'Illuminate\Support\Facades\URL',
'Validator' => 'Illuminate\Support\Facades\Validator',
'View' => 'Illuminate\Support\Facades\View',
'Html' => 'Illuminate\Html\HtmlFacade',
'Form' => 'Illuminate\Html\FormFacade',
'Flash' => 'Laracasts\Flash\Flash',
'Datatables' => 'yajra\Datatables\Datatables',
],
];
|
apache-2.0
|
cloudfoundry-community/asp.net5-buildpack
|
fixtures/angular_msbuild_dotnet_2.1/ClientApp/node_modules/caniuse-lite/data/features/streams.js
|
917
|
module.exports={A:{A:{"2":"H D G E A FB","130":"B"},B:{"16":"C p","260":"x J","5124":"L N I"},C:{"2":"0 1 2 3 ZB CB F K H D G E A B C p x J L N I O P Q R S T U V W X Y Z b c d e f g h i j k l m n o M q r s t u v w XB RB","2626":"4 5 6 8 9 y AB"},D:{"2":"0 F K H D G E A B C p x J L N I O P Q R S T U V W X Y Z b c d e f g h i j k l m n o M q r s t u","260":"1 2 3 4 5 v w","1028":"6 8 9 y AB LB bB GB a HB IB JB"},E:{"2":"F K H D G E KB DB MB NB OB PB","3076":"A B C QB z SB"},F:{"2":"0 7 E B C J L N I O P Q R S T U V W X Y Z b c d e f g h TB UB VB WB z BB YB","260":"i j k l m n o","1028":"M q r s t u v w"},G:{"2":"G DB aB EB cB dB eB fB gB hB","16":"iB","1028":"C jB kB lB"},H:{"2":"mB"},I:{"2":"CB F nB oB pB qB EB rB sB","260":"a"},J:{"2":"D A"},K:{"2":"7 A B C z BB","1028":"M"},L:{"1028":"a"},M:{"2626":"y"},N:{"2":"A B"},O:{"2":"tB"},P:{"2":"F K uB","1028":"vB"},Q:{"2":"wB"},R:{"2":"xB"}},B:1,C:"Streams"};
|
apache-2.0
|
vincentpoon/hbase
|
hbase-common/src/main/java/org/apache/hadoop/hbase/io/encoding/NoneEncoder.java
|
2300
|
/*
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.io.encoding;
import java.io.DataOutputStream;
import java.io.IOException;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.PrivateCellUtil;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.KeyValueUtil;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.hadoop.io.WritableUtils;
@InterfaceAudience.Private
public class NoneEncoder {
private DataOutputStream out;
private HFileBlockDefaultEncodingContext encodingCtx;
public NoneEncoder(DataOutputStream out,
HFileBlockDefaultEncodingContext encodingCtx) {
this.out = out;
this.encodingCtx = encodingCtx;
}
public int write(Cell cell) throws IOException {
// We write tags seperately because though there is no tag in KV
// if the hfilecontext says include tags we need the tags length to be
// written
int size = KeyValueUtil.oswrite(cell, out, false);
// Write the additional tag into the stream
if (encodingCtx.getHFileContext().isIncludesTags()) {
int tagsLength = cell.getTagsLength();
out.writeShort(tagsLength);
if (tagsLength > 0) {
PrivateCellUtil.writeTags(out, cell, tagsLength);
}
size += tagsLength + KeyValue.TAGS_LENGTH_SIZE;
}
if (encodingCtx.getHFileContext().isIncludesMvcc()) {
WritableUtils.writeVLong(out, cell.getSequenceId());
size += WritableUtils.getVIntSize(cell.getSequenceId());
}
return size;
}
}
|
apache-2.0
|
rutvikd/ak-recipes
|
com_akrecipes-1.0.2/templates/html/com_k2/cached_image_util.php
|
5730
|
<?php
/*
Helper class to get resized cached image, if not found it will resize and create it
*/
// no direct access
defined('_JEXEC') or die ;
class K2ItemTemplateImageHelper {
public static function isItemARecipe( $item ) {
$extraitems = array();
$isrecipe = false;
if ( !is_array($item->extra_fields) ) return false ;
foreach($item->extra_fields as $extrafield){
$extraitems[$extrafield->alias] = $extrafield;
}
if (!isset($extraitems['RecipeName'])) return false ;
$isrecipe = ( $extraitems['RecipeName']->value != null ? true : false );
return $isrecipe ;
}
public static function getCachedImageBySize($recipeImage, $imageSize = 'Small') {
$width = 100;
$height = 80;
switch ($imageSize) {
case 'XSmall':
$width = 100;
$height = 80;
break;
case 'Small':
$width = 200;
$height = 160;
break;
case 'Medium':
$width = 400;
$height = 300;
break;
case 'Large':
$width = 800;
$height = 600;
break;
case 'XLarge':
$recipeImage = preg_replace('/<img src="/',"",$recipeImage);
$recipeImage = preg_replace('/" alt="Recipe Image".*/'," ",$recipeImage);
return $recipeImage ;
//$width = 600;
//$height = 480;
break;
default:
$width = 100;
$height = 80;
break;
}
return self::getCachedImage($recipeImage, $width , $height);
}
public static function getCachedImage($recipeImage, $width = 100, $height = 80) {
$recipeImage = preg_replace('/<img src="/',"",$recipeImage);
$recipeImage = preg_replace('/" alt="Recipe Image".*/'," ",$recipeImage);
//echo "Recipe Item Image Val = " . $recipeImage ;
// echo "ImageVar = " . $recipeImage;
$imageFullPath = trim(JPATH_ROOT.DS.preg_replace('/.*images\//', 'images/', $recipeImage));
//$imageFullPath = trim(JPATH_ROOT.DS.preg_replace('/.*media\//', 'media/', $imageFullPath));
//echo "JPATH_ROOT" . JPATH_ROOT.DS ;
//$imageFullPath = str_replace("//", "/", $imageFullPath);
// echo "Image Path:" . $imageFullPath;
// echo "IMage Path Exists : " . JFile::exists($imageFullPath);
$cachedImage = preg_replace('/.*images\//', 'images/', $recipeImage);
$cachedImage = preg_replace('/\.jpg/i', '_thumbnail_' . $width . 'x' . $height . '.jpg', $cachedImage);
$cachedImageFullPath = JPATH_CACHE.DS.$cachedImage;
//echo "Cached Image Path:" . $cachedImageFullPath;
if (!JFile::exists($cachedImageFullPath)) {
//echo "!ImageVar = " . $cachedImage;
require_once (JPATH_ADMINISTRATOR.DS.'components'.DS.'com_k2'.DS.'lib'.DS.'class.upload.php');
//echo $cachedImageFullPath;
// $handle = new Upload("/var/www/html/dev2/images/archanaskitchen/Indian_Dal_Khadi_Curry/Fajeto_Gujarati_Mango_Yogurt_Kadhi_Recipe-1.jpg");
$handle = new Upload(trim($imageFullPath));
/* if ( $handle->uploaded ) {
echo $imageFullPath . " Uploaded" ;
} else {
echo $imageFullPath . " NOT Uploaded" ;
}
*/
$handle->image_resize = true;
$handle->image_x = $width;
$handle->image_y = $height;
$handle->image_convert = 'jpg';
$handle->jpeg_quality = 70;
//$handle->jpeg_quality = 100;
//$handle->jpeg_size = 12*1024;
$handle->image_ratio_crop = true;
//echo basename(JPATH_ROOT.$cachedImage);
$cachedImageBaseName = strstr(basename($cachedImage), '.',true) ;
$handle->file_new_name_body = $cachedImageBaseName ;
$pathToSaveCachedImage = dirname($cachedImageFullPath);
if ( !file_exists($pathToSaveCachedImage) ) {
mkdir($pathToSaveCachedImage,true);
}
//echo "Path to save: " . $pathToSaveCachedImage ;
$handle->process($pathToSaveCachedImage);
/*if ($handle->processed) {
//echo "Processed Image";
} else {
//echo "Process FAIL " . $handle->error;
}*/
}
return 'cache/' . $cachedImage ;
}
public static function getCachedImageFromPath($imagePath, $width = 100, $height = 80) {
if ( substr($imagePath, 0, 1) === '/' ) {
$imagePath = substr($imagePath, 1);
}
$imageFullPath = trim(JPATH_ROOT.DS.$imagePath);
$cachedImage = preg_replace('/\.jpg/i', '_thumbnail_' . $width . 'x' . $height . '.jpg', $imagePath);
$cachedImageFullPath = JPATH_CACHE.DS.$cachedImage;
//error_log("cachedImageFullPath --> " . $cachedImageFullPath );
if (!JFile::exists($cachedImageFullPath)) {
//echo "!ImageVar = " . $cachedImage;
require_once (JPATH_ADMINISTRATOR.DS.'components'.DS.'com_k2'.DS.'lib'.DS.'class.upload.php');
//echo $cachedImageFullPath;
// $handle = new Upload("/var/www/html/dev2/images/archanaskitchen/Indian_Dal_Khadi_Curry/Fajeto_Gujarati_Mango_Yogurt_Kadhi_Recipe-1.jpg");
$handle = new Upload(trim($imageFullPath));
/* if ( $handle->uploaded ) {
echo $imageFullPath . " Uploaded" ;
} else {
echo $imageFullPath . " NOT Uploaded" ;
}
*/
$handle->image_resize = true;
$handle->image_x = $width;
$handle->image_y = $height;
$handle->image_convert = 'jpg';
$handle->jpeg_quality = 70;
//$handle->jpeg_quality = 100;
//$handle->jpeg_size = 12*1024;
$handle->image_ratio_crop = true;
//echo basename(JPATH_ROOT.$cachedImage);
$cachedImageBaseName = strstr(basename($cachedImage), '.',true) ;
$handle->file_new_name_body = $cachedImageBaseName ;
$pathToSaveCachedImage = dirname($cachedImageFullPath);
//error_log("pathToSaveCachedImage --> " . $pathToSaveCachedImage );
if ( !file_exists($pathToSaveCachedImage) ) {
mkdir($pathToSaveCachedImage,true);
}
//echo "Path to save: " . $pathToSaveCachedImage ;
$handle->process($pathToSaveCachedImage);
/*if ($handle->processed) {
//echo "Processed Image";
} else {
//echo "Process FAIL " . $handle->error;
}*/
}
return 'cache/' . $cachedImage ;
}
}
?>
|
apache-2.0
|
nham/rust
|
src/libserialize/json.rs
|
124119
|
// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Rust JSON serialization library
// Copyright (c) 2011 Google Inc.
#![forbid(non_camel_case_types)]
#![allow(missing_doc)]
/*!
JSON parsing and serialization
# What is JSON?
JSON (JavaScript Object Notation) is a way to write data in Javascript.
Like XML, it allows to encode structured data in a text format that can be easily read by humans.
Its simple syntax and native compatibility with JavaScript have made it a widely used format.
Data types that can be encoded are JavaScript types (see the `Json` enum for more details):
* `Boolean`: equivalent to rust's `bool`
* `Number`: equivalent to rust's `f64`
* `String`: equivalent to rust's `String`
* `Array`: equivalent to rust's `Vec<T>`, but also allowing objects of different types in the same
array
* `Object`: equivalent to rust's `Treemap<String, json::Json>`
* `Null`
An object is a series of string keys mapping to values, in `"key": value` format.
Arrays are enclosed in square brackets ([ ... ]) and objects in curly brackets ({ ... }).
A simple JSON document encoding a person, his/her age, address and phone numbers could look like:
```ignore
{
"FirstName": "John",
"LastName": "Doe",
"Age": 43,
"Address": {
"Street": "Downing Street 10",
"City": "London",
"Country": "Great Britain"
},
"PhoneNumbers": [
"+44 1234567",
"+44 2345678"
]
}
```
# Rust Type-based Encoding and Decoding
Rust provides a mechanism for low boilerplate encoding & decoding of values to and from JSON via
the serialization API.
To be able to encode a piece of data, it must implement the `serialize::Encodable` trait.
To be able to decode a piece of data, it must implement the `serialize::Decodable` trait.
The Rust compiler provides an annotation to automatically generate the code for these traits:
`#[deriving(Decodable, Encodable)]`
The JSON API provides an enum `json::Json` and a trait `ToJson` to encode objects.
The `ToJson` trait provides a `to_json` method to convert an object into a `json::Json` value.
A `json::Json` value can be encoded as a string or buffer using the functions described above.
You can also use the `json::Encoder` object, which implements the `Encoder` trait.
When using `ToJson` the `Encodable` trait implementation is not mandatory.
# Examples of use
## Using Autoserialization
Create a struct called `TestStruct` and serialize and deserialize it to and from JSON using the
serialization API, using the derived serialization code.
```rust
extern crate serialize;
use serialize::json;
// Automatically generate `Decodable` and `Encodable` trait implementations
#[deriving(Decodable, Encodable)]
pub struct TestStruct {
data_int: u8,
data_str: String,
data_vector: Vec<u8>,
}
fn main() {
let object = TestStruct {
data_int: 1,
data_str: "toto".to_string(),
data_vector: vec![2,3,4,5],
};
// Serialize using `json::encode`
let encoded = json::encode(&object);
// Deserialize using `json::decode`
let decoded: TestStruct = json::decode(encoded.as_slice()).unwrap();
}
```
## Using the `ToJson` trait
The examples above use the `ToJson` trait to generate the JSON string, which is required
for custom mappings.
### Simple example of `ToJson` usage
```rust
extern crate serialize;
use serialize::json::ToJson;
use serialize::json;
// A custom data structure
struct ComplexNum {
a: f64,
b: f64,
}
// JSON value representation
impl ToJson for ComplexNum {
fn to_json(&self) -> json::Json {
json::String(format!("{}+{}i", self.a, self.b))
}
}
// Only generate `Encodable` trait implementation
#[deriving(Encodable)]
pub struct ComplexNumRecord {
uid: u8,
dsc: String,
val: json::Json,
}
fn main() {
let num = ComplexNum { a: 0.0001, b: 12.539 };
let data: String = json::encode(&ComplexNumRecord{
uid: 1,
dsc: "test".to_string(),
val: num.to_json(),
});
println!("data: {}", data);
// data: {"uid":1,"dsc":"test","val":"0.0001+12.539j"};
}
```
### Verbose example of `ToJson` usage
```rust
extern crate serialize;
use std::collections::TreeMap;
use serialize::json::ToJson;
use serialize::json;
// Only generate `Decodable` trait implementation
#[deriving(Decodable)]
pub struct TestStruct {
data_int: u8,
data_str: String,
data_vector: Vec<u8>,
}
// Specify encoding method manually
impl ToJson for TestStruct {
fn to_json(&self) -> json::Json {
let mut d = TreeMap::new();
// All standard types implement `to_json()`, so use it
d.insert("data_int".to_string(), self.data_int.to_json());
d.insert("data_str".to_string(), self.data_str.to_json());
d.insert("data_vector".to_string(), self.data_vector.to_json());
json::Object(d)
}
}
fn main() {
// Serialize using `ToJson`
let input_data = TestStruct {
data_int: 1,
data_str: "toto".to_string(),
data_vector: vec![2,3,4,5],
};
let json_obj: json::Json = input_data.to_json();
let json_str: String = json_obj.to_string();
// Deserialize like before
let decoded: TestStruct = json::decode(json_str.as_slice()).unwrap();
}
```
*/
use std;
use std::collections::{HashMap, TreeMap};
use std::{char, f64, fmt, io, num, str};
use std::io::MemWriter;
use std::mem::{swap, transmute};
use std::num::{FPNaN, FPInfinite};
use std::str::ScalarValue;
use std::string::String;
use std::vec::Vec;
use Encodable;
/// Represents a json value
#[deriving(Clone, PartialEq, PartialOrd)]
pub enum Json {
I64(i64),
U64(u64),
F64(f64),
String(String),
Boolean(bool),
List(List),
Object(Object),
Null,
}
pub type List = Vec<Json>;
pub type Object = TreeMap<String, Json>;
/// The errors that can arise while parsing a JSON stream.
#[deriving(Clone, PartialEq)]
pub enum ErrorCode {
InvalidSyntax,
InvalidNumber,
EOFWhileParsingObject,
EOFWhileParsingList,
EOFWhileParsingValue,
EOFWhileParsingString,
KeyMustBeAString,
ExpectedColon,
TrailingCharacters,
InvalidEscape,
InvalidUnicodeCodePoint,
LoneLeadingSurrogateInHexEscape,
UnexpectedEndOfHexEscape,
UnrecognizedHex,
NotFourDigit,
NotUtf8,
}
#[deriving(Clone, PartialEq, Show)]
pub enum ParserError {
/// msg, line, col
SyntaxError(ErrorCode, uint, uint),
IoError(io::IoErrorKind, &'static str),
}
// Builder and Parser have the same errors.
pub type BuilderError = ParserError;
#[deriving(Clone, PartialEq, Show)]
pub enum DecoderError {
ParseError(ParserError),
ExpectedError(String, String),
MissingFieldError(String),
UnknownVariantError(String),
ApplicationError(String)
}
/// Returns a readable error string for a given error code.
pub fn error_str(error: ErrorCode) -> &'static str {
return match error {
InvalidSyntax => "invalid syntax",
InvalidNumber => "invalid number",
EOFWhileParsingObject => "EOF While parsing object",
EOFWhileParsingList => "EOF While parsing list",
EOFWhileParsingValue => "EOF While parsing value",
EOFWhileParsingString => "EOF While parsing string",
KeyMustBeAString => "key must be a string",
ExpectedColon => "expected `:`",
TrailingCharacters => "trailing characters",
InvalidEscape => "invalid escape",
UnrecognizedHex => "invalid \\u escape (unrecognized hex)",
NotFourDigit => "invalid \\u escape (not four digits)",
NotUtf8 => "contents not utf-8",
InvalidUnicodeCodePoint => "invalid unicode code point",
LoneLeadingSurrogateInHexEscape => "lone leading surrogate in hex escape",
UnexpectedEndOfHexEscape => "unexpected end of hex escape",
}
}
/// Shortcut function to decode a JSON `&str` into an object
pub fn decode<T: ::Decodable<Decoder, DecoderError>>(s: &str) -> DecodeResult<T> {
let json = match from_str(s) {
Ok(x) => x,
Err(e) => return Err(ParseError(e))
};
let mut decoder = Decoder::new(json);
::Decodable::decode(&mut decoder)
}
/// Shortcut function to encode a `T` into a JSON `String`
pub fn encode<'a, T: Encodable<Encoder<'a>, io::IoError>>(object: &T) -> String {
let buff = Encoder::buffer_encode(object);
String::from_utf8(buff).unwrap()
}
impl fmt::Show for ErrorCode {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
error_str(*self).fmt(f)
}
}
fn io_error_to_error(io: io::IoError) -> ParserError {
IoError(io.kind, io.desc)
}
pub type EncodeResult = io::IoResult<()>;
pub type DecodeResult<T> = Result<T, DecoderError>;
pub fn escape_bytes(wr: &mut io::Writer, bytes: &[u8]) -> Result<(), io::IoError> {
try!(wr.write_str("\""));
let mut start = 0;
for (i, byte) in bytes.iter().enumerate() {
let escaped = match *byte {
b'"' => "\\\"",
b'\\' => "\\\\",
b'\x08' => "\\b",
b'\x0c' => "\\f",
b'\n' => "\\n",
b'\r' => "\\r",
b'\t' => "\\t",
_ => { continue; }
};
if start < i {
try!(wr.write(bytes.slice(start, i)));
}
try!(wr.write_str(escaped));
start = i + 1;
}
if start != bytes.len() {
try!(wr.write(bytes.slice_from(start)));
}
wr.write_str("\"")
}
fn escape_str(writer: &mut io::Writer, v: &str) -> Result<(), io::IoError> {
escape_bytes(writer, v.as_bytes())
}
fn escape_char(writer: &mut io::Writer, v: char) -> Result<(), io::IoError> {
let mut buf = [0, .. 4];
v.encode_utf8(buf);
escape_bytes(writer, buf)
}
fn spaces(wr: &mut io::Writer, mut n: uint) -> Result<(), io::IoError> {
static len: uint = 16;
static buf: [u8, ..len] = [b' ', ..len];
while n >= len {
try!(wr.write(buf));
n -= len;
}
if n > 0 {
wr.write(buf.slice_to(n))
} else {
Ok(())
}
}
fn fmt_number_or_null(v: f64) -> String {
match v.classify() {
FPNaN | FPInfinite => String::from_str("null"),
_ => f64::to_str_digits(v, 6u)
}
}
/// A structure for implementing serialization to JSON.
pub struct Encoder<'a> {
writer: &'a mut io::Writer+'a,
}
impl<'a> Encoder<'a> {
/// Creates a new JSON encoder whose output will be written to the writer
/// specified.
pub fn new(writer: &'a mut io::Writer) -> Encoder<'a> {
Encoder { writer: writer }
}
/// Encode the specified struct into a json [u8]
pub fn buffer_encode<T:Encodable<Encoder<'a>, io::IoError>>(object: &T) -> Vec<u8> {
//Serialize the object in a string using a writer
let mut m = MemWriter::new();
// FIXME(14302) remove the transmute and unsafe block.
unsafe {
let mut encoder = Encoder::new(&mut m as &mut io::Writer);
// MemWriter never Errs
let _ = object.encode(transmute(&mut encoder));
}
m.unwrap()
}
/// Encode the specified struct into a json str
///
/// Note: this function is deprecated. Consider using `json::encode` instead.
#[deprecated = "Replaced by `json::encode`"]
pub fn str_encode<T: Encodable<Encoder<'a>, io::IoError>>(object: &T) -> String {
encode(object)
}
}
impl<'a> ::Encoder<io::IoError> for Encoder<'a> {
fn emit_nil(&mut self) -> EncodeResult { write!(self.writer, "null") }
fn emit_uint(&mut self, v: uint) -> EncodeResult { self.emit_f64(v as f64) }
fn emit_u64(&mut self, v: u64) -> EncodeResult { self.emit_f64(v as f64) }
fn emit_u32(&mut self, v: u32) -> EncodeResult { self.emit_f64(v as f64) }
fn emit_u16(&mut self, v: u16) -> EncodeResult { self.emit_f64(v as f64) }
fn emit_u8(&mut self, v: u8) -> EncodeResult { self.emit_f64(v as f64) }
fn emit_int(&mut self, v: int) -> EncodeResult { self.emit_f64(v as f64) }
fn emit_i64(&mut self, v: i64) -> EncodeResult { self.emit_f64(v as f64) }
fn emit_i32(&mut self, v: i32) -> EncodeResult { self.emit_f64(v as f64) }
fn emit_i16(&mut self, v: i16) -> EncodeResult { self.emit_f64(v as f64) }
fn emit_i8(&mut self, v: i8) -> EncodeResult { self.emit_f64(v as f64) }
fn emit_bool(&mut self, v: bool) -> EncodeResult {
if v {
write!(self.writer, "true")
} else {
write!(self.writer, "false")
}
}
fn emit_f64(&mut self, v: f64) -> EncodeResult {
write!(self.writer, "{}", fmt_number_or_null(v))
}
fn emit_f32(&mut self, v: f32) -> EncodeResult { self.emit_f64(v as f64) }
fn emit_char(&mut self, v: char) -> EncodeResult {
escape_char(self.writer, v)
}
fn emit_str(&mut self, v: &str) -> EncodeResult {
escape_str(self.writer, v)
}
fn emit_enum(&mut self, _name: &str, f: |&mut Encoder<'a>| -> EncodeResult) -> EncodeResult {
f(self)
}
fn emit_enum_variant(&mut self,
name: &str,
_id: uint,
cnt: uint,
f: |&mut Encoder<'a>| -> EncodeResult) -> EncodeResult {
// enums are encoded as strings or objects
// Bunny => "Bunny"
// Kangaroo(34,"William") => {"variant": "Kangaroo", "fields": [34,"William"]}
if cnt == 0 {
escape_str(self.writer, name)
} else {
try!(write!(self.writer, "{{\"variant\":"));
try!(escape_str(self.writer, name));
try!(write!(self.writer, ",\"fields\":["));
try!(f(self));
write!(self.writer, "]}}")
}
}
fn emit_enum_variant_arg(&mut self,
idx: uint,
f: |&mut Encoder<'a>| -> EncodeResult) -> EncodeResult {
if idx != 0 {
try!(write!(self.writer, ","));
}
f(self)
}
fn emit_enum_struct_variant(&mut self,
name: &str,
id: uint,
cnt: uint,
f: |&mut Encoder<'a>| -> EncodeResult) -> EncodeResult {
self.emit_enum_variant(name, id, cnt, f)
}
fn emit_enum_struct_variant_field(&mut self,
_: &str,
idx: uint,
f: |&mut Encoder<'a>| -> EncodeResult) -> EncodeResult {
self.emit_enum_variant_arg(idx, f)
}
fn emit_struct(&mut self,
_: &str,
_: uint,
f: |&mut Encoder<'a>| -> EncodeResult) -> EncodeResult {
try!(write!(self.writer, "{{"));
try!(f(self));
write!(self.writer, "}}")
}
fn emit_struct_field(&mut self,
name: &str,
idx: uint,
f: |&mut Encoder<'a>| -> EncodeResult) -> EncodeResult {
if idx != 0 { try!(write!(self.writer, ",")); }
try!(escape_str(self.writer, name));
try!(write!(self.writer, ":"));
f(self)
}
fn emit_tuple(&mut self, len: uint, f: |&mut Encoder<'a>| -> EncodeResult) -> EncodeResult {
self.emit_seq(len, f)
}
fn emit_tuple_arg(&mut self,
idx: uint,
f: |&mut Encoder<'a>| -> EncodeResult) -> EncodeResult {
self.emit_seq_elt(idx, f)
}
fn emit_tuple_struct(&mut self,
_name: &str,
len: uint,
f: |&mut Encoder<'a>| -> EncodeResult) -> EncodeResult {
self.emit_seq(len, f)
}
fn emit_tuple_struct_arg(&mut self,
idx: uint,
f: |&mut Encoder<'a>| -> EncodeResult) -> EncodeResult {
self.emit_seq_elt(idx, f)
}
fn emit_option(&mut self, f: |&mut Encoder<'a>| -> EncodeResult) -> EncodeResult {
f(self)
}
fn emit_option_none(&mut self) -> EncodeResult { self.emit_nil() }
fn emit_option_some(&mut self, f: |&mut Encoder<'a>| -> EncodeResult) -> EncodeResult {
f(self)
}
fn emit_seq(&mut self, _len: uint, f: |&mut Encoder<'a>| -> EncodeResult) -> EncodeResult {
try!(write!(self.writer, "["));
try!(f(self));
write!(self.writer, "]")
}
fn emit_seq_elt(&mut self, idx: uint, f: |&mut Encoder<'a>| -> EncodeResult) -> EncodeResult {
if idx != 0 {
try!(write!(self.writer, ","));
}
f(self)
}
fn emit_map(&mut self, _len: uint, f: |&mut Encoder<'a>| -> EncodeResult) -> EncodeResult {
try!(write!(self.writer, "{{"));
try!(f(self));
write!(self.writer, "}}")
}
fn emit_map_elt_key(&mut self,
idx: uint,
f: |&mut Encoder<'a>| -> EncodeResult) -> EncodeResult {
if idx != 0 { try!(write!(self.writer, ",")) }
// ref #12967, make sure to wrap a key in double quotes,
// in the event that its of a type that omits them (eg numbers)
let mut buf = MemWriter::new();
// FIXME(14302) remove the transmute and unsafe block.
unsafe {
let mut check_encoder = Encoder::new(&mut buf);
try!(f(transmute(&mut check_encoder)));
}
let out = str::from_utf8(buf.get_ref()).unwrap();
let needs_wrapping = out.char_at(0) != '"' && out.char_at_reverse(out.len()) != '"';
if needs_wrapping { try!(write!(self.writer, "\"")); }
try!(f(self));
if needs_wrapping { try!(write!(self.writer, "\"")); }
Ok(())
}
fn emit_map_elt_val(&mut self,
_idx: uint,
f: |&mut Encoder<'a>| -> EncodeResult) -> EncodeResult {
try!(write!(self.writer, ":"));
f(self)
}
}
/// Another encoder for JSON, but prints out human-readable JSON instead of
/// compact data
pub struct PrettyEncoder<'a> {
writer: &'a mut io::Writer+'a,
curr_indent: uint,
indent: uint,
}
impl<'a> PrettyEncoder<'a> {
/// Creates a new encoder whose output will be written to the specified writer
pub fn new<'a>(writer: &'a mut io::Writer) -> PrettyEncoder<'a> {
PrettyEncoder { writer: writer, curr_indent: 0, indent: 2, }
}
/// Set the number of spaces to indent for each level.
/// This is safe to set during encoding.
pub fn set_indent<'a>(&mut self, indent: uint) {
// self.indent very well could be 0 so we need to use checked division.
let level = self.curr_indent.checked_div(&self.indent).unwrap_or(0);
self.indent = indent;
self.curr_indent = level * self.indent;
}
}
impl<'a> ::Encoder<io::IoError> for PrettyEncoder<'a> {
fn emit_nil(&mut self) -> EncodeResult { write!(self.writer, "null") }
fn emit_uint(&mut self, v: uint) -> EncodeResult { self.emit_f64(v as f64) }
fn emit_u64(&mut self, v: u64) -> EncodeResult { self.emit_f64(v as f64) }
fn emit_u32(&mut self, v: u32) -> EncodeResult { self.emit_f64(v as f64) }
fn emit_u16(&mut self, v: u16) -> EncodeResult { self.emit_f64(v as f64) }
fn emit_u8(&mut self, v: u8) -> EncodeResult { self.emit_f64(v as f64) }
fn emit_int(&mut self, v: int) -> EncodeResult { self.emit_f64(v as f64) }
fn emit_i64(&mut self, v: i64) -> EncodeResult { self.emit_f64(v as f64) }
fn emit_i32(&mut self, v: i32) -> EncodeResult { self.emit_f64(v as f64) }
fn emit_i16(&mut self, v: i16) -> EncodeResult { self.emit_f64(v as f64) }
fn emit_i8(&mut self, v: i8) -> EncodeResult { self.emit_f64(v as f64) }
fn emit_bool(&mut self, v: bool) -> EncodeResult {
if v {
write!(self.writer, "true")
} else {
write!(self.writer, "false")
}
}
fn emit_f64(&mut self, v: f64) -> EncodeResult {
write!(self.writer, "{}", fmt_number_or_null(v))
}
fn emit_f32(&mut self, v: f32) -> EncodeResult {
self.emit_f64(v as f64)
}
fn emit_char(&mut self, v: char) -> EncodeResult {
escape_char(self.writer, v)
}
fn emit_str(&mut self, v: &str) -> EncodeResult {
escape_str(self.writer, v)
}
fn emit_enum(&mut self,
_name: &str,
f: |&mut PrettyEncoder<'a>| -> EncodeResult) -> EncodeResult {
f(self)
}
fn emit_enum_variant(&mut self,
name: &str,
_: uint,
cnt: uint,
f: |&mut PrettyEncoder<'a>| -> EncodeResult) -> EncodeResult {
if cnt == 0 {
escape_str(self.writer, name)
} else {
self.curr_indent += self.indent;
try!(write!(self.writer, "[\n"));
try!(spaces(self.writer, self.curr_indent));
try!(escape_str(self.writer, name));
try!(write!(self.writer, ",\n"));
try!(f(self));
self.curr_indent -= self.indent;
try!(write!(self.writer, "\n"));
try!(spaces(self.writer, self.curr_indent));
write!(self.writer, "]")
}
}
fn emit_enum_variant_arg(&mut self,
idx: uint,
f: |&mut PrettyEncoder<'a>| -> EncodeResult) -> EncodeResult {
if idx != 0 {
try!(write!(self.writer, ",\n"));
}
try!(spaces(self.writer, self.curr_indent));
f(self)
}
fn emit_enum_struct_variant(&mut self,
name: &str,
id: uint,
cnt: uint,
f: |&mut PrettyEncoder<'a>| -> EncodeResult) -> EncodeResult {
self.emit_enum_variant(name, id, cnt, f)
}
fn emit_enum_struct_variant_field(&mut self,
_: &str,
idx: uint,
f: |&mut PrettyEncoder<'a>| -> EncodeResult) -> EncodeResult {
self.emit_enum_variant_arg(idx, f)
}
fn emit_struct(&mut self,
_: &str,
len: uint,
f: |&mut PrettyEncoder<'a>| -> EncodeResult) -> EncodeResult {
if len == 0 {
write!(self.writer, "{{}}")
} else {
try!(write!(self.writer, "{{"));
self.curr_indent += self.indent;
try!(f(self));
self.curr_indent -= self.indent;
try!(write!(self.writer, "\n"));
try!(spaces(self.writer, self.curr_indent));
write!(self.writer, "}}")
}
}
fn emit_struct_field(&mut self,
name: &str,
idx: uint,
f: |&mut PrettyEncoder<'a>| -> EncodeResult) -> EncodeResult {
if idx == 0 {
try!(write!(self.writer, "\n"));
} else {
try!(write!(self.writer, ",\n"));
}
try!(spaces(self.writer, self.curr_indent));
try!(escape_str(self.writer, name));
try!(write!(self.writer, ": "));
f(self)
}
fn emit_tuple(&mut self,
len: uint,
f: |&mut PrettyEncoder<'a>| -> EncodeResult) -> EncodeResult {
self.emit_seq(len, f)
}
fn emit_tuple_arg(&mut self,
idx: uint,
f: |&mut PrettyEncoder<'a>| -> EncodeResult) -> EncodeResult {
self.emit_seq_elt(idx, f)
}
fn emit_tuple_struct(&mut self,
_: &str,
len: uint,
f: |&mut PrettyEncoder<'a>| -> EncodeResult) -> EncodeResult {
self.emit_seq(len, f)
}
fn emit_tuple_struct_arg(&mut self,
idx: uint,
f: |&mut PrettyEncoder<'a>| -> EncodeResult) -> EncodeResult {
self.emit_seq_elt(idx, f)
}
fn emit_option(&mut self, f: |&mut PrettyEncoder<'a>| -> EncodeResult) -> EncodeResult {
f(self)
}
fn emit_option_none(&mut self) -> EncodeResult { self.emit_nil() }
fn emit_option_some(&mut self, f: |&mut PrettyEncoder<'a>| -> EncodeResult) -> EncodeResult {
f(self)
}
fn emit_seq(&mut self,
len: uint,
f: |&mut PrettyEncoder<'a>| -> EncodeResult) -> EncodeResult {
if len == 0 {
write!(self.writer, "[]")
} else {
try!(write!(self.writer, "["));
self.curr_indent += self.indent;
try!(f(self));
self.curr_indent -= self.indent;
try!(write!(self.writer, "\n"));
try!(spaces(self.writer, self.curr_indent));
write!(self.writer, "]")
}
}
fn emit_seq_elt(&mut self,
idx: uint,
f: |&mut PrettyEncoder<'a>| -> EncodeResult) -> EncodeResult {
if idx == 0 {
try!(write!(self.writer, "\n"));
} else {
try!(write!(self.writer, ",\n"));
}
try!(spaces(self.writer, self.curr_indent));
f(self)
}
fn emit_map(&mut self,
len: uint,
f: |&mut PrettyEncoder<'a>| -> EncodeResult) -> EncodeResult {
if len == 0 {
write!(self.writer, "{{}}")
} else {
try!(write!(self.writer, "{{"));
self.curr_indent += self.indent;
try!(f(self));
self.curr_indent -= self.indent;
try!(write!(self.writer, "\n"));
try!(spaces(self.writer, self.curr_indent));
write!(self.writer, "}}")
}
}
fn emit_map_elt_key(&mut self,
idx: uint,
f: |&mut PrettyEncoder<'a>| -> EncodeResult) -> EncodeResult {
if idx == 0 {
try!(write!(self.writer, "\n"));
} else {
try!(write!(self.writer, ",\n"));
}
try!(spaces(self.writer, self.curr_indent));
// ref #12967, make sure to wrap a key in double quotes,
// in the event that its of a type that omits them (eg numbers)
let mut buf = MemWriter::new();
// FIXME(14302) remove the transmute and unsafe block.
unsafe {
let mut check_encoder = PrettyEncoder::new(&mut buf);
try!(f(transmute(&mut check_encoder)));
}
let out = str::from_utf8(buf.get_ref()).unwrap();
let needs_wrapping = out.char_at(0) != '"' && out.char_at_reverse(out.len()) != '"';
if needs_wrapping { try!(write!(self.writer, "\"")); }
try!(f(self));
if needs_wrapping { try!(write!(self.writer, "\"")); }
Ok(())
}
fn emit_map_elt_val(&mut self,
_idx: uint,
f: |&mut PrettyEncoder<'a>| -> EncodeResult) -> EncodeResult {
try!(write!(self.writer, ": "));
f(self)
}
}
impl<E: ::Encoder<S>, S> Encodable<E, S> for Json {
fn encode(&self, e: &mut E) -> Result<(), S> {
match *self {
I64(v) => v.encode(e),
U64(v) => v.encode(e),
F64(v) => v.encode(e),
String(ref v) => v.encode(e),
Boolean(v) => v.encode(e),
List(ref v) => v.encode(e),
Object(ref v) => v.encode(e),
Null => e.emit_nil(),
}
}
}
impl Json {
/// Encodes a json value into an io::writer. Uses a single line.
pub fn to_writer(&self, writer: &mut io::Writer) -> EncodeResult {
let mut encoder = Encoder::new(writer);
self.encode(&mut encoder)
}
/// Encodes a json value into an io::writer.
/// Pretty-prints in a more readable format.
pub fn to_pretty_writer(&self, writer: &mut io::Writer) -> EncodeResult {
let mut encoder = PrettyEncoder::new(writer);
self.encode(&mut encoder)
}
/// Encodes a json value into a string
pub fn to_pretty_str(&self) -> String {
let mut s = MemWriter::new();
self.to_pretty_writer(&mut s as &mut io::Writer).unwrap();
String::from_utf8(s.unwrap()).unwrap()
}
/// If the Json value is an Object, returns the value associated with the provided key.
/// Otherwise, returns None.
pub fn find<'a>(&'a self, key: &String) -> Option<&'a Json>{
match self {
&Object(ref map) => map.find(key),
_ => None
}
}
/// Attempts to get a nested Json Object for each key in `keys`.
/// If any key is found not to exist, find_path will return None.
/// Otherwise, it will return the Json value associated with the final key.
pub fn find_path<'a>(&'a self, keys: &[&String]) -> Option<&'a Json>{
let mut target = self;
for key in keys.iter() {
match target.find(*key) {
Some(t) => { target = t; },
None => return None
}
}
Some(target)
}
/// If the Json value is an Object, performs a depth-first search until
/// a value associated with the provided key is found. If no value is found
/// or the Json value is not an Object, returns None.
pub fn search<'a>(&'a self, key: &String) -> Option<&'a Json> {
match self {
&Object(ref map) => {
match map.find(key) {
Some(json_value) => Some(json_value),
None => {
let mut value : Option<&'a Json> = None;
for (_, v) in map.iter() {
value = v.search(key);
if value.is_some() {
break;
}
}
value
}
}
},
_ => None
}
}
/// Returns true if the Json value is an Object. Returns false otherwise.
pub fn is_object<'a>(&'a self) -> bool {
self.as_object().is_some()
}
/// If the Json value is an Object, returns the associated TreeMap.
/// Returns None otherwise.
pub fn as_object<'a>(&'a self) -> Option<&'a Object> {
match self {
&Object(ref map) => Some(map),
_ => None
}
}
/// Returns true if the Json value is a List. Returns false otherwise.
pub fn is_list<'a>(&'a self) -> bool {
self.as_list().is_some()
}
/// If the Json value is a List, returns the associated vector.
/// Returns None otherwise.
pub fn as_list<'a>(&'a self) -> Option<&'a List> {
match self {
&List(ref list) => Some(&*list),
_ => None
}
}
/// Returns true if the Json value is a String. Returns false otherwise.
pub fn is_string<'a>(&'a self) -> bool {
self.as_string().is_some()
}
/// If the Json value is a String, returns the associated str.
/// Returns None otherwise.
pub fn as_string<'a>(&'a self) -> Option<&'a str> {
match *self {
String(ref s) => Some(s.as_slice()),
_ => None
}
}
/// Returns true if the Json value is a Number. Returns false otherwise.
pub fn is_number(&self) -> bool {
match *self {
I64(_) | U64(_) | F64(_) => true,
_ => false,
}
}
/// Returns true if the Json value is a i64. Returns false otherwise.
pub fn is_i64(&self) -> bool {
match *self {
I64(_) => true,
_ => false,
}
}
/// Returns true if the Json value is a u64. Returns false otherwise.
pub fn is_u64(&self) -> bool {
match *self {
U64(_) => true,
_ => false,
}
}
/// Returns true if the Json value is a f64. Returns false otherwise.
pub fn is_f64(&self) -> bool {
match *self {
F64(_) => true,
_ => false,
}
}
/// If the Json value is a number, return or cast it to a i64.
/// Returns None otherwise.
pub fn as_i64(&self) -> Option<i64> {
match *self {
I64(n) => Some(n),
U64(n) => num::cast(n),
_ => None
}
}
/// If the Json value is a number, return or cast it to a u64.
/// Returns None otherwise.
pub fn as_u64(&self) -> Option<u64> {
match *self {
I64(n) => num::cast(n),
U64(n) => Some(n),
_ => None
}
}
/// If the Json value is a number, return or cast it to a f64.
/// Returns None otherwise.
pub fn as_f64(&self) -> Option<f64> {
match *self {
I64(n) => num::cast(n),
U64(n) => num::cast(n),
F64(n) => Some(n),
_ => None
}
}
/// Returns true if the Json value is a Boolean. Returns false otherwise.
pub fn is_boolean(&self) -> bool {
self.as_boolean().is_some()
}
/// If the Json value is a Boolean, returns the associated bool.
/// Returns None otherwise.
pub fn as_boolean(&self) -> Option<bool> {
match self {
&Boolean(b) => Some(b),
_ => None
}
}
/// Returns true if the Json value is a Null. Returns false otherwise.
pub fn is_null(&self) -> bool {
self.as_null().is_some()
}
/// If the Json value is a Null, returns ().
/// Returns None otherwise.
pub fn as_null(&self) -> Option<()> {
match self {
&Null => Some(()),
_ => None
}
}
}
/// The output of the streaming parser.
#[deriving(PartialEq, Clone, Show)]
pub enum JsonEvent {
ObjectStart,
ObjectEnd,
ListStart,
ListEnd,
BooleanValue(bool),
I64Value(i64),
U64Value(u64),
F64Value(f64),
StringValue(String),
NullValue,
Error(ParserError),
}
#[deriving(PartialEq, Show)]
enum ParserState {
// Parse a value in a list, true means first element.
ParseList(bool),
// Parse ',' or ']' after an element in a list.
ParseListComma,
// Parse a key:value in an object, true means first element.
ParseObject(bool),
// Parse ',' or ']' after an element in an object.
ParseObjectComma,
// Initial state.
ParseStart,
// Expecting the stream to end.
ParseBeforeFinish,
// Parsing can't continue.
ParseFinished,
}
/// A Stack represents the current position of the parser in the logical
/// structure of the JSON stream.
/// For example foo.bar[3].x
pub struct Stack {
stack: Vec<InternalStackElement>,
str_buffer: Vec<u8>,
}
/// StackElements compose a Stack.
/// For example, Key("foo"), Key("bar"), Index(3) and Key("x") are the
/// StackElements compositing the stack that represents foo.bar[3].x
#[deriving(PartialEq, Clone, Show)]
pub enum StackElement<'l> {
Index(u32),
Key(&'l str),
}
// Internally, Key elements are stored as indices in a buffer to avoid
// allocating a string for every member of an object.
#[deriving(PartialEq, Clone, Show)]
enum InternalStackElement {
InternalIndex(u32),
InternalKey(u16, u16), // start, size
}
impl Stack {
pub fn new() -> Stack {
Stack { stack: Vec::new(), str_buffer: Vec::new() }
}
/// Returns The number of elements in the Stack.
pub fn len(&self) -> uint { self.stack.len() }
/// Returns true if the stack is empty.
pub fn is_empty(&self) -> bool { self.stack.is_empty() }
/// Provides access to the StackElement at a given index.
/// lower indices are at the bottom of the stack while higher indices are
/// at the top.
pub fn get<'l>(&'l self, idx: uint) -> StackElement<'l> {
match self.stack[idx] {
InternalIndex(i) => { Index(i) }
InternalKey(start, size) => {
Key(str::from_utf8(
self.str_buffer.slice(start as uint, start as uint + size as uint)).unwrap())
}
}
}
/// Compares this stack with an array of StackElements.
pub fn is_equal_to(&self, rhs: &[StackElement]) -> bool {
if self.stack.len() != rhs.len() { return false; }
for i in range(0, rhs.len()) {
if self.get(i) != rhs[i] { return false; }
}
return true;
}
/// Returns true if the bottom-most elements of this stack are the same as
/// the ones passed as parameter.
pub fn starts_with(&self, rhs: &[StackElement]) -> bool {
if self.stack.len() < rhs.len() { return false; }
for i in range(0, rhs.len()) {
if self.get(i) != rhs[i] { return false; }
}
return true;
}
/// Returns true if the top-most elements of this stack are the same as
/// the ones passed as parameter.
pub fn ends_with(&self, rhs: &[StackElement]) -> bool {
if self.stack.len() < rhs.len() { return false; }
let offset = self.stack.len() - rhs.len();
for i in range(0, rhs.len()) {
if self.get(i + offset) != rhs[i] { return false; }
}
return true;
}
/// Returns the top-most element (if any).
pub fn top<'l>(&'l self) -> Option<StackElement<'l>> {
return match self.stack.last() {
None => None,
Some(&InternalIndex(i)) => Some(Index(i)),
Some(&InternalKey(start, size)) => {
Some(Key(str::from_utf8(
self.str_buffer.slice(start as uint, (start+size) as uint)
).unwrap()))
}
}
}
// Used by Parser to insert Key elements at the top of the stack.
fn push_key(&mut self, key: String) {
self.stack.push(InternalKey(self.str_buffer.len() as u16, key.len() as u16));
for c in key.as_bytes().iter() {
self.str_buffer.push(*c);
}
}
// Used by Parser to insert Index elements at the top of the stack.
fn push_index(&mut self, index: u32) {
self.stack.push(InternalIndex(index));
}
// Used by Parser to remove the top-most element of the stack.
fn pop(&mut self) {
assert!(!self.is_empty());
match *self.stack.last().unwrap() {
InternalKey(_, sz) => {
let new_size = self.str_buffer.len() - sz as uint;
self.str_buffer.truncate(new_size);
}
InternalIndex(_) => {}
}
self.stack.pop();
}
// Used by Parser to test whether the top-most element is an index.
fn last_is_index(&self) -> bool {
if self.is_empty() { return false; }
return match *self.stack.last().unwrap() {
InternalIndex(_) => true,
_ => false,
}
}
// Used by Parser to increment the index of the top-most element.
fn bump_index(&mut self) {
let len = self.stack.len();
let idx = match *self.stack.last().unwrap() {
InternalIndex(i) => { i + 1 }
_ => { fail!(); }
};
*self.stack.get_mut(len - 1) = InternalIndex(idx);
}
}
/// A streaming JSON parser implemented as an iterator of JsonEvent, consuming
/// an iterator of char.
pub struct Parser<T> {
rdr: T,
ch: Option<char>,
line: uint,
col: uint,
// We maintain a stack representing where we are in the logical structure
// of the JSON stream.
stack: Stack,
// A state machine is kept to make it possible to interrupt and resume parsing.
state: ParserState,
}
impl<T: Iterator<char>> Iterator<JsonEvent> for Parser<T> {
fn next(&mut self) -> Option<JsonEvent> {
if self.state == ParseFinished {
return None;
}
if self.state == ParseBeforeFinish {
self.parse_whitespace();
// Make sure there is no trailing characters.
if self.eof() {
self.state = ParseFinished;
return None;
} else {
return Some(self.error_event(TrailingCharacters));
}
}
return Some(self.parse());
}
}
impl<T: Iterator<char>> Parser<T> {
/// Creates the JSON parser.
pub fn new(rdr: T) -> Parser<T> {
let mut p = Parser {
rdr: rdr,
ch: Some('\x00'),
line: 1,
col: 0,
stack: Stack::new(),
state: ParseStart,
};
p.bump();
return p;
}
/// Provides access to the current position in the logical structure of the
/// JSON stream.
pub fn stack<'l>(&'l self) -> &'l Stack {
return &self.stack;
}
fn eof(&self) -> bool { self.ch.is_none() }
fn ch_or_null(&self) -> char { self.ch.unwrap_or('\x00') }
fn bump(&mut self) {
self.ch = self.rdr.next();
if self.ch_is('\n') {
self.line += 1u;
self.col = 1u;
} else {
self.col += 1u;
}
}
fn next_char(&mut self) -> Option<char> {
self.bump();
self.ch
}
fn ch_is(&self, c: char) -> bool {
self.ch == Some(c)
}
fn error<T>(&self, reason: ErrorCode) -> Result<T, ParserError> {
Err(SyntaxError(reason, self.line, self.col))
}
fn parse_whitespace(&mut self) {
while self.ch_is(' ') ||
self.ch_is('\n') ||
self.ch_is('\t') ||
self.ch_is('\r') { self.bump(); }
}
fn parse_number(&mut self) -> JsonEvent {
let mut neg = false;
if self.ch_is('-') {
self.bump();
neg = true;
}
let res = match self.parse_u64() {
Ok(res) => res,
Err(e) => { return Error(e); }
};
if self.ch_is('.') || self.ch_is('e') || self.ch_is('E') {
let mut res = res as f64;
if self.ch_is('.') {
res = match self.parse_decimal(res) {
Ok(res) => res,
Err(e) => { return Error(e); }
};
}
if self.ch_is('e') || self.ch_is('E') {
res = match self.parse_exponent(res) {
Ok(res) => res,
Err(e) => { return Error(e); }
};
}
if neg {
res *= -1.0;
}
F64Value(res)
} else {
if neg {
let res = -(res as i64);
// Make sure we didn't underflow.
if res > 0 {
Error(SyntaxError(InvalidNumber, self.line, self.col))
} else {
I64Value(res)
}
} else {
U64Value(res)
}
}
}
fn parse_u64(&mut self) -> Result<u64, ParserError> {
let mut accum = 0;
let last_accum = 0; // necessary to detect overflow.
match self.ch_or_null() {
'0' => {
self.bump();
// A leading '0' must be the only digit before the decimal point.
match self.ch_or_null() {
'0' .. '9' => return self.error(InvalidNumber),
_ => ()
}
},
'1' .. '9' => {
while !self.eof() {
match self.ch_or_null() {
c @ '0' .. '9' => {
accum *= 10;
accum += (c as u64) - ('0' as u64);
// Detect overflow by comparing to the last value.
if accum <= last_accum { return self.error(InvalidNumber); }
self.bump();
}
_ => break,
}
}
}
_ => return self.error(InvalidNumber),
}
Ok(accum)
}
fn parse_decimal(&mut self, mut res: f64) -> Result<f64, ParserError> {
self.bump();
// Make sure a digit follows the decimal place.
match self.ch_or_null() {
'0' .. '9' => (),
_ => return self.error(InvalidNumber)
}
let mut dec = 1.0;
while !self.eof() {
match self.ch_or_null() {
c @ '0' .. '9' => {
dec /= 10.0;
res += (((c as int) - ('0' as int)) as f64) * dec;
self.bump();
}
_ => break,
}
}
Ok(res)
}
fn parse_exponent(&mut self, mut res: f64) -> Result<f64, ParserError> {
self.bump();
let mut exp = 0u;
let mut neg_exp = false;
if self.ch_is('+') {
self.bump();
} else if self.ch_is('-') {
self.bump();
neg_exp = true;
}
// Make sure a digit follows the exponent place.
match self.ch_or_null() {
'0' .. '9' => (),
_ => return self.error(InvalidNumber)
}
while !self.eof() {
match self.ch_or_null() {
c @ '0' .. '9' => {
exp *= 10;
exp += (c as uint) - ('0' as uint);
self.bump();
}
_ => break
}
}
let exp = num::pow(10_f64, exp);
if neg_exp {
res /= exp;
} else {
res *= exp;
}
Ok(res)
}
fn decode_hex_escape(&mut self) -> Result<u16, ParserError> {
let mut i = 0u;
let mut n = 0u16;
while i < 4 && !self.eof() {
self.bump();
n = match self.ch_or_null() {
c @ '0' .. '9' => n * 16 + ((c as u16) - ('0' as u16)),
'a' | 'A' => n * 16 + 10,
'b' | 'B' => n * 16 + 11,
'c' | 'C' => n * 16 + 12,
'd' | 'D' => n * 16 + 13,
'e' | 'E' => n * 16 + 14,
'f' | 'F' => n * 16 + 15,
_ => return self.error(InvalidEscape)
};
i += 1u;
}
// Error out if we didn't parse 4 digits.
if i != 4 {
return self.error(InvalidEscape);
}
Ok(n)
}
fn parse_str(&mut self) -> Result<String, ParserError> {
let mut escape = false;
let mut res = String::new();
loop {
self.bump();
if self.eof() {
return self.error(EOFWhileParsingString);
}
if escape {
match self.ch_or_null() {
'"' => res.push_char('"'),
'\\' => res.push_char('\\'),
'/' => res.push_char('/'),
'b' => res.push_char('\x08'),
'f' => res.push_char('\x0c'),
'n' => res.push_char('\n'),
'r' => res.push_char('\r'),
't' => res.push_char('\t'),
'u' => match try!(self.decode_hex_escape()) {
0xDC00 .. 0xDFFF => return self.error(LoneLeadingSurrogateInHexEscape),
// Non-BMP characters are encoded as a sequence of
// two hex escapes, representing UTF-16 surrogates.
n1 @ 0xD800 .. 0xDBFF => {
match (self.next_char(), self.next_char()) {
(Some('\\'), Some('u')) => (),
_ => return self.error(UnexpectedEndOfHexEscape),
}
let buf = [n1, try!(self.decode_hex_escape())];
match str::utf16_items(buf.as_slice()).next() {
Some(ScalarValue(c)) => res.push_char(c),
_ => return self.error(LoneLeadingSurrogateInHexEscape),
}
}
n => match char::from_u32(n as u32) {
Some(c) => res.push_char(c),
None => return self.error(InvalidUnicodeCodePoint),
},
},
_ => return self.error(InvalidEscape),
}
escape = false;
} else if self.ch_is('\\') {
escape = true;
} else {
match self.ch {
Some('"') => {
self.bump();
return Ok(res);
},
Some(c) => res.push_char(c),
None => unreachable!()
}
}
}
}
// Invoked at each iteration, consumes the stream until it has enough
// information to return a JsonEvent.
// Manages an internal state so that parsing can be interrupted and resumed.
// Also keeps track of the position in the logical structure of the json
// stream int the form of a stack that can be queried by the user using the
// stack() method.
fn parse(&mut self) -> JsonEvent {
loop {
// The only paths where the loop can spin a new iteration
// are in the cases ParseListComma and ParseObjectComma if ','
// is parsed. In these cases the state is set to (respectively)
// ParseList(false) and ParseObject(false), which always return,
// so there is no risk of getting stuck in an infinite loop.
// All other paths return before the end of the loop's iteration.
self.parse_whitespace();
match self.state {
ParseStart => {
return self.parse_start();
}
ParseList(first) => {
return self.parse_list(first);
}
ParseListComma => {
match self.parse_list_comma_or_end() {
Some(evt) => { return evt; }
None => {}
}
}
ParseObject(first) => {
return self.parse_object(first);
}
ParseObjectComma => {
self.stack.pop();
if self.ch_is(',') {
self.state = ParseObject(false);
self.bump();
} else {
return self.parse_object_end();
}
}
_ => {
return self.error_event(InvalidSyntax);
}
}
}
}
fn parse_start(&mut self) -> JsonEvent {
let val = self.parse_value();
self.state = match val {
Error(_) => { ParseFinished }
ListStart => { ParseList(true) }
ObjectStart => { ParseObject(true) }
_ => { ParseBeforeFinish }
};
return val;
}
fn parse_list(&mut self, first: bool) -> JsonEvent {
if self.ch_is(']') {
if !first {
return self.error_event(InvalidSyntax);
}
if self.stack.is_empty() {
self.state = ParseBeforeFinish;
} else {
self.state = if self.stack.last_is_index() {
ParseListComma
} else {
ParseObjectComma
}
}
self.bump();
return ListEnd;
}
if first {
self.stack.push_index(0);
}
let val = self.parse_value();
self.state = match val {
Error(_) => { ParseFinished }
ListStart => { ParseList(true) }
ObjectStart => { ParseObject(true) }
_ => { ParseListComma }
};
return val;
}
fn parse_list_comma_or_end(&mut self) -> Option<JsonEvent> {
if self.ch_is(',') {
self.stack.bump_index();
self.state = ParseList(false);
self.bump();
return None;
} else if self.ch_is(']') {
self.stack.pop();
if self.stack.is_empty() {
self.state = ParseBeforeFinish;
} else {
self.state = if self.stack.last_is_index() {
ParseListComma
} else {
ParseObjectComma
}
}
self.bump();
return Some(ListEnd);
} else if self.eof() {
return Some(self.error_event(EOFWhileParsingList));
} else {
return Some(self.error_event(InvalidSyntax));
}
}
fn parse_object(&mut self, first: bool) -> JsonEvent {
if self.ch_is('}') {
if !first {
self.stack.pop();
}
if self.stack.is_empty() {
self.state = ParseBeforeFinish;
} else {
self.state = if self.stack.last_is_index() {
ParseListComma
} else {
ParseObjectComma
}
}
self.bump();
return ObjectEnd;
}
if self.eof() {
return self.error_event(EOFWhileParsingObject);
}
if !self.ch_is('"') {
return self.error_event(KeyMustBeAString);
}
let s = match self.parse_str() {
Ok(s) => { s }
Err(e) => {
self.state = ParseFinished;
return Error(e);
}
};
self.parse_whitespace();
if self.eof() {
return self.error_event(EOFWhileParsingObject);
} else if self.ch_or_null() != ':' {
return self.error_event(ExpectedColon);
}
self.stack.push_key(s);
self.bump();
self.parse_whitespace();
let val = self.parse_value();
self.state = match val {
Error(_) => { ParseFinished }
ListStart => { ParseList(true) }
ObjectStart => { ParseObject(true) }
_ => { ParseObjectComma }
};
return val;
}
fn parse_object_end(&mut self) -> JsonEvent {
if self.ch_is('}') {
if self.stack.is_empty() {
self.state = ParseBeforeFinish;
} else {
self.state = if self.stack.last_is_index() {
ParseListComma
} else {
ParseObjectComma
}
}
self.bump();
ObjectEnd
} else if self.eof() {
self.error_event(EOFWhileParsingObject)
} else {
self.error_event(InvalidSyntax)
}
}
fn parse_value(&mut self) -> JsonEvent {
if self.eof() { return self.error_event(EOFWhileParsingValue); }
match self.ch_or_null() {
'n' => { self.parse_ident("ull", NullValue) }
't' => { self.parse_ident("rue", BooleanValue(true)) }
'f' => { self.parse_ident("alse", BooleanValue(false)) }
'0' .. '9' | '-' => self.parse_number(),
'"' => match self.parse_str() {
Ok(s) => StringValue(s),
Err(e) => Error(e),
},
'[' => {
self.bump();
ListStart
}
'{' => {
self.bump();
ObjectStart
}
_ => { self.error_event(InvalidSyntax) }
}
}
fn parse_ident(&mut self, ident: &str, value: JsonEvent) -> JsonEvent {
if ident.chars().all(|c| Some(c) == self.next_char()) {
self.bump();
value
} else {
Error(SyntaxError(InvalidSyntax, self.line, self.col))
}
}
fn error_event(&mut self, reason: ErrorCode) -> JsonEvent {
self.state = ParseFinished;
Error(SyntaxError(reason, self.line, self.col))
}
}
/// A Builder consumes a json::Parser to create a generic Json structure.
pub struct Builder<T> {
parser: Parser<T>,
token: Option<JsonEvent>,
}
impl<T: Iterator<char>> Builder<T> {
/// Create a JSON Builder.
pub fn new(src: T) -> Builder<T> {
Builder { parser: Parser::new(src), token: None, }
}
// Decode a Json value from a Parser.
pub fn build(&mut self) -> Result<Json, BuilderError> {
self.bump();
let result = self.build_value();
self.bump();
match self.token {
None => {}
Some(Error(e)) => { return Err(e); }
ref tok => { fail!("unexpected token {}", tok.clone()); }
}
result
}
fn bump(&mut self) {
self.token = self.parser.next();
}
fn build_value(&mut self) -> Result<Json, BuilderError> {
return match self.token {
Some(NullValue) => { Ok(Null) }
Some(I64Value(n)) => { Ok(I64(n)) }
Some(U64Value(n)) => { Ok(U64(n)) }
Some(F64Value(n)) => { Ok(F64(n)) }
Some(BooleanValue(b)) => { Ok(Boolean(b)) }
Some(StringValue(ref mut s)) => {
let mut temp = String::new();
swap(s, &mut temp);
Ok(String(temp))
}
Some(Error(e)) => { Err(e) }
Some(ListStart) => { self.build_list() }
Some(ObjectStart) => { self.build_object() }
Some(ObjectEnd) => { self.parser.error(InvalidSyntax) }
Some(ListEnd) => { self.parser.error(InvalidSyntax) }
None => { self.parser.error(EOFWhileParsingValue) }
}
}
fn build_list(&mut self) -> Result<Json, BuilderError> {
self.bump();
let mut values = Vec::new();
loop {
if self.token == Some(ListEnd) {
return Ok(List(values.move_iter().collect()));
}
match self.build_value() {
Ok(v) => values.push(v),
Err(e) => { return Err(e) }
}
self.bump();
}
}
fn build_object(&mut self) -> Result<Json, BuilderError> {
self.bump();
let mut values = TreeMap::new();
loop {
match self.token {
Some(ObjectEnd) => { return Ok(Object(values)); }
Some(Error(e)) => { return Err(e); }
None => { break; }
_ => {}
}
let key = match self.parser.stack().top() {
Some(Key(k)) => { k.to_string() }
_ => { fail!("invalid state"); }
};
match self.build_value() {
Ok(value) => { values.insert(key, value); }
Err(e) => { return Err(e); }
}
self.bump();
}
return self.parser.error(EOFWhileParsingObject);
}
}
/// Decodes a json value from an `&mut io::Reader`
pub fn from_reader(rdr: &mut io::Reader) -> Result<Json, BuilderError> {
let contents = match rdr.read_to_end() {
Ok(c) => c,
Err(e) => return Err(io_error_to_error(e))
};
let s = match str::from_utf8(contents.as_slice()) {
Some(s) => s,
_ => return Err(SyntaxError(NotUtf8, 0, 0))
};
let mut builder = Builder::new(s.chars());
builder.build()
}
/// Decodes a json value from a string
pub fn from_str(s: &str) -> Result<Json, BuilderError> {
let mut builder = Builder::new(s.chars());
builder.build()
}
/// A structure to decode JSON to values in rust.
pub struct Decoder {
stack: Vec<Json>,
}
impl Decoder {
/// Creates a new decoder instance for decoding the specified JSON value.
pub fn new(json: Json) -> Decoder {
Decoder { stack: vec![json] }
}
}
impl Decoder {
fn pop(&mut self) -> Json {
self.stack.pop().unwrap()
}
}
macro_rules! expect(
($e:expr, Null) => ({
match $e {
Null => Ok(()),
other => Err(ExpectedError("Null".to_string(),
format!("{}", other)))
}
});
($e:expr, $t:ident) => ({
match $e {
$t(v) => Ok(v),
other => {
Err(ExpectedError(stringify!($t).to_string(),
format!("{}", other)))
}
}
})
)
macro_rules! read_primitive {
($name:ident, $ty:ty) => {
fn $name(&mut self) -> DecodeResult<$ty> {
match self.pop() {
I64(f) => {
match num::cast(f) {
Some(f) => Ok(f),
None => Err(ExpectedError("Number".to_string(), format!("{}", f))),
}
}
U64(f) => {
match num::cast(f) {
Some(f) => Ok(f),
None => Err(ExpectedError("Number".to_string(), format!("{}", f))),
}
}
F64(f) => {
match num::cast(f) {
Some(f) => Ok(f),
None => Err(ExpectedError("Number".to_string(), format!("{}", f))),
}
}
String(s) => {
// re: #12967.. a type w/ numeric keys (ie HashMap<uint, V> etc)
// is going to have a string here, as per JSON spec.
match std::from_str::from_str(s.as_slice()) {
Some(f) => Ok(f),
None => Err(ExpectedError("Number".to_string(), s)),
}
},
value => Err(ExpectedError("Number".to_string(), format!("{}", value)))
}
}
}
}
impl ::Decoder<DecoderError> for Decoder {
fn read_nil(&mut self) -> DecodeResult<()> {
debug!("read_nil");
expect!(self.pop(), Null)
}
read_primitive!(read_uint, uint)
read_primitive!(read_u8, u8)
read_primitive!(read_u16, u16)
read_primitive!(read_u32, u32)
read_primitive!(read_u64, u64)
read_primitive!(read_int, int)
read_primitive!(read_i8, i8)
read_primitive!(read_i16, i16)
read_primitive!(read_i32, i32)
read_primitive!(read_i64, i64)
fn read_f32(&mut self) -> DecodeResult<f32> { self.read_f64().map(|x| x as f32) }
fn read_f64(&mut self) -> DecodeResult<f64> {
debug!("read_f64");
match self.pop() {
I64(f) => Ok(f as f64),
U64(f) => Ok(f as f64),
F64(f) => Ok(f),
String(s) => {
// re: #12967.. a type w/ numeric keys (ie HashMap<uint, V> etc)
// is going to have a string here, as per JSON spec.
match std::from_str::from_str(s.as_slice()) {
Some(f) => Ok(f),
None => Err(ExpectedError("Number".to_string(), s)),
}
},
Null => Ok(f64::NAN),
value => Err(ExpectedError("Number".to_string(), format!("{}", value)))
}
}
fn read_bool(&mut self) -> DecodeResult<bool> {
debug!("read_bool");
expect!(self.pop(), Boolean)
}
fn read_char(&mut self) -> DecodeResult<char> {
let s = try!(self.read_str());
{
let mut it = s.as_slice().chars();
match (it.next(), it.next()) {
// exactly one character
(Some(c), None) => return Ok(c),
_ => ()
}
}
Err(ExpectedError("single character string".to_string(), format!("{}", s)))
}
fn read_str(&mut self) -> DecodeResult<String> {
debug!("read_str");
expect!(self.pop(), String)
}
fn read_enum<T>(&mut self,
name: &str,
f: |&mut Decoder| -> DecodeResult<T>) -> DecodeResult<T> {
debug!("read_enum({})", name);
f(self)
}
fn read_enum_variant<T>(&mut self,
names: &[&str],
f: |&mut Decoder, uint| -> DecodeResult<T>)
-> DecodeResult<T> {
debug!("read_enum_variant(names={})", names);
let name = match self.pop() {
String(s) => s,
Object(mut o) => {
let n = match o.pop(&"variant".to_string()) {
Some(String(s)) => s,
Some(val) => {
return Err(ExpectedError("String".to_string(), format!("{}", val)))
}
None => {
return Err(MissingFieldError("variant".to_string()))
}
};
match o.pop(&"fields".to_string()) {
Some(List(l)) => {
for field in l.move_iter().rev() {
self.stack.push(field);
}
},
Some(val) => {
return Err(ExpectedError("List".to_string(), format!("{}", val)))
}
None => {
return Err(MissingFieldError("fields".to_string()))
}
}
n
}
json => {
return Err(ExpectedError("String or Object".to_string(), format!("{}", json)))
}
};
let idx = match names.iter()
.position(|n| str::eq_slice(*n, name.as_slice())) {
Some(idx) => idx,
None => return Err(UnknownVariantError(name))
};
f(self, idx)
}
fn read_enum_variant_arg<T>(&mut self, idx: uint, f: |&mut Decoder| -> DecodeResult<T>)
-> DecodeResult<T> {
debug!("read_enum_variant_arg(idx={})", idx);
f(self)
}
fn read_enum_struct_variant<T>(&mut self,
names: &[&str],
f: |&mut Decoder, uint| -> DecodeResult<T>)
-> DecodeResult<T> {
debug!("read_enum_struct_variant(names={})", names);
self.read_enum_variant(names, f)
}
fn read_enum_struct_variant_field<T>(&mut self,
name: &str,
idx: uint,
f: |&mut Decoder| -> DecodeResult<T>)
-> DecodeResult<T> {
debug!("read_enum_struct_variant_field(name={}, idx={})", name, idx);
self.read_enum_variant_arg(idx, f)
}
fn read_struct<T>(&mut self,
name: &str,
len: uint,
f: |&mut Decoder| -> DecodeResult<T>)
-> DecodeResult<T> {
debug!("read_struct(name={}, len={})", name, len);
let value = try!(f(self));
self.pop();
Ok(value)
}
fn read_struct_field<T>(&mut self,
name: &str,
idx: uint,
f: |&mut Decoder| -> DecodeResult<T>)
-> DecodeResult<T> {
debug!("read_struct_field(name={}, idx={})", name, idx);
let mut obj = try!(expect!(self.pop(), Object));
let value = match obj.pop(&name.to_string()) {
None => return Err(MissingFieldError(name.to_string())),
Some(json) => {
self.stack.push(json);
try!(f(self))
}
};
self.stack.push(Object(obj));
Ok(value)
}
fn read_tuple<T>(&mut self, f: |&mut Decoder, uint| -> DecodeResult<T>) -> DecodeResult<T> {
debug!("read_tuple()");
self.read_seq(f)
}
fn read_tuple_arg<T>(&mut self,
idx: uint,
f: |&mut Decoder| -> DecodeResult<T>) -> DecodeResult<T> {
debug!("read_tuple_arg(idx={})", idx);
self.read_seq_elt(idx, f)
}
fn read_tuple_struct<T>(&mut self,
name: &str,
f: |&mut Decoder, uint| -> DecodeResult<T>)
-> DecodeResult<T> {
debug!("read_tuple_struct(name={})", name);
self.read_tuple(f)
}
fn read_tuple_struct_arg<T>(&mut self,
idx: uint,
f: |&mut Decoder| -> DecodeResult<T>)
-> DecodeResult<T> {
debug!("read_tuple_struct_arg(idx={})", idx);
self.read_tuple_arg(idx, f)
}
fn read_option<T>(&mut self, f: |&mut Decoder, bool| -> DecodeResult<T>) -> DecodeResult<T> {
match self.pop() {
Null => f(self, false),
value => { self.stack.push(value); f(self, true) }
}
}
fn read_seq<T>(&mut self, f: |&mut Decoder, uint| -> DecodeResult<T>) -> DecodeResult<T> {
debug!("read_seq()");
let list = try!(expect!(self.pop(), List));
let len = list.len();
for v in list.move_iter().rev() {
self.stack.push(v);
}
f(self, len)
}
fn read_seq_elt<T>(&mut self,
idx: uint,
f: |&mut Decoder| -> DecodeResult<T>) -> DecodeResult<T> {
debug!("read_seq_elt(idx={})", idx);
f(self)
}
fn read_map<T>(&mut self, f: |&mut Decoder, uint| -> DecodeResult<T>) -> DecodeResult<T> {
debug!("read_map()");
let obj = try!(expect!(self.pop(), Object));
let len = obj.len();
for (key, value) in obj.move_iter() {
self.stack.push(value);
self.stack.push(String(key));
}
f(self, len)
}
fn read_map_elt_key<T>(&mut self, idx: uint, f: |&mut Decoder| -> DecodeResult<T>)
-> DecodeResult<T> {
debug!("read_map_elt_key(idx={})", idx);
f(self)
}
fn read_map_elt_val<T>(&mut self, idx: uint, f: |&mut Decoder| -> DecodeResult<T>)
-> DecodeResult<T> {
debug!("read_map_elt_val(idx={})", idx);
f(self)
}
fn error(&mut self, err: &str) -> DecoderError {
ApplicationError(err.to_string())
}
}
/// A trait for converting values to JSON
pub trait ToJson {
/// Converts the value of `self` to an instance of JSON
fn to_json(&self) -> Json;
}
macro_rules! to_json_impl_i64(
($($t:ty), +) => (
$(impl ToJson for $t {
fn to_json(&self) -> Json { I64(*self as i64) }
})+
)
)
to_json_impl_i64!(int, i8, i16, i32, i64)
macro_rules! to_json_impl_u64(
($($t:ty), +) => (
$(impl ToJson for $t {
fn to_json(&self) -> Json { U64(*self as u64) }
})+
)
)
to_json_impl_u64!(uint, u8, u16, u32, u64)
impl ToJson for Json {
fn to_json(&self) -> Json { self.clone() }
}
impl ToJson for f32 {
fn to_json(&self) -> Json { (*self as f64).to_json() }
}
impl ToJson for f64 {
fn to_json(&self) -> Json {
match self.classify() {
FPNaN | FPInfinite => Null,
_ => F64(*self)
}
}
}
impl ToJson for () {
fn to_json(&self) -> Json { Null }
}
impl ToJson for bool {
fn to_json(&self) -> Json { Boolean(*self) }
}
impl ToJson for String {
fn to_json(&self) -> Json { String((*self).clone()) }
}
macro_rules! tuple_impl {
// use variables to indicate the arity of the tuple
($($tyvar:ident),* ) => {
// the trailing commas are for the 1 tuple
impl<
$( $tyvar : ToJson ),*
> ToJson for ( $( $tyvar ),* , ) {
#[inline]
#[allow(non_snake_case)]
fn to_json(&self) -> Json {
match *self {
($(ref $tyvar),*,) => List(vec![$($tyvar.to_json()),*])
}
}
}
}
}
tuple_impl!{A}
tuple_impl!{A, B}
tuple_impl!{A, B, C}
tuple_impl!{A, B, C, D}
tuple_impl!{A, B, C, D, E}
tuple_impl!{A, B, C, D, E, F}
tuple_impl!{A, B, C, D, E, F, G}
tuple_impl!{A, B, C, D, E, F, G, H}
tuple_impl!{A, B, C, D, E, F, G, H, I}
tuple_impl!{A, B, C, D, E, F, G, H, I, J}
tuple_impl!{A, B, C, D, E, F, G, H, I, J, K}
tuple_impl!{A, B, C, D, E, F, G, H, I, J, K, L}
impl<'a, A: ToJson> ToJson for &'a [A] {
fn to_json(&self) -> Json { List(self.iter().map(|elt| elt.to_json()).collect()) }
}
impl<A: ToJson> ToJson for Vec<A> {
fn to_json(&self) -> Json { List(self.iter().map(|elt| elt.to_json()).collect()) }
}
impl<A: ToJson> ToJson for TreeMap<String, A> {
fn to_json(&self) -> Json {
let mut d = TreeMap::new();
for (key, value) in self.iter() {
d.insert((*key).clone(), value.to_json());
}
Object(d)
}
}
impl<A: ToJson> ToJson for HashMap<String, A> {
fn to_json(&self) -> Json {
let mut d = TreeMap::new();
for (key, value) in self.iter() {
d.insert((*key).clone(), value.to_json());
}
Object(d)
}
}
impl<A:ToJson> ToJson for Option<A> {
fn to_json(&self) -> Json {
match *self {
None => Null,
Some(ref value) => value.to_json()
}
}
}
impl fmt::Show for Json {
/// Encodes a json value into a string
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
self.to_writer(f).map_err(|_| fmt::WriteError)
}
}
impl std::from_str::FromStr for Json {
fn from_str(s: &str) -> Option<Json> {
from_str(s).ok()
}
}
#[cfg(test)]
mod tests {
extern crate test;
use self::test::Bencher;
use {Encodable, Decodable};
use super::{Encoder, Decoder, Error, Boolean, I64, U64, F64, List, String, Null,
PrettyEncoder, Object, Json, from_str, ParseError, ExpectedError,
MissingFieldError, UnknownVariantError, DecodeResult, DecoderError,
JsonEvent, Parser, StackElement,
ObjectStart, ObjectEnd, ListStart, ListEnd, BooleanValue, U64Value,
F64Value, StringValue, NullValue, SyntaxError, Key, Index, Stack,
InvalidSyntax, InvalidNumber, EOFWhileParsingObject, EOFWhileParsingList,
EOFWhileParsingValue, EOFWhileParsingString, KeyMustBeAString, ExpectedColon,
TrailingCharacters};
use std::{i64, u64, f32, f64, io};
use std::collections::TreeMap;
#[deriving(PartialEq, Encodable, Decodable, Show)]
enum Animal {
Dog,
Frog(String, int)
}
#[deriving(PartialEq, Encodable, Decodable, Show)]
struct Inner {
a: (),
b: uint,
c: Vec<String>,
}
#[deriving(PartialEq, Encodable, Decodable, Show)]
struct Outer {
inner: Vec<Inner>,
}
fn mk_object(items: &[(String, Json)]) -> Json {
let mut d = TreeMap::new();
for item in items.iter() {
match *item {
(ref key, ref value) => { d.insert((*key).clone(), (*value).clone()); },
}
};
Object(d)
}
#[test]
fn test_from_str_trait() {
let s = "null";
assert!(::std::from_str::from_str::<Json>(s).unwrap() == from_str(s).unwrap());
}
#[test]
fn test_write_null() {
assert_eq!(Null.to_string().into_string(), "null".to_string());
assert_eq!(Null.to_pretty_str().into_string(), "null".to_string());
}
#[test]
fn test_write_i64() {
assert_eq!(U64(0).to_string().into_string(), "0".to_string());
assert_eq!(U64(0).to_pretty_str().into_string(), "0".to_string());
assert_eq!(U64(1234).to_string().into_string(), "1234".to_string());
assert_eq!(U64(1234).to_pretty_str().into_string(), "1234".to_string());
assert_eq!(I64(-5678).to_string().into_string(), "-5678".to_string());
assert_eq!(I64(-5678).to_pretty_str().into_string(), "-5678".to_string());
}
#[test]
fn test_write_f64() {
assert_eq!(F64(3.0).to_string().into_string(), "3".to_string());
assert_eq!(F64(3.0).to_pretty_str().into_string(), "3".to_string());
assert_eq!(F64(3.1).to_string().into_string(), "3.1".to_string());
assert_eq!(F64(3.1).to_pretty_str().into_string(), "3.1".to_string());
assert_eq!(F64(-1.5).to_string().into_string(), "-1.5".to_string());
assert_eq!(F64(-1.5).to_pretty_str().into_string(), "-1.5".to_string());
assert_eq!(F64(0.5).to_string().into_string(), "0.5".to_string());
assert_eq!(F64(0.5).to_pretty_str().into_string(), "0.5".to_string());
assert_eq!(F64(f64::NAN).to_string().into_string(), "null".to_string());
assert_eq!(F64(f64::NAN).to_pretty_str().into_string(), "null".to_string());
assert_eq!(F64(f64::INFINITY).to_string().into_string(), "null".to_string());
assert_eq!(F64(f64::INFINITY).to_pretty_str().into_string(), "null".to_string());
assert_eq!(F64(f64::NEG_INFINITY).to_string().into_string(), "null".to_string());
assert_eq!(F64(f64::NEG_INFINITY).to_pretty_str().into_string(), "null".to_string());
}
#[test]
fn test_write_str() {
assert_eq!(String("".to_string()).to_string().into_string(), "\"\"".to_string());
assert_eq!(String("".to_string()).to_pretty_str().into_string(), "\"\"".to_string());
assert_eq!(String("foo".to_string()).to_string().into_string(), "\"foo\"".to_string());
assert_eq!(String("foo".to_string()).to_pretty_str().into_string(), "\"foo\"".to_string());
}
#[test]
fn test_write_bool() {
assert_eq!(Boolean(true).to_string().into_string(), "true".to_string());
assert_eq!(Boolean(true).to_pretty_str().into_string(), "true".to_string());
assert_eq!(Boolean(false).to_string().into_string(), "false".to_string());
assert_eq!(Boolean(false).to_pretty_str().into_string(), "false".to_string());
}
#[test]
fn test_write_list() {
assert_eq!(List(vec![]).to_string().into_string(), "[]".to_string());
assert_eq!(List(vec![]).to_pretty_str().into_string(), "[]".to_string());
assert_eq!(List(vec![Boolean(true)]).to_string().into_string(), "[true]".to_string());
assert_eq!(
List(vec![Boolean(true)]).to_pretty_str().into_string(),
"\
[\n \
true\n\
]".to_string()
);
let long_test_list = List(vec![
Boolean(false),
Null,
List(vec![String("foo\nbar".to_string()), F64(3.5)])]);
assert_eq!(long_test_list.to_string().into_string(),
"[false,null,[\"foo\\nbar\",3.5]]".to_string());
assert_eq!(
long_test_list.to_pretty_str().into_string(),
"\
[\n \
false,\n \
null,\n \
[\n \
\"foo\\nbar\",\n \
3.5\n \
]\n\
]".to_string()
);
}
#[test]
fn test_write_object() {
assert_eq!(mk_object([]).to_string().into_string(), "{}".to_string());
assert_eq!(mk_object([]).to_pretty_str().into_string(), "{}".to_string());
assert_eq!(
mk_object([
("a".to_string(), Boolean(true))
]).to_string().into_string(),
"{\"a\":true}".to_string()
);
assert_eq!(
mk_object([("a".to_string(), Boolean(true))]).to_pretty_str(),
"\
{\n \
\"a\": true\n\
}".to_string()
);
let complex_obj = mk_object([
("b".to_string(), List(vec![
mk_object([("c".to_string(), String("\x0c\r".to_string()))]),
mk_object([("d".to_string(), String("".to_string()))])
]))
]);
assert_eq!(
complex_obj.to_string().into_string(),
"{\
\"b\":[\
{\"c\":\"\\f\\r\"},\
{\"d\":\"\"}\
]\
}".to_string()
);
assert_eq!(
complex_obj.to_pretty_str().into_string(),
"\
{\n \
\"b\": [\n \
{\n \
\"c\": \"\\f\\r\"\n \
},\n \
{\n \
\"d\": \"\"\n \
}\n \
]\n\
}".to_string()
);
let a = mk_object([
("a".to_string(), Boolean(true)),
("b".to_string(), List(vec![
mk_object([("c".to_string(), String("\x0c\r".to_string()))]),
mk_object([("d".to_string(), String("".to_string()))])
]))
]);
// We can't compare the strings directly because the object fields be
// printed in a different order.
assert_eq!(a.clone(), from_str(a.to_string().as_slice()).unwrap());
assert_eq!(a.clone(),
from_str(a.to_pretty_str().as_slice()).unwrap());
}
fn with_str_writer(f: |&mut io::Writer|) -> String {
use std::io::MemWriter;
use std::str;
let mut m = MemWriter::new();
f(&mut m as &mut io::Writer);
str::from_utf8(m.unwrap().as_slice()).unwrap().to_string()
}
#[test]
fn test_write_enum() {
let animal = Dog;
assert_eq!(
with_str_writer(|writer| {
let mut encoder = Encoder::new(writer);
animal.encode(&mut encoder).unwrap();
}),
"\"Dog\"".to_string()
);
assert_eq!(
with_str_writer(|writer| {
let mut encoder = PrettyEncoder::new(writer);
animal.encode(&mut encoder).unwrap();
}),
"\"Dog\"".to_string()
);
let animal = Frog("Henry".to_string(), 349);
assert_eq!(
with_str_writer(|writer| {
let mut encoder = Encoder::new(writer);
animal.encode(&mut encoder).unwrap();
}),
"{\"variant\":\"Frog\",\"fields\":[\"Henry\",349]}".to_string()
);
assert_eq!(
with_str_writer(|writer| {
let mut encoder = PrettyEncoder::new(writer);
animal.encode(&mut encoder).unwrap();
}),
"\
[\n \
\"Frog\",\n \
\"Henry\",\n \
349\n\
]".to_string()
);
}
#[test]
fn test_write_some() {
let value = Some("jodhpurs".to_string());
let s = with_str_writer(|writer| {
let mut encoder = Encoder::new(writer);
value.encode(&mut encoder).unwrap();
});
assert_eq!(s, "\"jodhpurs\"".to_string());
let value = Some("jodhpurs".to_string());
let s = with_str_writer(|writer| {
let mut encoder = PrettyEncoder::new(writer);
value.encode(&mut encoder).unwrap();
});
assert_eq!(s, "\"jodhpurs\"".to_string());
}
#[test]
fn test_write_none() {
let value: Option<String> = None;
let s = with_str_writer(|writer| {
let mut encoder = Encoder::new(writer);
value.encode(&mut encoder).unwrap();
});
assert_eq!(s, "null".to_string());
let s = with_str_writer(|writer| {
let mut encoder = Encoder::new(writer);
value.encode(&mut encoder).unwrap();
});
assert_eq!(s, "null".to_string());
}
#[test]
fn test_trailing_characters() {
assert_eq!(from_str("nulla"), Err(SyntaxError(TrailingCharacters, 1, 5)));
assert_eq!(from_str("truea"), Err(SyntaxError(TrailingCharacters, 1, 5)));
assert_eq!(from_str("falsea"), Err(SyntaxError(TrailingCharacters, 1, 6)));
assert_eq!(from_str("1a"), Err(SyntaxError(TrailingCharacters, 1, 2)));
assert_eq!(from_str("[]a"), Err(SyntaxError(TrailingCharacters, 1, 3)));
assert_eq!(from_str("{}a"), Err(SyntaxError(TrailingCharacters, 1, 3)));
}
#[test]
fn test_read_identifiers() {
assert_eq!(from_str("n"), Err(SyntaxError(InvalidSyntax, 1, 2)));
assert_eq!(from_str("nul"), Err(SyntaxError(InvalidSyntax, 1, 4)));
assert_eq!(from_str("t"), Err(SyntaxError(InvalidSyntax, 1, 2)));
assert_eq!(from_str("truz"), Err(SyntaxError(InvalidSyntax, 1, 4)));
assert_eq!(from_str("f"), Err(SyntaxError(InvalidSyntax, 1, 2)));
assert_eq!(from_str("faz"), Err(SyntaxError(InvalidSyntax, 1, 3)));
assert_eq!(from_str("null"), Ok(Null));
assert_eq!(from_str("true"), Ok(Boolean(true)));
assert_eq!(from_str("false"), Ok(Boolean(false)));
assert_eq!(from_str(" null "), Ok(Null));
assert_eq!(from_str(" true "), Ok(Boolean(true)));
assert_eq!(from_str(" false "), Ok(Boolean(false)));
}
#[test]
fn test_decode_identifiers() {
let v: () = super::decode("null").unwrap();
assert_eq!(v, ());
let v: bool = super::decode("true").unwrap();
assert_eq!(v, true);
let v: bool = super::decode("false").unwrap();
assert_eq!(v, false);
}
#[test]
fn test_read_number() {
assert_eq!(from_str("+"), Err(SyntaxError(InvalidSyntax, 1, 1)));
assert_eq!(from_str("."), Err(SyntaxError(InvalidSyntax, 1, 1)));
assert_eq!(from_str("NaN"), Err(SyntaxError(InvalidSyntax, 1, 1)));
assert_eq!(from_str("-"), Err(SyntaxError(InvalidNumber, 1, 2)));
assert_eq!(from_str("00"), Err(SyntaxError(InvalidNumber, 1, 2)));
assert_eq!(from_str("1."), Err(SyntaxError(InvalidNumber, 1, 3)));
assert_eq!(from_str("1e"), Err(SyntaxError(InvalidNumber, 1, 3)));
assert_eq!(from_str("1e+"), Err(SyntaxError(InvalidNumber, 1, 4)));
assert_eq!(from_str("18446744073709551616"), Err(SyntaxError(InvalidNumber, 1, 20)));
assert_eq!(from_str("-9223372036854775809"), Err(SyntaxError(InvalidNumber, 1, 21)));
assert_eq!(from_str("3"), Ok(U64(3)));
assert_eq!(from_str("3.1"), Ok(F64(3.1)));
assert_eq!(from_str("-1.2"), Ok(F64(-1.2)));
assert_eq!(from_str("0.4"), Ok(F64(0.4)));
assert_eq!(from_str("0.4e5"), Ok(F64(0.4e5)));
assert_eq!(from_str("0.4e+15"), Ok(F64(0.4e15)));
assert_eq!(from_str("0.4e-01"), Ok(F64(0.4e-01)));
assert_eq!(from_str(" 3 "), Ok(U64(3)));
assert_eq!(from_str("-9223372036854775808"), Ok(I64(i64::MIN)));
assert_eq!(from_str("9223372036854775807"), Ok(U64(i64::MAX as u64)));
assert_eq!(from_str("18446744073709551615"), Ok(U64(u64::MAX)));
}
#[test]
fn test_decode_numbers() {
let v: f64 = super::decode("3").unwrap();
assert_eq!(v, 3.0);
let v: f64 = super::decode("3.1").unwrap();
assert_eq!(v, 3.1);
let v: f64 = super::decode("-1.2").unwrap();
assert_eq!(v, -1.2);
let v: f64 = super::decode("0.4").unwrap();
assert_eq!(v, 0.4);
let v: f64 = super::decode("0.4e5").unwrap();
assert_eq!(v, 0.4e5);
let v: f64 = super::decode("0.4e15").unwrap();
assert_eq!(v, 0.4e15);
let v: f64 = super::decode("0.4e-01").unwrap();
assert_eq!(v, 0.4e-01);
let v: u64 = super::decode("0").unwrap();
assert_eq!(v, 0);
let v: u64 = super::decode("18446744073709551615").unwrap();
assert_eq!(v, u64::MAX);
let v: i64 = super::decode("-9223372036854775808").unwrap();
assert_eq!(v, i64::MIN);
let v: i64 = super::decode("9223372036854775807").unwrap();
assert_eq!(v, i64::MAX);
}
#[test]
fn test_read_str() {
assert_eq!(from_str("\""), Err(SyntaxError(EOFWhileParsingString, 1, 2)));
assert_eq!(from_str("\"lol"), Err(SyntaxError(EOFWhileParsingString, 1, 5)));
assert_eq!(from_str("\"\""), Ok(String("".to_string())));
assert_eq!(from_str("\"foo\""), Ok(String("foo".to_string())));
assert_eq!(from_str("\"\\\"\""), Ok(String("\"".to_string())));
assert_eq!(from_str("\"\\b\""), Ok(String("\x08".to_string())));
assert_eq!(from_str("\"\\n\""), Ok(String("\n".to_string())));
assert_eq!(from_str("\"\\r\""), Ok(String("\r".to_string())));
assert_eq!(from_str("\"\\t\""), Ok(String("\t".to_string())));
assert_eq!(from_str(" \"foo\" "), Ok(String("foo".to_string())));
assert_eq!(from_str("\"\\u12ab\""), Ok(String("\u12ab".to_string())));
assert_eq!(from_str("\"\\uAB12\""), Ok(String("\uAB12".to_string())));
}
#[test]
fn test_decode_str() {
let s = [("\"\"", ""),
("\"foo\"", "foo"),
("\"\\\"\"", "\""),
("\"\\b\"", "\x08"),
("\"\\n\"", "\n"),
("\"\\r\"", "\r"),
("\"\\t\"", "\t"),
("\"\\u12ab\"", "\u12ab"),
("\"\\uAB12\"", "\uAB12")];
for &(i, o) in s.iter() {
let v: String = super::decode(i).unwrap();
assert_eq!(v.as_slice(), o);
}
}
#[test]
fn test_read_list() {
assert_eq!(from_str("["), Err(SyntaxError(EOFWhileParsingValue, 1, 2)));
assert_eq!(from_str("[1"), Err(SyntaxError(EOFWhileParsingList, 1, 3)));
assert_eq!(from_str("[1,"), Err(SyntaxError(EOFWhileParsingValue, 1, 4)));
assert_eq!(from_str("[1,]"), Err(SyntaxError(InvalidSyntax, 1, 4)));
assert_eq!(from_str("[6 7]"), Err(SyntaxError(InvalidSyntax, 1, 4)));
assert_eq!(from_str("[]"), Ok(List(vec![])));
assert_eq!(from_str("[ ]"), Ok(List(vec![])));
assert_eq!(from_str("[true]"), Ok(List(vec![Boolean(true)])));
assert_eq!(from_str("[ false ]"), Ok(List(vec![Boolean(false)])));
assert_eq!(from_str("[null]"), Ok(List(vec![Null])));
assert_eq!(from_str("[3, 1]"),
Ok(List(vec![U64(3), U64(1)])));
assert_eq!(from_str("\n[3, 2]\n"),
Ok(List(vec![U64(3), U64(2)])));
assert_eq!(from_str("[2, [4, 1]]"),
Ok(List(vec![U64(2), List(vec![U64(4), U64(1)])])));
}
#[test]
fn test_decode_list() {
let v: Vec<()> = super::decode("[]").unwrap();
assert_eq!(v, vec![]);
let v: Vec<()> = super::decode("[null]").unwrap();
assert_eq!(v, vec![()]);
let v: Vec<bool> = super::decode("[true]").unwrap();
assert_eq!(v, vec![true]);
let v: Vec<int> = super::decode("[3, 1]").unwrap();
assert_eq!(v, vec![3, 1]);
let v: Vec<Vec<uint>> = super::decode("[[3], [1, 2]]").unwrap();
assert_eq!(v, vec![vec![3], vec![1, 2]]);
}
#[test]
fn test_read_object() {
assert_eq!(from_str("{"), Err(SyntaxError(EOFWhileParsingObject, 1, 2)));
assert_eq!(from_str("{ "), Err(SyntaxError(EOFWhileParsingObject, 1, 3)));
assert_eq!(from_str("{1"), Err(SyntaxError(KeyMustBeAString, 1, 2)));
assert_eq!(from_str("{ \"a\""), Err(SyntaxError(EOFWhileParsingObject, 1, 6)));
assert_eq!(from_str("{\"a\""), Err(SyntaxError(EOFWhileParsingObject, 1, 5)));
assert_eq!(from_str("{\"a\" "), Err(SyntaxError(EOFWhileParsingObject, 1, 6)));
assert_eq!(from_str("{\"a\" 1"), Err(SyntaxError(ExpectedColon, 1, 6)));
assert_eq!(from_str("{\"a\":"), Err(SyntaxError(EOFWhileParsingValue, 1, 6)));
assert_eq!(from_str("{\"a\":1"), Err(SyntaxError(EOFWhileParsingObject, 1, 7)));
assert_eq!(from_str("{\"a\":1 1"), Err(SyntaxError(InvalidSyntax, 1, 8)));
assert_eq!(from_str("{\"a\":1,"), Err(SyntaxError(EOFWhileParsingObject, 1, 8)));
assert_eq!(from_str("{}").unwrap(), mk_object([]));
assert_eq!(from_str("{\"a\": 3}").unwrap(),
mk_object([("a".to_string(), U64(3))]));
assert_eq!(from_str(
"{ \"a\": null, \"b\" : true }").unwrap(),
mk_object([
("a".to_string(), Null),
("b".to_string(), Boolean(true))]));
assert_eq!(from_str("\n{ \"a\": null, \"b\" : true }\n").unwrap(),
mk_object([
("a".to_string(), Null),
("b".to_string(), Boolean(true))]));
assert_eq!(from_str(
"{\"a\" : 1.0 ,\"b\": [ true ]}").unwrap(),
mk_object([
("a".to_string(), F64(1.0)),
("b".to_string(), List(vec![Boolean(true)]))
]));
assert_eq!(from_str(
"{\
\"a\": 1.0, \
\"b\": [\
true,\
\"foo\\nbar\", \
{ \"c\": {\"d\": null} } \
]\
}").unwrap(),
mk_object([
("a".to_string(), F64(1.0)),
("b".to_string(), List(vec![
Boolean(true),
String("foo\nbar".to_string()),
mk_object([
("c".to_string(), mk_object([("d".to_string(), Null)]))
])
]))
]));
}
#[test]
fn test_decode_struct() {
let s = "{
\"inner\": [
{ \"a\": null, \"b\": 2, \"c\": [\"abc\", \"xyz\"] }
]
}";
let v: Outer = super::decode(s).unwrap();
assert_eq!(
v,
Outer {
inner: vec![
Inner { a: (), b: 2, c: vec!["abc".to_string(), "xyz".to_string()] }
]
}
);
}
#[deriving(Decodable)]
struct FloatStruct {
f: f64,
a: Vec<f64>
}
#[test]
fn test_decode_struct_with_nan() {
let s = "{\"f\":null,\"a\":[null,123]}";
let obj: FloatStruct = super::decode(s).unwrap();
assert!(obj.f.is_nan());
assert!(obj.a.get(0).is_nan());
assert_eq!(obj.a.get(1), &123f64);
}
#[test]
fn test_decode_option() {
let value: Option<String> = super::decode("null").unwrap();
assert_eq!(value, None);
let value: Option<String> = super::decode("\"jodhpurs\"").unwrap();
assert_eq!(value, Some("jodhpurs".to_string()));
}
#[test]
fn test_decode_enum() {
let value: Animal = super::decode("\"Dog\"").unwrap();
assert_eq!(value, Dog);
let s = "{\"variant\":\"Frog\",\"fields\":[\"Henry\",349]}";
let value: Animal = super::decode(s).unwrap();
assert_eq!(value, Frog("Henry".to_string(), 349));
}
#[test]
fn test_decode_map() {
let s = "{\"a\": \"Dog\", \"b\": {\"variant\":\"Frog\",\
\"fields\":[\"Henry\", 349]}}";
let mut map: TreeMap<String, Animal> = super::decode(s).unwrap();
assert_eq!(map.pop(&"a".to_string()), Some(Dog));
assert_eq!(map.pop(&"b".to_string()), Some(Frog("Henry".to_string(), 349)));
}
#[test]
fn test_multiline_errors() {
assert_eq!(from_str("{\n \"foo\":\n \"bar\""),
Err(SyntaxError(EOFWhileParsingObject, 3u, 8u)));
}
#[deriving(Decodable)]
#[allow(dead_code)]
struct DecodeStruct {
x: f64,
y: bool,
z: String,
w: Vec<DecodeStruct>
}
#[deriving(Decodable)]
enum DecodeEnum {
A(f64),
B(String)
}
fn check_err<T: Decodable<Decoder, DecoderError>>(to_parse: &'static str,
expected: DecoderError) {
let res: DecodeResult<T> = match from_str(to_parse) {
Err(e) => Err(ParseError(e)),
Ok(json) => Decodable::decode(&mut Decoder::new(json))
};
match res {
Ok(_) => fail!("`{}` parsed & decoded ok, expecting error `{}`",
to_parse, expected),
Err(ParseError(e)) => fail!("`{}` is not valid json: {}",
to_parse, e),
Err(e) => {
assert_eq!(e, expected);
}
}
}
#[test]
fn test_decode_errors_struct() {
check_err::<DecodeStruct>("[]", ExpectedError("Object".to_string(), "[]".to_string()));
check_err::<DecodeStruct>("{\"x\": true, \"y\": true, \"z\": \"\", \"w\": []}",
ExpectedError("Number".to_string(), "true".to_string()));
check_err::<DecodeStruct>("{\"x\": 1, \"y\": [], \"z\": \"\", \"w\": []}",
ExpectedError("Boolean".to_string(), "[]".to_string()));
check_err::<DecodeStruct>("{\"x\": 1, \"y\": true, \"z\": {}, \"w\": []}",
ExpectedError("String".to_string(), "{}".to_string()));
check_err::<DecodeStruct>("{\"x\": 1, \"y\": true, \"z\": \"\", \"w\": null}",
ExpectedError("List".to_string(), "null".to_string()));
check_err::<DecodeStruct>("{\"x\": 1, \"y\": true, \"z\": \"\"}",
MissingFieldError("w".to_string()));
}
#[test]
fn test_decode_errors_enum() {
check_err::<DecodeEnum>("{}",
MissingFieldError("variant".to_string()));
check_err::<DecodeEnum>("{\"variant\": 1}",
ExpectedError("String".to_string(), "1".to_string()));
check_err::<DecodeEnum>("{\"variant\": \"A\"}",
MissingFieldError("fields".to_string()));
check_err::<DecodeEnum>("{\"variant\": \"A\", \"fields\": null}",
ExpectedError("List".to_string(), "null".to_string()));
check_err::<DecodeEnum>("{\"variant\": \"C\", \"fields\": []}",
UnknownVariantError("C".to_string()));
}
#[test]
fn test_find(){
let json_value = from_str("{\"dog\" : \"cat\"}").unwrap();
let found_str = json_value.find(&"dog".to_string());
assert!(found_str.is_some() && found_str.unwrap().as_string().unwrap() == "cat");
}
#[test]
fn test_find_path(){
let json_value = from_str("{\"dog\":{\"cat\": {\"mouse\" : \"cheese\"}}}").unwrap();
let found_str = json_value.find_path(&[&"dog".to_string(),
&"cat".to_string(), &"mouse".to_string()]);
assert!(found_str.is_some() && found_str.unwrap().as_string().unwrap() == "cheese");
}
#[test]
fn test_search(){
let json_value = from_str("{\"dog\":{\"cat\": {\"mouse\" : \"cheese\"}}}").unwrap();
let found_str = json_value.search(&"mouse".to_string()).and_then(|j| j.as_string());
assert!(found_str.is_some());
assert!(found_str.unwrap() == "cheese");
}
#[test]
fn test_is_object(){
let json_value = from_str("{}").unwrap();
assert!(json_value.is_object());
}
#[test]
fn test_as_object(){
let json_value = from_str("{}").unwrap();
let json_object = json_value.as_object();
assert!(json_object.is_some());
}
#[test]
fn test_is_list(){
let json_value = from_str("[1, 2, 3]").unwrap();
assert!(json_value.is_list());
}
#[test]
fn test_as_list(){
let json_value = from_str("[1, 2, 3]").unwrap();
let json_list = json_value.as_list();
let expected_length = 3;
assert!(json_list.is_some() && json_list.unwrap().len() == expected_length);
}
#[test]
fn test_is_string(){
let json_value = from_str("\"dog\"").unwrap();
assert!(json_value.is_string());
}
#[test]
fn test_as_string(){
let json_value = from_str("\"dog\"").unwrap();
let json_str = json_value.as_string();
let expected_str = "dog";
assert_eq!(json_str, Some(expected_str));
}
#[test]
fn test_is_number(){
let json_value = from_str("12").unwrap();
assert!(json_value.is_number());
}
#[test]
fn test_is_i64(){
let json_value = from_str("-12").unwrap();
assert!(json_value.is_i64());
let json_value = from_str("12").unwrap();
assert!(!json_value.is_i64());
let json_value = from_str("12.0").unwrap();
assert!(!json_value.is_i64());
}
#[test]
fn test_is_u64(){
let json_value = from_str("12").unwrap();
assert!(json_value.is_u64());
let json_value = from_str("-12").unwrap();
assert!(!json_value.is_u64());
let json_value = from_str("12.0").unwrap();
assert!(!json_value.is_u64());
}
#[test]
fn test_is_f64(){
let json_value = from_str("12").unwrap();
assert!(!json_value.is_f64());
let json_value = from_str("-12").unwrap();
assert!(!json_value.is_f64());
let json_value = from_str("12.0").unwrap();
assert!(json_value.is_f64());
let json_value = from_str("-12.0").unwrap();
assert!(json_value.is_f64());
}
#[test]
fn test_as_i64(){
let json_value = from_str("-12").unwrap();
let json_num = json_value.as_i64();
assert_eq!(json_num, Some(-12));
}
#[test]
fn test_as_u64(){
let json_value = from_str("12").unwrap();
let json_num = json_value.as_u64();
assert_eq!(json_num, Some(12));
}
#[test]
fn test_as_f64(){
let json_value = from_str("12.0").unwrap();
let json_num = json_value.as_f64();
assert_eq!(json_num, Some(12f64));
}
#[test]
fn test_is_boolean(){
let json_value = from_str("false").unwrap();
assert!(json_value.is_boolean());
}
#[test]
fn test_as_boolean(){
let json_value = from_str("false").unwrap();
let json_bool = json_value.as_boolean();
let expected_bool = false;
assert!(json_bool.is_some() && json_bool.unwrap() == expected_bool);
}
#[test]
fn test_is_null(){
let json_value = from_str("null").unwrap();
assert!(json_value.is_null());
}
#[test]
fn test_as_null(){
let json_value = from_str("null").unwrap();
let json_null = json_value.as_null();
let expected_null = ();
assert!(json_null.is_some() && json_null.unwrap() == expected_null);
}
#[test]
fn test_encode_hashmap_with_numeric_key() {
use std::str::from_utf8;
use std::io::Writer;
use std::io::MemWriter;
use std::collections::HashMap;
let mut hm: HashMap<uint, bool> = HashMap::new();
hm.insert(1, true);
let mut mem_buf = MemWriter::new();
{
let mut encoder = Encoder::new(&mut mem_buf as &mut io::Writer);
hm.encode(&mut encoder).unwrap();
}
let bytes = mem_buf.unwrap();
let json_str = from_utf8(bytes.as_slice()).unwrap();
match from_str(json_str) {
Err(_) => fail!("Unable to parse json_str: {}", json_str),
_ => {} // it parsed and we are good to go
}
}
#[test]
fn test_prettyencode_hashmap_with_numeric_key() {
use std::str::from_utf8;
use std::io::Writer;
use std::io::MemWriter;
use std::collections::HashMap;
let mut hm: HashMap<uint, bool> = HashMap::new();
hm.insert(1, true);
let mut mem_buf = MemWriter::new();
{
let mut encoder = PrettyEncoder::new(&mut mem_buf as &mut io::Writer);
hm.encode(&mut encoder).unwrap()
}
let bytes = mem_buf.unwrap();
let json_str = from_utf8(bytes.as_slice()).unwrap();
match from_str(json_str) {
Err(_) => fail!("Unable to parse json_str: {}", json_str),
_ => {} // it parsed and we are good to go
}
}
#[test]
fn test_prettyencoder_indent_level_param() {
use std::str::from_utf8;
use std::io::MemWriter;
use std::collections::TreeMap;
let mut tree = TreeMap::new();
tree.insert("hello".into_string(), String("guten tag".into_string()));
tree.insert("goodbye".into_string(), String("sayonara".into_string()));
let json = List(
// The following layout below should look a lot like
// the pretty-printed JSON (indent * x)
vec!
( // 0x
String("greetings".into_string()), // 1x
Object(tree), // 1x + 2x + 2x + 1x
) // 0x
// End JSON list (7 lines)
);
// Helper function for counting indents
fn indents(source: &str) -> uint {
let trimmed = source.trim_left_chars(' ');
source.len() - trimmed.len()
}
// Test up to 4 spaces of indents (more?)
for i in range(0, 4u) {
let mut writer = MemWriter::new();
{
let ref mut encoder = PrettyEncoder::new(&mut writer);
encoder.set_indent(i);
json.encode(encoder).unwrap();
}
let bytes = writer.unwrap();
let printed = from_utf8(bytes.as_slice()).unwrap();
// Check for indents at each line
let lines: Vec<&str> = printed.lines().collect();
assert_eq!(lines.len(), 7); // JSON should be 7 lines
assert_eq!(indents(lines[0]), 0 * i); // [
assert_eq!(indents(lines[1]), 1 * i); // "greetings",
assert_eq!(indents(lines[2]), 1 * i); // {
assert_eq!(indents(lines[3]), 2 * i); // "hello": "guten tag",
assert_eq!(indents(lines[4]), 2 * i); // "goodbye": "sayonara"
assert_eq!(indents(lines[5]), 1 * i); // },
assert_eq!(indents(lines[6]), 0 * i); // ]
// Finally, test that the pretty-printed JSON is valid
from_str(printed).ok().expect("Pretty-printed JSON is invalid!");
}
}
#[test]
fn test_hashmap_with_numeric_key_can_handle_double_quote_delimited_key() {
use std::collections::HashMap;
use Decodable;
let json_str = "{\"1\":true}";
let json_obj = match from_str(json_str) {
Err(_) => fail!("Unable to parse json_str: {}", json_str),
Ok(o) => o
};
let mut decoder = Decoder::new(json_obj);
let _hm: HashMap<uint, bool> = Decodable::decode(&mut decoder).unwrap();
}
#[test]
fn test_hashmap_with_numeric_key_will_error_with_string_keys() {
use std::collections::HashMap;
use Decodable;
let json_str = "{\"a\":true}";
let json_obj = match from_str(json_str) {
Err(_) => fail!("Unable to parse json_str: {}", json_str),
Ok(o) => o
};
let mut decoder = Decoder::new(json_obj);
let result: Result<HashMap<uint, bool>, DecoderError> = Decodable::decode(&mut decoder);
assert_eq!(result, Err(ExpectedError("Number".to_string(), "a".to_string())));
}
fn assert_stream_equal(src: &str,
expected: Vec<(JsonEvent, Vec<StackElement>)>) {
let mut parser = Parser::new(src.chars());
let mut i = 0;
loop {
let evt = match parser.next() {
Some(e) => e,
None => { break; }
};
let (ref expected_evt, ref expected_stack) = expected[i];
if !parser.stack().is_equal_to(expected_stack.as_slice()) {
fail!("Parser stack is not equal to {}", expected_stack);
}
assert_eq!(&evt, expected_evt);
i+=1;
}
}
#[test]
#[ignore(cfg(target_word_size = "32"))] // FIXME(#14064)
fn test_streaming_parser() {
assert_stream_equal(
r#"{ "foo":"bar", "array" : [0, 1, 2, 3, 4, 5], "idents":[null,true,false]}"#,
vec![
(ObjectStart, vec![]),
(StringValue("bar".to_string()), vec![Key("foo")]),
(ListStart, vec![Key("array")]),
(U64Value(0), vec![Key("array"), Index(0)]),
(U64Value(1), vec![Key("array"), Index(1)]),
(U64Value(2), vec![Key("array"), Index(2)]),
(U64Value(3), vec![Key("array"), Index(3)]),
(U64Value(4), vec![Key("array"), Index(4)]),
(U64Value(5), vec![Key("array"), Index(5)]),
(ListEnd, vec![Key("array")]),
(ListStart, vec![Key("idents")]),
(NullValue, vec![Key("idents"), Index(0)]),
(BooleanValue(true), vec![Key("idents"), Index(1)]),
(BooleanValue(false), vec![Key("idents"), Index(2)]),
(ListEnd, vec![Key("idents")]),
(ObjectEnd, vec![]),
]
);
}
fn last_event(src: &str) -> JsonEvent {
let mut parser = Parser::new(src.chars());
let mut evt = NullValue;
loop {
evt = match parser.next() {
Some(e) => e,
None => return evt,
}
}
}
#[test]
#[ignore(cfg(target_word_size = "32"))] // FIXME(#14064)
fn test_read_object_streaming() {
assert_eq!(last_event("{ "), Error(SyntaxError(EOFWhileParsingObject, 1, 3)));
assert_eq!(last_event("{1"), Error(SyntaxError(KeyMustBeAString, 1, 2)));
assert_eq!(last_event("{ \"a\""), Error(SyntaxError(EOFWhileParsingObject, 1, 6)));
assert_eq!(last_event("{\"a\""), Error(SyntaxError(EOFWhileParsingObject, 1, 5)));
assert_eq!(last_event("{\"a\" "), Error(SyntaxError(EOFWhileParsingObject, 1, 6)));
assert_eq!(last_event("{\"a\" 1"), Error(SyntaxError(ExpectedColon, 1, 6)));
assert_eq!(last_event("{\"a\":"), Error(SyntaxError(EOFWhileParsingValue, 1, 6)));
assert_eq!(last_event("{\"a\":1"), Error(SyntaxError(EOFWhileParsingObject, 1, 7)));
assert_eq!(last_event("{\"a\":1 1"), Error(SyntaxError(InvalidSyntax, 1, 8)));
assert_eq!(last_event("{\"a\":1,"), Error(SyntaxError(EOFWhileParsingObject, 1, 8)));
assert_stream_equal(
"{}",
vec![(ObjectStart, vec![]), (ObjectEnd, vec![])]
);
assert_stream_equal(
"{\"a\": 3}",
vec![
(ObjectStart, vec![]),
(U64Value(3), vec![Key("a")]),
(ObjectEnd, vec![]),
]
);
assert_stream_equal(
"{ \"a\": null, \"b\" : true }",
vec![
(ObjectStart, vec![]),
(NullValue, vec![Key("a")]),
(BooleanValue(true), vec![Key("b")]),
(ObjectEnd, vec![]),
]
);
assert_stream_equal(
"{\"a\" : 1.0 ,\"b\": [ true ]}",
vec![
(ObjectStart, vec![]),
(F64Value(1.0), vec![Key("a")]),
(ListStart, vec![Key("b")]),
(BooleanValue(true),vec![Key("b"), Index(0)]),
(ListEnd, vec![Key("b")]),
(ObjectEnd, vec![]),
]
);
assert_stream_equal(
r#"{
"a": 1.0,
"b": [
true,
"foo\nbar",
{ "c": {"d": null} }
]
}"#,
vec![
(ObjectStart, vec![]),
(F64Value(1.0), vec![Key("a")]),
(ListStart, vec![Key("b")]),
(BooleanValue(true), vec![Key("b"), Index(0)]),
(StringValue("foo\nbar".to_string()), vec![Key("b"), Index(1)]),
(ObjectStart, vec![Key("b"), Index(2)]),
(ObjectStart, vec![Key("b"), Index(2), Key("c")]),
(NullValue, vec![Key("b"), Index(2), Key("c"), Key("d")]),
(ObjectEnd, vec![Key("b"), Index(2), Key("c")]),
(ObjectEnd, vec![Key("b"), Index(2)]),
(ListEnd, vec![Key("b")]),
(ObjectEnd, vec![]),
]
);
}
#[test]
#[ignore(cfg(target_word_size = "32"))] // FIXME(#14064)
fn test_read_list_streaming() {
assert_stream_equal(
"[]",
vec![
(ListStart, vec![]),
(ListEnd, vec![]),
]
);
assert_stream_equal(
"[ ]",
vec![
(ListStart, vec![]),
(ListEnd, vec![]),
]
);
assert_stream_equal(
"[true]",
vec![
(ListStart, vec![]),
(BooleanValue(true), vec![Index(0)]),
(ListEnd, vec![]),
]
);
assert_stream_equal(
"[ false ]",
vec![
(ListStart, vec![]),
(BooleanValue(false), vec![Index(0)]),
(ListEnd, vec![]),
]
);
assert_stream_equal(
"[null]",
vec![
(ListStart, vec![]),
(NullValue, vec![Index(0)]),
(ListEnd, vec![]),
]
);
assert_stream_equal(
"[3, 1]",
vec![
(ListStart, vec![]),
(U64Value(3), vec![Index(0)]),
(U64Value(1), vec![Index(1)]),
(ListEnd, vec![]),
]
);
assert_stream_equal(
"\n[3, 2]\n",
vec![
(ListStart, vec![]),
(U64Value(3), vec![Index(0)]),
(U64Value(2), vec![Index(1)]),
(ListEnd, vec![]),
]
);
assert_stream_equal(
"[2, [4, 1]]",
vec![
(ListStart, vec![]),
(U64Value(2), vec![Index(0)]),
(ListStart, vec![Index(1)]),
(U64Value(4), vec![Index(1), Index(0)]),
(U64Value(1), vec![Index(1), Index(1)]),
(ListEnd, vec![Index(1)]),
(ListEnd, vec![]),
]
);
assert_eq!(last_event("["), Error(SyntaxError(EOFWhileParsingValue, 1, 2)));
assert_eq!(from_str("["), Err(SyntaxError(EOFWhileParsingValue, 1, 2)));
assert_eq!(from_str("[1"), Err(SyntaxError(EOFWhileParsingList, 1, 3)));
assert_eq!(from_str("[1,"), Err(SyntaxError(EOFWhileParsingValue, 1, 4)));
assert_eq!(from_str("[1,]"), Err(SyntaxError(InvalidSyntax, 1, 4)));
assert_eq!(from_str("[6 7]"), Err(SyntaxError(InvalidSyntax, 1, 4)));
}
#[test]
fn test_trailing_characters_streaming() {
assert_eq!(last_event("nulla"), Error(SyntaxError(TrailingCharacters, 1, 5)));
assert_eq!(last_event("truea"), Error(SyntaxError(TrailingCharacters, 1, 5)));
assert_eq!(last_event("falsea"), Error(SyntaxError(TrailingCharacters, 1, 6)));
assert_eq!(last_event("1a"), Error(SyntaxError(TrailingCharacters, 1, 2)));
assert_eq!(last_event("[]a"), Error(SyntaxError(TrailingCharacters, 1, 3)));
assert_eq!(last_event("{}a"), Error(SyntaxError(TrailingCharacters, 1, 3)));
}
#[test]
fn test_read_identifiers_streaming() {
assert_eq!(Parser::new("null".chars()).next(), Some(NullValue));
assert_eq!(Parser::new("true".chars()).next(), Some(BooleanValue(true)));
assert_eq!(Parser::new("false".chars()).next(), Some(BooleanValue(false)));
assert_eq!(last_event("n"), Error(SyntaxError(InvalidSyntax, 1, 2)));
assert_eq!(last_event("nul"), Error(SyntaxError(InvalidSyntax, 1, 4)));
assert_eq!(last_event("t"), Error(SyntaxError(InvalidSyntax, 1, 2)));
assert_eq!(last_event("truz"), Error(SyntaxError(InvalidSyntax, 1, 4)));
assert_eq!(last_event("f"), Error(SyntaxError(InvalidSyntax, 1, 2)));
assert_eq!(last_event("faz"), Error(SyntaxError(InvalidSyntax, 1, 3)));
}
#[test]
fn test_stack() {
let mut stack = Stack::new();
assert!(stack.is_empty());
assert!(stack.len() == 0);
assert!(!stack.last_is_index());
stack.push_index(0);
stack.bump_index();
assert!(stack.len() == 1);
assert!(stack.is_equal_to([Index(1)]));
assert!(stack.starts_with([Index(1)]));
assert!(stack.ends_with([Index(1)]));
assert!(stack.last_is_index());
assert!(stack.get(0) == Index(1));
stack.push_key("foo".to_string());
assert!(stack.len() == 2);
assert!(stack.is_equal_to([Index(1), Key("foo")]));
assert!(stack.starts_with([Index(1), Key("foo")]));
assert!(stack.starts_with([Index(1)]));
assert!(stack.ends_with([Index(1), Key("foo")]));
assert!(stack.ends_with([Key("foo")]));
assert!(!stack.last_is_index());
assert!(stack.get(0) == Index(1));
assert!(stack.get(1) == Key("foo"));
stack.push_key("bar".to_string());
assert!(stack.len() == 3);
assert!(stack.is_equal_to([Index(1), Key("foo"), Key("bar")]));
assert!(stack.starts_with([Index(1)]));
assert!(stack.starts_with([Index(1), Key("foo")]));
assert!(stack.starts_with([Index(1), Key("foo"), Key("bar")]));
assert!(stack.ends_with([Key("bar")]));
assert!(stack.ends_with([Key("foo"), Key("bar")]));
assert!(stack.ends_with([Index(1), Key("foo"), Key("bar")]));
assert!(!stack.last_is_index());
assert!(stack.get(0) == Index(1));
assert!(stack.get(1) == Key("foo"));
assert!(stack.get(2) == Key("bar"));
stack.pop();
assert!(stack.len() == 2);
assert!(stack.is_equal_to([Index(1), Key("foo")]));
assert!(stack.starts_with([Index(1), Key("foo")]));
assert!(stack.starts_with([Index(1)]));
assert!(stack.ends_with([Index(1), Key("foo")]));
assert!(stack.ends_with([Key("foo")]));
assert!(!stack.last_is_index());
assert!(stack.get(0) == Index(1));
assert!(stack.get(1) == Key("foo"));
}
#[test]
fn test_to_json() {
use std::collections::{HashMap,TreeMap};
use super::ToJson;
let list2 = List(vec!(U64(1), U64(2)));
let list3 = List(vec!(U64(1), U64(2), U64(3)));
let object = {
let mut tree_map = TreeMap::new();
tree_map.insert("a".to_string(), U64(1));
tree_map.insert("b".to_string(), U64(2));
Object(tree_map)
};
assert_eq!(list2.to_json(), list2);
assert_eq!(object.to_json(), object);
assert_eq!(3_i.to_json(), I64(3));
assert_eq!(4_i8.to_json(), I64(4));
assert_eq!(5_i16.to_json(), I64(5));
assert_eq!(6_i32.to_json(), I64(6));
assert_eq!(7_i64.to_json(), I64(7));
assert_eq!(8_u.to_json(), U64(8));
assert_eq!(9_u8.to_json(), U64(9));
assert_eq!(10_u16.to_json(), U64(10));
assert_eq!(11_u32.to_json(), U64(11));
assert_eq!(12_u64.to_json(), U64(12));
assert_eq!(13.0_f32.to_json(), F64(13.0_f64));
assert_eq!(14.0_f64.to_json(), F64(14.0_f64));
assert_eq!(().to_json(), Null);
assert_eq!(f32::INFINITY.to_json(), Null);
assert_eq!(f64::NAN.to_json(), Null);
assert_eq!(true.to_json(), Boolean(true));
assert_eq!(false.to_json(), Boolean(false));
assert_eq!("abc".to_string().to_json(), String("abc".to_string()));
assert_eq!((1u, 2u).to_json(), list2);
assert_eq!((1u, 2u, 3u).to_json(), list3);
assert_eq!([1u, 2].to_json(), list2);
assert_eq!((&[1u, 2, 3]).to_json(), list3);
assert_eq!((vec![1u, 2]).to_json(), list2);
assert_eq!(vec!(1u, 2, 3).to_json(), list3);
let mut tree_map = TreeMap::new();
tree_map.insert("a".to_string(), 1u);
tree_map.insert("b".to_string(), 2);
assert_eq!(tree_map.to_json(), object);
let mut hash_map = HashMap::new();
hash_map.insert("a".to_string(), 1u);
hash_map.insert("b".to_string(), 2);
assert_eq!(hash_map.to_json(), object);
assert_eq!(Some(15i).to_json(), I64(15));
assert_eq!(Some(15u).to_json(), U64(15));
assert_eq!(None::<int>.to_json(), Null);
}
#[bench]
fn bench_streaming_small(b: &mut Bencher) {
b.iter( || {
let mut parser = Parser::new(
r#"{
"a": 1.0,
"b": [
true,
"foo\nbar",
{ "c": {"d": null} }
]
}"#.chars()
);
loop {
match parser.next() {
None => return,
_ => {}
}
}
});
}
#[bench]
fn bench_small(b: &mut Bencher) {
b.iter( || {
let _ = from_str(r#"{
"a": 1.0,
"b": [
true,
"foo\nbar",
{ "c": {"d": null} }
]
}"#);
});
}
fn big_json() -> String {
let mut src = "[\n".to_string();
for _ in range(0i, 500) {
src.push_str(r#"{ "a": true, "b": null, "c":3.1415, "d": "Hello world", "e": \
[1,2,3]},"#);
}
src.push_str("{}]");
return src;
}
#[bench]
fn bench_streaming_large(b: &mut Bencher) {
let src = big_json();
b.iter( || {
let mut parser = Parser::new(src.as_slice().chars());
loop {
match parser.next() {
None => return,
_ => {}
}
}
});
}
#[bench]
fn bench_large(b: &mut Bencher) {
let src = big_json();
b.iter( || { let _ = from_str(src.as_slice()); });
}
}
|
apache-2.0
|
mauricionr/TypeScript
|
tests/baselines/reference/Protected4.js
|
202
|
//// [Protected4.ts]
class C {
protected public m() { }
}
//// [Protected4.js]
var C = (function () {
function C() {
}
C.prototype.m = function () {
};
return C;
})();
|
apache-2.0
|
Wikidata/Wikidata-Toolkit
|
wdtk-datamodel/src/main/java/org/wikidata/wdtk/datamodel/interfaces/EntityDocument.java
|
1554
|
package org.wikidata.wdtk.datamodel.interfaces;
/*
* #%L
* Wikidata Toolkit Data Model
* %%
* Copyright (C) 2014 Wikidata Toolkit Developers
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
/**
* Interface for datasets that describe an entity.
*
* @author Markus Kroetzsch
*
*/
public interface EntityDocument {
/**
* Returns the ID of the entity that the data refers to
*
* @return entity id
*/
EntityIdValue getEntityId();
/**
* Returns the revision ID of this document, or 0 if no id is known. The
* revision ID is a number stored by MediaWiki to indicate the version of a
* document. It is based on a global counter that is incremented on each
* edit. Not all sources of entity document data may provide the revision
* ID, as it is not strictly part of the data, but part of the document
* metadata.
*
* @return revision id
*/
long getRevisionId();
/**
* Returns a copy of this document with an updated revision id.
*/
EntityDocument withRevisionId(long newRevisionId);
}
|
apache-2.0
|
smsOrg/Tetris3d
|
app/src/main/java/org/sms/tetris3d/dialogs/DialogItem.java
|
410
|
package org.sms.tetris3d.dialogs;
/**
* 디버그용 클래스
*
* @version 0.1
*
* @author 이민수
*/
/**
* Created by hsh on 2016. 11. 21..
*/
public class DialogItem {
private String title="";
public DialogItem setTitle(String t){
title=t;
return this;
}
public void onClickItem(){
}
@Override
public String toString(){
return title;
}
}
|
apache-2.0
|
zawn/pac4j
|
pac4j-core/src/main/java/org/pac4j/core/authorization/generator/SpringSecurityPropertiesAuthorizationGenerator.java
|
1944
|
package org.pac4j.core.authorization.generator;
import org.pac4j.core.context.WebContext;
import org.pac4j.core.profile.UserProfile;
import org.pac4j.core.util.CommonHelper;
import java.util.*;
/**
* Authorization generator based on a properties file in Spring security format:
* username=password,grantedAuthority[,grantedAuthority][,enabled|disabled]
*
* @author Jerome Leleu
* @since 1.8.1
*/
public class SpringSecurityPropertiesAuthorizationGenerator implements AuthorizationGenerator {
public final static String DISABLED = "disabled";
public final static String ENABLED = "enabled";
private Map<String, List<String>> rolesByUsers = new HashMap<>();
public SpringSecurityPropertiesAuthorizationGenerator(final Properties properties) {
final Set<String> keys = properties.stringPropertyNames();
for (final String key : keys) {
final String value = properties.getProperty(key);
if (CommonHelper.isNotBlank(value)) {
final String[] parts = value.split(",");
final int nb = parts.length;
if (nb > 1) {
final String latest = parts[nb - 1];
if (!DISABLED.equals(latest)) {
final List<String> roles = new ArrayList<>(Arrays.asList(parts));
if (ENABLED.equals(latest)) {
roles.remove(nb - 1);
}
roles.remove(0);
rolesByUsers.put(key, roles);
}
}
}
}
}
@Override
public UserProfile generate(final WebContext context, final UserProfile profile) {
final String id = profile.getId();
final List<String> roles = rolesByUsers.get(id);
if (roles != null && !roles.isEmpty()) {
profile.addRoles(roles);
}
return profile;
}
}
|
apache-2.0
|
PaddlePaddle/cloud
|
go/filemanager/pfsmodules/chunkmeta.go
|
3848
|
package pfsmodules
import (
"encoding/json"
"errors"
"fmt"
"io"
"net/http"
"net/url"
"os"
"strconv"
"strings"
"github.com/PaddlePaddle/cloud/go/utils/restclient"
)
const (
defaultMaxChunkSize = 32 * 1024 * 1024
defaultMinChunkSize = 4 * 1024
)
const (
// ChunkMetaCmdName is the name of GetChunkMeta command.
ChunkMetaCmdName = "GetChunkMeta"
)
// ChunkMeta holds the chunk meta's info.
type ChunkMeta struct {
Offset int64 `json:"offset"`
Checksum string `json:"checksum"`
Len int64 `json:"len"`
}
// String pack a info tring of ChunkMeta.
func (m *ChunkMeta) String() string {
return fmt.Sprintf("Offset:%d Checksum:%s Len:%d", m.Offset, m.Checksum, m.Len)
}
// ChunkMetaCmd is a command.
type ChunkMetaCmd struct {
Method string `json:"method"`
FilePath string `json:"path"`
Offset int64 `json:"offset"`
ChunkSize int64 `json:"chunksize"`
}
// ToURLParam encodes ChunkMetaCmd to URL encoding string.
func (p *ChunkMetaCmd) ToURLParam() url.Values {
parameters := url.Values{}
parameters.Add("method", p.Method)
parameters.Add("path", p.FilePath)
str := fmt.Sprint(p.ChunkSize)
parameters.Add("chunksize", str)
str = fmt.Sprint(p.Offset)
parameters.Add("offset", str)
return parameters
}
// ToJSON encodes ChunkMetaCmd to JSON string.
func (p *ChunkMetaCmd) ToJSON() ([]byte, error) {
return json.Marshal(p)
}
// Run is a functions which run ChunkMetaCmd.
func (p *ChunkMetaCmd) Run() (interface{}, error) {
f := FileHandle{}
if err := f.Open(p.FilePath, os.O_RDONLY, 0); err != nil {
return nil, err
}
defer f.Close()
return f.GetChunkMeta(p.Offset, p.ChunkSize)
}
func (p *ChunkMetaCmd) checkChunkSize() error {
if p.ChunkSize < defaultMinChunkSize ||
p.ChunkSize > defaultMaxChunkSize {
return errors.New(StatusBadChunkSize)
}
return nil
}
// ValidateCloudArgs checks the conditions when running on cloud.
func (p *ChunkMetaCmd) ValidateCloudArgs(userName string) error {
if err := ValidatePfsPath([]string{p.FilePath}, userName, ChunkMetaCmdName); err != nil {
return err
}
return p.checkChunkSize()
}
// ValidateLocalArgs checks the conditions when running locally.
func (p *ChunkMetaCmd) ValidateLocalArgs() error {
return p.checkChunkSize()
}
// NewChunkMetaCmdFromURLParam get a new ChunkMetaCmd.
func NewChunkMetaCmdFromURLParam(r *http.Request) (*ChunkMetaCmd, error) {
method := r.URL.Query().Get("method")
path := r.URL.Query().Get("path")
chunkStr := r.URL.Query().Get("chunksize")
offsetStr := r.URL.Query().Get("offset")
if len(method) == 0 ||
method != ChunkMetaCmdName ||
len(path) == 0 ||
len(chunkStr) == 0 ||
len(offsetStr) == 0 {
return nil, errors.New(http.StatusText(http.StatusBadRequest))
}
chunkSize, err := strconv.ParseInt(chunkStr, 10, 64)
if err != nil {
return nil, errors.New(StatusBadChunkSize)
}
offset, err := strconv.ParseInt(offsetStr, 10, 64)
if err != nil {
return nil, errors.New(StatusBadChunkSize)
}
return &ChunkMetaCmd{
Method: method,
FilePath: path,
ChunkSize: chunkSize,
Offset: offset,
}, nil
}
func remoteChunkMeta(path string, offset int64,
chunkSize int64) (*ChunkMeta, error) {
cmd := ChunkMetaCmd{
Method: ChunkMetaCmdName,
FilePath: path,
ChunkSize: chunkSize,
Offset: offset,
}
t := fmt.Sprintf("%s/%s", Config.ActiveConfig.Endpoint, RESTChunksPath)
ret, err := restclient.GetCall(t, cmd.ToURLParam())
if err != nil {
return nil, err
}
type chunkMetaResponse struct {
Err string `json:"err"`
Results ChunkMeta `json:"results"`
}
resp := chunkMetaResponse{}
if err := json.Unmarshal(ret, &resp); err != nil {
return nil, err
}
if resp.Err == "" {
return &resp.Results, nil
}
if strings.Contains(resp.Err, StatusFileEOF) {
return &resp.Results, io.EOF
}
return &resp.Results, errors.New(resp.Err)
}
|
apache-2.0
|
weiwenqiang/GitHub
|
Subentry/vlayout-master/vlayout/src/main/java/com/alibaba/android/vlayout/VirtualLayoutManager.java
|
46080
|
/*
* MIT License
*
* Copyright (c) 2016 Alibaba Group
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
package com.alibaba.android.vlayout;
import android.content.Context;
import android.graphics.Rect;
import android.os.Build;
import android.os.Trace;
import android.support.annotation.NonNull;
import android.support.annotation.Nullable;
import android.support.v7.widget.OrientationHelper;
import android.support.v7.widget.RecyclerView;
import android.util.AttributeSet;
import android.util.Log;
import android.util.Pair;
import android.view.View;
import android.view.ViewGroup;
import android.view.ViewParent;
import com.alibaba.android.vlayout.layout.BaseLayoutHelper;
import com.alibaba.android.vlayout.layout.DefaultLayoutHelper;
import com.alibaba.android.vlayout.layout.FixAreaAdjuster;
import com.alibaba.android.vlayout.layout.FixAreaLayoutHelper;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashMap;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
/**
* A {@link android.support.v7.widget.RecyclerView.LayoutManager} implementation which provides
* a virtual layout for actual views.
*
* NOTE: it will change {@link android.support.v7.widget.RecyclerView.RecycledViewPool}
* for RecyclerView.
*
* @author villadora
* @since 1.0.0
*/
public class VirtualLayoutManager extends ExposeLinearLayoutManagerEx implements LayoutManagerHelper {
protected static final String TAG = "VirtualLayoutManager";
private static final String TRACE_LAYOUT = "VLM onLayoutChildren";
private static final String TRACE_SCROLL = "VLM scroll";
private static boolean sDebuggable = false;
public static void enableDebugging(boolean isDebug) {
sDebuggable = isDebug;
}
public static final int HORIZONTAL = OrientationHelper.HORIZONTAL;
public static final int VERTICAL = OrientationHelper.VERTICAL;
protected OrientationHelper mOrientationHelper;
protected OrientationHelper mSecondaryOrientationHelper;
private RecyclerView mRecyclerView;
private boolean mNoScrolling = false;
private boolean mNestedScrolling = false;
private int mMaxMeasureSize = -1;
public VirtualLayoutManager(@NonNull final Context context) {
this(context, VERTICAL);
}
/**
* @param context Context
* @param orientation Layout orientation. Should be {@link #HORIZONTAL} or {@link
* #VERTICAL}.
*/
public VirtualLayoutManager(@NonNull final Context context, int orientation) {
this(context, orientation, false);
}
/**
* @param context Current context, will be used to access resources.
* @param orientation Layout orientation. Should be {@link #HORIZONTAL} or {@link
* #VERTICAL}.
* @param reverseLayout whether should reverse data
*/
public VirtualLayoutManager(@NonNull final Context context, int orientation, boolean reverseLayout) {
super(context, orientation, reverseLayout);
this.mOrientationHelper = OrientationHelper.createOrientationHelper(this, orientation);
this.mSecondaryOrientationHelper = OrientationHelper.createOrientationHelper(this, orientation == VERTICAL ? HORIZONTAL : VERTICAL);
setHelperFinder(new RangeLayoutHelperFinder());
}
public void setNoScrolling(boolean noScrolling) {
this.mNoScrolling = noScrolling;
mSpaceMeasured = false;
mMeasuredFullSpace = 0;
mSpaceMeasuring = false;
}
public void setNestedScrolling(boolean nestedScrolling) {
setNestedScrolling(nestedScrolling, -1);
}
public void setNestedScrolling(boolean nestedScrolling, int maxMeasureSize) {
this.mNestedScrolling = nestedScrolling;
mSpaceMeasuring = mSpaceMeasured = false;
mMeasuredFullSpace = 0;
}
private LayoutHelperFinder mHelperFinder;
public void setHelperFinder(@NonNull final LayoutHelperFinder finder) {
//noinspection ConstantConditions
if (finder == null) {
throw new IllegalArgumentException("finder is null");
}
List<LayoutHelper> helpers = new LinkedList<>();
if (this.mHelperFinder != null) {
for (LayoutHelper helper : mHelperFinder) {
helpers.add(helper);
}
}
this.mHelperFinder = finder;
if (helpers.size() > 0)
this.mHelperFinder.setLayouts(helpers);
mSpaceMeasured = false;
requestLayout();
}
private FixAreaAdjuster mFixAreaAdjustor = FixAreaAdjuster.mDefaultAdjuster;
public void setFixOffset(int left, int top, int right, int bottom) {
mFixAreaAdjustor = new FixAreaAdjuster(left, top, right, bottom);
}
/*
* Temp hashMap
*/
private HashMap<Integer, LayoutHelper> newHelpersSet = new HashMap<>();
private HashMap<Integer, LayoutHelper> oldHelpersSet = new HashMap<>();
private BaseLayoutHelper.LayoutViewBindListener mLayoutViewBindListener;
/**
* Update layoutHelpers, data changes will cause layoutHelpers change
*
* @param helpers group of layoutHelpers
*/
public void setLayoutHelpers(@Nullable List<LayoutHelper> helpers) {
for (LayoutHelper helper : mHelperFinder) {
oldHelpersSet.put(System.identityHashCode(helper), helper);
}
// set ranges
if (helpers != null) {
int start = 0;
for (int i = 0; i < helpers.size(); i++) {
LayoutHelper helper = helpers.get(i);
if (helper instanceof FixAreaLayoutHelper) {
((FixAreaLayoutHelper) helper).setAdjuster(mFixAreaAdjustor);
}
if (helper instanceof BaseLayoutHelper && mLayoutViewBindListener != null) {
((BaseLayoutHelper) helper).setLayoutViewBindListener(mLayoutViewBindListener);
}
if (helper.getItemCount() > 0) {
helper.setRange(start, start + helper.getItemCount() - 1);
} else {
helper.setRange(-1, -1);
}
start += helper.getItemCount();
}
}
this.mHelperFinder.setLayouts(helpers);
for (LayoutHelper helper : mHelperFinder) {
newHelpersSet.put(System.identityHashCode(helper), helper);
}
for (Iterator<Map.Entry<Integer, LayoutHelper>> it = oldHelpersSet.entrySet().iterator(); it.hasNext(); ) {
Map.Entry<Integer, LayoutHelper> entry = it.next();
Integer key = entry.getKey();
if (newHelpersSet.containsKey(key)) {
newHelpersSet.remove(key);
it.remove();
}
}
for (LayoutHelper helper : oldHelpersSet.values()) {
helper.clear(this);
}
if (!oldHelpersSet.isEmpty() || !newHelpersSet.isEmpty()) {
mSpaceMeasured = false;
}
oldHelpersSet.clear();
newHelpersSet.clear();
requestLayout();
}
@NonNull
public List<LayoutHelper> getLayoutHelpers() {
return this.mHelperFinder.getLayoutHelpers();
}
/**
* Either be {@link #HORIZONTAL} or {@link #VERTICAL}
*
* @return orientation of this layout manager
*/
@Override
public int getOrientation() {
return super.getOrientation();
}
@Override
public void setOrientation(int orientation) {
this.mOrientationHelper = OrientationHelper.createOrientationHelper(this, orientation);
super.setOrientation(orientation);
}
/**
* reverseLayout is not supported by VirtualLayoutManager. It's get disabled until all the LayoutHelpers support it.
*/
@Override
public void setReverseLayout(boolean reverseLayout) {
if (reverseLayout) {
throw new UnsupportedOperationException(
"VirtualLayoutManager does not support reverse layout in current version.");
}
super.setReverseLayout(false);
}
/**
* stackFromEnd is not supported by VirtualLayoutManager. It's get disabled util all the layoutHelpers support it.
* {@link #setReverseLayout(boolean)}.
*/
@Override
public void setStackFromEnd(boolean stackFromEnd) {
if (stackFromEnd) {
throw new UnsupportedOperationException(
"VirtualLayoutManager does not support stack from end.");
}
super.setStackFromEnd(false);
}
private AnchorInfoWrapper mTempAnchorInfoWrapper = new AnchorInfoWrapper();
@Override
public void onAnchorReady(RecyclerView.State state, ExposeLinearLayoutManagerEx.AnchorInfo anchorInfo) {
super.onAnchorReady(state, anchorInfo);
boolean changed = true;
while (changed) {
mTempAnchorInfoWrapper.position = anchorInfo.mPosition;
mTempAnchorInfoWrapper.coordinate = anchorInfo.mCoordinate;
mTempAnchorInfoWrapper.layoutFromEnd = anchorInfo.mLayoutFromEnd;
LayoutHelper layoutHelper = mHelperFinder.getLayoutHelper(anchorInfo.mPosition);
if (layoutHelper != null)
layoutHelper.checkAnchorInfo(state, mTempAnchorInfoWrapper, this);
if (mTempAnchorInfoWrapper.position == anchorInfo.mPosition) {
changed = false;
} else {
anchorInfo.mPosition = mTempAnchorInfoWrapper.position;
}
anchorInfo.mCoordinate = mTempAnchorInfoWrapper.coordinate;
mTempAnchorInfoWrapper.position = -1;
}
mTempAnchorInfoWrapper.position = anchorInfo.mPosition;
mTempAnchorInfoWrapper.coordinate = anchorInfo.mCoordinate;
for (LayoutHelper layoutHelper : mHelperFinder) {
layoutHelper.onRefreshLayout(state, mTempAnchorInfoWrapper, this);
}
}
@Override
protected int computeAlignOffset(View child, boolean isLayoutEnd, boolean useAnchor) {
int position = getPosition(child);
if (position != RecyclerView.NO_POSITION) {
LayoutHelper helper = mHelperFinder.getLayoutHelper(position);
if (helper != null) {
return helper.computeAlignOffset(position - helper.getRange().getLower(),
isLayoutEnd, useAnchor, this);
}
}
return 0;
}
public int obtainExtraMargin(View child, boolean isLayoutEnd) {
if (child != null) {
return computeAlignOffset(child, isLayoutEnd, true);
}
return 0;
}
private int mNested = 0;
private void runPreLayout(RecyclerView.Recycler recycler, RecyclerView.State state) {
if (mNested == 0) {
for (LayoutHelper layoutHelper : mHelperFinder.reverse()) {
layoutHelper.beforeLayout(recycler, state, this);
}
}
mNested++;
}
private void runPostLayout(RecyclerView.Recycler recycler, RecyclerView.State state, int scrolled) {
mNested--;
if (mNested <= 0) {
mNested = 0;
final int startPosition = findFirstVisibleItemPosition();
final int endPosition = findLastVisibleItemPosition();
for (LayoutHelper layoutHelper : mHelperFinder) {
try {
layoutHelper.afterLayout(recycler, state, startPosition, endPosition, scrolled, this);
} catch (Exception e) {
if (VirtualLayoutManager.sDebuggable) {
throw e;
}
}
}
}
}
@Override
public void onLayoutChildren(RecyclerView.Recycler recycler, RecyclerView.State state) {
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN_MR2) {
Trace.beginSection(TRACE_LAYOUT);
}
if (mNoScrolling && state.didStructureChange()) {
mSpaceMeasured = false;
mSpaceMeasuring = true;
}
runPreLayout(recycler, state);
try {
super.onLayoutChildren(recycler, state);
} catch (Exception e) {
e.printStackTrace();
throw e;
} finally {
// MaX_VALUE means invalidate scrolling offset - no scroll
runPostLayout(recycler, state, Integer.MAX_VALUE); // hack to indicate its an initial layout
}
if ((mNestedScrolling || mNoScrolling) && mSpaceMeasuring) {
// measure required, so do measure
mSpaceMeasured = true;
// get last child
int childCount = getChildCount();
View lastChild = getChildAt(childCount - 1);
if (lastChild != null) {
RecyclerView.LayoutParams params = (RecyclerView.LayoutParams) lastChild.getLayoutParams();
// found the end of last child view
mMeasuredFullSpace = getDecoratedBottom(lastChild) + params.bottomMargin + computeAlignOffset(lastChild, true, false);
if (mRecyclerView != null && mNestedScrolling) {
ViewParent parent = mRecyclerView.getParent();
if (parent instanceof View) {
// make sure the fullspace be the min value of measured space and parent's height
mMeasuredFullSpace = Math.min(mMeasuredFullSpace, ((View) parent).getMeasuredHeight());
}
}
} else {
mSpaceMeasuring = false;
}
mSpaceMeasuring = false;
if (mRecyclerView != null && getItemCount() > 0) {
// relayout
mRecyclerView.post(new Runnable() {
@Override
public void run() {
// post relayout
if (mRecyclerView != null)
mRecyclerView.requestLayout();
}
});
}
}
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN_MR2) {
Trace.endSection();
}
}
/**
* Entry method for scrolling
* {@inheritDoc}
*/
@Override
protected int scrollInternalBy(int dy, RecyclerView.Recycler recycler, RecyclerView.State state) {
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN_MR2) {
Trace.beginSection(TRACE_SCROLL);
}
runPreLayout(recycler, state);
int scrolled = 0;
try {
if (!mNoScrolling) {
scrolled = super.scrollInternalBy(dy, recycler, state);
} else {
if (getChildCount() == 0 || dy == 0) {
return 0;
}
mLayoutState.mRecycle = true;
ensureLayoutStateExpose();
final int layoutDirection = dy > 0 ? LayoutState.LAYOUT_END : LayoutState.LAYOUT_START;
final int absDy = Math.abs(dy);
updateLayoutStateExpose(layoutDirection, absDy, true, state);
final int freeScroll = mLayoutState.mScrollingOffset;
final int consumed = freeScroll + fill(recycler, mLayoutState, state, false);
if (consumed < 0) {
return 0;
}
scrolled = absDy > consumed ? layoutDirection * consumed : dy;
}
} catch (Exception e) {
Log.w(TAG, Log.getStackTraceString(e), e);
if (sDebuggable)
throw e;
} finally {
runPostLayout(recycler, state, scrolled);
}
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN_MR2) {
Trace.endSection();
}
return scrolled;
}
@Override
public void onScrollStateChanged(int state) {
super.onScrollStateChanged(state);
int startPosition = findFirstVisibleItemPosition();
int endPosition = findLastVisibleItemPosition();
for (LayoutHelper helper : mHelperFinder) {
helper.onScrollStateChanged(state, startPosition, endPosition, this);
}
}
@Override
public void offsetChildrenHorizontal(int dx) {
super.offsetChildrenHorizontal(dx);
for (LayoutHelper helper : mHelperFinder) {
helper.onOffsetChildrenHorizontal(dx, this);
}
}
@Override
public void offsetChildrenVertical(int dy) {
super.offsetChildrenVertical(dy);
for (LayoutHelper helper : mHelperFinder) {
helper.onOffsetChildrenVertical(dy, this);
}
}
private LayoutStateWrapper mTempLayoutStateWrapper = new LayoutStateWrapper();
private List<Pair<Range<Integer>, Integer>> mRangeLengths = new LinkedList<>();
@Nullable
private int findRangeLength(@NonNull final Range<Integer> range) {
final int count = mRangeLengths.size();
if (count == 0) {
return -1;
}
int s = 0, e = count - 1, m = -1;
Pair<Range<Integer>, Integer> rs = null;
// binary search range
while (s <= e) {
m = (s + e) / 2;
rs = mRangeLengths.get(m);
Range<Integer> r = rs.first;
if (r == null) {
rs = null;
break;
}
if (r.contains(range.getLower()) || r.contains(range.getUpper()) || range.contains(r)) {
break;
} else if (r.getLower() > range.getUpper()) {
e = m - 1;
} else if (r.getUpper() < range.getLower()) {
s = m + 1;
}
rs = null;
}
return rs == null ? -1 : m;
}
@Override
protected void layoutChunk(RecyclerView.Recycler recycler, RecyclerView.State state, LayoutState layoutState, com.alibaba.android.vlayout.layout.LayoutChunkResult result) {
final int position = layoutState.mCurrentPosition;
mTempLayoutStateWrapper.mLayoutState = layoutState;
LayoutHelper layoutHelper = mHelperFinder == null ? null : mHelperFinder.getLayoutHelper(position);
if (layoutHelper == null)
layoutHelper = mDefaultLayoutHelper;
layoutHelper.doLayout(recycler, state, mTempLayoutStateWrapper, result, this);
mTempLayoutStateWrapper.mLayoutState = null;
// no item consumed
if (layoutState.mCurrentPosition == position) {
Log.w(TAG, "layoutHelper[" + layoutHelper.getClass().getSimpleName() + "@" + layoutHelper.toString() + "] consumes no item!");
// break as no item consumed
result.mFinished = true;
} else {
// Update height consumed in each layoutChunck pass
final int positionAfterLayout = layoutState.mCurrentPosition - layoutState.mItemDirection;
final int consumed = result.mIgnoreConsumed ? 0 : result.mConsumed;
// TODO: change when supporting reverseLayout
Range<Integer> range = new Range<>(Math.min(position, positionAfterLayout), Math.max(position, positionAfterLayout));
final int idx = findRangeLength(range);
if (idx >= 0) {
Pair<Range<Integer>, Integer> pair = mRangeLengths.get(idx);
if (pair != null && pair.first.equals(range) && pair.second == consumed)
return;
mRangeLengths.remove(idx);
}
mRangeLengths.add(Pair.create(range, consumed));
Collections.sort(mRangeLengths, new Comparator<Pair<Range<Integer>, Integer>>() {
@Override
public int compare(Pair<Range<Integer>, Integer> a, Pair<Range<Integer>, Integer> b) {
if (a == null && b == null) return 0;
if (a == null) return -1;
if (b == null) return 1;
Range<Integer> lr = a.first;
Range<Integer> rr = b.first;
return lr.getLower() - rr.getLower();
}
});
}
}
/**
* Return current position related to the top, only works when scrolling from the top
*
* @return offset from current position to original top of RecycledView
*/
public int getOffsetToStart() {
if (getChildCount() == 0) return -1;
final View view = getChildAt(0);
if (view == null) {
//in some conditions, for exapmle, calling this method when outter activity destroy, may cause npe
return -1;
}
int position = getPosition(view);
final int idx = findRangeLength(Range.create(position, position));
if (idx < 0 || idx >= mRangeLengths.size()) {
return -1;
}
int offset = -mOrientationHelper.getDecoratedStart(view);
for (int i = 0; i < idx; i++) {
Pair<Range<Integer>, Integer> pair = mRangeLengths.get(i);
if (pair != null) {
offset += pair.second;
}
}
return offset;
}
private static LayoutHelper DEFAULT_LAYOUT_HELPER = new DefaultLayoutHelper();
private LayoutHelper mDefaultLayoutHelper = DEFAULT_LAYOUT_HELPER;
/**
* Change default LayoutHelper
*
* @param layoutHelper default layoutHelper apply to items without specified layoutHelper, it should not be null
*/
private void setDefaultLayoutHelper(@NonNull final LayoutHelper layoutHelper) {
//noinspection ConstantConditions
if (layoutHelper == null)
throw new IllegalArgumentException("layoutHelper should not be null");
this.mDefaultLayoutHelper = layoutHelper;
}
@Override
public void scrollToPosition(int position) {
super.scrollToPosition(position);
}
@Override
public void scrollToPositionWithOffset(int position, int offset) {
super.scrollToPositionWithOffset(position, offset);
}
@Override
public void smoothScrollToPosition(RecyclerView recyclerView, RecyclerView.State state, int position) {
super.smoothScrollToPosition(recyclerView, state, position);
}
@Override
public boolean supportsPredictiveItemAnimations() {
return mCurrentPendingSavedState == null;
}
/**
* Do updates when items change
*
* @param recyclerView recyclerView that belong to
* @param positionStart start position that items changed
* @param itemCount number of items that changed
*/
@Override
public void onItemsAdded(RecyclerView recyclerView, int positionStart, int itemCount) {
onItemsChanged(recyclerView);
}
@Override
public void onItemsRemoved(RecyclerView recyclerView, int positionStart, int itemCount) {
onItemsChanged(recyclerView);
}
@Override
public void onItemsUpdated(RecyclerView recyclerView, int positionStart, int itemCount) {
onItemsChanged(recyclerView);
}
@Override
public void onItemsMoved(RecyclerView recyclerView, int from, int to, int itemCount) {
onItemsChanged(recyclerView);
}
@Override
public void onItemsChanged(RecyclerView recyclerView) {
for (LayoutHelper helper : mHelperFinder) {
helper.onItemsChanged(this);
}
// setLayoutHelpers(mHelperFinder.getLayoutHelpers());
}
@Override
public boolean checkLayoutParams(RecyclerView.LayoutParams lp) {
return lp instanceof LayoutParams;
}
@Override
public RecyclerView.LayoutParams generateDefaultLayoutParams() {
return new LayoutParams(ViewGroup.LayoutParams.WRAP_CONTENT,
ViewGroup.LayoutParams.WRAP_CONTENT);
}
@Override
public RecyclerView.LayoutParams generateLayoutParams(ViewGroup.LayoutParams lp) {
if (lp instanceof LayoutParams) {
return new LayoutParams((LayoutParams) lp);
} else if (lp instanceof RecyclerView.LayoutParams) {
return new LayoutParams((RecyclerView.LayoutParams) lp);
} else if (lp instanceof ViewGroup.MarginLayoutParams) {
return new LayoutParams((ViewGroup.MarginLayoutParams) lp);
} else {
return new LayoutParams(lp);
}
}
@Override
public RecyclerView.LayoutParams generateLayoutParams(Context c, AttributeSet attrs) {
return new InflateLayoutParams(c, attrs);
}
@Override
public void onAdapterChanged(RecyclerView.Adapter oldAdapter, RecyclerView.Adapter newAdapter) {
super.onAdapterChanged(oldAdapter, newAdapter);
}
@Override
public void onAttachedToWindow(RecyclerView view) {
super.onAttachedToWindow(view);
mRecyclerView = view;
}
@Override
public void onDetachedFromWindow(RecyclerView view, RecyclerView.Recycler recycler) {
super.onDetachedFromWindow(view, recycler);
for (LayoutHelper helper : mHelperFinder) {
helper.clear(this);
}
mRecyclerView = null;
}
@SuppressWarnings("unused")
public static class LayoutParams extends RecyclerView.LayoutParams {
public static final int INVALIDE_SIZE = Integer.MIN_VALUE;
public int zIndex = 0;
public float mAspectRatio = Float.NaN;
private int mOriginWidth = INVALIDE_SIZE;
private int mOriginHeight = INVALIDE_SIZE;
public void storeOriginWidth() {
if (mOriginWidth == INVALIDE_SIZE) {
mOriginWidth = width;
}
}
public void storeOriginHeight() {
if (mOriginHeight == INVALIDE_SIZE) {
mOriginHeight = height;
}
}
public void restoreOriginWidth() {
if (mOriginWidth != INVALIDE_SIZE) {
width = mOriginWidth;
}
}
public void restoreOriginHeight() {
if (mOriginHeight != INVALIDE_SIZE) {
height = mOriginHeight;
}
}
public LayoutParams(Context c, AttributeSet attrs) {
super(c, attrs);
}
public LayoutParams(int width, int height) {
super(width, height);
}
public LayoutParams(ViewGroup.MarginLayoutParams source) {
super(source);
}
public LayoutParams(ViewGroup.LayoutParams source) {
super(source);
}
public LayoutParams(RecyclerView.LayoutParams source) {
super(source);
}
}
public static class InflateLayoutParams extends LayoutParams {
public InflateLayoutParams(Context c, AttributeSet attrs) {
super(c, attrs);
}
}
public static class AnchorInfoWrapper {
public int position;
public int coordinate;
public boolean layoutFromEnd;
AnchorInfoWrapper() {
}
}
@SuppressWarnings({"JavaDoc", "unused"})
public static class LayoutStateWrapper {
public final static int LAYOUT_START = -1;
public final static int LAYOUT_END = 1;
final static int INVALID_LAYOUT = Integer.MIN_VALUE;
public final static int ITEM_DIRECTION_HEAD = -1;
public final static int ITEM_DIRECTION_TAIL = 1;
final static int SCOLLING_OFFSET_NaN = Integer.MIN_VALUE;
private LayoutState mLayoutState;
LayoutStateWrapper() {
}
LayoutStateWrapper(LayoutState layoutState) {
this.mLayoutState = layoutState;
}
public int getOffset() {
return mLayoutState.mOffset;
}
public int getCurrentPosition() {
return mLayoutState.mCurrentPosition;
}
public boolean hasScrapList() {
return mLayoutState.mScrapList != null;
}
public void skipCurrentPosition() {
mLayoutState.mCurrentPosition += mLayoutState.mItemDirection;
}
/**
* We may not want to recycle children in some cases (e.g. layout)
*/
public boolean isRecycle() {
return mLayoutState.mRecycle;
}
/**
* This {@link #layoutChunk(RecyclerView.Recycler, RecyclerView.State, LayoutState, com.alibaba.android.vlayout.layout.LayoutChunkResult)} pass is in layouting or scrolling
*/
public boolean isRefreshLayout() {
return mLayoutState.mOnRefresLayout;
}
/**
* Number of pixels that we should fill, in the layout direction.
*/
public int getAvailable() {
return mLayoutState.mAvailable;
}
/**
* Defines the direction in which the data adapter is traversed.
* Should be {@link #ITEM_DIRECTION_HEAD} or {@link #ITEM_DIRECTION_TAIL}
*/
public int getItemDirection() {
return mLayoutState.mItemDirection;
}
/**
* Defines the direction in which the layout is filled.
* Should be {@link #LAYOUT_START} or {@link #LAYOUT_END}
*/
public int getLayoutDirection() {
return mLayoutState.mLayoutDirection;
}
/**
* Used when LayoutState is constructed in a scrolling state.
* It should be set the amount of scrolling we can make without creating a new view.
* Settings this is required for efficient view recycling.
*/
public int getScrollingOffset() {
return mLayoutState.mScrollingOffset;
}
/**
* Used if you want to pre-layout items that are not yet visible.
* The difference with {@link #getAvailable()} is that, when recycling, distance laid out for
* {@link #getExtra()} is not considered to avoid recycling visible children.
*/
public int getExtra() {
return mLayoutState.mExtra;
}
/**
* Equal to {@link RecyclerView.State#isPreLayout()}. When consuming scrap, if this value
* is set to true, we skip removed views since they should not be laid out in post layout
* step.
*/
public boolean isPreLayout() {
return mLayoutState.mIsPreLayout;
}
public boolean hasMore(RecyclerView.State state) {
return mLayoutState.hasMore(state);
}
public View next(RecyclerView.Recycler recycler) {
View next = mLayoutState.next(recycler);
// set recycler
return next;
}
public View retrieve(RecyclerView.Recycler recycler, int position) {
int originPosition = mLayoutState.mCurrentPosition;
mLayoutState.mCurrentPosition = position;
View view = next(recycler);
mLayoutState.mCurrentPosition = originPosition;
return view;
}
}
private static class LayoutViewHolder extends RecyclerView.ViewHolder {
public LayoutViewHolder(View itemView) {
super(itemView);
}
}
public List<View> getFixedViews() {
if (mRecyclerView == null) return Collections.emptyList();
// TODO: support zIndex?
List<View> views = new LinkedList<>();
for (LayoutHelper helper : mHelperFinder) {
View fixedView = helper.getFixedView();
if (fixedView != null) {
views.add(fixedView);
}
}
return views;
}
private LayoutViewFactory mLayoutViewFatory = new LayoutViewFactory() {
@Override
public View generateLayoutView(@NonNull Context context) {
return new LayoutView(context);
}
};
/**
* Set LayoutView Factory, so you can replace LayoutView for LayoutHelpers
*
* @param factory
*/
public void setLayoutViewFactory(@NonNull final LayoutViewFactory factory) {
if (factory == null)
throw new IllegalArgumentException("factory should not be null");
mLayoutViewFatory = factory;
}
@Override
public final View generateLayoutView() {
if (mRecyclerView == null) return null;
View layoutView = mLayoutViewFatory.generateLayoutView(mRecyclerView.getContext());
LayoutParams params = new LayoutParams(ViewGroup.LayoutParams.WRAP_CONTENT, ViewGroup.LayoutParams.WRAP_CONTENT);
attachViewHolder(params, new LayoutViewHolder(layoutView));
layoutView.setLayoutParams(params);
return layoutView;
}
@Override
public void addChildView(View view, int index) {
super.addView(view, index);
}
@Override
public void moveView(int fromIndex, int toIndex) {
super.moveView(fromIndex, toIndex);
}
@Override
public void addChildView(LayoutStateWrapper layoutState, View view) {
addChildView(layoutState, view, layoutState.getItemDirection() == LayoutStateWrapper.ITEM_DIRECTION_TAIL ? -1 : 0);
}
@Override
public void addChildView(LayoutStateWrapper layoutState, View view, int index) {
showView(view);
if (!layoutState.hasScrapList()) {
// can not find in scrapList
addView(view, index);
} else {
addDisappearingView(view, index);
}
}
/**
* {@inheritDoc}
*/
@Override
public void addOffFlowView(View view, boolean head) {
showView(view);
addHiddenView(view, head);
}
@Override
public void addBackgroundView(View view, boolean head) {
showView(view);
int index = head ? 0 : -1;
addView(view, index);
}
@Override
public void addFixedView(View view) {
//removeChildView(view);
//mFixedContainer.addView(view);
addOffFlowView(view, false);
}
@Override
public void hideView(View view) {
super.hideView(view);
}
@Override
public void showView(View view) {
super.showView(view);
}
@Override
public RecyclerView getRecyclerView() {
return mRecyclerView;
}
@Override
public RecyclerView.ViewHolder getChildViewHolder(View view) {
if (mRecyclerView != null)
return mRecyclerView.getChildViewHolder(view);
return null;
}
/**
* {@inheritDoc}
*/
@Override
public boolean isViewHolderUpdated(View view) {
RecyclerView.ViewHolder holder = getChildViewHolder(view);
return holder == null || isViewHolderUpdated(holder);
}
@Override
public void removeChildView(View child) {
removeView(child);
}
@Override
public OrientationHelper getMainOrientationHelper() {
return mOrientationHelper;
}
@Override
public OrientationHelper getSecondaryOrientationHelper() {
return mSecondaryOrientationHelper;
}
@Override
public void measureChild(View child, int widthSpec, int heightSpec) {
measureChildWithDecorations(child, widthSpec, heightSpec);
}
@Override
public void measureChildWithMargins(View child, int widthUsed, int heightUsed) {
measureChildWithDecorationsAndMargin(child, widthUsed, heightUsed);
}
@Override
public int getChildMeasureSpec(int parentSize, int size, boolean canScroll) {
return getChildMeasureSpec(parentSize, 0, size, canScroll);
}
@Override
public boolean canScrollHorizontally() {
return super.canScrollHorizontally() && !mNoScrolling;
}
@Override
public boolean canScrollVertically() {
return super.canScrollVertically() && !mNoScrolling;
}
@Override
public void layoutChild(View child, int left, int top, int right, int bottom) {
final ViewGroup.MarginLayoutParams lp = (ViewGroup.MarginLayoutParams) child.getLayoutParams();
layoutDecorated(child, left + lp.leftMargin, top + lp.topMargin,
right - lp.rightMargin, bottom - lp.bottomMargin);
}
@Override
protected void recycleChildren(RecyclerView.Recycler recycler, int startIndex, int endIndex) {
if (startIndex == endIndex) {
return;
}
if (sDebuggable) {
Log.d(TAG, "Recycling " + Math.abs(startIndex - endIndex) + " items");
}
if (endIndex > startIndex) {
View endView = getChildAt(endIndex - 1);
View startView = getChildAt(startIndex);
int startPos = getPosition(startView);
int endPos = getPosition(endView);
int idx = startIndex;
for (int i = startIndex; i < endIndex; i++) {
View v = getChildAt(idx);
int pos = getPosition(v);
if (pos != RecyclerView.NO_POSITION) {
LayoutHelper layoutHelper = mHelperFinder.getLayoutHelper(pos);
if (layoutHelper == null || layoutHelper.isRecyclable(pos, startPos, endPos, this, true)) {
removeAndRecycleViewAt(idx, recycler);
} else {
idx++;
}
} else
removeAndRecycleViewAt(idx, recycler);
}
} else {
View endView = getChildAt(startIndex);
View startView = getChildAt(endIndex + 1);
int startPos = getPosition(startView);
int endPos = getPosition(endView);
for (int i = startIndex; i > endIndex; i--) {
View v = getChildAt(i);
int pos = getPosition(v);
if (pos != RecyclerView.NO_POSITION) {
LayoutHelper layoutHelper = mHelperFinder.getLayoutHelper(pos);
if (layoutHelper == null || layoutHelper.isRecyclable(pos, startPos, endPos, this, false)) {
removeAndRecycleViewAt(i, recycler);
}
} else
removeAndRecycleViewAt(i, recycler);
}
}
}
@Override
public void detachAndScrapAttachedViews(RecyclerView.Recycler recycler) {
int childCount = this.getChildCount();
for (int i = childCount - 1; i >= 0; --i) {
View v = this.getChildAt(i);
RecyclerView.ViewHolder holder = getChildViewHolder(v);
if (holder instanceof CacheViewHolder && ((CacheViewHolder) holder).needCached()) {
// mark not invalid, ignore DataSetChange(), make the ViewHolder itself to maitain the data
ViewHolderWrapper.setFlags(holder, 0, FLAG_INVALID |FLAG_UPDATED);
}
}
super.detachAndScrapAttachedViews(recycler);
}
@Override
public void detachAndScrapViewAt(int index, RecyclerView.Recycler recycler) {
View child = getChildAt(index);
RecyclerView.ViewHolder holder = getChildViewHolder(child);
if (holder instanceof CacheViewHolder && ((CacheViewHolder) holder).needCached()) {
// mark not invalid
ViewHolderWrapper.setFlags(holder, 0, FLAG_INVALID);
}
super.detachAndScrapViewAt(index, recycler);
}
@Override
public void detachAndScrapView(View child, RecyclerView.Recycler recycler) {
super.detachAndScrapView(child, recycler);
}
public interface CacheViewHolder {
boolean needCached();
}
@Override
public int getContentWidth() {
return super.getWidth();
}
@Override
public int getContentHeight() {
return super.getHeight();
}
@Override
public boolean isDoLayoutRTL() {
return isLayoutRTL();
}
private Rect mDecorInsets = new Rect();
private void measureChildWithDecorations(View child, int widthSpec, int heightSpec) {
calculateItemDecorationsForChild(child, mDecorInsets);
widthSpec = updateSpecWithExtra(widthSpec, mDecorInsets.left, mDecorInsets.right);
heightSpec = updateSpecWithExtra(heightSpec, mDecorInsets.top, mDecorInsets.bottom);
child.measure(widthSpec, heightSpec);
}
private void measureChildWithDecorationsAndMargin(View child, int widthSpec, int heightSpec) {
calculateItemDecorationsForChild(child, mDecorInsets);
RecyclerView.LayoutParams lp = (RecyclerView.LayoutParams) child.getLayoutParams();
widthSpec = updateSpecWithExtra(widthSpec, lp.leftMargin + mDecorInsets.left,
lp.rightMargin + mDecorInsets.right);
heightSpec = updateSpecWithExtra(heightSpec, lp.topMargin + mDecorInsets.top,
lp.bottomMargin + mDecorInsets.bottom);
child.measure(widthSpec, heightSpec);
}
/**
* Update measure spec with insets
*
* @param spec
* @param startInset
* @param endInset
* @return
*/
private int updateSpecWithExtra(int spec, int startInset, int endInset) {
if (startInset == 0 && endInset == 0) {
return spec;
}
final int mode = View.MeasureSpec.getMode(spec);
if (mode == View.MeasureSpec.AT_MOST || mode == View.MeasureSpec.EXACTLY) {
int size = View.MeasureSpec.getSize(spec);
if (size - startInset - endInset < 0) {
return View.MeasureSpec.makeMeasureSpec(0, mode);
} else {
return View.MeasureSpec.makeMeasureSpec(
View.MeasureSpec.getSize(spec) - startInset - endInset, mode);
}
}
return spec;
}
@Override
public View findViewByPosition(int position) {
View view = super.findViewByPosition(position);
if (view != null && getPosition(view) == position)
return view;
for (int i = 0; i < getChildCount(); i++) {
view = getChildAt(i);
if (view != null && getPosition(view) == position) {
return view;
}
}
return null;
}
@Override
public void recycleView(View view) {
if (mRecyclerView != null) {
RecyclerView.ViewHolder holder = mRecyclerView.getChildViewHolder(view);
mRecyclerView.getRecycledViewPool().putRecycledView(holder);
}
}
@Override
public LayoutHelper findLayoutHelperByPosition(int position) {
return mHelperFinder.getLayoutHelper(position);
}
/*
* extend to full show view
*/
// when set no scrolling, the max size should have limit
private static final int MAX_NO_SCROLLING_SIZE = Integer.MAX_VALUE >> 4;
private boolean mSpaceMeasured = false;
private int mMeasuredFullSpace = 0;
private boolean mSpaceMeasuring = false;
@Override
public void onMeasure(RecyclerView.Recycler recycler, RecyclerView.State state, int widthSpec, int heightSpec) {
if (!mNoScrolling && !mNestedScrolling) {
super.onMeasure(recycler, state, widthSpec, heightSpec);
return;
}
int initialSize = MAX_NO_SCROLLING_SIZE;
if (mRecyclerView != null && mNestedScrolling) {
if (mMaxMeasureSize > 0) {
initialSize = mMaxMeasureSize;
} else {
ViewParent parent = mRecyclerView.getParent();
if (parent instanceof View) {
initialSize = ((View) parent).getMeasuredHeight();
}
}
}
int measuredSize = mSpaceMeasured ? mMeasuredFullSpace : initialSize;
if (mNoScrolling) {
mSpaceMeasuring = !mSpaceMeasured;
if (getChildCount() > 0 || getChildCount() != getItemCount()) {
View lastChild = getChildAt(getChildCount() - 1);
int bottom = mMeasuredFullSpace;
if (lastChild != null) {
RecyclerView.LayoutParams params = (RecyclerView.LayoutParams) lastChild.getLayoutParams();
bottom = getDecoratedBottom(lastChild) + params.bottomMargin + computeAlignOffset(lastChild, true, false);
}
if (getChildCount() != getItemCount() || (lastChild != null && bottom != mMeasuredFullSpace)) {
measuredSize = MAX_NO_SCROLLING_SIZE;
mSpaceMeasured = false;
mSpaceMeasuring = true;
}
} else if (getItemCount() == 0) {
measuredSize = 0;
mSpaceMeasured = true;
mSpaceMeasuring = false;
}
}
if (getOrientation() == VERTICAL) {
super.onMeasure(recycler, state, widthSpec, View.MeasureSpec.makeMeasureSpec(measuredSize, View.MeasureSpec.AT_MOST));
} else {
super.onMeasure(recycler, state, View.MeasureSpec.makeMeasureSpec(measuredSize, View.MeasureSpec.AT_MOST), heightSpec);
}
}
}
|
apache-2.0
|
nutzam/nutzwx
|
src/main/java/org/nutz/weixin/impl/BasicWxHandler.java
|
1422
|
package org.nutz.weixin.impl;
import org.nutz.ioc.impl.PropertiesProxy;
import org.nutz.lang.Strings;
import org.nutz.weixin.repo.com.qq.weixin.mp.aes.AesException;
import org.nutz.weixin.repo.com.qq.weixin.mp.aes.WXBizMsgCrypt;
import org.nutz.weixin.util.Wxs;
public class BasicWxHandler extends AbstractWxHandler {
protected String token;
protected String aeskey;
protected WXBizMsgCrypt msgCrypt;
protected String appid;
protected BasicWxHandler() {}
public BasicWxHandler(String token) {
this.token = token;
}
public BasicWxHandler(String token, String aeskey, String appid) {
super();
this.token = token;
this.aeskey = aeskey;
this.appid = appid;
}
public boolean check(String signature, String timestamp, String nonce, String key) {
return Wxs.check(token, signature, timestamp, nonce);
}
public WXBizMsgCrypt getMsgCrypt() {
if (msgCrypt == null)
try {
msgCrypt = new WXBizMsgCrypt(token, aeskey, appid);
}
catch (AesException e) {
throw new RuntimeException(e);
}
return msgCrypt;
}
public BasicWxHandler configure(PropertiesProxy conf, String prefix){
prefix = Strings.sBlank(prefix);
token = conf.check(prefix+"token");
aeskey = conf.get(prefix+"aes");
appid = conf.get(prefix+"appid");
return this;
}
}
|
apache-2.0
|
sdgdsffdsfff/tddl
|
tddl-repo-oceanbase/src/main/java/com/taobao/tddl/repo/oceanbase/handler/ObCommandHandlerFactory.java
|
271
|
package com.taobao.tddl.repo.oceanbase.handler;
import com.taobao.tddl.repo.mysql.handler.CommandHandlerFactoryMyImp;
/**
* @author dreamond 2014年1月9日 下午5:00:51
* @since 5.1.0
*/
public class ObCommandHandlerFactory extends CommandHandlerFactoryMyImp {
}
|
apache-2.0
|
ecologylab/BigSemanticsJava
|
BigSemanticsCore/src/ecologylab/bigsemantics/tools/GenericIterable.java
|
1145
|
/**
*
*/
package ecologylab.bigsemantics.tools;
import java.util.ArrayList;
import java.util.Iterator;
import org.w3c.dom.NodeList;
/**
* @author amathur
*
*/
public class GenericIterable implements Iterable
{
Object collectionObject;
public GenericIterable(Object collectionObject)
{
this.collectionObject=collectionObject;
}
@Override
public Iterator iterator()
{
// if this is an instance of NodeList
if(collectionObject instanceof NodeList )
{
// need to do some thing
return new NodeListIterator((NodeList)collectionObject);
}
else
{
// the simple case :-)
return ((Iterable)collectionObject).iterator();
}
}
public int size()
{
if(collectionObject instanceof NodeList)
{
return ((NodeList)collectionObject).getLength();
}
else
return ((ArrayList)collectionObject).size();
}
public Object get(int i)
{
if(collectionObject instanceof NodeList)
{
return ((NodeList)collectionObject).item(i);
}
else
return ((ArrayList)collectionObject).get(i);
}
}
|
apache-2.0
|
software-engineering-amsterdam/poly-ql
|
OmarPakker/Source/QL_GOLD_C-Sharp/Algebra.QL.Eval/Expr/DivideExpr.cs
|
466
|
using System;
using Algebra.QL.Eval.Value;
namespace Algebra.QL.Eval.Expr
{
public class DivideExpr : BinaryExpr, IEvalExpr
{
public DivideExpr(IEvalExpr l, IEvalExpr r)
: base(l, r)
{
}
protected override object Evaluate(ValueContainer expr1Value, ValueContainer expr2Value)
{
return Convert.ToDouble(expr1Value.Value) / Convert.ToDouble(expr2Value.Value);
}
}
}
|
apache-2.0
|
astopy/Panoptes
|
lib/role_control/controlled.rb
|
1491
|
module RoleControl
module Controlled
extend ActiveSupport::Concern
included do
@roles_for = Hash.new
scope :public_scope, -> { where(private: false) }
scope :private_scope, -> { where(private: true) }
end
module ClassMethods
def can_by_role(*actions,
roles: [],
public: false)
actions.each do |action|
@roles_for[action] = [roles,
public]
end
end
def memberships_query(action, target)
target.memberships_for(action, self)
end
def private_query(action, target, roles)
AccessControlList.joins(user_group: :memberships)
.select(:resource_id)
.merge(memberships_query(action, target))
.where(resource_type: name)
.where.overlap(roles: roles)
end
def public_query(private_query, public_flag)
query = where(id: private_query.pluck(:resource_id))
query = query.or(public_scope) if public_flag
query
end
def scope_for(action, user, opts={})
roles, public_flag = roles(action)
case
when user.is_admin?
all
when user.logged_in?
public_query(private_query(action, user, roles), public_flag)
when public_flag
public_scope
else
none
end
end
def roles(action)
@roles_for[action]
end
end
end
end
|
apache-2.0
|
kingston-csj/jforgame
|
jforgame-server/src/main/java/jforgame/server/cross/demo/HelloCallbackHandler.java
|
595
|
package jforgame.server.cross.demo;
import jforgame.server.cross.core.callback.G2FCallBack;
import jforgame.server.cross.core.callback.CallBackCommands;
import jforgame.server.cross.core.callback.CallbackHandler;
import jforgame.server.cross.core.server.SCSession;
public class HelloCallbackHandler extends CallbackHandler {
@Override
public void onRequest(SCSession session, G2FCallBack req) {
F2GHeartBeat response = new F2GHeartBeat();
sendBack(session, req, response);
}
@Override
public int cmdType() {
return CallBackCommands.HELLO;
}
}
|
apache-2.0
|
ilivoo/ilivoo
|
coretechnology/src/main/java/com/ilivoo/ch4/ch10/ClassDesignTest.java
|
1714
|
package com.ilivoo.ch4.ch10;
/**
*1. Ò»¶¨Òª±£Ö¤Êý¾Ý˽ÓÐ: ÕâÊÇ×îÖØÒªµÄ£¬ ¾ø¶Ô²»ÒªÆÆ»µ·â×°ÐÔ£¬ ÓÐʱºò£¬ÐèÒª±àдһ¸ö·ÃÎÊÆ÷·½·¨»òÕßÊÇÐÞ¸ÄÆ÷·½·¨£¬ µ«ÊÇ×îºÃ»¹ÊDZ£³Ö
* ʵÀýÓòµÄ˽ÓÐÐÔ¡£ ºÜ¶à²ÒÍ´µÄ¾Ñ鏿ËßÎÒÃÇ£¬ Êý¾ÝµÄ±íʾÐÎʽºÜ¿ÉÄÜ»á¸Ä±ä£¬ µ«ÊÇËûÃǵÄʹÓ÷½Ê½È´²»»á¾³£±ä»»¡£ µ±Êý¾Ý˽ÓÐʱ£¬ Ëû
* ÃǵıíʾÐÎʽµÄ±ä»»²»»á¶ÔÀàµÄʹÓÃÕß²úÉúÓ°Ï죬 ¼´Ê¹³öÏÖbugÒ²ÈÝÒ×¼ì²â¡£
*2. Ò»¶¨Òª¶ÔÊý¾Ý³õʼ»¯£º java²»¶Ô¾Ö²¿±äÁ¿½øÐгõʼ»¯£¬ µ«»á¶Ô¶ÔÏóµÄʵÀýÓò½øÐгõʼ»¯£¬ ×îºÃ²»ÒªÒÀÀµÓÚϵͳµÄĬÈÏÖµ£¬ ¶øÓ¦¸ÃÏÔʾ
* µÄ³õʼ»¯ËùÓеÄÊý¾Ý£¬ ¾ßÌå³õʼ»¯·½Ê½¿ÉÒÔÌṩĬÈÏÖµ£¬ Ò²¿ÉÒÔÔÚËùÓй¹ÔìÆ÷ÖÐÉèÖÃĬÈÏÖµ¡£
*3. ²»ÒªÔÚÀàÖйý¶àµÄʹÓûù±¾ÀàÐÍ£º Ò²¾ÍÊÇ˵£¬ ÓÃÆäËûµÄÀà´úÌæ¶à¸öÏà¹ØµÄ»ù±¾ÀàÐ͵ÄʹÓ㬠ÕâÑù»áʹµÃÀà±ãÓÚÀí½â£¬ÀýÈ磺AddressÀà
* ÎÒÃÇÓ¦¸Ã½«¼¸¸ö»ù±¾ÀàÐͰü×°³ÉÒ»¸öÀà¡£
*4. ²»ÊÇËùÓеÄÓò¶¼ÐèÒª¶ÀÁ¢µÄ·ÃÎÊÆ÷ºÍ¸ü¸ÄÆ÷µÄ£º Ò»°ãÔÚÀàÖУ¬ ¶ÔÏóÒ»µ©´´½¨¾Í²»Ï£Íû¸ü¸ÄÆäÖеÄijЩ±äÁ¿£¬ ´ËʱÎÒÃǾͲ»Ó¦¸ÃΪÆäÉèÖÃ
* ¸ü¸ÄÆ÷¡£
*5. ½«Ö°Ôð¹ý¶àµÄÀà½øÐзֽ⣺ Èç¹ûÃ÷ÏԵĿÉÒÔ½«Ò»¸ö¸´ÔÓµÄÀà·Ö½â³ÉÁ½¸ö¸üΪ¼òµ¥µÄÀ࣬ ¾ÍÓ¦¸Ã½«Æä·Ö½â¡£
*6. ½«·½·¨·Ö½âµ½×îС£º Ò²¾ÍÊÇÎÒÃÇÓ¦¸Ã¾¡¿ÉÄܵķֽⷽ·¨Ê¹Æä×÷Ϊһ¸öÔ×ÓÐÔ²Ù×÷£¬ ͨ¹ýÔ×ÓÐÔ²Ù×÷¹¹Ô츴Ôӵķ½·¨¡£³éÈ¡£¬ Ò»°ãÇé¿öÏÂ
* ÔÚÖØ¹¹µÄʱºòÎÒÃÇÓ¦¸Ã¾³£Õâô×ö£¬ ÕâÑù¿ÉÒÔÃ÷ÏÔµÄÌá¸ßËٶȺͼò»¯ÀàµÄʵÏÖ£¬ ÒÔ¼°ÈÃÎÒÃǸüÈÝÒ×Àí½â¡£
*7. ½«Àà½øÐгéÈ¡ÖØ¹¹£¬ ¿ÉÒÔʹµÃÎÒÃǵÄÀàÐͲã´Î¸ü¼Ó¼ò»¯£¬ ¹¦ÄÜÖ°Ôð¸ü¼ÓÃ÷ÏÔ£¬ ¿ÉÒÔʹµÃÎÒÃǵÄʹÓÃÕ߸ü¼Ó±ã½Ý¡£
*8. ÀàÃûºÍ·½·¨ÃûÒªÄܹËÌåÏÖËûÃǵÄÖ°Ô𣺠Óë±äÁ¿Ó¦¸ÃÓÐÒ»¸öÄܹ»·´Ó³Æäº¬ÒåµÄÃû×ÖÒ»Ñù£¬ ÀàÒ²Ó¦¸ÃÈç´Ë£¬ ÃüÃûÀàÃûµÄÁ¼ºÃϰ¹ßÊDzÉÓÃÃû´Ê
* Ç°ÃæÓÐÐÎÈÝ´ÊÐÞÊεÄÃû´Ê£¬ »ò¶¯Ãû´ÊÐÞÊεÄÃû´Ê£¬ È磺 RushOrder, BillingAddressµÈ£¬ ¶ÔÓÚ·½·¨À´Ëµ£¬ ϰ¹ßÊÇ·ÃÎÊ·½·¨ÓÃСд
* get¿ªÍ·£¬ ¸ü¸ÄÆ÷·½·¨Ð¡Ð´set¿ªÍ·£¬ Èç¹ûÊÇBooleanÀàÐÍÄÇô¿ÉÄܾͻᷢÉú±ä»¯ÁË¡£
*/
public class ClassDesignTest {
}
|
apache-2.0
|
antho2930/bowhead
|
app/Console/Commands/FxStreamCommand.php
|
2973
|
<?php
namespace Bowhead\Console\Commands;
use Bowhead\Traits\OHLC;
use Bowhead\Util\Console;
use Illuminate\Console\Command;
use SimpleXMLElement;
class FxStreamCommand extends Command
{
use OHLC;
/**
* The console command name.
*
* @var string
*/
protected $name = 'bowhead:fx_stream';
/**
* The console command description.
*
* @var string
*/
protected $description = 'The Fx stream processor.';
/**
* Execute the console command.
*
* @return void
*/
public function handle()
{
$console = new Console();
$oneforgekey = env('ONEFORGE_API');
$instruments = ['USDJPY','EURUSD','AUDUSD','EURGBP','USDCAD','USDCHF','USDMXN','USDTRY','USDCNH','NZDUSD'];
$trans = [
'USDJPY' => 'USD_JPY'
,'EURUSD' => 'EUR_USD'
,'AUDUSD' => 'AUD_USD'
,'EURGBP' => 'EUR_GBP'
,'USDCAD' => 'USD_CAD'
,'USDCHF' => 'USD_CHF'
,'USDMXN' => 'USD_MXN'
,'USDTRY' => 'USD_TRY'
,'USDCNH' => 'USD_CNH'
,'NZDUSD' => 'NZD_USD'
];
stream_set_blocking(STDIN, 0);
$output = $last = [];
while(1){
if (ord(fgetc(STDIN)) == 113) {
echo "QUIT detected...";
return null;
}
$data = file_get_contents('http://rates.fxcm.com/RatesXML');
$fixed = new SimpleXMLElement($data);
foreach ($fixed as $fx) {
$symbol = (string) $fx['Symbol'];
$symbolt = $trans[$symbol] ?? null;
if (!in_array($symbol, $instruments)) {
continue;
}
$ticker = [];
$ticker['tick']['bid'] = round(((float) $fx->Bid + (float) $fx->Ask) / 2, 5);
$ticker['tick']['instrument'] = $symbol;
$this->markOHLC(json_encode($ticker));
$ins = $ticker['tick']['instrument'];
$curr[$ins] = $ticker['tick']['bid'];
foreach ($curr as $instrument => $bid) {
if ($curr[$instrument] > ($last[$instrument] ?? 0)) {
$output[$instrument] = $console->colorize(str_pad($instrument . " " . round($curr[$instrument], 3), 14), 'green', 'bold');
} elseif ($curr[$instrument] < ($last[$instrument] ?? 0)) {
$output[$instrument] = $console->colorize(str_pad($instrument . " " . round($curr[$instrument], 3), 14), 'bg_red', 'bold');
} else {
$output[$instrument] = $console->colorize(str_pad($instrument . " " . round($curr[$instrument], 3), 14), 'none');
}
}
$last = $curr;
}
// for cool output uncomment
echo join(' | ', $output) ."\n";
sleep(15);
}
}
}
|
apache-2.0
|
REVLWorld/elasticsearch-dsl-py
|
elasticsearch_dsl/search.py
|
22718
|
from six import iteritems, string_types
from elasticsearch.helpers import scan
from elasticsearch.exceptions import TransportError
from .query import Q, EMPTY_QUERY, Bool
from .aggs import A, AggBase
from .utils import DslBase
from .result import Response, Result, SuggestResponse
from .connections import connections
class QueryProxy(object):
"""
Simple proxy around DSL objects (queries) that can be called
(to add query/post_filter) and also allows attribute access which is proxied to
the wrapped query.
"""
def __init__(self, search, attr_name):
self._search = search
self._proxied = EMPTY_QUERY
self._attr_name = attr_name
def __nonzero__(self):
return self._proxied != EMPTY_QUERY
__bool__ = __nonzero__
def __call__(self, *args, **kwargs):
s = self._search._clone()
getattr(s, self._attr_name)._proxied += Q(*args, **kwargs)
# always return search to be chainable
return s
def __getattr__(self, attr_name):
return getattr(self._proxied, attr_name)
def __setattr__(self, attr_name, value):
if not attr_name.startswith('_'):
self._proxied = Q(self._proxied.to_dict())
setattr(self._proxied, attr_name, value)
super(QueryProxy, self).__setattr__(attr_name, value)
class ProxyDescriptor(object):
"""
Simple descriptor to enable setting of queries and filters as:
s = Search()
s.query = Q(...)
"""
def __init__(self, name):
self._attr_name = '_%s_proxy' % name
def __get__(self, instance, owner):
return getattr(instance, self._attr_name)
def __set__(self, instance, value):
proxy = getattr(instance, self._attr_name)
proxy._proxied = Q(value)
class AggsProxy(AggBase, DslBase):
name = 'aggs'
def __init__(self, search):
self._base = self._search = search
self._params = {'aggs': {}}
def to_dict(self):
return super(AggsProxy, self).to_dict().get('aggs', {})
class Request(object):
def __init__(self, using='default', index=None, doc_type=None, extra=None):
self._using = using
self._index = None
if isinstance(index, (tuple, list)):
self._index = list(index)
elif index:
self._index = [index]
self._doc_type = []
self._doc_type_map = {}
if isinstance(doc_type, (tuple, list)):
for dt in doc_type:
self._add_doc_type(dt)
elif isinstance(doc_type, dict):
self._doc_type.extend(doc_type.keys())
self._doc_type_map.update(doc_type)
elif doc_type:
self._add_doc_type(doc_type)
self._params = {}
self._extra = extra or {}
def params(self, **kwargs):
"""
Specify query params to be used when executing the search. All the
keyword arguments will override the current values. See
https://elasticsearch-py.readthedocs.io/en/master/api.html#elasticsearch.Elasticsearch.search
for all available parameters.
Example::
s = Search()
s = s.params(routing='user-1', preference='local')
"""
s = self._clone()
s._params.update(kwargs)
return s
def index(self, *index):
"""
Set the index for the search. If called empty it will remove all information.
Example:
s = Search()
s = s.index('twitter-2015.01.01', 'twitter-2015.01.02')
"""
# .index() resets
s = self._clone()
if not index:
s._index = None
else:
s._index = (self._index or []) + list(index)
return s
def _add_doc_type(self, doc_type):
if hasattr(doc_type, '_doc_type'):
self._doc_type_map[doc_type._doc_type.name] = doc_type.from_es
doc_type = doc_type._doc_type.name
self._doc_type.append(doc_type)
def doc_type(self, *doc_type, **kwargs):
"""
Set the type to search through. You can supply a single value or
multiple. Values can be strings or subclasses of ``DocType``.
You can also pass in any keyword arguments, mapping a doc_type to a
callback that should be used instead of the Result class.
If no doc_type is supplied any information stored on the instance will
be erased.
Example:
s = Search().doc_type('product', 'store', User, custom=my_callback)
"""
# .doc_type() resets
s = self._clone()
if not doc_type and not kwargs:
s._doc_type = []
s._doc_type_map = {}
else:
for dt in doc_type:
s._add_doc_type(dt)
s._doc_type.extend(kwargs.keys())
s._doc_type_map.update(kwargs)
return s
def using(self, client):
"""
Associate the search request with an elasticsearch client. A fresh copy
will be returned with current instance remaining unchanged.
:arg client: an instance of ``elasticsearch.Elasticsearch`` to use or
an alias to look up in ``elasticsearch_dsl.connections``
"""
s = self._clone()
s._using = client
return s
def extra(self, **kwargs):
"""
Add extra keys to the request body. Mostly here for backwards
compatibility.
"""
s = self._clone()
if 'from_' in kwargs:
kwargs['from'] = kwargs.pop('from_')
s._extra.update(kwargs)
return s
def _clone(self):
s = self.__class__(using=self._using, index=self._index,
doc_type=self._doc_type)
s._doc_type_map = self._doc_type_map.copy()
s._extra = self._extra.copy()
s._params = self._params.copy()
return s
class Search(Request):
query = ProxyDescriptor('query')
post_filter = ProxyDescriptor('post_filter')
def __init__(self, **kwargs):
"""
Search request to elasticsearch.
:arg using: `Elasticsearch` instance to use
:arg index: limit the search to index
:arg doc_type: only query this type.
All the parameters supplied (or omitted) at creation type can be later
overriden by methods (`using`, `index` and `doc_type` respectively).
"""
super(Search, self).__init__(**kwargs)
self.aggs = AggsProxy(self)
self._sort = []
self._source = None
self._fields = None
self._partial_fields = {}
self._highlight = {}
self._highlight_opts = {}
self._suggest = {}
self._script_fields = {}
self._response_class = Response
self._query_proxy = QueryProxy(self, 'query')
self._post_filter_proxy = QueryProxy(self, 'post_filter')
def filter(self, *args, **kwargs):
return self.query(Bool(filter=[Q(*args, **kwargs)]))
def __iter__(self):
"""
Iterate over the hits.
"""
return iter(self.execute())
def __getitem__(self, n):
"""
Support slicing the `Search` instance for pagination.
Slicing equates to the from/size parameters. E.g.::
s = Search().query(...)[0:25]
is equivalent to::
s = Search().query(...).extra(from_=0, size=25)
"""
s = self._clone()
if isinstance(n, slice):
# If negative slicing, abort.
if n.start and n.start < 0 or n.stop and n.stop < 0:
raise ValueError("Search does not support negative slicing.")
# Elasticsearch won't get all results so we default to size: 10 if
# stop not given.
s._extra['from'] = n.start or 0
s._extra['size'] = n.stop - (n.start or 0) if n.stop is not None else 10
return s
else: # This is an index lookup, equivalent to slicing by [n:n+1].
# If negative index, abort.
if n < 0:
raise ValueError("Search does not support negative indexing.")
s._extra['from'] = n
s._extra['size'] = 1
return s
@classmethod
def from_dict(cls, d):
"""
Construct a new `Search` instance from a raw dict containing the search
body. Useful when migrating from raw dictionaries.
Example::
s = Search.from_dict({
"query": {
"bool": {
"must": [...]
}
},
"aggs": {...}
})
s = s.filter('term', published=True)
"""
s = cls()
s.update_from_dict(d)
return s
def _clone(self):
"""
Return a clone of the current search request. Performs a shallow copy
of all the underlying objects. Used internally by most state modifying
APIs.
"""
s = super(Search, self)._clone()
s._response_class = self._response_class
s._sort = self._sort[:]
s._source = self._source.copy() if self._source else None
s._fields = self._fields[:] if self._fields else None
s._partial_fields = self._partial_fields.copy()
s._highlight = self._highlight.copy()
s._highlight_opts = self._highlight_opts.copy()
s._suggest = self._suggest.copy()
s._script_fields = self._script_fields.copy()
for x in ('query', 'post_filter'):
getattr(s, x)._proxied = getattr(self, x)._proxied
# copy top-level bucket definitions
if self.aggs._params.get('aggs'):
s.aggs._params = {'aggs': self.aggs._params['aggs'].copy()}
return s
def response_class(self, cls):
"""
Override the default wrapper used for the response.
"""
s = self._clone()
s._response_class = cls
return s
def update_from_dict(self, d):
"""
Apply options from a serialized body to the current instance. Modifies
the object in-place. Used mostly by ``from_dict``.
"""
d = d.copy()
if 'query' in d:
self.query._proxied = Q(d.pop('query'))
if 'post_filter' in d:
self.post_filter._proxied = Q(d.pop('post_filter'))
aggs = d.pop('aggs', d.pop('aggregations', {}))
if aggs:
self.aggs._params = {
'aggs': dict(
(name, A(value)) for (name, value) in iteritems(aggs))
}
if 'sort' in d:
self._sort = d.pop('sort')
if '_source' in d:
self._source = d.pop('_source')
if 'fields' in d:
self._fields = d.pop('fields')
if 'partial_fields' in d:
self._partial_fields = d.pop('partial_fields')
if 'highlight' in d:
high = d.pop('highlight').copy()
self._highlight = high.pop('fields')
self._highlight_opts = high
if 'suggest' in d:
self._suggest = d.pop('suggest')
if 'text' in self._suggest:
text = self._suggest.pop('text')
for s in self._suggest.values():
s.setdefault('text', text)
if 'script_fields' in d:
self._script_fields = d.pop('script_fields')
self._extra = d
return self
def script_fields(self, **kwargs):
"""
Define script fields to be calculated on hits. See
https://www.elastic.co/guide/en/elasticsearch/reference/current/search-request-script-fields.html
for more details.
Example::
s = Search()
s = s.script_fields(times_two="doc['field'].value * 2")
s = s.script_fields(
times_three={
'script': "doc['field'].value * n",
'params': {'n': 3}
}
)
"""
s = self._clone()
for name in kwargs:
if isinstance(kwargs[name], string_types):
kwargs[name] = {'script': kwargs[name]}
s._script_fields.update(kwargs)
return s
def source(self, **kwargs):
"""
Selectively control how the _source field is returned.
:arg source: wildcard string, array of wildcards, or dictionary of includes and excludes
If ``source`` is None, the entire document will be returned for
each hit. If source is a dictionary with keys of 'include' and/or
'exclude' the fields will be either included or excluded appropriately.
Calling this multiple times with the same named parameter will override the
previous values with the new ones.
Example::
s = Search()
s = s.source(include=['obj1.*'], exclude=["*.description"])
s = Search()
s = s.source(include=['obj1.*']).source(exclude=["*.description"])
"""
s = self._clone()
if s._source is None:
s._source = {}
for key, value in kwargs.items():
if value is None:
try:
del s._source[key]
except KeyError:
pass
else:
s._source[key] = value
return s
def fields(self, fields=None):
"""
Selectively load specific stored fields for each document.
:arg fields: list of fields to return for each document
If ``fields`` is None, the entire document will be returned for
each hit. If fields is the empty list, no fields will be
returned for each hit, just the metadata.
"""
s = self._clone()
s._fields = fields
return s
def partial_fields(self, **partial):
"""
Control which part of the fields to extract from the `_source` document
:kwargs partial: dict specifying which fields to extract from the source
An example usage would be:
s = Search().partial_fields(authors_data={
'include': ['authors.*'],
'exclude': ['authors.name']
})
which will include all fields from the `authors` nested property except for
each authors `name`
If ``partial`` is not provided, the whole `_source` will be fetched. Calling this multiple
times will override the previous values with the new ones.
"""
s = self._clone()
s._partial_fields = partial
return s
def sort(self, *keys):
"""
Add sorting information to the search request. If called without
arguments it will remove all sort requirements. Otherwise it will
replace them. Acceptable arguments are::
'some.field'
'-some.other.field'
{'different.field': {'any': 'dict'}}
so for example::
s = Search().sort(
'category',
'-title',
{"price" : {"order" : "asc", "mode" : "avg"}}
)
will sort by ``category``, ``title`` (in descending order) and
``price`` in ascending order using the ``avg`` mode.
The API returns a copy of the Search object and can thus be chained.
"""
s = self._clone()
s._sort = []
for k in keys:
if isinstance(k, string_types) and k.startswith('-'):
k = {k[1:]: {"order": "desc"}}
s._sort.append(k)
return s
def highlight_options(self, **kwargs):
"""
Update the global highlighting options used for this request. For
example::
s = Search()
s = s.highlight_options(order='score')
"""
s = self._clone()
s._highlight_opts.update(kwargs)
return s
def highlight(self, *fields, **kwargs):
"""
Request highlighting of some fields. All keyword arguments passed in will be
used as parameters. Example::
Search().highlight('title', 'body', fragment_size=50)
will produce the equivalent of::
{
"highlight": {
"fields": {
"body": {"fragment_size": 50},
"title": {"fragment_size": 50}
}
}
}
"""
s = self._clone()
for f in fields:
s._highlight[f] = kwargs
return s
def suggest(self, name, text, **kwargs):
"""
Add a suggestions request to the search.
:arg name: name of the suggestion
:arg text: text to suggest on
All keyword arguments will be added to the suggestions body. For example::
s = Search()
s = s.suggest('suggestion-1', 'Elasticsearch', term={'field': 'body'})
"""
s = self._clone()
s._suggest[name] = {'text': text}
s._suggest[name].update(kwargs)
return s
def to_dict(self, count=False, **kwargs):
"""
Serialize the search into the dictionary that will be sent over as the
request's body.
:arg count: a flag to specify we are interested in a body for count -
no aggregations, no pagination bounds etc.
All additional keyword arguments will be included into the dictionary.
"""
d = {"query": self.query.to_dict()}
if self.post_filter:
d['post_filter'] = self.post_filter.to_dict()
# count request doesn't care for sorting and other things
if not count:
if self.aggs.aggs:
d.update(self.aggs.to_dict())
if self._sort:
d['sort'] = self._sort
d.update(self._extra)
if self._source:
d['_source'] = self._source
if self._fields is not None:
d['fields'] = self._fields
if self._partial_fields:
d['partial_fields'] = self._partial_fields
if self._highlight:
d['highlight'] = {'fields': self._highlight}
d['highlight'].update(self._highlight_opts)
if self._suggest:
d['suggest'] = self._suggest
if self._script_fields:
d['script_fields'] = self._script_fields
d.update(kwargs)
return d
def count(self):
"""
Return the number of hits matching the query and filters. Note that
only the actual number is returned.
"""
if hasattr(self, '_response'):
return self._response.hits.total
es = connections.get_connection(self._using)
d = self.to_dict(count=True)
# TODO: failed shards detection
return es.count(
index=self._index,
doc_type=self._doc_type,
body=d,
**self._params
)['count']
def execute(self, ignore_cache=False):
"""
Execute the search and return an instance of ``Response`` wrapping all
the data.
:arg response_class: optional subclass of ``Response`` to use instead.
"""
if ignore_cache or not hasattr(self, '_response'):
es = connections.get_connection(self._using)
self._response = self._response_class(
es.search(
index=self._index,
doc_type=self._doc_type,
body=self.to_dict(),
**self._params
),
callbacks=self._doc_type_map
)
return self._response
def execute_suggest(self):
"""
Execute just the suggesters. Ignores all parts of the request that are
not relevant, including ``query`` and ``doc_type``.
"""
es = connections.get_connection(self._using)
return SuggestResponse(
es.suggest(
index=self._index,
body=self._suggest,
**self._params
)
)
def scan(self):
"""
Turn the search into a scan search and return a generator that will
iterate over all the documents matching the query.
Use ``params`` method to specify any additional arguments you with to
pass to the underlying ``scan`` helper from ``elasticsearch-py`` -
https://elasticsearch-py.readthedocs.io/en/master/helpers.html#elasticsearch.helpers.scan
"""
es = connections.get_connection(self._using)
for hit in scan(
es,
query=self.to_dict(),
index=self._index,
doc_type=self._doc_type,
**self._params
):
yield self._doc_type_map.get(hit['_type'], Result)(hit)
class MultiSearch(Request):
def __init__(self, **kwargs):
super(MultiSearch, self).__init__(**kwargs)
self._searches = []
def __getitem__(self, key):
return self._searches[key]
def __iter__(self):
return iter(self._searches)
def _clone(self):
ms = super(MultiSearch, self)._clone()
ms._searches = self._searches[:]
return ms
def add(self, search):
ms = self._clone()
ms._searches.append(search)
return ms
def to_dict(self):
out = []
for s in self._searches:
meta = {}
if s._index:
meta['index'] = s._index
if s._doc_type:
meta['type'] = s._doc_type
meta.update(s._params)
out.append(meta)
out.append(s.to_dict())
return out
def execute(self, ignore_cache=False, raise_on_error=True):
if ignore_cache or not hasattr(self, '_response'):
es = connections.get_connection(self._using)
responses = es.msearch(
index=self._index,
doc_type=self._doc_type,
body=self.to_dict(),
**self._params
)
out = []
for s, r in zip(self._searches, responses['responses']):
if r.get('error', False):
print(r)
if raise_on_error:
raise TransportError('N/A', r['error']['type'], r['error'])
r = None
else:
r = Response(r, callbacks=s._doc_type_map)
r.search = s
out.append(r)
self._response = out
return self._response
|
apache-2.0
|
inovex/rest-client
|
restclient-ui/src/main/java/org/wiztools/restclient/ui/ISessionView.java
|
257
|
package org.wiztools.restclient.ui;
import org.wiztools.restclient.Request;
import org.wiztools.restclient.Response;
/**
*
* @author subwiz
*/
interface ISessionView {
public void add(Request request, Response response);
public void clear();
}
|
apache-2.0
|
jnthnclt/nicity
|
nicity-view/src/main/java/com/jonathancolt/nicity/view/core/DragAndDrop.java
|
5666
|
/*
* DragAndDrop.java.java
*
* Created on 01-03-2010 01:31:35 PM
*
* Copyright 2010 Jonathan Colt
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.jonathancolt.nicity.view.core;
/*
* #%L
* nicity-view
* %%
* Copyright (C) 2013 Jonathan Colt
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import com.jonathancolt.nicity.view.event.AInputEvent;
import com.jonathancolt.nicity.view.event.MouseDragged;
import com.jonathancolt.nicity.view.event.MouseEntered;
import com.jonathancolt.nicity.view.event.MouseExited;
import com.jonathancolt.nicity.view.event.MousePressed;
import com.jonathancolt.nicity.view.event.MouseReleased;
import com.jonathancolt.nicity.view.interfaces.IDrag;
import com.jonathancolt.nicity.view.interfaces.IDrop;
import com.jonathancolt.nicity.view.interfaces.IDropMode;
import com.jonathancolt.nicity.view.interfaces.IMouseMotionEvents;
import com.jonathancolt.nicity.view.interfaces.IView;
// To effectivly drag and drop the two components must share a common DragAndDrop!
// Since the idea of multiple concurrent drag and drops with a single user is unusual
// if not impossible, the most common thing to do is use the static final cDefault.
// However I left DragAndDrop's constructor public so you could create multiple instances,
// which when distributed correctly allow a convienent way to partition drag and drops.
/**
*
* @author Administrator
*/
public class DragAndDrop {
/**
*
*/
public static final DragAndDrop cDefault = new DragAndDrop();
private IDrag dragFrom = null;
private IDrop dropOn = null;
private boolean dragged = false;
/**
*
*/
protected IView scrollView = NullView.cNull;
/**
*
*/
public DragAndDrop() {
}
/**
*
* @param _view
* @return
*/
public boolean isDragFrom(IView _view) {
if (!dragged) {
return false;
}
return _view == dragFrom;
}
/**
*
* @param _view
* @return
*/
public boolean isDropOn(IView _view) {
return _view == dropOn;
}
/**
*
* @param _scrollView
*/
public void setScrollView(IView _scrollView) {
scrollView = (_scrollView == null) ? NullView.cNull : _scrollView;
}
/**
*
* @param e
*/
public void mousePressed(MousePressed e) {
Object source = e.getSource();
if (!(source instanceof IDrag)) {
return;
}
dragFrom = (IDrag) source;
if (source instanceof IView) {
((IView) source).paint();
}
}
/**
*
* @param e
*/
public void mouseReleased(MouseReleased e) {
if (e.getClickCount() == 1) {
IDropMode mode = canDeliver(e, dropOn);
if (mode != null) {
mode.drop(dropOn, dragFrom.getParcel(), e);
}
Object _dropOn = dropOn;
Object _dragFrom = dragFrom;
dropOn = null;
dragFrom = null;
if (_dropOn instanceof IView) {
((IView) _dropOn).paint();
}
if (_dragFrom instanceof IView) {
((IView) _dragFrom).paint();
}
dragged = false;
scrollView = NullView.cNull;
AInput.isDragging = false;
}
}
/**
*
* @param e
*/
public void mouseEntered(MouseEntered e) {
Object source = e.getSource();
if (dragFrom != null && source instanceof IDrop && source != dragFrom && canDeliver(e, source) != null) {
dropOn = (IDrop) source;
if (source instanceof IView) {
((IView) source).paint();
}
}
}
/**
*
* @param e
*/
public void mouseExited(MouseExited e) {
Object _dropOn = dropOn;
dropOn = null;
if (_dropOn instanceof IView) {
((IView) _dropOn).paint();
}
}
/**
*
* @param e
*/
public void mouseDragged(MouseDragged e) {
if (scrollView != NullView.cNull && scrollView instanceof IMouseMotionEvents) {
((IMouseMotionEvents) scrollView).mouseDragged(e);
}
if (e.getSumDeltaX() < 5 && e.getSumDeltaY() < 5) {
return;
}
dragged = true;
AInput.isDragging = true;
}
private IDropMode canDeliver(AInputEvent e, Object _address) {
if (!(_address instanceof IDrop) || dragFrom == null) {
return null;
}
return ((IDrop) _address).accepts(dragFrom.getParcel(), e);
}
}
|
apache-2.0
|
getquill/quill
|
quill-core/src/main/scala/io/getquill/quat/package.scala
|
224
|
package io.getquill
import scala.language.experimental.macros
/**
* Convenience API that allows construction of a Quat using `Quat.from[T]`
*/
package object quat {
def quatOf[T]: Quat = macro QuatMacro.makeQuat[T]
}
|
apache-2.0
|
JetBrains/intellij-scala
|
scala/scala-impl/src/org/jetbrains/plugins/scala/lang/formatting/scalafmt/ScalaFmtDisabledIndentRangesProvider.scala
|
815
|
package org.jetbrains.plugins.scala.lang.formatting.scalafmt
import java.util
import com.intellij.openapi.util.TextRange
import com.intellij.psi.PsiElement
import com.intellij.psi.impl.source.DisabledIndentRangesProvider
import org.jetbrains.plugins.scala.lang.formatting.settings.ScalaCodeStyleSettings
class ScalaFmtDisabledIndentRangesProvider extends DisabledIndentRangesProvider {
override def getDisabledIndentRanges(element: PsiElement): util.Collection[TextRange] = {
val project = element.getProject
//Do not try to fix indents after formatting - ScalaFmtPreformatProcessor delegates all the work to scalafmt
import scala.jdk.CollectionConverters._
if (ScalaCodeStyleSettings.getInstance(project).USE_SCALAFMT_FORMATTER()) Seq(element.getTextRange).asJava
else Seq().asJava
}
}
|
apache-2.0
|
shiver-me-timbers/smt-http-mock-parent
|
smt-http-mock/src/test/java/shiver/me/timbers/http/mock/HttpMockTest.java
|
1876
|
/*
* Copyright 2016 Karl Bennett
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package shiver.me.timbers.http.mock;
import org.junit.Test;
import shiver.me.timbers.http.Header;
import shiver.me.timbers.http.Headers;
import java.util.HashSet;
import static java.util.Arrays.asList;
import static org.hamcrest.Matchers.equalTo;
import static org.junit.Assert.assertThat;
import static org.mockito.Mockito.mock;
import static shiver.me.timbers.data.random.RandomStrings.someString;
import static shiver.me.timbers.http.mock.HttpMock.h;
import static shiver.me.timbers.http.mock.HttpMock.headers;
public class HttpMockTest {
@Test
public void Creation_just_to_get_coverage() {
new HttpMock();
}
@Test
public void Can_create_a_header() {
// Given
final String name = someString();
final String value = someString();
// When
final Header actual = h(name, value);
// Then
assertThat(actual, equalTo(new Header(name, value)));
}
@Test
public void Can_create_some_headers() {
// Given
final Header[] headers = {mock(Header.class), mock(Header.class), mock(Header.class)};
// When
final Headers actual = headers(headers);
// Then
assertThat(actual, equalTo(new Headers(new HashSet<>(asList(headers)))));
}
}
|
apache-2.0
|
aspnet/AspNetCore
|
src/Mvc/test/WebSites/FormatterWebSite/StringInputFormatter.cs
|
1128
|
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
using System.IO;
using System.Text;
using System.Threading.Tasks;
using Microsoft.AspNetCore.Mvc;
using Microsoft.AspNetCore.Mvc.Formatters;
using Microsoft.Net.Http.Headers;
namespace FormatterWebSite
{
public class StringInputFormatter : TextInputFormatter
{
public StringInputFormatter()
{
SupportedMediaTypes.Add(MediaTypeHeaderValue.Parse("text/plain"));
SupportedEncodings.Add(Encoding.UTF8);
SupportedEncodings.Add(Encoding.Unicode);
}
public override async Task<InputFormatterResult> ReadRequestBodyAsync(InputFormatterContext context, Encoding effectiveEncoding)
{
var request = context.HttpContext.Request;
using (var reader = new StreamReader(request.Body, effectiveEncoding))
{
var stringContent = await reader.ReadToEndAsync();
return await InputFormatterResult.SuccessAsync(stringContent);
}
}
}
}
|
apache-2.0
|
googleapis/java-containeranalysis
|
proto-google-cloud-containeranalysis-v1/src/main/java/com/google/containeranalysis/v1/VulnerabilityOccurrencesSummaryOrBuilder.java
|
2969
|
/*
* Copyright 2020 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/devtools/containeranalysis/v1/containeranalysis.proto
package com.google.containeranalysis.v1;
public interface VulnerabilityOccurrencesSummaryOrBuilder
extends
// @@protoc_insertion_point(interface_extends:google.devtools.containeranalysis.v1.VulnerabilityOccurrencesSummary)
com.google.protobuf.MessageOrBuilder {
/**
*
*
* <pre>
* A listing by resource of the number of fixable and total vulnerabilities.
* </pre>
*
* <code>
* repeated .google.devtools.containeranalysis.v1.VulnerabilityOccurrencesSummary.FixableTotalByDigest counts = 1;
* </code>
*/
java.util.List<
com.google.containeranalysis.v1.VulnerabilityOccurrencesSummary.FixableTotalByDigest>
getCountsList();
/**
*
*
* <pre>
* A listing by resource of the number of fixable and total vulnerabilities.
* </pre>
*
* <code>
* repeated .google.devtools.containeranalysis.v1.VulnerabilityOccurrencesSummary.FixableTotalByDigest counts = 1;
* </code>
*/
com.google.containeranalysis.v1.VulnerabilityOccurrencesSummary.FixableTotalByDigest getCounts(
int index);
/**
*
*
* <pre>
* A listing by resource of the number of fixable and total vulnerabilities.
* </pre>
*
* <code>
* repeated .google.devtools.containeranalysis.v1.VulnerabilityOccurrencesSummary.FixableTotalByDigest counts = 1;
* </code>
*/
int getCountsCount();
/**
*
*
* <pre>
* A listing by resource of the number of fixable and total vulnerabilities.
* </pre>
*
* <code>
* repeated .google.devtools.containeranalysis.v1.VulnerabilityOccurrencesSummary.FixableTotalByDigest counts = 1;
* </code>
*/
java.util.List<
? extends
com.google.containeranalysis.v1.VulnerabilityOccurrencesSummary
.FixableTotalByDigestOrBuilder>
getCountsOrBuilderList();
/**
*
*
* <pre>
* A listing by resource of the number of fixable and total vulnerabilities.
* </pre>
*
* <code>
* repeated .google.devtools.containeranalysis.v1.VulnerabilityOccurrencesSummary.FixableTotalByDigest counts = 1;
* </code>
*/
com.google.containeranalysis.v1.VulnerabilityOccurrencesSummary.FixableTotalByDigestOrBuilder
getCountsOrBuilder(int index);
}
|
apache-2.0
|
Tyesb/lovetothinkserv
|
matcher/Models/VMatching.cs
|
622
|
namespace matcher.Models
{
using System;
using System.Collections.Generic;
using System.ComponentModel.DataAnnotations;
using System.ComponentModel.DataAnnotations.Schema;
using System.Data.Entity.Spatial;
[Table("VMatching")]
public partial class VMatching
{
[Key]
[Column(Order = 0)]
[DatabaseGenerated(DatabaseGeneratedOption.None)]
public long Vid1 { get; set; }
[Key]
[Column(Order = 1)]
[DatabaseGenerated(DatabaseGeneratedOption.None)]
public long Vid2 { get; set; }
public int? Weight { get; set; }
}
}
|
apache-2.0
|
FabricMC/fabric-base
|
src/main/java/net/fabricmc/loader/impl/launch/FabricMixinBootstrap.java
|
7016
|
/*
* Copyright 2016 FabricMC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.fabricmc.loader.impl.launch;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.spongepowered.asm.launch.MixinBootstrap;
import org.spongepowered.asm.mixin.FabricUtil;
import org.spongepowered.asm.mixin.MixinEnvironment;
import org.spongepowered.asm.mixin.Mixins;
import org.spongepowered.asm.mixin.extensibility.IMixinConfig;
import org.spongepowered.asm.mixin.transformer.Config;
import net.fabricmc.api.EnvType;
import net.fabricmc.loader.api.SemanticVersion;
import net.fabricmc.loader.api.Version;
import net.fabricmc.loader.api.VersionParsingException;
import net.fabricmc.loader.api.metadata.ModDependency;
import net.fabricmc.loader.api.metadata.ModDependency.Kind;
import net.fabricmc.loader.api.metadata.version.VersionInterval;
import net.fabricmc.loader.impl.FabricLoaderImpl;
import net.fabricmc.loader.impl.ModContainerImpl;
import net.fabricmc.loader.impl.util.log.Log;
import net.fabricmc.loader.impl.util.log.LogCategory;
import net.fabricmc.loader.impl.util.mappings.MixinIntermediaryDevRemapper;
import net.fabricmc.mapping.tree.TinyTree;
public final class FabricMixinBootstrap {
private FabricMixinBootstrap() { }
private static boolean initialized = false;
public static void init(EnvType side, FabricLoaderImpl loader) {
if (initialized) {
throw new RuntimeException("FabricMixinBootstrap has already been initialized!");
}
if (FabricLauncherBase.getLauncher().isDevelopment()) {
MappingConfiguration mappingConfiguration = FabricLauncherBase.getLauncher().getMappingConfiguration();
TinyTree mappings = mappingConfiguration.getMappings();
if (mappings != null) {
List<String> namespaces = mappings.getMetadata().getNamespaces();
if (namespaces.contains("intermediary") && namespaces.contains(mappingConfiguration.getTargetNamespace())) {
System.setProperty("mixin.env.remapRefMap", "true");
try {
MixinIntermediaryDevRemapper remapper = new MixinIntermediaryDevRemapper(mappings, "intermediary", mappingConfiguration.getTargetNamespace());
MixinEnvironment.getDefaultEnvironment().getRemappers().add(remapper);
Log.info(LogCategory.MIXIN, "Loaded Fabric development mappings for mixin remapper!");
} catch (Exception e) {
Log.error(LogCategory.MIXIN, "Fabric development environment setup error - the game will probably crash soon!");
e.printStackTrace();
}
}
}
}
MixinBootstrap.init();
Map<String, ModContainerImpl> configToModMap = new HashMap<>();
for (ModContainerImpl mod : loader.getModsInternal()) {
for (String config : mod.getMetadata().getMixinConfigs(side)) {
ModContainerImpl prev = configToModMap.putIfAbsent(config, mod);
if (prev != null) throw new RuntimeException(String.format("Non-unique Mixin config name %s used by the mods %s and %s", config, prev.getMetadata().getId(), mod.getMetadata().getId()));
try {
Mixins.addConfiguration(config);
} catch (Throwable t) {
throw new RuntimeException(String.format("Error creating Mixin config %s for mod %s", config, mod.getMetadata().getId()), t);
}
}
}
for (Config config : Mixins.getConfigs()) {
ModContainerImpl mod = configToModMap.get(config.getName());
if (mod == null) continue;
}
try {
IMixinConfig.class.getMethod("decorate", String.class, Object.class);
MixinConfigDecorator.apply(configToModMap);
} catch (NoSuchMethodException e) {
Log.info(LogCategory.MIXIN, "Detected old Mixin version without config decoration support");
}
initialized = true;
}
private static final class MixinConfigDecorator {
private static final List<LoaderMixinVersionEntry> versions = new ArrayList<>();
static {
// maximum loader version and bundled fabric mixin version, DESCENDING ORDER, LATEST FIRST
// loader versions with new mixin versions need to be added here
// addVersion("0.13", FabricUtil.COMPATIBILITY_0_11_0); // example for next entry (latest first!)
addVersion("0.12.0-", FabricUtil.COMPATIBILITY_0_10_0);
}
static void apply(Map<String, ModContainerImpl> configToModMap) {
for (Config rawConfig : Mixins.getConfigs()) {
ModContainerImpl mod = configToModMap.get(rawConfig.getName());
if (mod == null) continue;
IMixinConfig config = rawConfig.getConfig();
config.decorate(FabricUtil.KEY_MOD_ID, mod.getMetadata().getId());
config.decorate(FabricUtil.KEY_COMPATIBILITY, getMixinCompat(mod));
}
}
private static int getMixinCompat(ModContainerImpl mod) {
// infer from loader dependency by determining the least relevant loader version the mod accepts
// AND any loader deps
List<VersionInterval> reqIntervals = Collections.singletonList(VersionInterval.INFINITE);
for (ModDependency dep : mod.getMetadata().getDependencies()) {
if (dep.getModId().equals("fabricloader") || dep.getModId().equals("fabric-loader")) {
if (dep.getKind() == Kind.DEPENDS) {
reqIntervals = VersionInterval.and(reqIntervals, dep.getVersionIntervals());
} else if (dep.getKind() == Kind.BREAKS) {
reqIntervals = VersionInterval.and(reqIntervals, VersionInterval.not(dep.getVersionIntervals()));
}
}
}
if (reqIntervals.isEmpty()) throw new IllegalStateException("mod "+mod+" is incompatible with every loader version?"); // shouldn't get there
Version minLoaderVersion = reqIntervals.get(0).getMin(); // it is sorted, to 0 has the absolute lower bound
if (minLoaderVersion != null) { // has a lower bound
for (LoaderMixinVersionEntry version : versions) {
if (minLoaderVersion.compareTo(version.loaderVersion) >= 0) { // lower bound is >= current version
return version.mixinVersion;
} else {
break;
}
}
}
return FabricUtil.COMPATIBILITY_0_9_2;
}
private static void addVersion(String minLoaderVersion, int mixinCompat) {
try {
versions.add(new LoaderMixinVersionEntry(SemanticVersion.parse(minLoaderVersion), mixinCompat));
} catch (VersionParsingException e) {
throw new RuntimeException(e);
}
}
private static final class LoaderMixinVersionEntry {
final SemanticVersion loaderVersion;
final int mixinVersion;
LoaderMixinVersionEntry(SemanticVersion loaderVersion, int mixinVersion) {
this.loaderVersion = loaderVersion;
this.mixinVersion = mixinVersion;
}
}
}
}
|
apache-2.0
|
hajimehoshi/oto
|
internal/oboe/oboe_flowgraph_SourceI16_android.cpp
|
1724
|
/*
* Copyright 2018 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include <algorithm>
#include <unistd.h>
#include "oboe_flowgraph_FlowGraphNode_android.h"
#include "oboe_flowgraph_SourceI16_android.h"
#if FLOWGRAPH_ANDROID_INTERNAL
#include <audio_utils/primitives.h>
#endif
using namespace FLOWGRAPH_OUTER_NAMESPACE::flowgraph;
SourceI16::SourceI16(int32_t channelCount)
: FlowGraphSourceBuffered(channelCount) {
}
int32_t SourceI16::onProcess(int32_t numFrames) {
float *floatData = output.getBuffer();
int32_t channelCount = output.getSamplesPerFrame();
int32_t framesLeft = mSizeInFrames - mFrameIndex;
int32_t framesToProcess = std::min(numFrames, framesLeft);
int32_t numSamples = framesToProcess * channelCount;
const int16_t *shortBase = static_cast<const int16_t *>(mData);
const int16_t *shortData = &shortBase[mFrameIndex * channelCount];
#if FLOWGRAPH_ANDROID_INTERNAL
memcpy_to_float_from_i16(floatData, shortData, numSamples);
#else
for (int i = 0; i < numSamples; i++) {
*floatData++ = *shortData++ * (1.0f / 32768);
}
#endif
mFrameIndex += framesToProcess;
return framesToProcess;
}
|
apache-2.0
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.