repo_name
stringlengths
4
116
path
stringlengths
4
379
size
stringlengths
1
7
content
stringlengths
3
1.05M
license
stringclasses
15 values
robkuijpers/shaileopenweb
public/src/scripts/main.ts
537
import { bootstrap } from '@angular/platform-browser-dynamic'; import { HTTP_PROVIDERS } from '@angular/http'; // import { enableProdMode } from '@angular/core'; import { disableDeprecatedForms, provideForms } from '@angular/forms'; import { AppComponent } from './app.component'; import { APP_ROUTER_PROVIDERS } from './app.routes'; import { AuthGuard } from './authguard.service'; // enableProdMode(); bootstrap(AppComponent, [ HTTP_PROVIDERS, APP_ROUTER_PROVIDERS, disableDeprecatedForms(), provideForms(), AuthGuard ]);
apache-2.0
adligo/fabricate_tests.adligo.org
src/org/adligo/fabricate_tests/common/en/GitEnMessagesTrial.java
3364
package org.adligo.fabricate_tests.common.en; import org.adligo.fabricate.common.en.GitEnMessages; import org.adligo.tests4j.system.shared.trials.SourceFileScope; import org.adligo.tests4j.system.shared.trials.Test; import org.adligo.tests4j_4mockito.MockitoSourceFileTrial; import org.adligo.tests4j_tests.shared.i18n.I18N_Asserter; @SourceFileScope (sourceClass=GitEnMessages.class) public class GitEnMessagesTrial extends MockitoSourceFileTrial { @Test public void testConstants() { I18N_Asserter asserter = new I18N_Asserter(this); GitEnMessages messages = GitEnMessages.INSTANCE; asserter.assertConstant("Cancel", messages.getCancel()); asserter.assertConstant("Default", messages.getDefault()); asserter.assertConstant("Discovered <X/> projects.", messages.getDiscoveredXProjects()); asserter.assertConstant("Finished git checkout for the following project;", messages.getFinishedGitCheckoutForTheFollowingProject()); asserter.assertConstant("Finished git clone for the following project;", messages.getFinishedGitCloneForTheFollowingProject()); asserter.assertConstant("Finished git commit for the following project;", messages.getFinishedGitCommitForTheFollowingProject()); asserter.assertConstant("Finished git pull for the following project;", messages.getFinishedGitPullForTheFollowingProject()); asserter.assertConstant("Finished git push for the following project;", messages.getFinishedGitPushForTheFollowingProject()); asserter.assertConstant("Finished git stage.", messages.getFinishedGitStage()); asserter.assertConstant("Ok", messages.getOk()); asserter.assertConstant("Please enter your commit message for project <X/>.", messages.getPleaseEnterYourCommitMessageForProjectX()); asserter.assertConstant("Please enter your default commit message.", messages.getPleaseEnterYourDefaultCommitMessage()); asserter.assertConstant("The following project has the version '<X/>' in fabricate.xml but " + "is not checked out to that version aborting fabrication;", messages.getTheFollowingProjectHasAVersionXInFabricatXmlButIsNotCheckedOutToThatVersionAborting()); asserter.assertConstant("The project directory is as follows;", messages.getTheProjectDirectoryIs()); asserter.assertConstant("Select All", messages.getSelectAll()); asserter.assertConstant("Select None", messages.getSelectNone()); asserter.assertConstant("Started git checkout for the following project;", messages.getStartedGitCheckoutForTheFollowingProject()); asserter.assertConstant("Started git clone for the following project;", messages.getStartedGitCloneForTheFollowingProject()); asserter.assertConstant("Started git commit for the following project;", messages.getStartedGitCommitForTheFollowingProject()); asserter.assertConstant("Started git pull for the following project;", messages.getStartedGitPullForTheFollowingProject()); asserter.assertConstant("Started git push for the following project;", messages.getStartedGitPushForTheFollowingProject()); asserter.assertConstantsMatchMethods(GitEnMessages.class); } }
apache-2.0
juweiping/ocms
src/org/openuap/cms/user/model/AbstractUserRole.java
2084
/* * Copyright 2005-2008 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.openuap.cms.user.model; import org.openuap.base.dao.hibernate.BaseObject; /** * * <p> * 抽象用户角色类. * </p> * * <p> * $Id: AbstractUserRole.java 3939 2010-10-27 08:41:36Z orangeforjava $ * </p> * * @author Joseph * @version 1.0 */ public abstract class AbstractUserRole extends BaseObject implements IUserRole, java.io.Serializable { /** * */ private static final long serialVersionUID = -3672151576865880987L; // private Long userId; private Long roleId; private UserRoleId id; // Constructors /** default constructor */ public AbstractUserRole() { } /** * constructor with id * * @param id * UserRoleId */ public AbstractUserRole(UserRoleId id) { this.id = id; } public UserRoleId getId() { return this.id; } public void setId(UserRoleId id) { this.id = id; } public boolean equals(Object o) { if (o == null) { return false; } if (!(o instanceof AbstractUserRole)) { return false; } AbstractUserRole that = (AbstractUserRole) o; if (this.getId() == null || that.getId() == null) { return false; } return (this.getId().equals(that.getId())); } public int hashCode() { return getId().hashCode(); } public Long getRoleId() { return roleId; } public Long getUserId() { return userId; } public void setRoleId(Long roleId) { this.roleId = roleId; } public void setUserId(Long userId) { this.userId = userId; } }
apache-2.0
tolbertam/java-driver
driver-tests/osgi/src/test/java/com/datastax/driver/osgi/MailboxServiceGuava21IT.java
2604
/* * Copyright (C) 2012-2017 DataStax Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.datastax.driver.osgi; import com.datastax.driver.osgi.api.MailboxException; import org.ops4j.pax.exam.Configuration; import org.ops4j.pax.exam.Option; import org.ops4j.pax.exam.options.MavenArtifactProvisionOption; import org.ops4j.pax.exam.testng.listener.PaxExam; import org.testng.SkipException; import org.testng.annotations.Listeners; import org.testng.annotations.Test; import static com.datastax.driver.osgi.BundleOptions.*; import static org.ops4j.pax.exam.CoreOptions.options; @Listeners({CCMBridgeListener.class, PaxExam.class}) public class MailboxServiceGuava21IT extends MailboxServiceTests { @Configuration public Option[] guava21Config() { MavenArtifactProvisionOption guavaBundle = guavaBundle(); String javaVersion = System.getProperty("java.version"); // Only bring in 21.0 if java version >= 1.8. If this is not done the framework // will fail to load for < 1.8 and we plan on skipping the test anyways. if (javaVersion.compareTo("1.8") >= 0) { guavaBundle = guavaBundle.version("21.0"); } return options( defaultOptions(), nettyBundles(), guavaBundle, driverBundle(), extrasBundle(), mappingBundle(), mailboxBundle() ); } /** * Exercises a 'mailbox' service provided by an OSGi bundle that depends on the driver with * Guava 21 explicitly enforced. * * @test_category packaging * @expected_result Can create, retrieve and delete data using the mailbox service. * @jira_ticket JAVA-620 * @since 2.0.10, 2.1.5 */ @Test(groups = "short") public void test_guava_21() throws MailboxException { String javaVersion = System.getProperty("java.version"); if (javaVersion.compareTo("1.8") < 0) { throw new SkipException("Guava 21 requires Java 1.8"); } checkService(); } }
apache-2.0
BrandonY/python-docs-samples
bigquery/api/streaming.py
3137
#!/usr/bin/env python # Copyright 2015, Google, Inc. # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Command-line application that streams data into BigQuery. This sample is used on this page: https://cloud.google.com/bigquery/streaming-data-into-bigquery For more information, see the README.rst. """ import argparse import ast import json import uuid import googleapiclient.discovery from six.moves import input # [START stream_row_to_bigquery] def stream_row_to_bigquery(bigquery, project_id, dataset_id, table_name, row, num_retries=5): insert_all_data = { 'rows': [{ 'json': row, # Generate a unique id for each row so retries don't accidentally # duplicate insert 'insertId': str(uuid.uuid4()), }] } return bigquery.tabledata().insertAll( projectId=project_id, datasetId=dataset_id, tableId=table_name, body=insert_all_data).execute(num_retries=num_retries) # [END stream_row_to_bigquery] # [START run] def main(project_id, dataset_id, table_name, num_retries): # [START build_service] # Construct the service object for interacting with the BigQuery API. bigquery = googleapiclient.discovery.build('bigquery', 'v2') # [END build_service] for row in get_rows(): response = stream_row_to_bigquery( bigquery, project_id, dataset_id, table_name, row, num_retries) print(json.dumps(response)) def get_rows(): line = input("Enter a row (python dict) into the table: ") while line: yield ast.literal_eval(line) line = input("Enter another row into the table \n" + "[hit enter to stop]: ") # [END run] # [START main] if __name__ == '__main__': parser = argparse.ArgumentParser( description=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter) parser.add_argument('project_id', help='Your Google Cloud project ID.') parser.add_argument('dataset_id', help='A BigQuery dataset ID.') parser.add_argument( 'table_name', help='Name of the table to load data into.') parser.add_argument( '-p', '--poll_interval', help='How often to poll the query for completion (seconds).', type=int, default=1) parser.add_argument( '-r', '--num_retries', help='Number of times to retry in case of 500 error.', type=int, default=5) args = parser.parse_args() main( args.project_id, args.dataset_id, args.table_name, args.num_retries) # [END main]
apache-2.0
spencerahill/aospy-obj-lib
aospy_user/calcs/__init__.py
7250
"""My library of functions for use in aospy. Historically, these assumed input variables in the form of numpy arrays or masked numpy arrays. As of October 2015, I have switched to assuming xarray.DataArrays, to coincide with the same switch within aospy. However, not all of the functions in this module have been converted to support this new datatype. """ from .tendencies import ( first_to_last_vals_dur, time_tendency_first_to_last, time_tendency_each_timestep, ) from .numerics import ( latlon_deriv_prefactor, wraparound, d_dx_from_latlon, d_dy_from_lat, d_dx_at_const_p_from_eta, d_dy_at_const_p_from_eta, d_dp_from_p, d_dp_from_eta ) from .thermo import ( dse, mse, fmse, kinetic_energy, internal_energy, energy, total_energy, cpt_lvq, virt_temp, pot_temp, virt_pot_temp, equiv_pot_temp, z_from_hypso, mse_from_hypso, mixing_ratio_from_specific_mass, specific_mass_dry_air, specific_gas_constant_moist_air, heat_capacity_moist_air_constant_volume, specific_entropy_dry_air, specific_entropy_water_vapor, tdt_diab, tdt_lw_cld, tdt_sw_cld, tdt_moist_diabatic, mse_tendency, ) from .toa_sfc_fluxes import ( albedo, sfc_albedo, cre_sw, cre_lw, cre_net, toa_rad, toa_rad_clr, toa_sw, sfc_rad, sfc_rad_cld, sfc_lw, sfc_lw_cld, sfc_sw, sfc_sw_cld, sfc_energy, column_energy, column_lw, column_sw, bowen_ratio, evap_frac, ) from .advection import ( zonal_advec, merid_advec, vert_advec, horiz_advec, total_advec, zonal_advec_upwind, merid_advec_upwind, horiz_advec_upwind, total_advec_upwind, zonal_advec_const_p_from_eta, merid_advec_const_p_from_eta, horiz_advec_const_p_from_eta, vert_advec_from_eta, total_advec_from_eta, horiz_advec_spharm, ) from .mass import ( horiz_divg, horiz_divg_spharm, vert_divg, divg_3d, dp, uv_mass_adjustment, uv_mass_adjusted, u_mass_adjustment, u_mass_adjusted, v_mass_adjustment, v_mass_adjusted, column_flux_divg, column_flux_divg_adj, mass_column, mass_column_divg, mass_column_divg_spharm, mass_column_divg_adj, mass_column_integral, mass_column_source, mass_column_budget_lhs, mass_column_budget_with_adj_lhs, mass_column_budget_residual, mass_column_budget_adj_residual, horiz_divg_mass_adj, horiz_divg_mass_adj_spharm, horiz_divg_mass_adj_from_eta, ps_horiz_advec, uv_dry_mass_adjusted, dry_mass_column_tendency, dry_mass_column_divg, dry_mass_column_divg_adj, dry_mass_column_budget_residual, dry_mass_column_budget_adj_residual, uv_mass_adjustment, uv_mass_adjusted, horiz_divg_mass_adj, horiz_advec_mass_adj, ) from .transport import ( field_horiz_flux_divg, field_vert_flux_divg, field_times_horiz_divg, field_horiz_advec_divg_sum, field_total_advec, field_vert_int_bal, field_times_horiz_divg_mass_adj, field_horiz_flux_divg_mass_adj, omega_from_divg_eta, ) from .energy_budget import ( energy_column, energy_column_tendency, energy_column_tendency_each_timestep, energy_column_source, energy_column_divg, energy_column_budget_residual, uv_energy_adjustment, uv_energy_adjusted, uv_mass_energy_adjustment, uv_mass_energy_adjusted, u_energy_adjustment, u_energy_adjusted, u_mass_energy_adjustment, u_mass_energy_adjusted, v_energy_adjustment, v_energy_adjusted, v_mass_energy_adjustment, v_mass_energy_adjusted, energy_column_divg_adj, energy_column_divg_adj_time_mean, energy_column_divg_adj_eddy, energy_column_budget_adj_residual, energy_column_budget_energy_adj_residual, energy_column_budget_mass_adj_residual, energy_column_budget_dry_mass_adj_residual, energy_column_divg_mass_adj, energy_sfc_ps_advec, energy_sfc_ps_advec_as_resid, energy_horiz_advec_adj, energy_zonal_advec_upwind, energy_merid_advec_upwind, energy_horiz_advec_upwind, energy_horiz_advec_eta_adj, energy_horiz_advec_eta_adj_spharm, energy_horiz_advec_eta_adj_time_mean, energy_horiz_advec_eta_upwind, energy_zonal_advec_eta_upwind, energy_merid_advec_eta_upwind, energy_horiz_advec_eta_upwind_time_mean, energy_horiz_advec_eta_upwind_adj_time_mean, energy_horiz_divg_eta, energy_column_vert_advec_as_resid_eta_time_mean, energy_column_vert_advec_as_resid, energy_vert_advec, energy_vert_advec_eta, energy_vert_advec_eta_adj, energy_vert_advec_eta_time_mean, energy_vert_advec_eta_adj_time_mean, energy_vert_advec_eta_upwind, energy_vert_advec_eta_upwind_time_mean, energy_vert_advec_eta_upwind_adj_time_mean, ) from .mse_budget import ( mse_horiz_flux_divg, mse_horiz_advec, mse_times_horiz_divg, mse_horiz_advec_divg_sum, mse_vert_flux_divg, mse_vert_advec, mse_total_advec, mse_horiz_advec_upwind, mse_merid_advec_upwind, mse_zonal_advec_upwind, mse_vert_advec_upwind, mse_total_advec_upwind, mse_budget_advec_residual, ) from .mse_from_hypso_budget import( mse_from_hypso_zonal_advec_upwind, mse_from_hypso_merid_advec_upwind, mse_from_hypso_horiz_advec_upwind, mse_from_hypso_vert_advec_upwind, cpt_lvq_zonal_deriv, cpt_lvq_merid_deriv, cpt_lvq_zonal_advec_upwind, cpt_lvq_merid_advec_upwind, cpt_lvq_horiz_advec_upwind, ) from .fmse_budget import ( fmse_merid_deriv_eta, fmse_zonal_deriv_eta, fmse_horiz_advec_eta_upwind, fmse_budget_advec_residual, omega_change_from_fmse_budget, ) from .dse_budget import ( dse_horiz_flux_divg, dse_horiz_advec, dse_times_horiz_divg, dse_horiz_advec_divg_sum, dse_vert_advec, ) from .stats import ( pointwise_corr, pointwise_lin_regr, corr_cre_sw, corr_cre_lw, corr_cre_net, corr_toa_rad_clr, lin_regr_cre_net, lin_regr_toa_rad_clr, vert_centroid, vert_avg ) from .water import ( p_minus_e, prec_conv_frac, precip_large_scale, moisture_column_source, moisture_column_tendency, moisture_column_divg_with_adj2, moisture_column_budget_lhs, moisture_column_budget_with_adj_lhs, moisture_column_budget_with_adj2_lhs, moisture_column_budget_residual, ) from .gms import ( field_vert_int_max, horiz_divg_vert_int_max, vert_divg_vert_int_max, gms_like_ratio, gross_moist_strat, gross_dry_stab, gross_moist_stab, gms_up_low, gms_each_level, dry_static_stab, moist_static_stab, frozen_moist_static_stab, moist_static_stab_p, frozen_moist_static_stab_p, ) from .zonal_mean_circ import ( msf, msf_max, aht, aht_no_snow, oht, tht, gms_change_est, gms_change_est2, gms_h01, gms_h01est, gms_h01est2, gms_moc, gms_msf, total_gms, ang_mom, hadley_bounds, had_bounds, had_bounds500, thermal_equator, itcz_pos, itcz_loc, prec_centroid, precip_centroid, trop_height, )
apache-2.0
ebagdasa/tempest
tempest/scenario/test_network_basic_ops.py
21362
# Copyright 2012 OpenStack Foundation # Copyright 2013 Hewlett-Packard Development Company, L.P. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import collections import re import testtools from tempest.common.utils import data_utils from tempest import config from tempest import exceptions from tempest.openstack.common import log as logging from tempest.scenario import manager from tempest.services.network import resources as net_resources from tempest import test CONF = config.CONF LOG = logging.getLogger(__name__) Floating_IP_tuple = collections.namedtuple('Floating_IP_tuple', ['floating_ip', 'server']) class TestNetworkBasicOps(manager.NetworkScenarioTest): """ This smoke test suite assumes that Nova has been configured to boot VM's with Neutron-managed networking, and attempts to verify network connectivity as follows: There are presumed to be two types of networks: tenant and public. A tenant network may or may not be reachable from the Tempest host. A public network is assumed to be reachable from the Tempest host, and it should be possible to associate a public ('floating') IP address with a tenant ('fixed') IP address to facilitate external connectivity to a potentially unroutable tenant IP address. This test suite can be configured to test network connectivity to a VM via a tenant network, a public network, or both. If both networking types are to be evaluated, tests that need to be executed remotely on the VM (via ssh) will only be run against one of the networks (to minimize test execution time). Determine which types of networks to test as follows: * Configure tenant network checks (via the 'tenant_networks_reachable' key) if the Tempest host should have direct connectivity to tenant networks. This is likely to be the case if Tempest is running on the same host as a single-node devstack installation with IP namespaces disabled. * Configure checks for a public network if a public network has been configured prior to the test suite being run and if the Tempest host should have connectivity to that public network. Checking connectivity for a public network requires that a value be provided for 'public_network_id'. A value can optionally be provided for 'public_router_id' if tenants will use a shared router to access a public network (as is likely to be the case when IP namespaces are not enabled). If a value is not provided for 'public_router_id', a router will be created for each tenant and use the network identified by 'public_network_id' as its gateway. """ @classmethod def check_preconditions(cls): super(TestNetworkBasicOps, cls).check_preconditions() if not (CONF.network.tenant_networks_reachable or CONF.network.public_network_id): msg = ('Either tenant_networks_reachable must be "true", or ' 'public_network_id must be defined.') raise cls.skipException(msg) @classmethod def resource_setup(cls): for ext in ['router', 'security-group']: if not test.is_extension_enabled(ext, 'network'): msg = "%s extension not enabled." % ext raise cls.skipException(msg) # Create no network resources for these tests. cls.set_network_resources() super(TestNetworkBasicOps, cls).resource_setup() def setUp(self): super(TestNetworkBasicOps, self).setUp() self.keypairs = {} self.servers = [] def _setup_network_and_servers(self, **kwargs): self.security_group = \ self._create_security_group(tenant_id=self.tenant_id) self.network, self.subnet, self.router = self.create_networks(**kwargs) self.check_networks() name = data_utils.rand_name('server-smoke') server = self._create_server(name, self.network) self._check_tenant_network_connectivity() floating_ip = self.create_floating_ip(server) self.floating_ip_tuple = Floating_IP_tuple(floating_ip, server) def check_networks(self): """ Checks that we see the newly created network/subnet/router via checking the result of list_[networks,routers,subnets] """ seen_nets = self._list_networks() seen_names = [n['name'] for n in seen_nets] seen_ids = [n['id'] for n in seen_nets] self.assertIn(self.network.name, seen_names) self.assertIn(self.network.id, seen_ids) if self.subnet: seen_subnets = self._list_subnets() seen_net_ids = [n['network_id'] for n in seen_subnets] seen_subnet_ids = [n['id'] for n in seen_subnets] self.assertIn(self.network.id, seen_net_ids) self.assertIn(self.subnet.id, seen_subnet_ids) if self.router: seen_routers = self._list_routers() seen_router_ids = [n['id'] for n in seen_routers] seen_router_names = [n['name'] for n in seen_routers] self.assertIn(self.router.name, seen_router_names) self.assertIn(self.router.id, seen_router_ids) def _create_server(self, name, network): keypair = self.create_keypair() self.keypairs[keypair['name']] = keypair security_groups = [{'name': self.security_group['name']}] create_kwargs = { 'networks': [ {'uuid': network.id}, ], 'key_name': keypair['name'], 'security_groups': security_groups, } server = self.create_server(name=name, create_kwargs=create_kwargs) self.servers.append(server) return server def _get_server_key(self, server): return self.keypairs[server['key_name']]['private_key'] def _check_tenant_network_connectivity(self): ssh_login = CONF.compute.image_ssh_user for server in self.servers: # call the common method in the parent class super(TestNetworkBasicOps, self).\ _check_tenant_network_connectivity( server, ssh_login, self._get_server_key(server), servers_for_debug=self.servers) def check_public_network_connectivity( self, should_connect=True, msg=None, should_check_floating_ip_status=True): """Verifies connectivty to a VM via public network and floating IP, and verifies floating IP has resource status is correct. :param should_connect: bool. determines if connectivity check is negative or positive. :param msg: Failure message to add to Error message. Should describe the place in the test scenario where the method was called, to indicate the context of the failure :param should_check_floating_ip_status: bool. should status of floating_ip be checked or not """ ssh_login = CONF.compute.image_ssh_user floating_ip, server = self.floating_ip_tuple ip_address = floating_ip.floating_ip_address private_key = None floatingip_status = 'DOWN' if should_connect: private_key = self._get_server_key(server) floatingip_status = 'ACTIVE' # call the common method in the parent class super(TestNetworkBasicOps, self).check_public_network_connectivity( ip_address, ssh_login, private_key, should_connect, msg, self.servers) if should_check_floating_ip_status: self.check_floating_ip_status(floating_ip, floatingip_status) def _disassociate_floating_ips(self): floating_ip, server = self.floating_ip_tuple self._disassociate_floating_ip(floating_ip) self.floating_ip_tuple = Floating_IP_tuple( floating_ip, None) def _reassociate_floating_ips(self): floating_ip, server = self.floating_ip_tuple name = data_utils.rand_name('new_server-smoke-') # create a new server for the floating ip server = self._create_server(name, self.network) self._associate_floating_ip(floating_ip, server) self.floating_ip_tuple = Floating_IP_tuple( floating_ip, server) def _create_new_network(self): self.new_net = self._create_network(tenant_id=self.tenant_id) self.new_subnet = self._create_subnet( network=self.new_net, gateway_ip=None) def _hotplug_server(self): old_floating_ip, server = self.floating_ip_tuple ip_address = old_floating_ip.floating_ip_address private_key = self._get_server_key(server) ssh_client = self.get_remote_client(ip_address, private_key=private_key) old_nic_list = self._get_server_nics(ssh_client) # get a port from a list of one item port_list = self._list_ports(device_id=server['id']) self.assertEqual(1, len(port_list)) old_port = port_list[0] _, interface = self.interface_client.create_interface( server=server['id'], network_id=self.new_net.id) self.addCleanup(self.network_client.wait_for_resource_deletion, 'port', interface['port_id']) self.addCleanup(self.delete_wrapper, self.interface_client.delete_interface, server['id'], interface['port_id']) def check_ports(): self.new_port_list = [port for port in self._list_ports(device_id=server['id']) if port != old_port] return len(self.new_port_list) == 1 if not test.call_until_true(check_ports, CONF.network.build_timeout, CONF.network.build_interval): raise exceptions.TimeoutException("No new port attached to the " "server in time (%s sec) !" % CONF.network.build_timeout) new_port = net_resources.DeletablePort(client=self.network_client, **self.new_port_list[0]) def check_new_nic(): new_nic_list = self._get_server_nics(ssh_client) self.diff_list = [n for n in new_nic_list if n not in old_nic_list] return len(self.diff_list) == 1 if not test.call_until_true(check_new_nic, CONF.network.build_timeout, CONF.network.build_interval): raise exceptions.TimeoutException("Interface not visible on the " "guest after %s sec" % CONF.network.build_timeout) num, new_nic = self.diff_list[0] ssh_client.assign_static_ip(nic=new_nic, addr=new_port.fixed_ips[0]['ip_address']) ssh_client.turn_nic_on(nic=new_nic) def _get_server_nics(self, ssh_client): reg = re.compile(r'(?P<num>\d+): (?P<nic_name>\w+):') ipatxt = ssh_client.get_ip_list() return reg.findall(ipatxt) def _check_network_internal_connectivity(self, network): """ via ssh check VM internal connectivity: - ping internal gateway and DHCP port, implying in-tenant connectivity pinging both, because L3 and DHCP agents might be on different nodes """ floating_ip, server = self.floating_ip_tuple # get internal ports' ips: # get all network ports in the new network internal_ips = (p['fixed_ips'][0]['ip_address'] for p in self._list_ports(tenant_id=server['tenant_id'], network_id=network.id) if p['device_owner'].startswith('network')) self._check_server_connectivity(floating_ip, internal_ips) def _check_network_external_connectivity(self): """ ping public network default gateway to imply external connectivity """ if not CONF.network.public_network_id: msg = 'public network not defined.' LOG.info(msg) return subnet = self._list_subnets( network_id=CONF.network.public_network_id) self.assertEqual(1, len(subnet), "Found %d subnets" % len(subnet)) external_ips = [subnet[0]['gateway_ip']] self._check_server_connectivity(self.floating_ip_tuple.floating_ip, external_ips) def _check_server_connectivity(self, floating_ip, address_list): ip_address = floating_ip.floating_ip_address private_key = self._get_server_key(self.floating_ip_tuple.server) ssh_source = self._ssh_to_server(ip_address, private_key) for remote_ip in address_list: try: self.assertTrue(self._check_remote_connectivity(ssh_source, remote_ip), "Timed out waiting for %s to become " "reachable" % remote_ip) except Exception: LOG.exception("Unable to access {dest} via ssh to " "floating-ip {src}".format(dest=remote_ip, src=floating_ip)) raise @test.attr(type='smoke') @test.services('compute', 'network') def test_network_basic_ops(self): """ For a freshly-booted VM with an IP address ("port") on a given network: - the Tempest host can ping the IP address. This implies, but does not guarantee (see the ssh check that follows), that the VM has been assigned the correct IP address and has connectivity to the Tempest host. - the Tempest host can perform key-based authentication to an ssh server hosted at the IP address. This check guarantees that the IP address is associated with the target VM. - the Tempest host can ssh into the VM via the IP address and successfully execute the following: - ping an external IP address, implying external connectivity. - ping an external hostname, implying that dns is correctly configured. - ping an internal IP address, implying connectivity to another VM on the same network. - detach the floating-ip from the VM and verify that it becomes unreachable - associate detached floating ip to a new VM and verify connectivity. VMs are created with unique keypair so connectivity also asserts that floating IP is associated with the new VM instead of the old one Verifies that floating IP status is updated correctly after each change """ self._setup_network_and_servers() self.check_public_network_connectivity(should_connect=True) self._check_network_internal_connectivity(network=self.network) self._check_network_external_connectivity() self._disassociate_floating_ips() self.check_public_network_connectivity(should_connect=False, msg="after disassociate " "floating ip") self._reassociate_floating_ips() self.check_public_network_connectivity(should_connect=True, msg="after re-associate " "floating ip") @testtools.skipUnless(CONF.compute_feature_enabled.interface_attach, 'NIC hotplug not available') @test.attr(type='smoke') @test.services('compute', 'network') def test_hotplug_nic(self): """ 1. create a new network, with no gateway (to prevent overwriting VM's gateway) 2. connect VM to new network 3. set static ip and bring new nic up 4. check VM can ping new network dhcp port """ self._setup_network_and_servers() self.check_public_network_connectivity(should_connect=True) self._create_new_network() self._hotplug_server() self._check_network_internal_connectivity(network=self.new_net) @testtools.skipIf(CONF.baremetal.driver_enabled, 'Router state cannot be altered on a shared baremetal ' 'network') @test.attr(type='smoke') @test.services('compute', 'network') def test_update_router_admin_state(self): """ 1. Check public connectivity before updating admin_state_up attribute of router to False 2. Check public connectivity after updating admin_state_up attribute of router to False 3. Check public connectivity after updating admin_state_up attribute of router to True """ self._setup_network_and_servers() self.check_public_network_connectivity( should_connect=True, msg="before updating " "admin_state_up of router to False") self._update_router_admin_state(self.router, False) # TODO(alokmaurya): Remove should_check_floating_ip_status=False check # once bug 1396310 is fixed self.check_public_network_connectivity( should_connect=False, msg="after updating " "admin_state_up of router to False", should_check_floating_ip_status=False) self._update_router_admin_state(self.router, True) self.check_public_network_connectivity( should_connect=True, msg="after updating " "admin_state_up of router to True") def _check_dns_server(self, ssh_client, dns_servers): servers = ssh_client.get_dns_servers() self.assertEqual(set(dns_servers), set(servers), 'Looking for servers: {trgt_serv}. ' 'Retrieved DNS nameservers: {act_serv} ' 'From host: {host}.' .format(host=ssh_client.ssh_client.host, act_serv=servers, trgt_serv=dns_servers)) @test.skip_because(bug="1412325") @testtools.skipUnless(CONF.scenario.dhcp_client, "DHCP client is not available.") @test.attr(type='smoke') @test.services('compute', 'network') def test_subnet_details(self): """Tests that subnet's extra configuration details are affecting the VMs NOTE: Neutron subnets push data to servers via dhcp-agent, so any update in subnet requires server to actively renew its DHCP lease. 1. Configure subnet with dns nameserver 2. retrieve the VM's configured dns and verify it matches the one configured for the subnet. 3. update subnet's dns 4. retrieve the VM's configured dns and verify it matches the new one configured for the subnet. TODO(yfried): add host_routes any resolution check would be testing either: * l3 forwarding (tested in test_network_basic_ops) * Name resolution of an external DNS nameserver - out of scope for Tempest """ # this test check only updates (no actual resolution) so using # arbitrary ip addresses as nameservers, instead of parsing CONF initial_dns_server = '1.2.3.4' alt_dns_server = '9.8.7.6' self._setup_network_and_servers(dns_nameservers=[initial_dns_server]) self.check_public_network_connectivity(should_connect=True) floating_ip, server = self.floating_ip_tuple ip_address = floating_ip.floating_ip_address private_key = self._get_server_key(server) ssh_client = self._ssh_to_server(ip_address, private_key) self._check_dns_server(ssh_client, [initial_dns_server]) self.subnet.update(dns_nameservers=[alt_dns_server]) # asserts that Neutron DB has updated the nameservers self.assertEqual([alt_dns_server], self.subnet.dns_nameservers, "Failed to update subnet's nameservers") # server needs to renew its dhcp lease in order to get the new dns # definitions from subnet ssh_client.renew_lease(fixed_ip=floating_ip['fixed_ip_address']) self._check_dns_server(ssh_client, [alt_dns_server])
apache-2.0
rogro82/HorizonRemote
HorizonRemote/jni/src/vnc/des_local.cpp
15841
/* * This is D3DES (V5.09) by Richard Outerbridge with the double and * triple-length support removed for use in VNC. Also the bytebit[] array * has been reversed so that the most significant bit in each byte of the * key is ignored, not the least significant. * * These changes are: * Copyright (C) 1999 AT&T Laboratories Cambridge. All Rights Reserved. * * This software is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. */ /* D3DES (V5.09) - * * A portable, public domain, version of the Data Encryption Standard. * * Written with Symantec's THINK (Lightspeed) C by Richard Outerbridge. * Thanks to: Dan Hoey for his excellent Initial and Inverse permutation * code; Jim Gillogly & Phil Karn for the DES key schedule code; Dennis * Ferguson, Eric Young and Dana How for comparing notes; and Ray Lau, * for humouring me on. * * Copyright (c) 1988,1989,1990,1991,1992 by Richard Outerbridge. * (GEnie : OUTER; CIS : [71755,204]) Graven Imagery, 1992. */ #include "vnc/des_local.h" static void scrunch(unsigned char *, unsigned long *); static void unscrun(unsigned long *, unsigned char *); static void desfunc(unsigned long *, unsigned long *); static void cookey(unsigned long *); static unsigned long KnL[32] = { 0L }; static const unsigned short bytebit[8] = { 01, 02, 04, 010, 020, 040, 0100, 0200 }; static const unsigned long bigbyte[24] = { 0x800000L, 0x400000L, 0x200000L, 0x100000L, 0x80000L, 0x40000L, 0x20000L, 0x10000L, 0x8000L, 0x4000L, 0x2000L, 0x1000L, 0x800L, 0x400L, 0x200L, 0x100L, 0x80L, 0x40L, 0x20L, 0x10L, 0x8L, 0x4L, 0x2L, 0x1L }; /* Use the key schedule specified in the Standard (ANSI X3.92-1981). */ static const unsigned char pc1[56] = { 56, 48, 40, 32, 24, 16, 8, 0, 57, 49, 41, 33, 25, 17, 9, 1, 58, 50, 42, 34, 26, 18, 10, 2, 59, 51, 43, 35, 62, 54, 46, 38, 30, 22, 14, 6, 61, 53, 45, 37, 29, 21, 13, 5, 60, 52, 44, 36, 28, 20, 12, 4, 27, 19, 11, 3 }; static const unsigned char totrot[16] = { 1,2,4,6,8,10,12,14,15,17,19,21,23,25,27,28 }; static const unsigned char pc2[48] = { 13, 16, 10, 23, 0, 4, 2, 27, 14, 5, 20, 9, 22, 18, 11, 3, 25, 7, 15, 6, 26, 19, 12, 1, 40, 51, 30, 36, 46, 54, 29, 39, 50, 44, 32, 47, 43, 48, 38, 55, 33, 52, 45, 41, 49, 35, 28, 31 }; /* Thanks to James Gillogly & Phil Karn! */ void deskey(unsigned char *key, int edf) { register int i, j, l, m, n; unsigned char pc1m[56], pcr[56]; unsigned long kn[32]; for ( j = 0; j < 56; j++ ) { l = pc1[j]; m = l & 07; pc1m[j] = (key[l >> 3] & bytebit[m]) ? 1 : 0; } for( i = 0; i < 16; i++ ) { if( edf == DE1 ) m = (15 - i) << 1; else m = i << 1; n = m + 1; kn[m] = kn[n] = 0L; for( j = 0; j < 28; j++ ) { l = j + totrot[i]; if( l < 28 ) pcr[j] = pc1m[l]; else pcr[j] = pc1m[l - 28]; } for( j = 28; j < 56; j++ ) { l = j + totrot[i]; if( l < 56 ) pcr[j] = pc1m[l]; else pcr[j] = pc1m[l - 28]; } for( j = 0; j < 24; j++ ) { if( pcr[pc2[j]] ) kn[m] |= bigbyte[j]; if( pcr[pc2[j+24]] ) kn[n] |= bigbyte[j]; } } cookey(kn); return; } static void cookey(register unsigned long *raw1) { register unsigned long *cook, *raw0; unsigned long dough[32]; register int i; cook = dough; for( i = 0; i < 16; i++, raw1++ ) { raw0 = raw1++; *cook = (*raw0 & 0x00fc0000L) << 6; *cook |= (*raw0 & 0x00000fc0L) << 10; *cook |= (*raw1 & 0x00fc0000L) >> 10; *cook++ |= (*raw1 & 0x00000fc0L) >> 6; *cook = (*raw0 & 0x0003f000L) << 12; *cook |= (*raw0 & 0x0000003fL) << 16; *cook |= (*raw1 & 0x0003f000L) >> 4; *cook++ |= (*raw1 & 0x0000003fL); } usekey(dough); return; } void cpkey(register unsigned long *into) { register unsigned long *from, *endp; from = KnL, endp = &KnL[32]; while( from < endp ) *into++ = *from++; return; } void usekey(register unsigned long *from) { register unsigned long *to, *endp; to = KnL, endp = &KnL[32]; while( to < endp ) *to++ = *from++; return; } void des(unsigned char *inblock, unsigned char *outblock) { unsigned long work[2]; scrunch(inblock, work); desfunc(work, KnL); unscrun(work, outblock); return; } static void scrunch(register unsigned char *outof, register unsigned long *into) { *into = (*outof++ & 0xffL) << 24; *into |= (*outof++ & 0xffL) << 16; *into |= (*outof++ & 0xffL) << 8; *into++ |= (*outof++ & 0xffL); *into = (*outof++ & 0xffL) << 24; *into |= (*outof++ & 0xffL) << 16; *into |= (*outof++ & 0xffL) << 8; *into |= (*outof & 0xffL); return; } static void unscrun(register unsigned long *outof, register unsigned char *into) { *into++ = (unsigned char)((*outof >> 24) & 0xffL); *into++ = (unsigned char)((*outof >> 16) & 0xffL); *into++ = (unsigned char)((*outof >> 8) & 0xffL); *into++ = (unsigned char)(*outof++ & 0xffL); *into++ = (unsigned char)((*outof >> 24) & 0xffL); *into++ = (unsigned char)((*outof >> 16) & 0xffL); *into++ = (unsigned char)((*outof >> 8) & 0xffL); *into = (unsigned char)(*outof & 0xffL); return; } static const unsigned long SP1[64] = { 0x01010400L, 0x00000000L, 0x00010000L, 0x01010404L, 0x01010004L, 0x00010404L, 0x00000004L, 0x00010000L, 0x00000400L, 0x01010400L, 0x01010404L, 0x00000400L, 0x01000404L, 0x01010004L, 0x01000000L, 0x00000004L, 0x00000404L, 0x01000400L, 0x01000400L, 0x00010400L, 0x00010400L, 0x01010000L, 0x01010000L, 0x01000404L, 0x00010004L, 0x01000004L, 0x01000004L, 0x00010004L, 0x00000000L, 0x00000404L, 0x00010404L, 0x01000000L, 0x00010000L, 0x01010404L, 0x00000004L, 0x01010000L, 0x01010400L, 0x01000000L, 0x01000000L, 0x00000400L, 0x01010004L, 0x00010000L, 0x00010400L, 0x01000004L, 0x00000400L, 0x00000004L, 0x01000404L, 0x00010404L, 0x01010404L, 0x00010004L, 0x01010000L, 0x01000404L, 0x01000004L, 0x00000404L, 0x00010404L, 0x01010400L, 0x00000404L, 0x01000400L, 0x01000400L, 0x00000000L, 0x00010004L, 0x00010400L, 0x00000000L, 0x01010004L }; static const unsigned long SP2[64] = { 0x80108020L, 0x80008000L, 0x00008000L, 0x00108020L, 0x00100000L, 0x00000020L, 0x80100020L, 0x80008020L, 0x80000020L, 0x80108020L, 0x80108000L, 0x80000000L, 0x80008000L, 0x00100000L, 0x00000020L, 0x80100020L, 0x00108000L, 0x00100020L, 0x80008020L, 0x00000000L, 0x80000000L, 0x00008000L, 0x00108020L, 0x80100000L, 0x00100020L, 0x80000020L, 0x00000000L, 0x00108000L, 0x00008020L, 0x80108000L, 0x80100000L, 0x00008020L, 0x00000000L, 0x00108020L, 0x80100020L, 0x00100000L, 0x80008020L, 0x80100000L, 0x80108000L, 0x00008000L, 0x80100000L, 0x80008000L, 0x00000020L, 0x80108020L, 0x00108020L, 0x00000020L, 0x00008000L, 0x80000000L, 0x00008020L, 0x80108000L, 0x00100000L, 0x80000020L, 0x00100020L, 0x80008020L, 0x80000020L, 0x00100020L, 0x00108000L, 0x00000000L, 0x80008000L, 0x00008020L, 0x80000000L, 0x80100020L, 0x80108020L, 0x00108000L }; static const unsigned long SP3[64] = { 0x00000208L, 0x08020200L, 0x00000000L, 0x08020008L, 0x08000200L, 0x00000000L, 0x00020208L, 0x08000200L, 0x00020008L, 0x08000008L, 0x08000008L, 0x00020000L, 0x08020208L, 0x00020008L, 0x08020000L, 0x00000208L, 0x08000000L, 0x00000008L, 0x08020200L, 0x00000200L, 0x00020200L, 0x08020000L, 0x08020008L, 0x00020208L, 0x08000208L, 0x00020200L, 0x00020000L, 0x08000208L, 0x00000008L, 0x08020208L, 0x00000200L, 0x08000000L, 0x08020200L, 0x08000000L, 0x00020008L, 0x00000208L, 0x00020000L, 0x08020200L, 0x08000200L, 0x00000000L, 0x00000200L, 0x00020008L, 0x08020208L, 0x08000200L, 0x08000008L, 0x00000200L, 0x00000000L, 0x08020008L, 0x08000208L, 0x00020000L, 0x08000000L, 0x08020208L, 0x00000008L, 0x00020208L, 0x00020200L, 0x08000008L, 0x08020000L, 0x08000208L, 0x00000208L, 0x08020000L, 0x00020208L, 0x00000008L, 0x08020008L, 0x00020200L }; static const unsigned long SP4[64] = { 0x00802001L, 0x00002081L, 0x00002081L, 0x00000080L, 0x00802080L, 0x00800081L, 0x00800001L, 0x00002001L, 0x00000000L, 0x00802000L, 0x00802000L, 0x00802081L, 0x00000081L, 0x00000000L, 0x00800080L, 0x00800001L, 0x00000001L, 0x00002000L, 0x00800000L, 0x00802001L, 0x00000080L, 0x00800000L, 0x00002001L, 0x00002080L, 0x00800081L, 0x00000001L, 0x00002080L, 0x00800080L, 0x00002000L, 0x00802080L, 0x00802081L, 0x00000081L, 0x00800080L, 0x00800001L, 0x00802000L, 0x00802081L, 0x00000081L, 0x00000000L, 0x00000000L, 0x00802000L, 0x00002080L, 0x00800080L, 0x00800081L, 0x00000001L, 0x00802001L, 0x00002081L, 0x00002081L, 0x00000080L, 0x00802081L, 0x00000081L, 0x00000001L, 0x00002000L, 0x00800001L, 0x00002001L, 0x00802080L, 0x00800081L, 0x00002001L, 0x00002080L, 0x00800000L, 0x00802001L, 0x00000080L, 0x00800000L, 0x00002000L, 0x00802080L }; static const unsigned long SP5[64] = { 0x00000100L, 0x02080100L, 0x02080000L, 0x42000100L, 0x00080000L, 0x00000100L, 0x40000000L, 0x02080000L, 0x40080100L, 0x00080000L, 0x02000100L, 0x40080100L, 0x42000100L, 0x42080000L, 0x00080100L, 0x40000000L, 0x02000000L, 0x40080000L, 0x40080000L, 0x00000000L, 0x40000100L, 0x42080100L, 0x42080100L, 0x02000100L, 0x42080000L, 0x40000100L, 0x00000000L, 0x42000000L, 0x02080100L, 0x02000000L, 0x42000000L, 0x00080100L, 0x00080000L, 0x42000100L, 0x00000100L, 0x02000000L, 0x40000000L, 0x02080000L, 0x42000100L, 0x40080100L, 0x02000100L, 0x40000000L, 0x42080000L, 0x02080100L, 0x40080100L, 0x00000100L, 0x02000000L, 0x42080000L, 0x42080100L, 0x00080100L, 0x42000000L, 0x42080100L, 0x02080000L, 0x00000000L, 0x40080000L, 0x42000000L, 0x00080100L, 0x02000100L, 0x40000100L, 0x00080000L, 0x00000000L, 0x40080000L, 0x02080100L, 0x40000100L }; static const unsigned long SP6[64] = { 0x20000010L, 0x20400000L, 0x00004000L, 0x20404010L, 0x20400000L, 0x00000010L, 0x20404010L, 0x00400000L, 0x20004000L, 0x00404010L, 0x00400000L, 0x20000010L, 0x00400010L, 0x20004000L, 0x20000000L, 0x00004010L, 0x00000000L, 0x00400010L, 0x20004010L, 0x00004000L, 0x00404000L, 0x20004010L, 0x00000010L, 0x20400010L, 0x20400010L, 0x00000000L, 0x00404010L, 0x20404000L, 0x00004010L, 0x00404000L, 0x20404000L, 0x20000000L, 0x20004000L, 0x00000010L, 0x20400010L, 0x00404000L, 0x20404010L, 0x00400000L, 0x00004010L, 0x20000010L, 0x00400000L, 0x20004000L, 0x20000000L, 0x00004010L, 0x20000010L, 0x20404010L, 0x00404000L, 0x20400000L, 0x00404010L, 0x20404000L, 0x00000000L, 0x20400010L, 0x00000010L, 0x00004000L, 0x20400000L, 0x00404010L, 0x00004000L, 0x00400010L, 0x20004010L, 0x00000000L, 0x20404000L, 0x20000000L, 0x00400010L, 0x20004010L }; static const unsigned long SP7[64] = { 0x00200000L, 0x04200002L, 0x04000802L, 0x00000000L, 0x00000800L, 0x04000802L, 0x00200802L, 0x04200800L, 0x04200802L, 0x00200000L, 0x00000000L, 0x04000002L, 0x00000002L, 0x04000000L, 0x04200002L, 0x00000802L, 0x04000800L, 0x00200802L, 0x00200002L, 0x04000800L, 0x04000002L, 0x04200000L, 0x04200800L, 0x00200002L, 0x04200000L, 0x00000800L, 0x00000802L, 0x04200802L, 0x00200800L, 0x00000002L, 0x04000000L, 0x00200800L, 0x04000000L, 0x00200800L, 0x00200000L, 0x04000802L, 0x04000802L, 0x04200002L, 0x04200002L, 0x00000002L, 0x00200002L, 0x04000000L, 0x04000800L, 0x00200000L, 0x04200800L, 0x00000802L, 0x00200802L, 0x04200800L, 0x00000802L, 0x04000002L, 0x04200802L, 0x04200000L, 0x00200800L, 0x00000000L, 0x00000002L, 0x04200802L, 0x00000000L, 0x00200802L, 0x04200000L, 0x00000800L, 0x04000002L, 0x04000800L, 0x00000800L, 0x00200002L }; static const unsigned long SP8[64] = { 0x10001040L, 0x00001000L, 0x00040000L, 0x10041040L, 0x10000000L, 0x10001040L, 0x00000040L, 0x10000000L, 0x00040040L, 0x10040000L, 0x10041040L, 0x00041000L, 0x10041000L, 0x00041040L, 0x00001000L, 0x00000040L, 0x10040000L, 0x10000040L, 0x10001000L, 0x00001040L, 0x00041000L, 0x00040040L, 0x10040040L, 0x10041000L, 0x00001040L, 0x00000000L, 0x00000000L, 0x10040040L, 0x10000040L, 0x10001000L, 0x00041040L, 0x00040000L, 0x00041040L, 0x00040000L, 0x10041000L, 0x00001000L, 0x00000040L, 0x10040040L, 0x00001000L, 0x00041040L, 0x10001000L, 0x00000040L, 0x10000040L, 0x10040000L, 0x10040040L, 0x10000000L, 0x00040000L, 0x10001040L, 0x00000000L, 0x10041040L, 0x00040040L, 0x10000040L, 0x10040000L, 0x10001000L, 0x10001040L, 0x00000000L, 0x10041040L, 0x00041000L, 0x00041000L, 0x00001040L, 0x00001040L, 0x00040040L, 0x10000000L, 0x10041000L }; static void desfunc(register unsigned long *block, register unsigned long *keys) { register unsigned long fval, work, right, leftt; register int round; leftt = block[0]; right = block[1]; work = ((leftt >> 4) ^ right) & 0x0f0f0f0fL; right ^= work; leftt ^= (work << 4); work = ((leftt >> 16) ^ right) & 0x0000ffffL; right ^= work; leftt ^= (work << 16); work = ((right >> 2) ^ leftt) & 0x33333333L; leftt ^= work; right ^= (work << 2); work = ((right >> 8) ^ leftt) & 0x00ff00ffL; leftt ^= work; right ^= (work << 8); right = ((right << 1) | ((right >> 31) & 1L)) & 0xffffffffL; work = (leftt ^ right) & 0xaaaaaaaaL; leftt ^= work; right ^= work; leftt = ((leftt << 1) | ((leftt >> 31) & 1L)) & 0xffffffffL; for( round = 0; round < 8; round++ ) { work = (right << 28) | (right >> 4); work ^= *keys++; fval = SP7[ work & 0x3fL]; fval |= SP5[(work >> 8) & 0x3fL]; fval |= SP3[(work >> 16) & 0x3fL]; fval |= SP1[(work >> 24) & 0x3fL]; work = right ^ *keys++; fval |= SP8[ work & 0x3fL]; fval |= SP6[(work >> 8) & 0x3fL]; fval |= SP4[(work >> 16) & 0x3fL]; fval |= SP2[(work >> 24) & 0x3fL]; leftt ^= fval; work = (leftt << 28) | (leftt >> 4); work ^= *keys++; fval = SP7[ work & 0x3fL]; fval |= SP5[(work >> 8) & 0x3fL]; fval |= SP3[(work >> 16) & 0x3fL]; fval |= SP1[(work >> 24) & 0x3fL]; work = leftt ^ *keys++; fval |= SP8[ work & 0x3fL]; fval |= SP6[(work >> 8) & 0x3fL]; fval |= SP4[(work >> 16) & 0x3fL]; fval |= SP2[(work >> 24) & 0x3fL]; right ^= fval; } right = (right << 31) | (right >> 1); work = (leftt ^ right) & 0xaaaaaaaaL; leftt ^= work; right ^= work; leftt = (leftt << 31) | (leftt >> 1); work = ((leftt >> 8) ^ right) & 0x00ff00ffL; right ^= work; leftt ^= (work << 8); work = ((leftt >> 2) ^ right) & 0x33333333L; right ^= work; leftt ^= (work << 2); work = ((right >> 16) ^ leftt) & 0x0000ffffL; leftt ^= work; right ^= (work << 16); work = ((right >> 4) ^ leftt) & 0x0f0f0f0fL; leftt ^= work; right ^= (work << 4); *block++ = right; *block = leftt; return; } /* Validation sets: * * Single-length key, single-length plaintext - * Key : 0123 4567 89ab cdef * Plain : 0123 4567 89ab cde7 * Cipher : c957 4425 6a5e d31d * * Double-length key, single-length plaintext - * Key : 0123 4567 89ab cdef fedc ba98 7654 3210 * Plain : 0123 4567 89ab cde7 * Cipher : 7f1d 0a77 826b 8aff * * Double-length key, double-length plaintext - * Key : 0123 4567 89ab cdef fedc ba98 7654 3210 * Plain : 0123 4567 89ab cdef 0123 4567 89ab cdff * Cipher : 27a0 8440 406a df60 278f 47cf 42d6 15d7 * * Triple-length key, single-length plaintext - * Key : 0123 4567 89ab cdef fedc ba98 7654 3210 89ab cdef 0123 4567 * Plain : 0123 4567 89ab cde7 * Cipher : de0b 7c06 ae5e 0ed5 * * Triple-length key, double-length plaintext - * Key : 0123 4567 89ab cdef fedc ba98 7654 3210 89ab cdef 0123 4567 * Plain : 0123 4567 89ab cdef 0123 4567 89ab cdff * Cipher : ad0d 1b30 ac17 cf07 0ed1 1c63 81e4 4de5 * * d3des V5.0a rwo 9208.07 18:44 Graven Imagery **********************************************************************/
apache-2.0
eug48/hapi-fhir
hapi-fhir-structures-hl7org-dstu2/src/main/java/org/hl7/fhir/instance/model/BodySite.java
18503
package org.hl7.fhir.instance.model; /* Copyright (c) 2011+, HL7, Inc. All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: * Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. * Neither the name of HL7 nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ // Generated on Wed, Jul 13, 2016 05:32+1000 for FHIR v1.0.2 import java.util.ArrayList; import java.util.List; import org.hl7.fhir.exceptions.FHIRException; import org.hl7.fhir.utilities.Utilities; import ca.uhn.fhir.model.api.annotation.*; /** * Record details about the anatomical location of a specimen or body part. This resource may be used when a coded concept does not provide the necessary detail needed for the use case. */ @ResourceDef(name="BodySite", profile="http://hl7.org/fhir/Profile/BodySite") public class BodySite extends DomainResource { /** * The person to which the body site belongs. */ @Child(name = "patient", type = {Patient.class}, order=0, min=1, max=1, modifier=false, summary=true) @Description(shortDefinition="Patient", formalDefinition="The person to which the body site belongs." ) protected Reference patient; /** * The actual object that is the target of the reference (The person to which the body site belongs.) */ protected Patient patientTarget; /** * Identifier for this instance of the anatomical location. */ @Child(name = "identifier", type = {Identifier.class}, order=1, min=0, max=Child.MAX_UNLIMITED, modifier=false, summary=true) @Description(shortDefinition="Bodysite identifier", formalDefinition="Identifier for this instance of the anatomical location." ) protected List<Identifier> identifier; /** * Named anatomical location - ideally coded where possible. */ @Child(name = "code", type = {CodeableConcept.class}, order=2, min=0, max=1, modifier=false, summary=true) @Description(shortDefinition="Named anatomical location", formalDefinition="Named anatomical location - ideally coded where possible." ) protected CodeableConcept code; /** * Modifier to refine the anatomical location. These include modifiers for laterality, relative location, directionality, number, and plane. */ @Child(name = "modifier", type = {CodeableConcept.class}, order=3, min=0, max=Child.MAX_UNLIMITED, modifier=false, summary=false) @Description(shortDefinition="Modification to location code", formalDefinition="Modifier to refine the anatomical location. These include modifiers for laterality, relative location, directionality, number, and plane." ) protected List<CodeableConcept> modifier; /** * Description of anatomical location. */ @Child(name = "description", type = {StringType.class}, order=4, min=0, max=1, modifier=false, summary=true) @Description(shortDefinition="The Description of anatomical location", formalDefinition="Description of anatomical location." ) protected StringType description; /** * Image or images used to identify a location. */ @Child(name = "image", type = {Attachment.class}, order=5, min=0, max=Child.MAX_UNLIMITED, modifier=false, summary=false) @Description(shortDefinition="Attached images", formalDefinition="Image or images used to identify a location." ) protected List<Attachment> image; private static final long serialVersionUID = 1568109920L; /* * Constructor */ public BodySite() { super(); } /* * Constructor */ public BodySite(Reference patient) { super(); this.patient = patient; } /** * @return {@link #patient} (The person to which the body site belongs.) */ public Reference getPatient() { if (this.patient == null) if (Configuration.errorOnAutoCreate()) throw new Error("Attempt to auto-create BodySite.patient"); else if (Configuration.doAutoCreate()) this.patient = new Reference(); // cc return this.patient; } public boolean hasPatient() { return this.patient != null && !this.patient.isEmpty(); } /** * @param value {@link #patient} (The person to which the body site belongs.) */ public BodySite setPatient(Reference value) { this.patient = value; return this; } /** * @return {@link #patient} The actual object that is the target of the reference. The reference library doesn't populate this, but you can use it to hold the resource if you resolve it. (The person to which the body site belongs.) */ public Patient getPatientTarget() { if (this.patientTarget == null) if (Configuration.errorOnAutoCreate()) throw new Error("Attempt to auto-create BodySite.patient"); else if (Configuration.doAutoCreate()) this.patientTarget = new Patient(); // aa return this.patientTarget; } /** * @param value {@link #patient} The actual object that is the target of the reference. The reference library doesn't use these, but you can use it to hold the resource if you resolve it. (The person to which the body site belongs.) */ public BodySite setPatientTarget(Patient value) { this.patientTarget = value; return this; } /** * @return {@link #identifier} (Identifier for this instance of the anatomical location.) */ public List<Identifier> getIdentifier() { if (this.identifier == null) this.identifier = new ArrayList<Identifier>(); return this.identifier; } public boolean hasIdentifier() { if (this.identifier == null) return false; for (Identifier item : this.identifier) if (!item.isEmpty()) return true; return false; } /** * @return {@link #identifier} (Identifier for this instance of the anatomical location.) */ // syntactic sugar public Identifier addIdentifier() { //3 Identifier t = new Identifier(); if (this.identifier == null) this.identifier = new ArrayList<Identifier>(); this.identifier.add(t); return t; } // syntactic sugar public BodySite addIdentifier(Identifier t) { //3 if (t == null) return this; if (this.identifier == null) this.identifier = new ArrayList<Identifier>(); this.identifier.add(t); return this; } /** * @return {@link #code} (Named anatomical location - ideally coded where possible.) */ public CodeableConcept getCode() { if (this.code == null) if (Configuration.errorOnAutoCreate()) throw new Error("Attempt to auto-create BodySite.code"); else if (Configuration.doAutoCreate()) this.code = new CodeableConcept(); // cc return this.code; } public boolean hasCode() { return this.code != null && !this.code.isEmpty(); } /** * @param value {@link #code} (Named anatomical location - ideally coded where possible.) */ public BodySite setCode(CodeableConcept value) { this.code = value; return this; } /** * @return {@link #modifier} (Modifier to refine the anatomical location. These include modifiers for laterality, relative location, directionality, number, and plane.) */ public List<CodeableConcept> getModifier() { if (this.modifier == null) this.modifier = new ArrayList<CodeableConcept>(); return this.modifier; } public boolean hasModifier() { if (this.modifier == null) return false; for (CodeableConcept item : this.modifier) if (!item.isEmpty()) return true; return false; } /** * @return {@link #modifier} (Modifier to refine the anatomical location. These include modifiers for laterality, relative location, directionality, number, and plane.) */ // syntactic sugar public CodeableConcept addModifier() { //3 CodeableConcept t = new CodeableConcept(); if (this.modifier == null) this.modifier = new ArrayList<CodeableConcept>(); this.modifier.add(t); return t; } // syntactic sugar public BodySite addModifier(CodeableConcept t) { //3 if (t == null) return this; if (this.modifier == null) this.modifier = new ArrayList<CodeableConcept>(); this.modifier.add(t); return this; } /** * @return {@link #description} (Description of anatomical location.). This is the underlying object with id, value and extensions. The accessor "getDescription" gives direct access to the value */ public StringType getDescriptionElement() { if (this.description == null) if (Configuration.errorOnAutoCreate()) throw new Error("Attempt to auto-create BodySite.description"); else if (Configuration.doAutoCreate()) this.description = new StringType(); // bb return this.description; } public boolean hasDescriptionElement() { return this.description != null && !this.description.isEmpty(); } public boolean hasDescription() { return this.description != null && !this.description.isEmpty(); } /** * @param value {@link #description} (Description of anatomical location.). This is the underlying object with id, value and extensions. The accessor "getDescription" gives direct access to the value */ public BodySite setDescriptionElement(StringType value) { this.description = value; return this; } /** * @return Description of anatomical location. */ public String getDescription() { return this.description == null ? null : this.description.getValue(); } /** * @param value Description of anatomical location. */ public BodySite setDescription(String value) { if (Utilities.noString(value)) this.description = null; else { if (this.description == null) this.description = new StringType(); this.description.setValue(value); } return this; } /** * @return {@link #image} (Image or images used to identify a location.) */ public List<Attachment> getImage() { if (this.image == null) this.image = new ArrayList<Attachment>(); return this.image; } public boolean hasImage() { if (this.image == null) return false; for (Attachment item : this.image) if (!item.isEmpty()) return true; return false; } /** * @return {@link #image} (Image or images used to identify a location.) */ // syntactic sugar public Attachment addImage() { //3 Attachment t = new Attachment(); if (this.image == null) this.image = new ArrayList<Attachment>(); this.image.add(t); return t; } // syntactic sugar public BodySite addImage(Attachment t) { //3 if (t == null) return this; if (this.image == null) this.image = new ArrayList<Attachment>(); this.image.add(t); return this; } protected void listChildren(List<Property> childrenList) { super.listChildren(childrenList); childrenList.add(new Property("patient", "Reference(Patient)", "The person to which the body site belongs.", 0, java.lang.Integer.MAX_VALUE, patient)); childrenList.add(new Property("identifier", "Identifier", "Identifier for this instance of the anatomical location.", 0, java.lang.Integer.MAX_VALUE, identifier)); childrenList.add(new Property("code", "CodeableConcept", "Named anatomical location - ideally coded where possible.", 0, java.lang.Integer.MAX_VALUE, code)); childrenList.add(new Property("modifier", "CodeableConcept", "Modifier to refine the anatomical location. These include modifiers for laterality, relative location, directionality, number, and plane.", 0, java.lang.Integer.MAX_VALUE, modifier)); childrenList.add(new Property("description", "string", "Description of anatomical location.", 0, java.lang.Integer.MAX_VALUE, description)); childrenList.add(new Property("image", "Attachment", "Image or images used to identify a location.", 0, java.lang.Integer.MAX_VALUE, image)); } @Override public void setProperty(String name, Base value) throws FHIRException { if (name.equals("patient")) this.patient = castToReference(value); // Reference else if (name.equals("identifier")) this.getIdentifier().add(castToIdentifier(value)); else if (name.equals("code")) this.code = castToCodeableConcept(value); // CodeableConcept else if (name.equals("modifier")) this.getModifier().add(castToCodeableConcept(value)); else if (name.equals("description")) this.description = castToString(value); // StringType else if (name.equals("image")) this.getImage().add(castToAttachment(value)); else super.setProperty(name, value); } @Override public Base addChild(String name) throws FHIRException { if (name.equals("patient")) { this.patient = new Reference(); return this.patient; } else if (name.equals("identifier")) { return addIdentifier(); } else if (name.equals("code")) { this.code = new CodeableConcept(); return this.code; } else if (name.equals("modifier")) { return addModifier(); } else if (name.equals("description")) { throw new FHIRException("Cannot call addChild on a primitive type BodySite.description"); } else if (name.equals("image")) { return addImage(); } else return super.addChild(name); } public String fhirType() { return "BodySite"; } public BodySite copy() { BodySite dst = new BodySite(); copyValues(dst); dst.patient = patient == null ? null : patient.copy(); if (identifier != null) { dst.identifier = new ArrayList<Identifier>(); for (Identifier i : identifier) dst.identifier.add(i.copy()); }; dst.code = code == null ? null : code.copy(); if (modifier != null) { dst.modifier = new ArrayList<CodeableConcept>(); for (CodeableConcept i : modifier) dst.modifier.add(i.copy()); }; dst.description = description == null ? null : description.copy(); if (image != null) { dst.image = new ArrayList<Attachment>(); for (Attachment i : image) dst.image.add(i.copy()); }; return dst; } protected BodySite typedCopy() { return copy(); } @Override public boolean equalsDeep(Base other) { if (!super.equalsDeep(other)) return false; if (!(other instanceof BodySite)) return false; BodySite o = (BodySite) other; return compareDeep(patient, o.patient, true) && compareDeep(identifier, o.identifier, true) && compareDeep(code, o.code, true) && compareDeep(modifier, o.modifier, true) && compareDeep(description, o.description, true) && compareDeep(image, o.image, true) ; } @Override public boolean equalsShallow(Base other) { if (!super.equalsShallow(other)) return false; if (!(other instanceof BodySite)) return false; BodySite o = (BodySite) other; return compareValues(description, o.description, true); } public boolean isEmpty() { return super.isEmpty() && (patient == null || patient.isEmpty()) && (identifier == null || identifier.isEmpty()) && (code == null || code.isEmpty()) && (modifier == null || modifier.isEmpty()) && (description == null || description.isEmpty()) && (image == null || image.isEmpty()); } @Override public ResourceType getResourceType() { return ResourceType.BodySite; } @SearchParamDefinition(name="identifier", path="BodySite.identifier", description="Identifier for this instance of the anatomical location", type="token" ) public static final String SP_IDENTIFIER = "identifier"; @SearchParamDefinition(name="code", path="BodySite.code", description="Named anatomical location", type="token" ) public static final String SP_CODE = "code"; @SearchParamDefinition(name="patient", path="BodySite.patient", description="Patient to whom bodysite belongs", type="reference" ) public static final String SP_PATIENT = "patient"; }
apache-2.0
mintsoft/csharp-driver
src/Cassandra/Policies/ExponentialReconnectionPolicy.cs
4462
// // Copyright (C) DataStax Inc. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // using System; namespace Cassandra { /// <summary> /// A reconnection policy that waits exponentially longer between each /// reconnection attempt (but keeps a constant delay once a maximum delay is /// reached). /// </summary> public class ExponentialReconnectionPolicy : IReconnectionPolicy { private readonly long _baseDelayMs; private readonly long _maxAttempts; private readonly long _maxDelayMs; /// <summary> /// Gets the base delay in milliseconds for this policy (e.g. the delay before the /// first reconnection attempt). /// </summary> public long BaseDelayMs { get { return _baseDelayMs; } } /// <summary> /// Gets the maximum delay in milliseconds between reconnection attempts for this /// policy. /// </summary> public long MaxDelayMs { get { return _maxDelayMs; } } /// <summary> /// Creates a reconnection policy waiting exponentially longer for each new /// attempt. /// </summary> /// <param name="baseDelayMs"> the base delay in milliseconds to use for the /// schedules created by this policy. </param> /// <param name="maxDelayMs"> the maximum delay to wait between two /// attempts.</param> public ExponentialReconnectionPolicy(long baseDelayMs, long maxDelayMs) { if (baseDelayMs < 0 || maxDelayMs < 0) throw new ArgumentOutOfRangeException("Invalid negative delay"); if (baseDelayMs == 0) throw new ArgumentOutOfRangeException("baseDelayMs must be strictly positive"); if (maxDelayMs < baseDelayMs) throw new ArgumentOutOfRangeException(string.Format("maxDelayMs (got {0}) cannot be smaller than baseDelayMs (got {1})", maxDelayMs, baseDelayMs)); _baseDelayMs = baseDelayMs; _maxDelayMs = maxDelayMs; // Maximum number of attempts after which we overflow (which is kind of theoretical anyway, you'll' // die of old age before reaching that but hey ...) int ceil = (baseDelayMs & (baseDelayMs - 1)) == 0 ? 0 : 1; _maxAttempts = 64 - LeadingZeros(long.MaxValue/baseDelayMs) - ceil; } /// <summary> /// A new schedule that used an exponentially growing delay between reconnection /// attempts. <p> For this schedule, reconnection attempt <c>i</c> will be /// tried <c>Math.min(2^(i-1) * BaseDelayMs, MaxDelayMs)</c> /// milliseconds after the previous one.</p> /// </summary> /// /// <returns>the newly created schedule.</returns> public IReconnectionSchedule NewSchedule() { return new ExponentialSchedule(this); } private static int LeadingZeros(long value) { int leadingZeros = 0; while (value != 0) { value = value >> 1; leadingZeros++; } return (64 - leadingZeros); } private class ExponentialSchedule : IReconnectionSchedule { private readonly ExponentialReconnectionPolicy _policy; private int _attempts; public ExponentialSchedule(ExponentialReconnectionPolicy policy) { _policy = policy; } public long NextDelayMs() { if (_attempts >= _policy._maxAttempts) return _policy._maxDelayMs; return Math.Min(_policy._baseDelayMs*(1L << _attempts++), _policy._maxDelayMs); } } } }
apache-2.0
AdeptJ/adeptj-modules
jaxrs/resteasy/src/main/java/com/adeptj/modules/jaxrs/resteasy/contextresolver/ValidatorContextResolver.java
2815
/* ############################################################################### # # # Copyright 2016, AdeptJ (http://www.adeptj.com) # # # # Licensed under the Apache License, Version 2.0 (the "License"); # # you may not use this file except in compliance with the License. # # You may obtain a copy of the License at # # # # http://www.apache.org/licenses/LICENSE-2.0 # # # # Unless required by applicable law or agreed to in writing, software # # distributed under the License is distributed on an "AS IS" BASIS, # # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # # See the License for the specific language governing permissions and # # limitations under the License. # # # ############################################################################### */ package com.adeptj.modules.jaxrs.resteasy.contextresolver; import com.adeptj.modules.commons.validator.ValidatorService; import org.jboss.resteasy.plugins.validation.GeneralValidatorImpl; import org.jboss.resteasy.spi.validation.GeneralValidator; import org.jetbrains.annotations.NotNull; import javax.annotation.Priority; import javax.ws.rs.ext.ContextResolver; import javax.ws.rs.ext.Provider; import static com.adeptj.modules.jaxrs.resteasy.contextresolver.ValidatorContextResolver.PRIORITY; /** * Priority based ContextResolver for RESTEasy's {@link GeneralValidator}. * * @author Rakesh.Kumar, AdeptJ */ @Priority(PRIORITY) @Provider public class ValidatorContextResolver implements ContextResolver<GeneralValidator> { static final int PRIORITY = 4500; // Cache or no cache the GeneralValidator instance? private final GeneralValidator validator; public ValidatorContextResolver(@NotNull ValidatorService validatorService) { this.validator = new GeneralValidatorImpl(validatorService.getValidatorFactory(), validatorService.isExecutableValidationEnabled(), validatorService.getDefaultValidatedExecutableTypes()); } @Override public GeneralValidator getContext(Class<?> type) { // Not doing the type check of passed Class object as RESTEasy passes null while processing resource methods // at bootstrap time. return this.validator; } }
apache-2.0
ReactiveSocket/reactivesocket-java
rsocket-core/src/test/java/io/rsocket/resume/ResumeExpBackoffTest.java
2293
/* * Copyright 2015-2019 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.rsocket.resume; import static org.junit.jupiter.api.Assertions.assertThrows; import java.time.Duration; import java.util.List; import org.assertj.core.api.Assertions; import org.junit.jupiter.api.Test; import reactor.core.publisher.Flux; public class ResumeExpBackoffTest { @Test void backOffSeries() { Duration firstBackoff = Duration.ofSeconds(1); Duration maxBackoff = Duration.ofSeconds(32); int factor = 2; ExponentialBackoffResumeStrategy strategy = new ExponentialBackoffResumeStrategy(firstBackoff, maxBackoff, factor); List<Duration> expected = Flux.just(1, 2, 4, 8, 16, 32, 32).map(Duration::ofSeconds).collectList().block(); List<Duration> actual = Flux.range(1, 7).map(v -> strategy.next()).collectList().block(); Assertions.assertThat(actual).isEqualTo(expected); } @Test void nullFirstBackoff() { assertThrows( NullPointerException.class, () -> { ExponentialBackoffResumeStrategy strategy = new ExponentialBackoffResumeStrategy(Duration.ofSeconds(1), null, 42); }); } @Test void nullMaxBackoff() { assertThrows( NullPointerException.class, () -> { ExponentialBackoffResumeStrategy strategy = new ExponentialBackoffResumeStrategy(null, Duration.ofSeconds(1), 42); }); } @Test void negativeFactor() { assertThrows( IllegalArgumentException.class, () -> { ExponentialBackoffResumeStrategy strategy = new ExponentialBackoffResumeStrategy( Duration.ofSeconds(1), Duration.ofSeconds(32), -1); }); } }
apache-2.0
cuba-platform/cuba
modules/web/src/com/haulmont/cuba/web/gui/components/WebScrollBoxLayout.java
16243
/* * Copyright (c) 2008-2016 Haulmont. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package com.haulmont.cuba.web.gui.components; import com.haulmont.bali.events.Subscription; import com.haulmont.bali.util.Preconditions; import com.haulmont.cuba.gui.ComponentsHelper; import com.haulmont.cuba.gui.components.*; import com.haulmont.cuba.gui.components.HtmlAttributes.CSS; import com.haulmont.cuba.gui.components.sys.FrameImplementation; import com.haulmont.cuba.web.widgets.CubaHorizontalActionsLayout; import com.haulmont.cuba.web.widgets.CubaScrollBoxLayout; import com.haulmont.cuba.web.widgets.CubaVerticalActionsLayout; import com.haulmont.cuba.web.widgets.HtmlAttributesExtension; import com.vaadin.event.LayoutEvents; import com.vaadin.event.ShortcutListener; import com.vaadin.server.Sizeable; import com.vaadin.shared.ui.MarginInfo; import com.vaadin.ui.AbstractOrderedLayout; import org.apache.commons.lang3.StringUtils; import javax.annotation.Nullable; import java.util.*; import java.util.function.Consumer; import java.util.stream.Stream; public class WebScrollBoxLayout extends WebAbstractComponent<CubaScrollBoxLayout> implements ScrollBoxLayout { protected static final String SCROLLBOX_CONTENT_STYLENAME = "c-scrollbox-content"; protected static final String SCROLLBOX_STYLENAME = "c-scrollbox"; protected List<Component> ownComponents = new ArrayList<>(); protected LayoutEvents.LayoutClickListener layoutClickListener; protected Orientation orientation = Orientation.VERTICAL; protected ScrollBarPolicy scrollBarPolicy = ScrollBarPolicy.VERTICAL; protected Map<ShortcutAction, ShortcutListener> shortcuts; public WebScrollBoxLayout() { component = new CubaScrollBoxLayout(); component.setWidth(100, Sizeable.Unit.PERCENTAGE); component.setPrimaryStyleName(SCROLLBOX_STYLENAME); CubaVerticalActionsLayout content = new CubaVerticalActionsLayout(); content.setWidth(100, Sizeable.Unit.PERCENTAGE); content.setStyleName(SCROLLBOX_CONTENT_STYLENAME); component.addComponent(content); getContent().setMargin(false); } protected AbstractOrderedLayout getContent() { return (AbstractOrderedLayout) component.getComponent(0); } @Override public void add(Component childComponent) { add(childComponent, ownComponents.size()); } @Override public void add(Component childComponent, int index) { if (childComponent.getParent() != null && childComponent.getParent() != this) { throw new IllegalStateException("Component already has parent"); } AbstractOrderedLayout newContent = null; if (orientation == Orientation.VERTICAL && !(getContent() instanceof CubaVerticalActionsLayout)) { newContent = new CubaVerticalActionsLayout(); newContent.setWidth(100, Sizeable.Unit.PERCENTAGE); } else if (orientation == Orientation.HORIZONTAL && !(getContent() instanceof CubaHorizontalActionsLayout)) { newContent = new CubaHorizontalActionsLayout(); } if (newContent != null) { newContent.setMargin((getContent()).getMargin()); newContent.setSpacing((getContent()).isSpacing()); newContent.setStyleName(SCROLLBOX_CONTENT_STYLENAME); com.vaadin.ui.Component oldContent = component.getComponent(0); newContent.setWidth(oldContent.getWidth(), oldContent.getWidthUnits()); newContent.setHeight(oldContent.getHeight(), oldContent.getHeightUnits()); component.removeAllComponents(); component.addComponent(newContent); applyScrollBarsPolicy(scrollBarPolicy); } if (ownComponents.contains(childComponent)) { int existingIndex = getContent().getComponentIndex(WebComponentsHelper.getComposition(childComponent)); if (index > existingIndex) { index--; } remove(childComponent); } com.vaadin.ui.Component vComponent = WebComponentsHelper.getComposition(childComponent); getContent().addComponent(vComponent, index); getContent().setComponentAlignment(vComponent, WebWrapperUtils.toVaadinAlignment(childComponent.getAlignment())); if (frame != null) { if (childComponent instanceof BelongToFrame && ((BelongToFrame) childComponent).getFrame() == null) { ((BelongToFrame) childComponent).setFrame(frame); } else { ((FrameImplementation) frame).registerComponent(childComponent); } } if (index == ownComponents.size()) { ownComponents.add(childComponent); } else { ownComponents.add(index, childComponent); } childComponent.setParent(this); } @Override public int indexOf(Component component) { return ownComponents.indexOf(component); } @Nullable @Override public Component getComponent(int index) { return ownComponents.get(index); } @Override public void setStyleName(String styleName) { super.setStyleName(styleName); component.addStyleName(SCROLLBOX_STYLENAME); } @Override public String getStyleName() { return StringUtils.normalizeSpace(super.getStyleName().replace(SCROLLBOX_STYLENAME, "")); } @Override public void remove(Component childComponent) { getContent().removeComponent(childComponent.unwrapComposition(com.vaadin.ui.Component.class)); ownComponents.remove(childComponent); childComponent.setParent(null); } @Override public void removeAll() { getContent().removeAllComponents(); Component[] components = ownComponents.toArray(new Component[0]); ownComponents.clear(); for (Component childComponent : components) { childComponent.setParent(null); } } @Override public void setFrame(Frame frame) { super.setFrame(frame); if (frame != null) { for (Component childComponent : ownComponents) { if (childComponent instanceof BelongToFrame && ((BelongToFrame) childComponent).getFrame() == null) { ((BelongToFrame) childComponent).setFrame(frame); } } } } @Override public Component getOwnComponent(String id) { Preconditions.checkNotNullArgument(id); return ownComponents.stream() .filter(component -> Objects.equals(id, component.getId())) .findFirst() .orElse(null); } @Nullable @Override public Component getComponent(String id) { return ComponentsHelper.getComponent(this, id); } @Override public Collection<Component> getOwnComponents() { return Collections.unmodifiableCollection(ownComponents); } @Override public Stream<Component> getOwnComponentsStream() { return ownComponents.stream(); } @Override public Collection<Component> getComponents() { return ComponentsHelper.getComponents(this); } @Override public Orientation getOrientation() { return orientation; } @Override public void setOrientation(Orientation orientation) { if (!Objects.equals(orientation, this.orientation)) { if (!ownComponents.isEmpty()) { throw new IllegalStateException("Unable to change scrollBox orientation after adding components to it"); } this.orientation = orientation; } } @Override public ScrollBarPolicy getScrollBarPolicy() { return scrollBarPolicy; } @Override public void setScrollBarPolicy(ScrollBarPolicy scrollBarPolicy) { if (this.scrollBarPolicy != scrollBarPolicy) { applyScrollBarsPolicy(scrollBarPolicy); } this.scrollBarPolicy = scrollBarPolicy; } @Override public void setContentWidth(String width) { getContent().setWidth(width); } @Override public float getContentWidth() { return getContent().getWidth(); } @Override public SizeUnit getContentWidthSizeUnit() { return WebWrapperUtils.toSizeUnit(getContent().getWidthUnits()); } @Override public void setContentHeight(String height) { getContent().setHeight(height); } @Override public float getContentHeight() { return getContent().getHeight(); } @Override public SizeUnit getContentHeightSizeUnit() { return WebWrapperUtils.toSizeUnit(getContent().getHeightUnits()); } @Override public void setContentMinWidth(String minWidth) { HtmlAttributesExtension.get(getContent()) .setCssProperty(CSS.MIN_WIDTH, minWidth); } @Override public String getContentMinWidth() { return HtmlAttributesExtension.get(getContent()) .getCssProperty(CSS.MIN_WIDTH); } @Override public void setContentMaxWidth(String maxWidth) { HtmlAttributesExtension.get(getContent()) .setCssProperty(CSS.MAX_WIDTH, maxWidth); } @Override public String getContentMaxWidth() { return HtmlAttributesExtension.get(getContent()) .getCssProperty(CSS.MAX_WIDTH); } @Override public void setContentMinHeight(String minHeight) { HtmlAttributesExtension.get(getContent()) .setCssProperty(CSS.MIN_HEIGHT, minHeight); } @Override public String getContentMinHeight() { return HtmlAttributesExtension.get(getContent()) .getCssProperty(CSS.MIN_HEIGHT); } @Override public void setContentMaxHeight(String maxHeight) { HtmlAttributesExtension.get(getContent()) .setCssProperty(CSS.MAX_HEIGHT, maxHeight); } @Override public String getContentMaxHeight() { return HtmlAttributesExtension.get(getContent()) .getCssProperty(CSS.MAX_HEIGHT); } @Override public Subscription addLayoutClickListener(Consumer<LayoutClickNotifier.LayoutClickEvent> listener) { if (layoutClickListener == null) { layoutClickListener = event -> { // scrollBoxLayout always has vertical or horizontal layout as first child element // choose vertical or horizontal layout as a parent to find the correct child com.vaadin.ui.Component child = findChildComponent(event.getClickedComponent()); Component childComponent = findExistingComponent(child); Component clickedComponent = findExistingComponent(event.getClickedComponent()); MouseEventDetails mouseEventDetails = WebWrapperUtils.toMouseEventDetails(event); LayoutClickNotifier.LayoutClickEvent layoutClickEvent = new LayoutClickNotifier.LayoutClickEvent(this, childComponent, clickedComponent, mouseEventDetails); publish(LayoutClickNotifier.LayoutClickEvent.class, layoutClickEvent); }; component.addLayoutClickListener(layoutClickListener); } getEventHub().subscribe(LayoutClickNotifier.LayoutClickEvent.class, listener); return () -> removeLayoutClickListener(listener); } protected com.vaadin.ui.Component findChildComponent(com.vaadin.ui.Component vComponent) { while (vComponent != null && vComponent.getParent() != component.getComponent(0)) { vComponent = vComponent.getParent(); } return vComponent; } protected Component findExistingComponent(com.vaadin.ui.Component vComponent) { for (Component component : getComponents()) { if (component.unwrapComposition(com.vaadin.ui.Component.class) == vComponent) { return component; } } return null; } @Override public void removeLayoutClickListener(Consumer<LayoutClickEvent> listener) { unsubscribe(LayoutClickEvent.class, listener); if (!hasSubscriptions(LayoutClickEvent.class)) { component.removeLayoutClickListener(layoutClickListener); layoutClickListener = null; } } protected void applyScrollBarsPolicy(ScrollBarPolicy scrollBarPolicy) { switch (scrollBarPolicy) { case VERTICAL: getContent().setHeightUndefined(); getContent().setWidth(100, Sizeable.Unit.PERCENTAGE); break; case HORIZONTAL: getContent().setHeight(100, Sizeable.Unit.PERCENTAGE); getContent().setWidthUndefined(); break; case BOTH: getContent().setSizeUndefined(); break; case NONE: getContent().setSizeFull(); break; } } @Override public void setMargin(com.haulmont.cuba.gui.components.MarginInfo marginInfo) { MarginInfo vMargin = new MarginInfo(marginInfo.hasTop(), marginInfo.hasRight(), marginInfo.hasBottom(), marginInfo.hasLeft()); component.setMargin(vMargin); } @Override public com.haulmont.cuba.gui.components.MarginInfo getMargin() { MarginInfo vMargin = getContent().getMargin(); return new com.haulmont.cuba.gui.components.MarginInfo(vMargin.hasTop(), vMargin.hasRight(), vMargin.hasBottom(), vMargin.hasLeft()); } @Override public void setSpacing(boolean enabled) { getContent().setSpacing(enabled); } @Override public boolean getSpacing() { return getContent().isSpacing(); } @Override public boolean isRequiredIndicatorVisible() { return component.isRequiredIndicatorVisible(); } @Override public void setRequiredIndicatorVisible(boolean visible) { component.setRequiredIndicatorVisible(visible); } @Override public void addShortcutAction(ShortcutAction action) { KeyCombination keyCombination = action.getShortcutCombination(); com.vaadin.event.ShortcutListener shortcut = new ContainerShortcutActionWrapper(action, this, keyCombination); component.addShortcutListener(shortcut); if (shortcuts == null) { shortcuts = new HashMap<>(4); } shortcuts.put(action, shortcut); } @Override public void removeShortcutAction(ShortcutAction action) { if (shortcuts != null) { component.removeShortcutListener(shortcuts.remove(action)); if (shortcuts.isEmpty()) { shortcuts = null; } } } @Override public int getScrollLeft() { return component.getScrollLeft(); } @Override public void setScrollLeft(int scrollLeft) { component.setScrollLeft(scrollLeft); } @Override public int getScrollTop() { return component.getScrollTop(); } @Override public void setScrollTop(int scrollTop) { component.setScrollTop(scrollTop); } @Override public void attached() { super.attached(); for (Component component : ownComponents) { ((AttachNotifier) component).attached(); } } @Override public void detached() { super.detached(); for (Component component : ownComponents) { ((AttachNotifier) component).detached(); } } }
apache-2.0
LinuxTek/kona-app-model
src/main/java/com/linuxtek/kona/app/core/service/KApiVersionService.java
573
/* * Copyright (C) 2013 LINUXTEK, Inc. All Rights Reserved. */ package com.linuxtek.kona.app.core.service; import com.linuxtek.kona.app.core.entity.KApiVersion; import com.linuxtek.kona.data.service.KDataService; import com.linuxtek.kona.remote.service.KService; /** * The client side stub for the RPC service. */ public interface KApiVersionService<A extends KApiVersion> extends KService, KDataService<A> { public static final String SERVICE_PATH = "rpc/kona/ApiVersionService"; public A fetchByName(String name); public A fetchLatest(); }
apache-2.0
tsegismont/vertx-monitor
src/main/java/io/vertx/ext/hawkular/impl/NetClientMetricsSupplier.java
2840
/* * Copyright 2015 Red Hat, Inc. * * All rights reserved. This program and the accompanying materials * are made available under the terms of the Eclipse Public License v1.0 * and Apache License v2.0 which accompanies this distribution. * * The Eclipse Public License is available at * http://www.eclipse.org/legal/epl-v10.html * * The Apache License v2.0 is available at * http://www.opensource.org/licenses/apache2.0.php * * You may elect to redistribute this code under either of these licenses. */ package io.vertx.ext.hawkular.impl; import io.vertx.core.net.SocketAddress; import io.vertx.ext.hawkular.impl.NetClientConnectionsMeasurements.Snapshot; import org.hawkular.metrics.client.common.MetricType; import org.hawkular.metrics.client.common.SingleMetric; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Set; import java.util.concurrent.CopyOnWriteArraySet; import static org.hawkular.metrics.client.common.MetricType.*; /** * Aggregates values from {@link NetClientMetricsImpl} instances and exposes metrics for collection. * * @author Thomas Segismont */ public class NetClientMetricsSupplier implements MetricSupplier { private final String baseName; private final Set<NetClientMetricsImpl> metricsSet = new CopyOnWriteArraySet<>(); public NetClientMetricsSupplier(String prefix) { baseName = prefix + (prefix.isEmpty() ? "" : ".") + "vertx.net.client."; } @Override public List<SingleMetric> collect() { long timestamp = System.currentTimeMillis(); Map<SocketAddress, Snapshot> values = new HashMap<>(); for (NetClientMetricsImpl netClientMetrics : metricsSet) { netClientMetrics.getMeasurementsSnapshot().forEach((address, snapshot) -> { values.merge(address, snapshot, Snapshot::merge); }); } List<SingleMetric> res = new ArrayList<>(); values.forEach((address, snapshot) -> { String addressId = address.host() + ":" + address.port(); res.add(metric(addressId + ".connections", timestamp, snapshot.getConnections(), GAUGE)); res.add(metric(addressId + ".bytesReceived", timestamp, snapshot.getBytesReceived(), COUNTER)); res.add(metric(addressId + ".bytesSent", timestamp, snapshot.getBytesSent(), COUNTER)); res.add(metric(addressId + ".errorCount", timestamp, snapshot.getErrorCount(), COUNTER)); }); return res; } private SingleMetric metric(String name, long timestamp, Number value, MetricType type) { return new SingleMetric(baseName + name, timestamp, value.doubleValue(), type); } public void register(NetClientMetricsImpl netClientMetrics) { metricsSet.add(netClientMetrics); } public void unregister(NetClientMetricsImpl netClientMetrics) { metricsSet.remove(netClientMetrics); } }
apache-2.0
Adgillmore/CaveMan
src/uk/co/atgsoft/caveman/wine/record/tasting/TastingEntryImpl.java
2079
/* * To change this license header, choose License Headers in Project Properties. * To change this template file, choose Tools | Templates * and open the template in the editor. */ package uk.co.atgsoft.caveman.wine.record.tasting; import java.time.LocalDate; import uk.co.atgsoft.caveman.wine.Wine; import uk.co.atgsoft.caveman.wine.record.DateEntry; import uk.co.atgsoft.caveman.wine.record.DateEntryImpl; import uk.co.atgsoft.caveman.wine.record.WineEntry; import uk.co.atgsoft.caveman.wine.record.WineEntryImpl; /** * * @author adam */ public class TastingEntryImpl implements WineEntry, TastingEntry, DateEntry { private final WineEntry mWine; private final DateEntry mDate; private final String mReviewer; private final String mLocation; private final float mRating; private final String mNotes; public TastingEntryImpl(final String id, final Wine wine, final LocalDate date, final String reviewer, final String location, final float rating, final String notes) { mWine = new WineEntryImpl(id, wine); mDate = new DateEntryImpl(date); mReviewer = reviewer; mLocation = location; mRating = rating; mNotes = notes; } @Override public String getReviewer() { return mReviewer; } @Override public String getLocation() { return mLocation; } @Override public float getRating() { return mRating; } @Override public String getNotes() { return mNotes; } @Override public void setDate(LocalDate date) { mDate.setDate(date); } @Override public LocalDate getDate() { return mDate.getDate(); } @Override public void setId(String id) { mWine.setId(id); } @Override public String getId() { return mWine.getId(); } @Override public void setWine(Wine wine) { mWine.setWine(wine); } @Override public Wine getWine() { return mWine.getWine(); } }
apache-2.0
HuangLS/neo4j
community/kernel/src/test/java/org/neo4j/kernel/impl/core/JumpingIdGeneratorFactory.java
4323
/* * Copyright (c) 2002-2018 "Neo Technology," * Network Engine for Objects in Lund AB [http://neotechnology.com] * * This file is part of Neo4j. * * Neo4j is free software: you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with this program. If not, see <http://www.gnu.org/licenses/>. */ package org.neo4j.kernel.impl.core; import java.io.File; import java.util.EnumMap; import java.util.Map; import java.util.concurrent.atomic.AtomicLong; import org.neo4j.kernel.IdGeneratorFactory; import org.neo4j.kernel.IdType; import org.neo4j.kernel.impl.store.id.IdGenerator; import org.neo4j.kernel.impl.store.id.IdGeneratorImpl; import org.neo4j.kernel.impl.store.id.IdRange; import org.neo4j.test.impl.EphemeralIdGenerator; public class JumpingIdGeneratorFactory implements IdGeneratorFactory { private final Map<IdType,IdGenerator> generators = new EnumMap<>( IdType.class ); private final IdGenerator forTheRest = new EphemeralIdGenerator( null, null ); private final int sizePerJump; public JumpingIdGeneratorFactory( int sizePerJump ) { this.sizePerJump = sizePerJump; } @Override public IdGenerator open( File filename, IdType idType, long highId ) { return get( idType ); } @Override public IdGenerator open( File fileName, int grabSize, IdType idType, long highId ) { return get( idType ); } @Override public IdGenerator get( IdType idType ) { if ( idType == IdType.NODE || idType == IdType.RELATIONSHIP || idType == IdType.PROPERTY || idType == IdType.STRING_BLOCK || idType == IdType.ARRAY_BLOCK ) { IdGenerator generator = generators.get( idType ); if ( generator == null ) { generator = new JumpingIdGenerator(); generators.put( idType, generator ); } return generator; } return forTheRest; } @Override public void create( File fileName, long highId, boolean throwIfFileExists ) { } private class JumpingIdGenerator implements IdGenerator { private final AtomicLong nextId = new AtomicLong(); private int leftToNextJump = sizePerJump/2; private long highBits = 0; @Override public long nextId() { long result = tryNextId(); if ( --leftToNextJump == 0 ) { leftToNextJump = sizePerJump; nextId.set( (0xFFFFFFFFL | (highBits++ << 32)) - sizePerJump/2 + 1 ); } return result; } private long tryNextId() { long result = nextId.getAndIncrement(); if ( result == IdGeneratorImpl.INTEGER_MINUS_ONE ) { result = nextId.getAndIncrement(); leftToNextJump--; } return result; } @Override public IdRange nextIdBatch( int size ) { throw new UnsupportedOperationException(); } @Override public void setHighId( long id ) { nextId.set( id ); } @Override public long getHighId() { return nextId.get(); } @Override public void freeId( long id ) { } @Override public void close() { } @Override public long getNumberOfIdsInUse() { return nextId.get(); } @Override public long getDefragCount() { return 0; } @Override public void delete() { } @Override public long getHighestPossibleIdInUse() { return getHighId()-1; } } }
apache-2.0
wdmir/521266750_qq_com
CSharpServer/DdzServer/DdzServer/net/silverfoxserver/exthandler/DdzRCClientHandler.cs
2394
/* * SilverFoxServer: massive multiplayer game server for Flash, ... * VERSION:3.0 * PUBLISH DATE:2015-9-2 * GITHUB:github.com/wdmir/521266750_qq_com * UPDATES AND DOCUMENTATION AT: http://www.silverfoxserver.net * COPYRIGHT 2009-2015 SilverFoxServer.NET. All rights reserved. * MAIL:521266750@qq.com */ using System; using System.Collections.Generic; using System.Text; // using System.Net.Sockets; using System.Net; // using System.Xml; // using net.silverfoxserver.core; using net.silverfoxserver.core.service; using net.silverfoxserver.core.socket; // using net.silverfoxserver.core.log; using net.silverfoxserver.core.protocol; using net.silverfoxserver.core.array; using DdzServer.net.silverfoxserver.extlogic; using net.silverfoxserver.core.logic; using net.silverfoxserver.core.util; // using SuperSocket.SocketBase; namespace DdzServer.net.silverfoxserver.handler { public class DdzRCClientHandler : IoHandlerAdapter { /// <summary> /// /// </summary> private RCServerAction _rcServerAction = new RCServerAction(); public RCServerAction RCServerAction() { return _rcServerAction; } public DdzRCClientHandler() { } override public void messageReceived(object session, Object message) { string packeBufStr = string.Empty; try { Socket s = (Socket)session; Byte[] packeBuf = (Byte[])message; XmlDocument doc = new XmlDocument(); // packeBufStr = Encoding.UTF8.GetString(packeBuf); doc.LoadXml(packeBufStr); // string rcCAction = doc.DocumentElement.ChildNodes[0].Attributes["action"].Value; string strIpPort = s.RemoteEndPoint.ToString(); //create item SessionMessage item = new SessionMessage(null, doc, true, strIpPort); //save DdzLPU.msgList.Opp(QueueMethod.Add, item); // Log.WriteStrByServerRecv(rcCAction,SR.getRecordServer_displayName()); } catch (Exception exd) { Log.WriteStrByException("DdzRCClientHandler", "messageReceived", exd.Message); } } } }
apache-2.0
schisamo/omnibus-chef-utensils
config/software/chef.rb
789
# # Copyright:: Copyright (c) Seth Chisamore # License:: Apache License, Version 2.0 # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # name "chef" version "11.2.0" dependencies ["ruby","rubygems"] build do gem "install #{name} -n #{install_dir}/bin --no-rdoc --no-ri -v #{version}" end
apache-2.0
lanshiqin/cloud-project
micro-services/producer-service/src/main/java/com/lanshiqin/producerservice/ProducerServiceApplication.java
499
package com.lanshiqin.producerservice; import org.springframework.boot.SpringApplication; import org.springframework.boot.autoconfigure.SpringBootApplication; import org.springframework.cloud.client.discovery.EnableDiscoveryClient; /** * 服务提供者 * 配置服务发现和服务熔断 */ @SpringBootApplication @EnableDiscoveryClient public class ProducerServiceApplication { public static void main(String[] args) { SpringApplication.run(ProducerServiceApplication.class, args); } }
apache-2.0
darth-willy/mobibench
MobiBenchWebService/src/main/java/wvw/mobibench/service/servlet/msg/ResponseMessage.java
857
/** * Copyright 2016 William Van Woensel Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. * * * @author wvw * */ package wvw.mobibench.service.servlet.msg; public class ResponseMessage { private ResponseTypes type; public ResponseMessage(ResponseTypes type) { this.type = type; } public ResponseTypes getType() { return type; } }
apache-2.0
azkaban/azkaban
azkaban-web-server/src/test/java/azkaban/webapp/servlet/ExecutionFlowViewTest.java
2277
package azkaban.webapp.servlet; import static org.junit.Assert.assertTrue; import azkaban.fixture.VelocityContextTestUtil; import azkaban.fixture.VelocityTemplateTestUtil; import azkaban.utils.ExternalLink; import java.util.Arrays; import org.apache.velocity.VelocityContext; import org.junit.Test; /** * Test flow execution page. */ public class ExecutionFlowViewTest { private static final String EXTERNAL_ANALYZER_ELEMENT1 = "<li><a id=\"analyzerButtontopic1\" href=\"http://topic1.linkedin.com/\" " + "class=\"btn btn-info btn-sm btn-external\" type=\"button\" target=\"_blank\" " + "title=\"Analyze execution in Label1\">Label1</a></li>"; private static final String EXTERNAL_ANALYZER_ELEMENT2 = "<li><a id=\"analyzerButtontopic2\" href=\"http://topic2.linkedin.com/\" " + "class=\"btn btn-info btn-sm btn-external disabled\" type=\"button\"" + "target=\"_blank\" " + "title=\"Execution is not analyzable in Label2 at the moment.\">Label2</a></li>"; /** * Test aims to check that the external analyzer button is displayed in the page. * * @throws Exception the exception */ @Test public void testExternalAnalyzerButton() throws Exception { final VelocityContext context = VelocityContextTestUtil.getInstance(); ExternalLink externalLink1 = new ExternalLink( "topic1", "Label1", "http://topic1.linkedin.com/", true); context.put("externalAnalyzers", Arrays.asList(externalLink1)); String result = VelocityTemplateTestUtil.renderTemplate("executingflowpage", context); assertTrue(VelocityTemplateTestUtil. ignoreCaseContains(result, EXTERNAL_ANALYZER_ELEMENT1)); ExternalLink externalLink2 = new ExternalLink( "topic2", "Label2", "http://topic2.linkedin.com/", false); context.put("externalAnalyzers", Arrays.asList(externalLink1, externalLink2)); result = VelocityTemplateTestUtil.renderTemplate("executingflowpage", context); assertTrue(VelocityTemplateTestUtil. ignoreCaseContains(result, EXTERNAL_ANALYZER_ELEMENT1)); assertTrue(VelocityTemplateTestUtil. ignoreCaseContains(result, EXTERNAL_ANALYZER_ELEMENT2)); } }
apache-2.0
tectronics/maduraconfiguration
test/nz/co/senanque/madura/configuration/ConfigNamespaceTest.java
3040
/******************************************************************************* * Copyright 2010 Prometheus Consulting * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. ******************************************************************************/ package nz.co.senanque.madura.configuration; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; import java.net.URL; import java.util.List; import org.apache.commons.configuration.reloading.ManagedReloadingStrategy; import org.junit.Test; import org.springframework.context.support.ClassPathXmlApplicationContext; public class ConfigNamespaceTest{ protected String[] getConfigLocations() { return new String[] { "/nz/co/senanque/madura/configuration/ConfigNamespace-spring.xml" }; } @Test public void testSpringConfig() throws Exception{ ClassPathXmlApplicationContext applicationContext = new ClassPathXmlApplicationContext(getConfigLocations()); Object hs1 = applicationContext.getBean("component.sourcedir"); assertTrue("mysourcedir".equals(hs1.toString())); Object hs2 = applicationContext.getBean("component1.sourcedir"); System.out.println(hs2.toString()); assertTrue("mysourcedir".equals(hs2.toString())); URL url = (URL)applicationContext.getBean("myurl"); assertTrue("http://localhost:8080/jjj".equals(url.toString())); System.out.println(url.toString()); MyTestBean myTestBean2 = (MyTestBean)applicationContext.getBean("m"); assertTrue("http://localhost:8080/jjj".equals(myTestBean2.getUrl().toString())); // Configuration configuration = (Configuration)applicationContext.getBean("configuration"); Object doc = applicationContext.getBean("test1"); MyTestBean myTestBean = (MyTestBean)applicationContext.getBean("test2"); assertTrue(myTestBean.getA().equals("XYZ")); myTestBean.setA("12345"); assertTrue(myTestBean.getA().equals("12345")); List sampleList = (List)applicationContext.getBean("sampleList"); assertEquals(2,sampleList.size()); // applicationContext.refresh(); ManagedReloadingStrategy reloadingStrategy = (ManagedReloadingStrategy)applicationContext.getBean("reloadingStrategy"); reloadingStrategy.refresh(); MyTestBean myTestBean1 = (MyTestBean)applicationContext.getBean("test2"); assertTrue(myTestBean1.getA().equals("XYZ")); } }
apache-2.0
FasterXML/jackson-modules-java8
datetime/src/test/java/com/fasterxml/jackson/datatype/jsr310/TestDecimalUtils.java
4742
package com.fasterxml.jackson.datatype.jsr310; import org.junit.Test; import java.math.BigDecimal; import static org.junit.Assert.*; public class TestDecimalUtils extends ModuleTestBase { @Test public void testToDecimal01() { String decimal = DecimalUtils.toDecimal(0, 0); assertEquals("The returned decimal is not correct.", NO_NANOSECS_SER, decimal); decimal = DecimalUtils.toDecimal(15, 72); assertEquals("The returned decimal is not correct.", "15.000000072", decimal); decimal = DecimalUtils.toDecimal(19827342231L, 192837465); assertEquals("The returned decimal is not correct.", "19827342231.192837465", decimal); decimal = DecimalUtils.toDecimal(19827342231L, 0); assertEquals("The returned decimal is not correct.", "19827342231"+NO_NANOSECS_SUFFIX, decimal); decimal = DecimalUtils.toDecimal(19827342231L, 999888000); assertEquals("The returned decimal is not correct.", "19827342231.999888000", decimal); decimal = DecimalUtils.toDecimal(-22704862, 599000000); assertEquals("The returned decimal is not correct.", "-22704862.599000000", decimal); } @SuppressWarnings("deprecation") private void checkExtractNanos(long expectedSeconds, int expectedNanos, BigDecimal decimal) { long seconds = decimal.longValue(); assertEquals("The second part is not correct.", expectedSeconds, seconds); int nanoseconds = DecimalUtils.extractNanosecondDecimal(decimal, seconds); assertEquals("The nanosecond part is not correct.", expectedNanos, nanoseconds); } @Test public void testExtractNanosecondDecimal01() { BigDecimal value = new BigDecimal("0"); checkExtractNanos(0L, 0, value); } @Test public void testExtractNanosecondDecimal02() { BigDecimal value = new BigDecimal("15.000000072"); checkExtractNanos(15L, 72, value); } @Test public void testExtractNanosecondDecimal03() { BigDecimal value = new BigDecimal("15.72"); checkExtractNanos(15L, 720000000, value); } @Test public void testExtractNanosecondDecimal04() { BigDecimal value = new BigDecimal("19827342231.192837465"); checkExtractNanos(19827342231L, 192837465, value); } @Test public void testExtractNanosecondDecimal05() { BigDecimal value = new BigDecimal("19827342231"); checkExtractNanos(19827342231L, 0, value); } @Test public void testExtractNanosecondDecimal06() { BigDecimal value = new BigDecimal("19827342231.999999999"); checkExtractNanos(19827342231L, 999999999, value); } private void checkExtractSecondsAndNanos(long expectedSeconds, int expectedNanos, BigDecimal decimal) { DecimalUtils.extractSecondsAndNanos(decimal, (Long s, Integer ns) -> { assertEquals("The second part is not correct.", expectedSeconds, s.longValue()); assertEquals("The nanosecond part is not correct.", expectedNanos, ns.intValue()); return null; }); } @Test public void testExtractSecondsAndNanos01() { BigDecimal value = new BigDecimal("0"); checkExtractSecondsAndNanos(0L, 0, value); } @Test public void testExtractSecondsAndNanos02() { BigDecimal value = new BigDecimal("15.000000072"); checkExtractSecondsAndNanos(15L, 72, value); } @Test public void testExtractSecondsAndNanos03() { BigDecimal value = new BigDecimal("15.72"); checkExtractSecondsAndNanos(15L, 720000000, value); } @Test public void testExtractSecondsAndNanos04() { BigDecimal value = new BigDecimal("19827342231.192837465"); checkExtractSecondsAndNanos(19827342231L, 192837465, value); } @Test public void testExtractSecondsAndNanos05() { BigDecimal value = new BigDecimal("19827342231"); checkExtractSecondsAndNanos(19827342231L, 0, value); } @Test public void testExtractSecondsAndNanos06() { BigDecimal value = new BigDecimal("19827342231.999999999"); checkExtractSecondsAndNanos(19827342231L, 999999999, value); } @Test public void testExtractSecondsAndNanosFromNegativeBigDecimal() { BigDecimal value = new BigDecimal("-22704862.599000000"); checkExtractSecondsAndNanos(-22704862L, 599000000, value); } @Test(timeout = 100) public void testExtractSecondsAndNanos07() { BigDecimal value = new BigDecimal("1e10000000"); checkExtractSecondsAndNanos(0L, 0, value); } }
apache-2.0
shun634501730/java_source_cn
src_en/org/omg/PortableInterceptor/AdapterNameHelper.java
2017
package org.omg.PortableInterceptor; /** * org/omg/PortableInterceptor/AdapterNameHelper.java . * Generated by the IDL-to-Java compiler (portable), version "3.2" * from c:/re/workspace/8-2-build-windows-amd64-cygwin/jdk8u91/7017/corba/src/share/classes/org/omg/PortableInterceptor/Interceptors.idl * Friday, May 20, 2016 5:44:10 PM PDT */ /** The name of an object adapter. This is unique only within * the scope of the ORB that created the object adapter. */ abstract public class AdapterNameHelper { private static String _id = "IDL:omg.org/PortableInterceptor/AdapterName:1.0"; public static void insert (org.omg.CORBA.Any a, String[] that) { org.omg.CORBA.portable.OutputStream out = a.create_output_stream (); a.type (type ()); write (out, that); a.read_value (out.create_input_stream (), type ()); } public static String[] extract (org.omg.CORBA.Any a) { return read (a.create_input_stream ()); } private static org.omg.CORBA.TypeCode __typeCode = null; synchronized public static org.omg.CORBA.TypeCode type () { if (__typeCode == null) { __typeCode = org.omg.CORBA.ORB.init ().create_string_tc (0); __typeCode = org.omg.CORBA.ORB.init ().create_sequence_tc (0, __typeCode); __typeCode = org.omg.CORBA.ORB.init ().create_alias_tc (org.omg.CORBA.StringSeqHelper.id (), "StringSeq", __typeCode); __typeCode = org.omg.CORBA.ORB.init ().create_alias_tc (org.omg.PortableInterceptor.AdapterNameHelper.id (), "AdapterName", __typeCode); } return __typeCode; } public static String id () { return _id; } public static String[] read (org.omg.CORBA.portable.InputStream istream) { String value[] = null; value = org.omg.CORBA.StringSeqHelper.read (istream); return value; } public static void write (org.omg.CORBA.portable.OutputStream ostream, String[] value) { org.omg.CORBA.StringSeqHelper.write (ostream, value); } }
apache-2.0
fredji97/samza
samza-sql/src/main/java/org/apache/samza/sql/testutil/SamzaSqlQueryParser.java
6240
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.samza.sql.testutil; import java.sql.Connection; import java.sql.DriverManager; import java.sql.SQLException; import java.util.ArrayList; import java.util.List; import java.util.regex.Matcher; import java.util.regex.Pattern; import org.apache.calcite.config.Lex; import org.apache.calcite.jdbc.CalciteConnection; import org.apache.calcite.plan.Contexts; import org.apache.calcite.plan.ConventionTraitDef; import org.apache.calcite.plan.RelTraitDef; import org.apache.calcite.rel.RelCollationTraitDef; import org.apache.calcite.schema.SchemaPlus; import org.apache.calcite.sql.SqlAsOperator; import org.apache.calcite.sql.SqlBasicCall; import org.apache.calcite.sql.SqlIdentifier; import org.apache.calcite.sql.SqlInsert; import org.apache.calcite.sql.SqlJoin; import org.apache.calcite.sql.SqlNode; import org.apache.calcite.sql.SqlSelect; import org.apache.calcite.sql.SqlUnnestOperator; import org.apache.calcite.sql.fun.SqlStdOperatorTable; import org.apache.calcite.sql.parser.SqlParseException; import org.apache.calcite.sql.parser.SqlParser; import org.apache.calcite.tools.FrameworkConfig; import org.apache.calcite.tools.Frameworks; import org.apache.calcite.tools.Planner; import org.apache.samza.SamzaException; /** * Utility class that is used to parse the Samza sql query to figure out the sources, sink etc.. */ public class SamzaSqlQueryParser { private SamzaSqlQueryParser() { } public static class QueryInfo { private final List<String> sources; private String selectQuery; private String sink; public QueryInfo(String selectQuery, List<String> sources, String sink) { this.selectQuery = selectQuery; this.sink = sink; this.sources = sources; } public List<String> getSources() { return sources; } public String getSelectQuery() { return selectQuery; } public String getSink() { return sink; } } public static QueryInfo parseQuery(String sql) { Pattern insertIntoSqlPattern = Pattern.compile("insert into (.*) (select .* from (.*))", Pattern.CASE_INSENSITIVE); Matcher m = insertIntoSqlPattern.matcher(sql); if (!m.matches()) { throw new SamzaException("Invalid query format"); } Planner planner = createPlanner(); SqlNode sqlNode; try { sqlNode = planner.parse(sql); } catch (SqlParseException e) { throw new SamzaException(e); } String sink; String selectQuery; ArrayList<String> sources; if (sqlNode instanceof SqlInsert) { SqlInsert sqlInsert = ((SqlInsert) sqlNode); sink = sqlInsert.getTargetTable().toString(); if (sqlInsert.getSource() instanceof SqlSelect) { SqlSelect sqlSelect = (SqlSelect) sqlInsert.getSource(); selectQuery = m.group(2); sources = getSourcesFromSelectQuery(sqlSelect); } else { throw new SamzaException("Sql query is not of the expected format"); } } else { throw new SamzaException("Sql query is not of the expected format"); } return new QueryInfo(selectQuery, sources, sink); } private static Planner createPlanner() { Connection connection; SchemaPlus rootSchema; try { connection = DriverManager.getConnection("jdbc:calcite:"); CalciteConnection calciteConnection = connection.unwrap(CalciteConnection.class); rootSchema = calciteConnection.getRootSchema(); } catch (SQLException e) { throw new SamzaException(e); } final List<RelTraitDef> traitDefs = new ArrayList<>(); traitDefs.add(ConventionTraitDef.INSTANCE); traitDefs.add(RelCollationTraitDef.INSTANCE); FrameworkConfig frameworkConfig = Frameworks.newConfigBuilder() .parserConfig(SqlParser.configBuilder().setLex(Lex.JAVA).build()) .defaultSchema(rootSchema) .operatorTable(SqlStdOperatorTable.instance()) .traitDefs(traitDefs) .context(Contexts.EMPTY_CONTEXT) .costFactory(null) .build(); return Frameworks.getPlanner(frameworkConfig); } private static ArrayList<String> getSourcesFromSelectQuery(SqlSelect sqlSelect) { ArrayList<String> sources = new ArrayList<>(); getSource(sqlSelect.getFrom(), sources); if (sources.size() < 1) { throw new SamzaException("Unsupported query " + sqlSelect); } return sources; } private static void getSource(SqlNode node, ArrayList<String> sourceList) { if (node instanceof SqlJoin) { SqlJoin joinNode = (SqlJoin) node; ArrayList<String> sourcesLeft = new ArrayList<>(); ArrayList<String> sourcesRight = new ArrayList<>(); getSource(joinNode.getLeft(), sourcesLeft); getSource(joinNode.getRight(), sourcesRight); sourceList.addAll(sourcesLeft); sourceList.addAll(sourcesRight); } else if (node instanceof SqlIdentifier) { sourceList.add(node.toString()); } else if (node instanceof SqlBasicCall) { SqlBasicCall basicCall = ((SqlBasicCall) node); if (basicCall.getOperator() instanceof SqlAsOperator) { getSource(basicCall.operand(0), sourceList); } else if (basicCall.getOperator() instanceof SqlUnnestOperator && basicCall.operand(0) instanceof SqlSelect) { sourceList.addAll(getSourcesFromSelectQuery(basicCall.operand(0))); return; } } else if (node instanceof SqlSelect) { getSource(((SqlSelect) node).getFrom(), sourceList); } } }
apache-2.0
yandex/graphouse
src/main/java/ru/yandex/market/graphouse/config/GraphouseConfig.java
1899
package ru.yandex.market.graphouse.config; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.Import; import org.springframework.context.annotation.Primary; import org.springframework.context.annotation.PropertySource; import org.springframework.context.support.PropertySourcesPlaceholderConfigurer; import org.springframework.jdbc.core.JdbcTemplate; import ru.yandex.market.graphouse.AutoHideService; import ru.yandex.market.graphouse.monitoring.Monitoring; import ru.yandex.market.graphouse.search.MetricSearch; /** * @author Vlad Vinogradov <a href="mailto:vladvin@yandex-team.ru"></a> on 12.01.17 */ @Configuration @PropertySource(value = "classpath:graphouse-default.properties") @PropertySource( value = { "file:${app.home}/conf/graphouse.properties", "file:${app.home}/conf/local-application.properties", "classpath:local-application.properties" }, ignoreResourceNotFound = true ) @Import({DbConfig.class, MetricsConfig.class, ServerConfig.class, StatisticsConfig.class, StatisticsCountersConfig.class}) public class GraphouseConfig { @Autowired private MetricSearch metricSearch; @Autowired private JdbcTemplate clickHouseJdbcTemplateAutohide; @Bean @Primary public Monitoring monitoring() { return new Monitoring(); } @Bean public Monitoring ping() { return new Monitoring(); } @Bean(initMethod = "startService") public AutoHideService autoHideService() { return new AutoHideService(clickHouseJdbcTemplateAutohide, metricSearch); } @Bean public static PropertySourcesPlaceholderConfigurer propertyConfigInDev() { return new PropertySourcesPlaceholderConfigurer(); } }
apache-2.0
google/syzkaller
prog/parse_test.go
3455
// Copyright 2015 syzkaller project authors. All rights reserved. // Use of this source code is governed by Apache 2 LICENSE that can be found in the LICENSE file. package prog import ( "testing" ) func TestParseSingle(t *testing.T) { t.Parallel() target, err := GetTarget("linux", "amd64") if err != nil { t.Fatal(err) } const execLog = `getpid() gettid() ` entries := target.ParseLog([]byte(execLog)) if len(entries) != 1 { t.Fatalf("got %v programs, want 1", len(entries)) } ent := entries[0] if ent.Start != 0 { t.Fatalf("start offset %v, want 0", ent.Start) } if ent.End != len(execLog) { t.Fatalf("end offset %v, want %v", ent.End, len(execLog)) } if ent.Proc != 0 { t.Fatalf("proc %v, want 0", ent.Proc) } if ent.P.RequiredFeatures().FaultInjection { t.Fatalf("fault injection enabled") } want := "getpid-gettid" got := ent.P.String() if got != want { t.Fatalf("bad program: %s, want %s", got, want) } } func TestParseMulti(t *testing.T) { t.Parallel() target, err := GetTarget("linux", "amd64") if err != nil { t.Fatal(err) } entries := target.ParseLog([]byte(execLog)) if len(entries) != 5 { for i, ent := range entries { t.Logf("program #%v: %v\n", i, ent.P) } t.Fatalf("got %v programs, want 5", len(entries)) } off := 0 for _, ent := range entries { if off > ent.Start || ent.Start > ent.End || ent.End > len(execLog) { t.Fatalf("bad offsets") } } if entries[0].Proc != 0 || entries[1].Proc != 1 || entries[2].Proc != 2 || entries[3].Proc != 33 || entries[4].Proc != 9 { t.Fatalf("bad procs") } for i, ent := range entries { if ent.P.RequiredFeatures().FaultInjection { t.Fatalf("prog %v has fault injection enabled", i) } } if s := entries[0].P.String(); s != "getpid-gettid" { t.Fatalf("bad program 0: %s", s) } if s := entries[1].P.String(); s != "getpid-gettid-munlockall" { t.Fatalf("bad program 0: %s", s) } if s := entries[2].P.String(); s != "getpid-gettid" { t.Fatalf("bad program 1: %s", s) } if s := entries[3].P.String(); s != "gettid-getpid" { t.Fatalf("bad program 2: %s", s) } if s := entries[4].P.String(); s != "munlockall" { t.Fatalf("bad program 3: %s", s) } } const execLog = ` getpid() gettid() 2015/12/21 12:18:05 executing program 1: getpid() [ 2351.935478] Modules linked in: gettid() munlockall() 2015/12/21 12:18:05 executing program 2: [ 2351.935478] Modules linked in: getpid() gettid() 2015/12/21 12:18:05 executing program 33: gettid() getpid() [ 2351.935478] Modules linked in: 2015/12/21 12:18:05 executing program 9: munlockall() ` func TestParseFault(t *testing.T) { t.Parallel() target, err := GetTarget("linux", "amd64") if err != nil { t.Fatal(err) } const execLog = `2015/12/21 12:18:05 executing program 1 (fault-call:1 fault-nth:55): gettid() getpid() ` entries := target.ParseLog([]byte(execLog)) if len(entries) != 1 { t.Fatalf("got %v programs, want 1", len(entries)) } ent := entries[0] faultCall := ent.P.Calls[1] normalCall := ent.P.Calls[0] if faultCall.Props.FailNth != 56 { // We want 56 (not 55!) because the number is now not 0-based. t.Fatalf("fault nth on the 2nd call: got %v, want 56", faultCall.Props.FailNth) } if normalCall.Props.FailNth != 0 { t.Fatalf("fault nth on the 1st call: got %v, want 0", normalCall.Props.FailNth) } want := "gettid-getpid" got := ent.P.String() if got != want { t.Fatalf("bad program: %s, want %s", got, want) } }
apache-2.0
xdegenne/alien4cloud
alien4cloud-ui/src/main/webapp/scripts/topology/services/workflow_services.js
1697
// define the rest api elements to work with workflow edition. define(function (require) { 'use strict'; var modules = require('modules'); modules.get('a4c-topology-editor', ['ngResource']).factory('workflowServices', ['$resource', function($resource) { var workflowsResource = $resource('rest/topologies/:topologyId/workflows'); var workflowResource = $resource('rest/topologies/:topologyId/workflows/:workflowName'); var activitiesResource = $resource('rest/topologies/:topologyId/workflows/:workflowName/activities'); var edgeResource = $resource('rest/topologies/:topologyId/workflows/:workflowName/edges/:from/:to'); var stepResource = $resource('rest/topologies/:topologyId/workflows/:workflowName/steps/:stepId'); var fromResource = $resource('rest/topologies/:topologyId/workflows/:workflowName/steps/:stepId/connectFrom'); var toResource = $resource('rest/topologies/:topologyId/workflows/:workflowName/steps/:stepId/connectTo'); var swapResource = $resource('rest/topologies/:topologyId/workflows/:workflowName/steps/:stepId/swap'); return { 'workflows': { 'create': workflowsResource.save, 'remove': workflowResource.remove, 'rename': workflowResource.save }, 'edge': { 'remove': edgeResource.remove }, 'step': { 'remove': stepResource.remove, 'rename': stepResource.save, 'connectFrom': fromResource.save, 'connectTo': toResource.save, 'swap': swapResource.save }, 'activity': { 'add': activitiesResource.save } }; } ]); }); // define
apache-2.0
jonvestal/open-kilda
src-gui/src/main/java/org/usermanagement/dao/entity/PermissionEntity.java
3991
/* Copyright 2018 Telstra Open Source * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.usermanagement.dao.entity; import org.openkilda.entity.BaseEntity; import java.io.Serializable; import java.util.HashSet; import java.util.Set; import javax.persistence.Column; import javax.persistence.Entity; import javax.persistence.FetchType; import javax.persistence.GeneratedValue; import javax.persistence.GenerationType; import javax.persistence.Id; import javax.persistence.JoinColumn; import javax.persistence.JoinTable; import javax.persistence.ManyToMany; import javax.persistence.ManyToOne; import javax.persistence.Table; @Entity @Table(name = "kilda_permission") public class PermissionEntity extends BaseEntity implements Serializable { private static final long serialVersionUID = 1L; @Id @Column(name = "permission_id", nullable = false) @GeneratedValue(strategy = GenerationType.IDENTITY) private Long permissionId; @Column(name = "permission", nullable = false) private String name; @Column(name = "description", nullable = true) private String description; @Column(name = "is_editable", nullable = false) private Boolean isEditable; @Column(name = "is_admin_permission", nullable = false) private Boolean isAdminPermission; @ManyToOne @JoinColumn(name = "status_id", nullable = false) private StatusEntity statusEntity; /** The roles. */ @ManyToMany(fetch = FetchType.LAZY) @JoinTable(name = "role_permission", joinColumns = { @JoinColumn(name = "permission_id") }, inverseJoinColumns = { @JoinColumn(name = "role_id") }) private Set<RoleEntity> roles = new HashSet<RoleEntity>(); /* (non-Javadoc) * @see org.openkilda.entity.BaseEntity#id() */ @Override public Long id() { return permissionId; } public Long getPermissionId() { return permissionId; } public void setPermissionId(final Long permissionId) { this.permissionId = permissionId; } public String getName() { return name; } public void setName(final String name) { this.name = name; } public StatusEntity getStatusEntity() { return statusEntity; } public void setStatusEntity(final StatusEntity statusEntity) { this.statusEntity = statusEntity; } public String getDescription() { return description; } public void setDescription(final String description) { this.description = description; } public Boolean getIsEditable() { return isEditable; } public void setIsEditable(final Boolean isEditable) { this.isEditable = isEditable; } public Boolean getIsAdminPermission() { return isAdminPermission; } public void setIsAdminPermission(final Boolean isAdminPermission) { this.isAdminPermission = isAdminPermission; } public Set<RoleEntity> getRoles() { return roles; } public void setRoles(Set<RoleEntity> roles) { this.roles = roles; } @Override public String toString() { return "PermissionEntity [permissionId=" + permissionId + ", name=" + name + ", description=" + description + ", statusEntity=" + statusEntity + "]"; } }
apache-2.0
Modwatch/API
transform.js
1212
const { initializeModlistCollection, getUsersList } = require("./dist/server/database"); (async () => { const modlist = await initializeModlistCollection(); const results = (await modlist .find({ timestamp: { $type: "string" } }, { sort: [ ["timestamp", -1] ] }) .project({ username: 1, timestamp: 1 }) .toArray()) .map(modl => ({ ...modl, newTimestamp: new Date(modl.timestamp) })) // .slice(0, 1); for(const modl of results) { console.log(`updating ${modl.username} from ${modl.timestamp} to ${modl.newTimestamp}...`); try { await modlist.updateOne({ username: modl.username }, { $set: { timestamp: modl.newTimestamp // oldTimestamp: modl.timestamp } }) console.log("It worked!"); } catch (e) { console.log("it failed!"); console.log(e); } } console.log(`updated ${results.length} modlists`); process.exit(0); })();
apache-2.0
eneiasbrumjr/cogib
resources/views/home.blade.php
10392
@extends('layouts.dashboard') @section('page_heading','Painel de Controle') @section('section') <!-- /.row --> <div class="col-sm-12"> <div class="row"> <div class="col-lg-3 col-md-6"> <div class="panel panel-primary"> <div class="panel-heading"> <div class="row"> <div class="col-xs-3"> <i class="fa fa-users fa-5x"></i> </div> <div class="col-xs-9 text-right"> <div class="huge">{{ $clientes->count() }}</div> <div>Clientes ativos!</div> </div> </div> </div> <a href="{{ url ('Cliente') }}"> <div class="panel-footer"> <span class="pull-left">Ver em detalhes</span> <span class="pull-right"><i class="fa fa-arrow-circle-right"></i></span> <div class="clearfix"></div> </div> </a> </div> </div> <div class="col-lg-3 col-md-6"> <div class="panel panel-green"> <div class="panel-heading"> <div class="row"> <div class="col-xs-3"> <i class="fa fa-plus fa-5x"></i> </div> <div class="col-xs-9 text-right"> <div class="huge">{{ $adesoes }}</div> <div>Adesões!</div> </div> </div> </div> <a href="{{ url ('Adesao') }}"> <div class="panel-footer"> <span class="pull-left">Ver em detalhes</span> <span class="pull-right"><i class="fa fa-arrow-circle-right"></i></span> <div class="clearfix"></div> </div> </a> </div> </div> <div class="col-lg-3 col-md-6"> <div class="panel panel-red"> <div class="panel-heading"> <div class="row"> <div class="col-xs-3"> <i class="fa fa-heartbeat fa-5x"></i> </div> <div class="col-xs-9 text-right"> <div class="huge">{{ $vidas }}</div> <div>Vidas!</div> </div> </div> </div> <a href="{{ url ('Vidas') }}"> <div class="panel-footer"> <span class="pull-left">Ver em detalhes</span> <span class="pull-right"><i class="fa fa-arrow-circle-right"></i></span> <div class="clearfix"></div> </div> </a> </div> </div> <div class="col-lg-3 col-md-6"> <div class="panel panel-yellow"> <div class="panel-heading"> <div class="row"> <div class="col-xs-3"> <i class="fa fa-exclamation fa-5x"></i> </div> <div class="col-xs-9 text-right"> <div class="huge">{{ $avisocount }}</div> <div>Avisos!</div> </div> </div> </div> <a href="#avisos"> <div class="panel-footer"> <span class="pull-left">Ver em detalhes</span> <span class="pull-right"><i class="fa fa-arrow-circle-right"></i></span> <div class="clearfix"></div> </div> </a> </div> </div> </div> <!-- /.row --> <div class="row"> <div class="col-lg-3 col-md-6"> <div class="panel panel-info"> <div class="panel-heading"> <div class="row"> <div class="col-xs-3"> <i class="fa fa-exclamation fa-5x"></i> </div> <div class="col-xs-9 text-right"> <div class="huge">{{ $clientesoff->count() }}</div> <div>Clientes sem adesão</div> </div> </div> </div> <a href="{{ url ('Cliente') }}"> <div class="panel-footer"> <span class="pull-left">Ver em detalhes</span> <span class="pull-right"><i class="fa fa-arrow-circle-right"></i></span> <div class="clearfix"></div> </div> </a> </div> </div> <div class="col-lg-3 col-md-6"> <div class="panel panel-info"> <div class="panel-heading"> <div class="row"> <div class="col-xs-3"> <i class="fa fa-times fa-5x"></i> </div> <div class="col-xs-9 text-right"> <div class="huge">{{ $adesoesoff->count() }}</div> <div>Adesões canceladas</div> </div> </div> </div> <a href="{{ url ('Adesao') }}"> <div class="panel-footer"> <span class="pull-left">Ver em detalhes</span> <span class="pull-right"><i class="fa fa-arrow-circle-right"></i></span> <div class="clearfix"></div> </div> </a> </div> </div> <div class="col-lg-6"> <!--@section ('chart1_panel_title','Line Chart') @section ('chart1_panel_body') @include('widgets.charts.clinechart') @endsection @include('widgets.panel', array('class'=>'primary', 'header'=>true, 'as'=>'chart1'))--> @section ('panel1_panel_title', 'Avisos') @section ('panel1_panel_body') <div class="list-group" id="avisos"> @foreach ($avisos as $aviso) <div class="list-group-item"> <i class="fa fa-warning fa-fw"></i> {{ $aviso->user->name }}: {{ $aviso->msg }} </div> @endforeach </div> <!-- /.list-group --> <!-- /.panel-body --> @endsection @include('widgets.panel', array('class'=>'warning', 'header'=>true, 'as'=>'panel1')) <!-- /.panel --> </div> </div> <div class="row"> <div class="col-lg-6"> @section ('panel2_panel_title', 'Atividade Recente') @section ('panel2_panel_body') <!-- /.panel --> <ul class="timeline"> @forelse ($activities as $key => $activity) @if ($key % 2 == 0) <li> @else <li class="timeline-inverted"> @endif <div class="timeline-badge info"><i class="fa fa-book"></i> </div> <div class="timeline-panel"> <div class="timeline-heading"> <h4 class="timeline-title">{{ $activity->user->name }}</h4> <p><small class="text-muted"><i class="fa fa-clock-o"></i> {{ $activity->created_at->diffForHumans() }}</small> </p> </div> <div class="timeline-body"> <p>{{ $activity->text }}</p> </div> </div> </li> @empty <li></li> @endforelse </ul> <!-- /.panel-body --> <!-- /.panel --> @endsection @include('widgets.panel', array('class'=>'info', 'header'=>true, 'as'=>'panel2')) </div> <!-- /.col-lg-8 --> <!-- /.col-lg-4 --> @stop
apache-2.0
gdbelvin/trillian
testonly/internal/hammer/hammer.go
21485
// Copyright 2017 Google LLC. All Rights Reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package hammer import ( "bytes" "context" "fmt" "math/rand" "strconv" "strings" "sync" "time" "github.com/golang/glog" "github.com/google/trillian" "github.com/google/trillian/client" "github.com/google/trillian/monitoring" "github.com/google/trillian/testonly" ) const ( defaultEmitSeconds = 10 // How far beyond current revision to request for invalid requests invalidStretch = int64(10000) // rev=-1 is used when requesting the latest revision latestRevision = int64(-1) // Format specifier for generating leaf values valueFormat = "value-%09d" minValueLen = len("value-") + 9 // prefix + 9 digits ) var ( // Metrics are all per-map (label "mapid"), and per-entrypoint (label "ep"). once sync.Once reqs monitoring.Counter // mapid, ep => value errs monitoring.Counter // mapid, ep => value rsps monitoring.Counter // mapid, ep => value rspLatency monitoring.Histogram // mapid, ep => distribution-of-values invalidReqs monitoring.Counter // mapid, ep => value ) // setupMetrics initializes all the exported metrics. func setupMetrics(mf monitoring.MetricFactory) { reqs = mf.NewCounter("reqs", "Number of valid requests sent", "mapid", "ep") errs = mf.NewCounter("errs", "Number of error responses received for valid requests", "mapid", "ep") rsps = mf.NewCounter("rsps", "Number of responses received for valid requests", "mapid", "ep") rspLatency = mf.NewHistogram("rsp_latency", "Latency of responses received for valid requests in seconds", "mapid", "ep") invalidReqs = mf.NewCounter("invalid_reqs", "Number of deliberately-invalid requests sent", "mapid", "ep") } // errSkip indicates that a test operation should be skipped. type errSkip struct{} func (e errSkip) Error() string { return "test operation skipped" } // MapEntrypointName identifies a Map RPC entrypoint type MapEntrypointName string // Constants for entrypoint names, as exposed in statistics/logging. const ( GetLeavesName = MapEntrypointName("GetLeaves") GetLeavesRevName = MapEntrypointName("GetLeavesRev") GetSMRName = MapEntrypointName("GetSMR") GetSMRRevName = MapEntrypointName("GetSMRRev") SetLeavesName = MapEntrypointName("SetLeaves") // TODO(mhutchinson): rename to WriteLeaves. ) // Read-only map entry points. var roMapEntrypoints = []MapEntrypointName{GetLeavesName, GetLeavesRevName, GetSMRName, GetSMRRevName} // All map entry points. var mapEntrypoints = append(roMapEntrypoints, SetLeavesName) // Choice is a readable representation of a choice about how to perform a hammering operation. type Choice string // Constants for both valid and invalid operation choices. const ( ExistingKey = Choice("ExistingKey") NonexistentKey = Choice("NonexistentKey") MalformedKey = Choice("MalformedKey") DuplicateKey = Choice("DuplicateKey") RevTooBig = Choice("RevTooBig") RevIsNegative = Choice("RevIsNegative") CreateLeaf = Choice("CreateLeaf") UpdateLeaf = Choice("UpdateLeaf") DeleteLeaf = Choice("DeleteLeaf") ) // MapBias indicates the bias for selecting different map operations. type MapBias struct { Bias map[MapEntrypointName]int total int // InvalidChance gives the odds of performing an invalid operation, as the N in 1-in-N. InvalidChance map[MapEntrypointName]int } // choose randomly picks an operation to perform according to the biases. func (hb *MapBias) choose(r *rand.Rand) MapEntrypointName { if hb.total == 0 { for _, ep := range mapEntrypoints { hb.total += hb.Bias[ep] } } which := r.Intn(hb.total) for _, ep := range mapEntrypoints { which -= hb.Bias[ep] if which < 0 { return ep } } panic("random choice out of range") } // invalid randomly chooses whether an operation should be invalid. func (hb *MapBias) invalid(ep MapEntrypointName, r *rand.Rand) bool { chance := hb.InvalidChance[ep] if chance <= 0 { return false } return r.Intn(chance) == 0 } // MapConfig provides configuration for a stress/load test. type MapConfig struct { MapID int64 // 0 to use an ephemeral tree MetricFactory monitoring.MetricFactory Client trillian.TrillianMapClient Write trillian.TrillianMapWriteClient Admin trillian.TrillianAdminClient RandSource rand.Source EPBias MapBias LeafSize, ExtraSize uint MinLeavesR, MaxLeavesR int MinLeavesW, MaxLeavesW int Operations uint64 EmitInterval time.Duration RetryErrors bool OperationDeadline time.Duration // NumCheckers indicates how many separate inclusion checker goroutines // to run. Note that the behaviour of these checkers is not governed by // RandSource. NumCheckers int // KeepFailedTree indicates whether ephemeral trees should be left intact // after a failed hammer run. KeepFailedTree bool } // String conforms with Stringer for MapConfig. func (c MapConfig) String() string { return fmt.Sprintf("mapID:%d biases:{%v} #operations:%d emit every:%v retryErrors? %t", c.MapID, c.EPBias, c.Operations, c.EmitInterval, c.RetryErrors) } // HitMap performs load/stress operations according to given config. func HitMap(ctx context.Context, cfg MapConfig) error { var firstErr error if cfg.MapID == 0 { // No mapID provided, so create an ephemeral tree to test against. var err error cfg.MapID, err = makeNewMap(ctx, cfg.Admin, cfg.Client) if err != nil { return fmt.Errorf("failed to create ephemeral tree: %v", err) } glog.Infof("testing against ephemeral tree %d", cfg.MapID) defer func() { if firstErr != nil && cfg.KeepFailedTree { glog.Errorf("note: leaving ephemeral tree %d intact after error %v", cfg.MapID, firstErr) return } if err := destroyMap(ctx, cfg.Admin, cfg.MapID); err != nil { glog.Errorf("failed to destroy map with treeID %d: %v", cfg.MapID, err) } }() } s, err := newHammerState(ctx, &cfg) if err != nil { return err } ticker := time.NewTicker(cfg.EmitInterval) go func(c <-chan time.Time) { for range c { glog.Info(s.String()) } }(ticker.C) var wg sync.WaitGroup // Anything that arrives on errs terminates all processing (but there // may be more errors queued up behind it). errs := make(chan error, cfg.NumCheckers+1) // The done channel is used to signal all of the goroutines to // terminate. done := make(chan struct{}) for i := 0; i < cfg.NumCheckers; i++ { wg.Add(1) go func(i int) { defer wg.Done() w := newReadWorker(s, i) glog.Infof("%d: start checker %d", s.cfg.MapID, i) err := w.run(ctx, done) if err != nil { errs <- err } glog.Infof("%d: checker %d done with %v", s.cfg.MapID, i, err) }(i) } wg.Add(1) go func() { defer wg.Done() w := newWriteWorker(s) glog.Infof("%d: start main goroutine", cfg.MapID) count, err := w.run(ctx, done) errs <- err // may be nil for the main goroutine completion glog.Infof("%d: performed %d operations on map", cfg.MapID, count) }() // Wait for first error, completion (which shows up as a nil error) or // external cancellation. select { case <-ctx.Done(): glog.Infof("%d: context canceled", cfg.MapID) case e := <-errs: firstErr = e if firstErr != nil { glog.Infof("%d: first error encountered: %v", cfg.MapID, e) } } close(done) ticker.Stop() wg.Wait() close(errs) for e := range errs { if e != nil { glog.Infof("%d: error encountered: %v", cfg.MapID, e) } } // Emit final statistics glog.Info(s.String()) return firstErr } // mapWorker represents a single entity in the Verifiable Map ecosystem. // The worker may be a read-only client, or a writer which adds new entries to // the map. Each worker should be as independent as possible (i.e. share little // to no state), though through well defined interfaces this guideline may be // ignored, which will allow effectively an in-memory gossip network to develop // between workers, which makes the validation more significant. // // Each worker has its own PRNG, which makes the sequence of operations that it // performs deterministic. type mapWorker struct { prng *rand.Rand mapID int64 label string bias MapBias // Each worker can have its own customized map bias. retryErrors bool operationDeadline time.Duration } func newWorker(cfg *MapConfig, bias MapBias, prng *rand.Rand) *mapWorker { return &mapWorker{ prng: prng, mapID: cfg.MapID, label: strconv.FormatInt(cfg.MapID, 10), bias: bias, retryErrors: cfg.RetryErrors, operationDeadline: cfg.OperationDeadline, } } func (w *mapWorker) retryOp(ctx context.Context, fn mapOperationFn, opName string) error { defer func(start time.Time) { rspLatency.Observe(time.Since(start).Seconds(), w.label, opName) }(time.Now()) deadline := time.Now().Add(w.operationDeadline) seed := w.prng.Int63() done := false var firstErr error for !done { // Always re-create the same per-operation rand.Rand so any retries are exactly the same. prng := rand.New(rand.NewSource(seed)) reqs.Inc(w.label, opName) err := fn(ctx, prng) switch err.(type) { case nil: rsps.Inc(w.label, opName) if firstErr != nil { glog.Warningf("%d: retry of op %v succeeded, previous error: %v", w.mapID, opName, firstErr) } firstErr = nil done = true case errSkip: firstErr = nil done = true case testonly.ErrInvariant: // Ensure invariant failures are not ignorable. They indicate a design assumption // being broken or incorrect, so must be seen. firstErr = err done = true default: errs.Inc(w.label, opName) if firstErr == nil { firstErr = err } if w.retryErrors { glog.Warningf("%d: op %v failed (will retry): %v", w.mapID, opName, err) } else { done = true } } if time.Now().After(deadline) { if firstErr == nil { // If there was no other error, we've probably hit the deadline - make sure we bubble that up. firstErr = ctx.Err() } glog.Warningf("%d: gave up on operation %v after %v, returning first err %v", w.mapID, opName, w.operationDeadline, firstErr) done = true } } return firstErr } // readWorker performs read-only operations on a fixed map. type readWorker struct { *mapWorker validReadOps *validReadOps invalidReadOps *invalidReadOps } func newReadWorker(s *hammerState, idx int) *readWorker { readBias := MapBias{ Bias: make(map[MapEntrypointName]int), InvalidChance: make(map[MapEntrypointName]int), } for _, ep := range roMapEntrypoints { readBias.Bias[ep] = s.cfg.EPBias.Bias[ep] readBias.InvalidChance[ep] = s.cfg.EPBias.InvalidChance[ep] } return &readWorker{ mapWorker: newWorker(s.cfg, readBias, rand.New(rand.NewSource(int64(idx)))), validReadOps: s.validReadOps, invalidReadOps: s.invalidReadOps, } } // run continuously performs read-only operations against the map until the // done channel is closed, or an error is encountered. func (w *readWorker) run(ctx context.Context, done <-chan struct{}) error { for { select { case <-done: return nil default: } if err := w.readOnce(ctx); err != nil { if _, ok := err.(errSkip); ok { continue } return err } } } // TODO(mhutchinson): resolve duplication between this and retryOneOp. func (w *readWorker) readOnce(ctx context.Context) error { ep := w.bias.choose(w.prng) if w.bias.invalid(ep, w.prng) { glog.V(3).Infof("%d: perform invalid %s operation", w.mapID, ep) invalidReqs.Inc(w.label, string(ep)) op, err := getReadOp(ep, w.invalidReadOps) if err != nil { return err } return op(ctx, w.prng) } op, err := getReadOp(ep, w.validReadOps) if err != nil { return err } glog.V(3).Infof("%d: perform %s operation", w.mapID, ep) return w.retryOp(ctx, op, string(ep)) } // writeWorker performs mutation operations on a fixed map. type writeWorker struct { *mapWorker head *testonly.MapContents operations uint64 // TODO(mhutchinson): Remove hammerState from here - it allows access to global info // which makes reasoning about the behaviour difficult. s *hammerState // Counters for generating unique keys/values. keyIdx int valueIdx int } func newWriteWorker(s *hammerState) *writeWorker { return &writeWorker{ mapWorker: newWorker(s.cfg, s.cfg.EPBias, rand.New(s.cfg.RandSource)), operations: s.cfg.Operations, s: s, } } // run continuously performs mutation operations on the map until the done channel is // closed, an error is encountered, or the maximum number of operations have been performed. func (w *writeWorker) run(ctx context.Context, done <-chan struct{}) (uint64, error) { count := uint64(0) for ; count < w.operations; count++ { select { case <-done: return count, nil default: } if err := w.writeOnce(ctx); err != nil { return count, err } } return count, nil } func (w *writeWorker) writeOnce(ctx context.Context) error { ep := SetLeavesName if w.bias.invalid(ep, w.prng) { glog.V(3).Infof("%d: perform invalid %s operation", w.mapID, ep) invalidReqs.Inc(w.label, string(ep)) return w.setLeavesInvalid(ctx, w.prng) } glog.V(3).Infof("%d: perform %s operation", w.mapID, ep) return w.retryOp(ctx, w.setLeaves, string(ep)) } func (w *writeWorker) setLeaves(ctx context.Context, prng *rand.Rand) error { choices := []Choice{CreateLeaf, UpdateLeaf, DeleteLeaf} cfg := w.s.cfg n := pickIntInRange(cfg.MinLeavesW, cfg.MaxLeavesW, prng) if n == 0 { n = 1 } leaves := make([]*trillian.MapLeaf, 0, n) rev := int64(0) if w.head != nil { rev = w.head.Rev } leafloop: for i := 0; i < n; i++ { choice := choices[prng.Intn(len(choices))] if w.head.Empty() { choice = CreateLeaf } switch choice { case CreateLeaf: key, value := w.nextKey(), w.nextValue() leaves = append(leaves, &trillian.MapLeaf{ Index: testonly.TransparentHash(key), LeafValue: value, ExtraData: testonly.ExtraDataForValue(value, cfg.ExtraSize), }) glog.V(3).Infof("%d: %v: data[%q]=%q", w.mapID, choice, key, string(value)) case UpdateLeaf, DeleteLeaf: key := w.head.PickKey(prng) // Not allowed to have the same key more than once in the same request for _, leaf := range leaves { if bytes.Equal(leaf.Index, key) { // Go back to the beginning of the loop and choose again. i-- continue leafloop } } var value, extra []byte if choice == UpdateLeaf { value = w.nextValue() extra = testonly.ExtraDataForValue(value, cfg.ExtraSize) } leaves = append(leaves, &trillian.MapLeaf{Index: key, LeafValue: value, ExtraData: extra}) glog.V(3).Infof("%d: %v: data[%q]=%q (extra=%q)", w.mapID, choice, dehash(key), string(value), string(extra)) } } writeRev := uint64(rev + 1) req := trillian.WriteMapLeavesRequest{ MapId: w.mapID, Leaves: leaves, Metadata: metadataForRev(writeRev), ExpectRevision: int64(writeRev), } if err := w.s.sharedState.proposeLeaves(writeRev, leaves); err != nil { return err } if _, err := cfg.Write.WriteLeaves(ctx, &req); err != nil { return fmt.Errorf("failed to WriteLeaves(count=%d): %v", len(leaves), err) } glog.V(2).Infof("%d: set %d leaves, rev=%d", w.mapID, len(leaves), writeRev) w.head = w.head.UpdatedWith(writeRev, leaves) return nil } func (w *writeWorker) setLeavesInvalid(ctx context.Context, prng *rand.Rand) error { choices := []Choice{MalformedKey, DuplicateKey} var leaves []*trillian.MapLeaf value := []byte("value-for-invalid-req") choice := choices[prng.Intn(len(choices))] if w.head.Empty() { choice = MalformedKey } switch choice { case MalformedKey: key := testonly.TransparentHash("..invalid-size") leaves = append(leaves, &trillian.MapLeaf{Index: key[2:], LeafValue: value}) case DuplicateKey: key := w.head.PickKey(prng) leaves = append(leaves, &trillian.MapLeaf{Index: key, LeafValue: value}) leaves = append(leaves, &trillian.MapLeaf{Index: key, LeafValue: value}) } req := &trillian.WriteMapLeavesRequest{MapId: w.mapID, Leaves: leaves} rsp, err := w.s.cfg.Write.WriteLeaves(ctx, req) if err == nil { return fmt.Errorf("unexpected success: set-leaves(%v: %+v): %+v", choice, req, rsp.Revision) } glog.V(2).Infof("%d: expected failure: set-leaves(%v: %+v): %+v", w.mapID, choice, req, rsp) return nil } func (w *writeWorker) nextKey() string { w.keyIdx++ return fmt.Sprintf("key-%08d", w.keyIdx) } func (w *writeWorker) nextValue() []byte { w.valueIdx++ result := make([]byte, w.s.cfg.LeafSize) copy(result, fmt.Sprintf(valueFormat, w.valueIdx)) return result } // hammerState tracks the operations that have been performed during a test run. type hammerState struct { cfg *MapConfig validReadOps *validReadOps invalidReadOps *invalidReadOps sharedState *sharedState start time.Time } func newHammerState(ctx context.Context, cfg *MapConfig) (*hammerState, error) { tree, err := cfg.Admin.GetTree(ctx, &trillian.GetTreeRequest{TreeId: cfg.MapID}) if err != nil { return nil, fmt.Errorf("failed to get tree information: %v", err) } glog.Infof("%d: hammering tree with configuration %+v", cfg.MapID, tree) mc, err := client.NewMapClientFromTree(cfg.Client, tree) if err != nil { return nil, fmt.Errorf("failed to get tree verifier: %v", err) } mf := cfg.MetricFactory if mf == nil { mf = monitoring.InertMetricFactory{} } once.Do(func() { setupMetrics(mf) }) if cfg.EmitInterval == 0 { cfg.EmitInterval = defaultEmitSeconds * time.Second } if cfg.MinLeavesR < 0 { return nil, fmt.Errorf("invalid MinLeavesR %d", cfg.MinLeavesR) } if cfg.MaxLeavesR < cfg.MinLeavesR { return nil, fmt.Errorf("invalid MaxLeavesR %d is less than MinLeavesR %d", cfg.MaxLeavesR, cfg.MinLeavesR) } if cfg.MinLeavesW < 0 { return nil, fmt.Errorf("invalid MinLeavesW %d", cfg.MinLeavesW) } if cfg.MaxLeavesW < cfg.MinLeavesW { return nil, fmt.Errorf("invalid MaxLeavesW %d is less than MinLeavesW %d", cfg.MaxLeavesW, cfg.MinLeavesW) } if int(cfg.LeafSize) < minValueLen { return nil, fmt.Errorf("invalid LeafSize %d is smaller than min %d", cfg.LeafSize, minValueLen) } if cfg.OperationDeadline == 0 { cfg.OperationDeadline = 60 * time.Second } sharedState := newSharedState() validReadOps := validReadOps{ mc: mc, extraSize: cfg.ExtraSize, minLeaves: cfg.MinLeavesR, maxLeaves: cfg.MaxLeavesR, prevContents: sharedState.contents, sharedState: sharedState, } invalidReadOps := invalidReadOps{ mapID: cfg.MapID, client: cfg.Client, prevContents: sharedState.contents, sharedState: sharedState, } return &hammerState{ cfg: cfg, start: time.Now(), sharedState: sharedState, validReadOps: &validReadOps, invalidReadOps: &invalidReadOps, }, nil } func (s *hammerState) label() string { return strconv.FormatInt(s.cfg.MapID, 10) } func (s *hammerState) String() string { interval := time.Since(s.start) details := "" totalReqs := 0 totalInvalidReqs := 0 totalErrs := 0 for _, ep := range mapEntrypoints { reqCount := int(reqs.Value(s.label(), string(ep))) totalReqs += reqCount if s.cfg.EPBias.Bias[ep] > 0 { details += fmt.Sprintf(" %s=%d/%d", ep, int(rsps.Value(s.label(), string(ep))), reqCount) } totalInvalidReqs += int(invalidReqs.Value(s.label(), string(ep))) totalErrs += int(errs.Value(s.label(), string(ep))) } revStr := "N/A" if latestRev, found := s.sharedState.getLastReadRev(); found { revStr = strconv.FormatUint(latestRev, 10) } return fmt.Sprintf("%d: lastSMR.rev=%v ops: total=%d (%f ops/sec) invalid=%d errs=%v%s", s.cfg.MapID, revStr, totalReqs, float64(totalReqs)/interval.Seconds(), totalInvalidReqs, totalErrs, details) } func pickIntInRange(min, max int, prng *rand.Rand) int { delta := 1 + max - min return min + prng.Intn(delta) } type readOps interface { getLeaves(context.Context, *rand.Rand) error getLeavesRev(context.Context, *rand.Rand) error getSMR(context.Context, *rand.Rand) error getSMRRev(context.Context, *rand.Rand) error } type mapOperationFn func(context.Context, *rand.Rand) error func getReadOp(ep MapEntrypointName, read readOps) (mapOperationFn, error) { switch ep { case GetLeavesName: return read.getLeaves, nil case GetLeavesRevName: return read.getLeavesRev, nil case GetSMRName: return read.getSMR, nil case GetSMRRevName: return read.getSMRRev, nil default: return nil, fmt.Errorf("internal error: unknown read operation %s", ep) } } func dehash(index []byte) string { return strings.TrimRight(string(index), "\x00") } // metadataForRev returns the metadata value that the maphammer always uses for // a specific revision. func metadataForRev(rev uint64) []byte { if rev == 0 { return []byte{} } return []byte(fmt.Sprintf("Metadata-%d", rev)) }
apache-2.0
DanielNesbitt/maven-activator-plugin
src/main/java/com/dnesbitt/util/OS.java
234
package com.dnesbitt.util; /** * @author Daniel Nesbitt */ public final class OS { private OS() { } public static boolean isWindows() { return System.getProperty("os.name").toLowerCase().contains("win"); } }
apache-2.0
dasomel/egovframework
common-component/v2.3.2/src/main/java/egovframework/com/cop/ncm/service/NameCard.java
13334
package egovframework.com.cop.ncm.service; import java.io.Serializable; import org.apache.commons.lang.builder.ToStringBuilder; /** * 명함정보 관리를 위한 모델 클래스 * @author 공통서비스개발팀 이삼섭 * @since 2009.06.01 * @version 1.0 * @see * * <pre> * << 개정이력(Modification Information) >> * * 수정일 수정자 수정내용 * ------- -------- --------------------------- * 2009.3.28 이삼섭 최초 생성 * * </pre> */ @SuppressWarnings("serial") public class NameCard implements Serializable { /** 주소 */ private String adres = ""; /** 지역번호 */ private String areaNo = ""; /** 직급명 */ private String clsfNm = ""; /** 회사명 */ private String cmpnyNm = ""; /** 부서명 */ private String deptNm = ""; /** 이메일주소 */ private String emailAdres = ""; /** 끝휴대폰번호 */ private String endMbtlNum = ""; /** 끝전화번호 */ private String endTelNo = ""; /** 외부사용자여부 */ private String extrlUserAt = ""; /** 최초등록자 아이디 */ private String frstRegisterId = ""; /** 최초등록시점 */ private String frstRegisterPnttm = ""; /** 식별번호 */ private String idntfcNo = ""; /** 최종수정자 아이디 */ private String lastUpdusrId = ""; /** 최종수정시점 */ private String lastUpdusrPnttm = ""; /** 휴대폰번호 */ private String mbtlNum = ""; /** 중간휴대폰번호 */ private String middleMbtlNum = ""; /** 중간전화번호 */ private String middleTelNo = ""; /** 국가번호 */ private String nationNo = ""; /** 명함아이디 */ private String ncrdId = ""; /** 명함대상자 아이디 */ private String ncrdTrgterId = ""; /** 이름 */ private String ncrdNm = ""; /** 직위명 */ private String ofcpsNm = ""; /** 공개여부 */ private String othbcAt = ""; /** 비고 */ private String remark = ""; /** 전화번호 */ private String telNo = ""; /** 상세주소 */ private String detailAdres = ""; /** 우편번호 */ private String zipCode = ""; /** * adres attribute를 리턴한다. * * @return the adres */ public String getAdres() { return adres; } /** * adres attribute 값을 설정한다. * * @param adres * the adres to set */ public void setAdres(String adres) { this.adres = adres; } /** * areaNo attribute를 리턴한다. * * @return the areaNo */ public String getAreaNo() { return areaNo; } /** * areaNo attribute 값을 설정한다. * * @param areaNo * the areaNo to set */ public void setAreaNo(String areaNo) { this.areaNo = areaNo; } /** * clsfNm attribute를 리턴한다. * * @return the clsfNm */ public String getClsfNm() { return clsfNm; } /** * clsfNm attribute 값을 설정한다. * * @param clsfNm * the clsfNm to set */ public void setClsfNm(String clsfNm) { this.clsfNm = clsfNm; } /** * cmpnyNm attribute를 리턴한다. * * @return the cmpnyNm */ public String getCmpnyNm() { return cmpnyNm; } /** * cmpnyNm attribute 값을 설정한다. * * @param cmpnyNm * the cmpnyNm to set */ public void setCmpnyNm(String cmpnyNm) { this.cmpnyNm = cmpnyNm; } /** * deptNm attribute를 리턴한다. * * @return the deptNm */ public String getDeptNm() { return deptNm; } /** * deptNm attribute 값을 설정한다. * * @param deptNm * the deptNm to set */ public void setDeptNm(String deptNm) { this.deptNm = deptNm; } /** * emailAdres attribute를 리턴한다. * * @return the emailAdres */ public String getEmailAdres() { return emailAdres; } /** * emailAdres attribute 값을 설정한다. * * @param emailAdres * the emailAdres to set */ public void setEmailAdres(String emailAdres) { this.emailAdres = emailAdres; } /** * endMbtlNum attribute를 리턴한다. * * @return the endMbtlNum */ public String getEndMbtlNum() { return endMbtlNum; } /** * endMbtlNum attribute 값을 설정한다. * * @param endMbtlNum * the endMbtlNum to set */ public void setEndMbtlNum(String endMbtlNum) { this.endMbtlNum = endMbtlNum; } /** * endTelNo attribute를 리턴한다. * * @return the endTelNo */ public String getEndTelNo() { return endTelNo; } /** * endTelNo attribute 값을 설정한다. * * @param endTelNo * the endTelNo to set */ public void setEndTelNo(String endTelNo) { this.endTelNo = endTelNo; } /** * extrlUserAt attribute를 리턴한다. * * @return the extrlUserAt */ public String getExtrlUserAt() { return extrlUserAt; } /** * extrlUserAt attribute 값을 설정한다. * * @param extrlUserAt * the extrlUserAt to set */ public void setExtrlUserAt(String extrlUserAt) { this.extrlUserAt = extrlUserAt; } /** * frstRegisterId attribute를 리턴한다. * * @return the frstRegisterId */ public String getFrstRegisterId() { return frstRegisterId; } /** * frstRegisterId attribute 값을 설정한다. * * @param frstRegisterId * the frstRegisterId to set */ public void setFrstRegisterId(String frstRegisterId) { this.frstRegisterId = frstRegisterId; } /** * frstRegisterPnttm attribute를 리턴한다. * * @return the frstRegisterPnttm */ public String getFrstRegisterPnttm() { return frstRegisterPnttm; } /** * frstRegisterPnttm attribute 값을 설정한다. * * @param frstRegisterPnttm * the frstRegisterPnttm to set */ public void setFrstRegisterPnttm(String frstRegisterPnttm) { this.frstRegisterPnttm = frstRegisterPnttm; } /** * idntfcNo attribute를 리턴한다. * * @return the idntfcNo */ public String getIdntfcNo() { return idntfcNo; } /** * idntfcNo attribute 값을 설정한다. * * @param idntfcNo * the idntfcNo to set */ public void setIdntfcNo(String idntfcNo) { this.idntfcNo = idntfcNo; } /** * lastUpdusrId attribute를 리턴한다. * * @return the lastUpdusrId */ public String getLastUpdusrId() { return lastUpdusrId; } /** * lastUpdusrId attribute 값을 설정한다. * * @param lastUpdusrId * the lastUpdusrId to set */ public void setLastUpdusrId(String lastUpdusrId) { this.lastUpdusrId = lastUpdusrId; } /** * lastUpdusrPnttm attribute를 리턴한다. * * @return the lastUpdusrPnttm */ public String getLastUpdusrPnttm() { return lastUpdusrPnttm; } /** * lastUpdusrPnttm attribute 값을 설정한다. * * @param lastUpdusrPnttm * the lastUpdusrPnttm to set */ public void setLastUpdusrPnttm(String lastUpdusrPnttm) { this.lastUpdusrPnttm = lastUpdusrPnttm; } /** * mbtlNum attribute를 리턴한다. * * @return the mbtlNum */ public String getMbtlNum() { return mbtlNum; } /** * mbtlNum attribute 값을 설정한다. * * @param mbtlNum * the mbtlNum to set */ public void setMbtlNum(String mbtlNum) { this.mbtlNum = mbtlNum; } /** * middleMbtlNum attribute를 리턴한다. * * @return the middleMbtlNum */ public String getMiddleMbtlNum() { return middleMbtlNum; } /** * middleMbtlNum attribute 값을 설정한다. * * @param middleMbtlNum * the middleMbtlNum to set */ public void setMiddleMbtlNum(String middleMbtlNum) { this.middleMbtlNum = middleMbtlNum; } /** * middleTelNo attribute를 리턴한다. * * @return the middleTelNo */ public String getMiddleTelNo() { return middleTelNo; } /** * middleTelNo attribute 값을 설정한다. * * @param middleTelNo * the middleTelNo to set */ public void setMiddleTelNo(String middleTelNo) { this.middleTelNo = middleTelNo; } /** * nationNo attribute를 리턴한다. * * @return the nationNo */ public String getNationNo() { return nationNo; } /** * nationNo attribute 값을 설정한다. * * @param nationNo * the nationNo to set */ public void setNationNo(String nationNo) { this.nationNo = nationNo; } /** * ncrdId attribute를 리턴한다. * * @return the ncrdId */ public String getNcrdId() { return ncrdId; } /** * ncrdId attribute 값을 설정한다. * * @param ncrdId * the ncrdId to set */ public void setNcrdId(String ncrdId) { this.ncrdId = ncrdId; } /** * ncrdTrgterId attribute를 리턴한다. * * @return the ncrdTrgterId */ public String getNcrdTrgterId() { return ncrdTrgterId; } /** * ncrdTrgterId attribute 값을 설정한다. * * @param ncrdTrgterId * the ncrdTrgterId to set */ public void setNcrdTrgterId(String ncrdTrgterId) { this.ncrdTrgterId = ncrdTrgterId; } /** * ncrdNm attribute를 리턴한다. * * @return the ncrdNm */ public String getNcrdNm() { return ncrdNm; } /** * ncrdNm attribute 값을 설정한다. * * @param ncrdNm * the ncrdNm to set */ public void setNcrdNm(String ncrdNm) { this.ncrdNm = ncrdNm; } /** * ofcpsNm attribute를 리턴한다. * * @return the ofcpsNm */ public String getOfcpsNm() { return ofcpsNm; } /** * ofcpsNm attribute 값을 설정한다. * * @param ofcpsNm * the ofcpsNm to set */ public void setOfcpsNm(String ofcpsNm) { this.ofcpsNm = ofcpsNm; } /** * othbcAt attribute를 리턴한다. * * @return the othbcAt */ public String getOthbcAt() { return othbcAt; } /** * othbcAt attribute 값을 설정한다. * * @param othbcAt * the othbcAt to set */ public void setOthbcAt(String othbcAt) { this.othbcAt = othbcAt; } /** * remark attribute를 리턴한다. * * @return the remark */ public String getRemark() { return remark; } /** * remark attribute 값을 설정한다. * * @param remark * the remark to set */ public void setRemark(String remark) { this.remark = remark; } /** * telNo attribute를 리턴한다. * * @return the telNo */ public String getTelNo() { return telNo; } /** * telNo attribute 값을 설정한다. * * @param telNo * the telNo to set */ public void setTelNo(String telNo) { this.telNo = telNo; } /** * detailAdres attribute를 리턴한다. * * @return the detailAdres */ public String getDetailAdres() { return detailAdres; } /** * detailAdres attribute 값을 설정한다. * * @param detailAdres * the detailAdres to set */ public void setDetailAdres(String detailAdres) { this.detailAdres = detailAdres; } /** * zipCode attribute를 리턴한다. * * @return the zipCode */ public String getZipCode() { return zipCode; } /** * zipCode attribute 값을 설정한다. * * @param zipCode * the zipCode to set */ public void setZipCode(String zipCode) { this.zipCode = zipCode; } /** * toString 메소드를 대치한다. */ public String toString() { return ToStringBuilder.reflectionToString(this); } }
apache-2.0
freeVM/freeVM
enhanced/archive/classlib/java6/modules/sound/src/test/java/org/apache/harmony/sound/tests/javax/sound/sampled/AudioSystemTest.java
22966
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.harmony.sound.tests.javax.sound.sampled; import java.io.File; import java.net.URL; import javax.sound.sampled.AudioFileFormat; import javax.sound.sampled.AudioFormat; import javax.sound.sampled.AudioInputStream; import javax.sound.sampled.AudioSystem; import javax.sound.sampled.Line; import javax.sound.sampled.Mixer; import junit.framework.TestCase; /** * * Dummy sound provider located in soundProvider.jar is used for testing. * Provider sources are provided at the comments at the end of this file. * */ public class AudioSystemTest extends TestCase { public void testAudioFile() throws Exception { boolean ok; assertTrue(AudioSystem.getAudioFileFormat(new URL("file:./myFile.txt")) != null); AudioFileFormat.Type[] types = AudioSystem.getAudioFileTypes(); ok = false; for (int i = 0; i < types.length; i++) { if (types[i].getExtension().equals("txt")) { ok = true; break; } } assertTrue(ok); } public void testMixer() throws Exception { boolean ok; Mixer.Info[] minfos = AudioSystem.getMixerInfo(); assertTrue(minfos.length > 0); assertEquals(minfos[0].getName(), "NAME"); assertEquals(minfos[0].getVersion(), "VERSION"); assertTrue(AudioSystem.getMixer(null) != null); Mixer mix = AudioSystem.getMixer(minfos[0]); assertEquals(mix.getClass().getName(), "org.apache.harmony.sound.testProvider.MyMixer"); Line.Info[] mli = mix.getSourceLineInfo(); assertEquals(mli.length, 4); Line.Info[] infos = AudioSystem.getSourceLineInfo(mli[0]); ok = false; for (int i = 0; i < infos.length; i++) { if (infos[i].getLineClass().getName().equals( "org.apache.harmony.sound.testProvider.myClip")) { ok = true; break; } } assertTrue(ok); infos = AudioSystem.getTargetLineInfo(mli[0]); ok = false; for (int i = 0; i < infos.length; i++) { if (infos[i].getLineClass().getName().equals( "org.apache.harmony.sound.testProvider.myClip")) { ok = true; break; } } assertTrue(ok); } public void testAudioInputStream() throws Exception { AudioInputStream stream = AudioSystem.getAudioInputStream(new File( "myFile.txt")); assertTrue(stream != null); // no exception expected AudioSystem.write(stream, new AudioFileFormat.Type("TXT", "txt"), System.out); assertEquals(AudioSystem.getAudioInputStream( AudioFormat.Encoding.PCM_UNSIGNED, stream), stream); } // see TestFormatConversionProvider public void testFormatConversion() throws Exception { boolean ok; AudioFormat af_source = new AudioFormat( AudioFormat.Encoding.PCM_UNSIGNED, 1f, 2, 3, 4, 5f, true); AudioFormat.Encoding[] aafe = AudioSystem .getTargetEncodings(AudioFormat.Encoding.PCM_UNSIGNED); ok = false; for (int i = 0; i < aafe.length; i++) { // contains PCM_SIGNED (see TestFormatConversionProvider) if (aafe[i].equals(AudioFormat.Encoding.PCM_SIGNED)) { ok = true; break; } } assertTrue(ok); assertTrue(AudioSystem.isConversionSupported( AudioFormat.Encoding.PCM_SIGNED, af_source)); AudioFormat[] aaf = AudioSystem.getTargetFormats( AudioFormat.Encoding.PCM_UNSIGNED, af_source); ok = false; for (int i = 0; i < aaf.length; i++) { if (aaf[i].getSampleRate() == 10f && aaf[i].getSampleSizeInBits() == 2 && aaf[i].getChannels() == 30 && aaf[i].getFrameSize() == 40 && aaf[i].getFrameRate() == 50f) { ok = true; break; } } assertTrue(ok); } public void testGetLine() throws Exception { assertEquals(AudioSystem.getLine( new Line.Info(javax.sound.sampled.Clip.class)).getClass() .getName(), "org.apache.harmony.sound.testProvider.myClip"); assertEquals(AudioSystem.getLine( new Line.Info(javax.sound.sampled.SourceDataLine.class)) .getClass().getName(), "org.apache.harmony.sound.testProvider.mySourceDataLine"); assertEquals(AudioSystem.getLine( new Line.Info(javax.sound.sampled.TargetDataLine.class)) .getClass().getName(), "org.apache.harmony.sound.testProvider.myTargetDataLine"); assertEquals(AudioSystem.getLine( new Line.Info(javax.sound.sampled.Port.class)).getClass() .getName(), "org.apache.harmony.sound.testProvider.myPort"); assertEquals(AudioSystem.getClip().getClass().getName(), "org.apache.harmony.sound.testProvider.myClip"); } } // SOUND PROVIDER SOURCES: // // META-INF/services/ files: // file META-INF/services/javax.sound.sampled.spi: //org.apache.harmony.sound.testProvider.TestAudioFileWriter // // file META-INF/services/javax.sound.sampled.spi: //org.apache.harmony.sound.testProvider.TestFormatConversionProvider // // file META-INF/services/javax.sound.sampled.spi: //org.apache.harmony.sound.testProvider.TestMixerProvider // // file META-INF/services/javax.sound.sampled.spi.AudioFileReader: //org.apache.harmony.sound.testProvider.TestAudioFileReader // //Source files: // //TestAudioFileReader.java // //package org.apache.harmony.sound.testProvider; // //import javax.sound.sampled.*; //import javax.sound.sampled.spi.*; //import java.util.*; //import java.io.*; //import java.net.*; // //public class TestAudioFileReader extends AudioFileReader { // static AudioFileFormat aff; // static AudioFormat af; // // static { // AudioFormat.Encoding enc = AudioFormat.Encoding.PCM_UNSIGNED; // AudioFileFormat.Type type = new AudioFileFormat.Type("TXT", "txt"); // af = new AudioFormat(enc , 1f, 2, 3, 4, 5f, true); // aff = new AudioFileFormat(type , af, 10); // } // // public TestAudioFileReader() { // super(); // }; // // public AudioFileFormat getAudioFileFormat(InputStream stream) throws UnsupportedAudioFileException, IOException { // return aff; // } // // public AudioFileFormat getAudioFileFormat(URL url) throws UnsupportedAudioFileException, IOException { // return aff; // } // public AudioFileFormat getAudioFileFormat(File file) throws UnsupportedAudioFileException, IOException { // return aff; // } // public AudioInputStream getAudioInputStream(InputStream stream) throws UnsupportedAudioFileException, IOException { // InputStream is = new ByteArrayInputStream(new byte[1001]); // return new AudioInputStream(is, af, 10); // } // public AudioInputStream getAudioInputStream(URL url) throws UnsupportedAudioFileException, IOException { // InputStream is = new ByteArrayInputStream(new byte[1001]); // return new AudioInputStream(is, af, 10); // } // public AudioInputStream getAudioInputStream(File file) // throws UnsupportedAudioFileException,IOException { // InputStream is = new ByteArrayInputStream(new byte[1001]); // return new AudioInputStream(is, af, 10); // } //} // //TestAudioFileWriter.java // //package org.apache.harmony.sound.testProvider; // //import javax.sound.sampled.*; //import javax.sound.sampled.spi.*; //import java.util.*; //import java.io.*; //import java.net.*; // //public class TestAudioFileWriter extends AudioFileWriter { // // static AudioFileFormat aff; // static AudioFormat af; // static AudioFileFormat.Type type; // // static { // AudioFormat.Encoding enc = AudioFormat.Encoding.PCM_UNSIGNED; // type = new AudioFileFormat.Type("TXT", "txt"); // vaf = new AudioFormat(enc , 1f, 2, 3, 4, 5f, true); // aff = new AudioFileFormat(type , af, 10); // } // // public TestAudioFileWriter () { // super(); // }; // // public AudioFileFormat.Type[] getAudioFileTypes() { // return new AudioFileFormat.Type[] {type}; // } // public AudioFileFormat.Type[] getAudioFileTypes(AudioInputStream stream) { // return new AudioFileFormat.Type[] {type}; // } // public boolean isFileTypeSupported(AudioFileFormat.Type fileType) { // return type.equals(fileType); // } // public boolean isFileTypeSupported(AudioFileFormat.Type fileType, AudioInputStream stream) { // return type.equals(fileType); // } // public int write(AudioInputStream stream, AudioFileFormat.Type fileType, OutputStream out) throws IOException { // return 10; // } // public int write(AudioInputStream stream, AudioFileFormat.Type fileType, File out) throws IOException { // return 10; // } //} // //TestFormatConversionProvider.java // //package org.apache.harmony.sound.testProvider; // //import javax.sound.sampled.*; //import javax.sound.sampled.spi.*; //import java.util.*; //import java.io.*; //import java.net.*; // //public class TestFormatConversionProvider extends FormatConversionProvider{ // // static AudioFormat.Encoding[] enc_source; // static AudioFormat.Encoding[] enc_target; // static AudioFormat af_source; // static AudioFormat af_target; // // static { // enc_source = new AudioFormat.Encoding[] {AudioFormat.Encoding.PCM_UNSIGNED}; // af_source = new AudioFormat(enc_source [0] , 1f, 2, 3, 4, 5f, true); // enc_target= new AudioFormat.Encoding[] {AudioFormat.Encoding.PCM_SIGNED}; // af_target= new AudioFormat(enc_target[0] , 10f, 2, 30, 40, 50f, false); // } // public TestFormatConversionProvider() { // super(); // }; // public AudioInputStream getAudioInputStream( // AudioFormat.Encoding targetEncoding, AudioInputStream sourceStream) { // if (!enc_target[0].equals(targetEncoding) || // !af_source.equals(sourceStream.getFormat())) { // throw new IllegalArgumentException("conversion not supported"); // } // return sourceStream; // } // public AudioInputStream getAudioInputStream( // AudioFormat targetFormat, AudioInputStream sourceStream) { // if (!af_target.equals(targetFormat) || // !af_source.equals(sourceStream.getFormat())) { // throw new IllegalArgumentException("conversion not supported"); // } // return sourceStream; // } // public AudioFormat.Encoding[] getTargetEncodings( // AudioFormat sourceFormat) { // if (af_source.matches(sourceFormat)) { // return enc_target; // } else { // return new AudioFormat.Encoding[0]; // } // } // public AudioFormat[] getTargetFormats( // AudioFormat.Encoding targetFormat, AudioFormat sourceFormat) { // if (af_source.matches(sourceFormat)) { // return new AudioFormat[] {af_target}; // } else { // return new AudioFormat[0]; // } // } // public AudioFormat.Encoding[] getSourceEncodings() { // return enc_source; // } // public AudioFormat.Encoding[] getTargetEncodings() { // return enc_target; // } //} // //TestMixerProvider.java // //package org.apache.harmony.sound.testProvider; // //import javax.sound.sampled.*; //import javax.sound.sampled.spi.*; //import java.util.*; //import java.io.*; //import java.net.*; // //public class TestMixerProvider extends MixerProvider { // static Mixer.Info info; // static Mixer mixer; // static { // info = new MyMixerInfo("NAME", "VENDOR", "DESCRIPTION", "VERSION"); // mixer = new MyMixer(info); // } // public TestMixerProvider () {super();} // public boolean isMixerSupported(Mixer.Info info) { // return this.info.equals(info); // } // public Mixer.Info[] getMixerInfo() { // return new Mixer.Info[] {info}; // } // public Mixer getMixer(Mixer.Info info) { // if (this.info.equals(info)) { // return mixer; // } // throw new IllegalArgumentException("TestMixerProvider "); // } //} // //class MyMixerInfo extends Mixer.Info { // public MyMixerInfo(String name, String vendor, String description, // String version) { // super(name, vendor, description, version); // } //} // //class MyMixer implements Mixer { // private Mixer.Info minfo; // private Line[] sourceLines = new Line[] {new myClip(), // new mySourceDataLine(), // new myTargetDataLine(), // new myPort()}; // private Line[] targetLines = new Line[] {new myClip(), // new mySourceDataLine(), // new myTargetDataLine(), // new myPort()}; // private Line.Info[] lineInfos = new Line.Info[] { // new Line.Info(javax.sound.sampled.Clip.class), // new Line.Info(javax.sound.sampled.SourceDataLine.class), // new Line.Info(javax.sound.sampled.TargetDataLine.class), // new Line.Info(javax.sound.sampled.Port.class) // }; // public MyMixer(Mixer.Info info) { // minfo = info; // } // public Line getLine(Line.Info info) throws LineUnavailableException { // for (int i = 0; i < lineInfos.length; i++) { // if (lineInfos[i].matches(info)) { // return sourceLines[i]; // } // } // throw new IllegalArgumentException("not supported " + info); // } // public int getMaxLines(Line.Info info) { // return AudioSystem.NOT_SPECIFIED; // } // public Mixer.Info getMixerInfo() { // return minfo; // } // public Line.Info[] getSourceLineInfo() { // return lineInfos; // } // public Line.Info[] getSourceLineInfo(Line.Info info) { // for (int i = 0; i < lineInfos.length; i++) { // if (lineInfos[i].matches(info)) { // return new Line.Info[] {sourceLines[i].getLineInfo()}; // } // } // throw new IllegalArgumentException("not supported " + info); // } // public Line[] getSourceLines() { // return sourceLines; // } // public Line.Info[] getTargetLineInfo() { // return lineInfos; // } // public Line.Info[] getTargetLineInfo(Line.Info info) { // for (int i = 0; i < lineInfos.length; i++) { // if (lineInfos[i].matches(info)) { // return new Line.Info[] {targetLines[i].getLineInfo()}; // } // } // throw new IllegalArgumentException("not supported " + info); // } // public Line[] getTargetLines() { // return targetLines; // } // public boolean isLineSupported(Line.Info info) { // for (int i = 0; i < lineInfos.length; i++) { // if (lineInfos[i].matches(info)) { // return true; // } // } // return false; // } // public boolean isSynchronizationSupported(Line[] lines, boolean maintainSync) { // return false; // } // public void synchronize(Line[] lines, boolean maintainSync) {} // // public void unsynchronize(Line[] lines) {} // // // methods of Line interface // public void close() {} // public Control getControl(Control.Type control) { // throw new IllegalArgumentException("not supported "+ control); // } // public Control[] getControls() { // return new Control[0]; // } // public Line.Info getLineInfo() { // return new Line.Info(this.getClass()); // } // public boolean isControlSupported(Control.Type control) { // return false; // } // public boolean isOpen() { // return false; // } // public void open() throws LineUnavailableException {} // public void removeLineListener(LineListener listener) {} // public void addLineListener(LineListener listener) {} //} // //class myClip implements Clip { // public int getFrameLength() { return 10;} // public long getMicrosecondLength() {return 100;} // public void loop(int count) {} // public void open(AudioFormat format, byte[] data, int offset, int bufferSize) // throws LineUnavailableException {} // public void open(AudioInputStream stream) throws LineUnavailableException, // IOException {} // public void setFramePosition(int frames) {} // public void setLoopPoints(int start, int end) {} // public void setMicrosecondPosition(long microseconds) {} // public int available() {return 1;} // public void drain() {} // public void flush() {} // public int getBufferSize() {return 1;} // public AudioFormat getFormat() {return null;} // public int getFramePosition() {return 1;} // public float getLevel() {return 1f;} // public long getLongFramePosition() {return 1;} // public long getMicrosecondPosition() {return 10;} // public boolean isActive() {return false;} // public boolean isRunning(){return false;} // public void start() {} // public void stop(){} // public void close() {} // public Control getControl(Control.Type control) { // throw new IllegalArgumentException("not supported "+ control); // } // public Control[] getControls() { // return new Control[0]; // } // public Line.Info getLineInfo() { // return new Line.Info(this.getClass()); // } // public boolean isControlSupported(Control.Type control) { // return false; // } // public boolean isOpen() { // return false; // } // public void open() throws LineUnavailableException {} // public void removeLineListener(LineListener listener) {} // public void addLineListener(LineListener listener) {} //} // //class mySourceDataLine implements SourceDataLine{ // public void open(AudioFormat format) throws LineUnavailableException {} // public void open(AudioFormat format, int bufferSize) // throws LineUnavailableException {} // public int write(byte[] b, int off, int len) {return 1;} // public int available() {return 1;} // public void drain() {} // public void flush() {} // public int getBufferSize() {return 1;} // public AudioFormat getFormat() {return null;} // public int getFramePosition() {return 1;} // public float getLevel() {return 1f;} // public long getLongFramePosition() {return 1;} // public long getMicrosecondPosition() {return 10;} // public boolean isActive() {return false;} // public boolean isRunning(){return false;} // public void start() {} // public void stop(){} // public void close() {} // public Control getControl(Control.Type control) { // throw new IllegalArgumentException("not supported "+ control); // } // public Control[] getControls() { // return new Control[0]; // } // public Line.Info getLineInfo() { // return new Line.Info(this.getClass()); // } // public boolean isControlSupported(Control.Type control) { // return false; // } // public boolean isOpen() { // return false; // } // public void open() throws LineUnavailableException {} // public void removeLineListener(LineListener listener) {} // public void addLineListener(LineListener listener) {} //} // //class myTargetDataLine implements TargetDataLine { // public void open(AudioFormat format) throws LineUnavailableException{} // public void open(AudioFormat format, int bufferSize) // throws LineUnavailableException{} // public int read(byte[] b, int off, int len) {return 1;} // public int available() {return 1;} // public void drain() {} // public void flush() {} // public int getBufferSize() {return 1;} // public AudioFormat getFormat() {return null;} // public int getFramePosition() {return 1;} // public float getLevel() {return 1f;} // public long getLongFramePosition() {return 1;} // public long getMicrosecondPosition() {return 10;} // public boolean isActive() {return false;} // public boolean isRunning(){return false;} // public void start() {} // public void stop(){} // public void close() {} // public Control getControl(Control.Type control) { // throw new IllegalArgumentException("not supported "+ control); // } // public Control[] getControls() { // return new Control[0]; // } // public Line.Info getLineInfo() { // return new Line.Info(this.getClass()); // } // public boolean isControlSupported(Control.Type control) { // return false; // } // public boolean isOpen() { // return false; // } // public void open() throws LineUnavailableException {} // public void removeLineListener(LineListener listener) {} // public void addLineListener(LineListener listener) {} //} // //class myPort implements Port { // public void close() {} // public Control getControl(Control.Type control) { // throw new IllegalArgumentException("not supported "+ control); // } // public Control[] getControls() { // return new Control[0]; // } // public Line.Info getLineInfo() { // return new Line.Info(this.getClass()); // } // public boolean isControlSupported(Control.Type control) { // return false; // } // public boolean isOpen() { // return false; // } // public void open() throws LineUnavailableException {} // public void removeLineListener(LineListener listener) {} // public void addLineListener(LineListener listener) {} //}
apache-2.0
ShAdOwNeT88/Company-Information
app/src/main/java/alterego/solutions/company_information/search_company/SearchActivity.java
9541
package alterego.solutions.company_information.search_company; import android.app.Activity; import android.app.SearchManager; import android.content.ActivityNotFoundException; import android.content.Context; import android.content.Intent; import android.graphics.Color; import android.net.Uri; import android.os.Bundle; import android.support.annotation.BinderThread; import android.support.customtabs.CustomTabsIntent; import android.support.design.widget.FloatingActionButton; import android.support.design.widget.NavigationView; import android.support.design.widget.Snackbar; import android.support.v4.view.GravityCompat; import android.support.v4.widget.DrawerLayout; import android.support.v4.widget.SwipeRefreshLayout; import android.support.v7.app.ActionBarDrawerToggle; import android.support.v7.app.AppCompatActivity; import android.support.v7.widget.LinearLayoutManager; import android.support.v7.widget.RecyclerView; import android.support.v7.widget.Toolbar; import android.util.Log; import android.view.Menu; import android.view.MenuItem; import android.view.View; import android.view.WindowManager; import android.widget.SearchView; import android.widget.TextView; import android.widget.Toast; import com.afollestad.materialdialogs.MaterialDialog; import com.crashlytics.android.Crashlytics; import com.nbsp.materialfilepicker.MaterialFilePicker; import com.nbsp.materialfilepicker.ui.FilePickerActivity; import alterego.solutions.company_information.runtime_permission.PermissionManager; import io.fabric.sdk.android.Fabric; import org.chromium.customtabsclient.CustomTabsActivityHelper; import java.util.ArrayList; import java.util.List; import java.util.regex.Pattern; import alterego.solutions.company_information.Company; import alterego.solutions.company_information.R; import alterego.solutions.company_information.add_company.AddActivity; import alterego.solutions.company_information.dbHelper.DBHelper; import alterego.solutions.company_information.dbHelper.DbManagmentPresenter; import alterego.solutions.company_information.models.CompanyAdapter; import butterknife.Bind; import butterknife.BindColor; import me.zhanghai.android.customtabshelper.CustomTabsHelperFragment; public class SearchActivity extends AppCompatActivity implements NavigationView.OnNavigationItemSelectedListener{ @Bind(R.id.searchView_company) SearchView mCompanySearchView; @BindColor(R.color.colorPrimary) int mColorPrimary; DbManagmentPresenter mManagerPresenter; SearchPresenter mSearchPresenter; private RecyclerView mRecyclerView; private RecyclerView.Adapter mAdapter; private SwipeRefreshLayout mSwipeRefreshLayout; private static String LOG_TAG = "CardViewActivity"; private CustomTabsHelperFragment mCustomTabsHelperFragment; private CustomTabsIntent mCustomTabsIntent; private Activity activity; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); Fabric.with(this, new Crashlytics()); setContentView(R.layout.activity_search); Toolbar toolbar = (Toolbar) findViewById(R.id.toolbar); setSupportActionBar(toolbar); DrawerLayout drawer = (DrawerLayout) findViewById(R.id.drawer_layout_search_company); ActionBarDrawerToggle toggle = new ActionBarDrawerToggle(this, drawer, toolbar, R.string.navigation_drawer_open, R.string.navigation_drawer_close); drawer.setDrawerListener(toggle); toggle.syncState(); Context context = this; mSwipeRefreshLayout = (SwipeRefreshLayout) findViewById(R.id.search_company_swipe_container); mSwipeRefreshLayout.setEnabled(false); mSwipeRefreshLayout.setRefreshing(false); PermissionManager pm = new PermissionManager(this); pm.managingPermission(); pm.managingPermission(); final Context ctx = this; NavigationView navigationView = (NavigationView) findViewById(R.id.nav_view); navigationView.setNavigationItemSelectedListener(this); mRecyclerView = (RecyclerView) findViewById(R.id.search_company_recycle_view); RecyclerView.LayoutManager mLayoutManager = new LinearLayoutManager(this); mRecyclerView.setLayoutManager(mLayoutManager); mManagerPresenter = new DbManagmentPresenter(this); mCustomTabsHelperFragment = CustomTabsHelperFragment.attachTo(this); mCustomTabsIntent = new CustomTabsIntent.Builder() .enableUrlBarHiding() .setToolbarColor(mColorPrimary) .setShowTitle(true) .build(); SearchManager searchManager = (SearchManager) getSystemService(Context.SEARCH_SERVICE); mCompanySearchView = (SearchView) findViewById(R.id.searchView_company); mCompanySearchView.setQueryHint("Nome Azienda"); mCompanySearchView.setSearchableInfo(searchManager.getSearchableInfo(getComponentName())); mCompanySearchView.setIconifiedByDefault(false); //Applies white color on searchview text int id = mCompanySearchView.getContext().getResources().getIdentifier("android:id/search_src_text", null, null); TextView textView = (TextView) mCompanySearchView.findViewById(id); textView.setTextColor(Color.WHITE); textView.setHintTextColor(Color.WHITE); //Inizialize presenter to show all the company in database mSearchPresenter = new SearchPresenter("tutte", getApplicationContext()); mAdapter = new CompanyAdapter(mSearchPresenter.manageQuery(),ctx); mRecyclerView.setAdapter(mAdapter); mCompanySearchView.setOnQueryTextListener(new SearchView.OnQueryTextListener() { @Override public boolean onQueryTextSubmit(String query) { mSearchPresenter = new SearchPresenter(query, context); ArrayList<Company> companies = mSearchPresenter.manageQuery(); mAdapter = new CompanyAdapter(companies,ctx); mAdapter.notifyDataSetChanged(); mRecyclerView.setAdapter(mAdapter); return true; } @Override public boolean onQueryTextChange(String newText) { return false; } }); } @Override public boolean onCreateOptionsMenu(Menu menu) { // Inflate the menu; this adds items to the action bar if it is present. getMenuInflater().inflate(R.menu.main, menu); return true; } @Override public boolean onOptionsItemSelected(MenuItem item) { int id = item.getItemId(); switch (id){ case R.id.action_dump_db: mManagerPresenter.backupDB(); return super.onOptionsItemSelected(item); case R.id.action_restore_db: getPathForDb(); return super.onOptionsItemSelected(item); default: return super.onOptionsItemSelected(item); } } @SuppressWarnings("StatementWithEmptyBody") @Override public boolean onNavigationItemSelected(MenuItem item) { int id = item.getItemId(); switch (id){ case R.id.search_company: Intent search = new Intent(this, SearchActivity.class); startActivity(search); break; case R.id.add_company: Intent add = new Intent(this, AddActivity.class); startActivity(add); break; case R.id.about_us: CustomTabsHelperFragment.open(this, mCustomTabsIntent, Uri.parse("http://alterego.solutions"), mCustomTabsFallback); break; default: break; } DrawerLayout drawer = (DrawerLayout) findViewById(R.id.drawer_layout_search_company); drawer.closeDrawer(GravityCompat.START); return true; } protected final CustomTabsActivityHelper.CustomTabsFallback mCustomTabsFallback = (activity, uri) -> { Toast.makeText(activity, R.string.custom_tab_error, Toast.LENGTH_SHORT).show(); try { activity.startActivity(new Intent(Intent.ACTION_VIEW, uri)); } catch (ActivityNotFoundException e) { e.printStackTrace(); Toast.makeText(activity, R.string.custom_tab_error_activity, Toast.LENGTH_SHORT) .show(); } }; //Launch File Picker and get the path of the file for db. public void getPathForDb(){ new MaterialFilePicker() .withActivity(this) .withRequestCode(1) .withFilter(Pattern.compile(".*\\.sqlite$")) // Filtering files and directories by file name using regexp .withFilterDirectories(false) // Set directories filterable (false by default) .withHiddenFiles(true) // Show hidden files and folders .start(); } @Override protected void onActivityResult(int requestCode, int resultCode, Intent data) { super.onActivityResult(requestCode, resultCode, data); if (requestCode == 1 && resultCode == RESULT_OK) { String filePath = data.getStringExtra(FilePickerActivity.RESULT_FILE_PATH); // Do anything with file Log.e("PATH OF THE DB",filePath); mManagerPresenter.restoreDB(filePath); } } }
apache-2.0
yuyakaido/CardStackView
cardstackview/src/main/java/com/yuyakaido/android/cardstackview/internal/CardStackSmoothScroller.java
5708
package com.yuyakaido.android.cardstackview.internal; import android.view.View; import com.yuyakaido.android.cardstackview.CardStackLayoutManager; import com.yuyakaido.android.cardstackview.CardStackListener; import com.yuyakaido.android.cardstackview.RewindAnimationSetting; import androidx.annotation.NonNull; import androidx.recyclerview.widget.RecyclerView; public class CardStackSmoothScroller extends RecyclerView.SmoothScroller { public enum ScrollType { AutomaticSwipe, AutomaticRewind, ManualSwipe, ManualCancel } private ScrollType type; private CardStackLayoutManager manager; public CardStackSmoothScroller( ScrollType type, CardStackLayoutManager manager ) { this.type = type; this.manager = manager; } @Override protected void onSeekTargetStep( int dx, int dy, @NonNull RecyclerView.State state, @NonNull Action action ) { if (type == ScrollType.AutomaticRewind) { RewindAnimationSetting setting = manager.getCardStackSetting().rewindAnimationSetting; action.update( -getDx(setting), -getDy(setting), setting.getDuration(), setting.getInterpolator() ); } } @Override protected void onTargetFound( @NonNull View targetView, @NonNull RecyclerView.State state, @NonNull Action action ) { int x = (int) targetView.getTranslationX(); int y = (int) targetView.getTranslationY(); AnimationSetting setting; switch (type) { case AutomaticSwipe: setting = manager.getCardStackSetting().swipeAnimationSetting; action.update( -getDx(setting), -getDy(setting), setting.getDuration(), setting.getInterpolator() ); break; case AutomaticRewind: setting = manager.getCardStackSetting().rewindAnimationSetting; action.update( x, y, setting.getDuration(), setting.getInterpolator() ); break; case ManualSwipe: int dx = -x * 10; int dy = -y * 10; setting = manager.getCardStackSetting().swipeAnimationSetting; action.update( dx, dy, setting.getDuration(), setting.getInterpolator() ); break; case ManualCancel: setting = manager.getCardStackSetting().rewindAnimationSetting; action.update( x, y, setting.getDuration(), setting.getInterpolator() ); break; } } @Override protected void onStart() { CardStackListener listener = manager.getCardStackListener(); CardStackState state = manager.getCardStackState(); switch (type) { case AutomaticSwipe: state.next(CardStackState.Status.AutomaticSwipeAnimating); listener.onCardDisappeared(manager.getTopView(), manager.getTopPosition()); break; case AutomaticRewind: state.next(CardStackState.Status.RewindAnimating); break; case ManualSwipe: state.next(CardStackState.Status.ManualSwipeAnimating); listener.onCardDisappeared(manager.getTopView(), manager.getTopPosition()); break; case ManualCancel: state.next(CardStackState.Status.RewindAnimating); break; } } @Override protected void onStop() { CardStackListener listener = manager.getCardStackListener(); switch (type) { case AutomaticSwipe: // Notify callback from CardStackLayoutManager break; case AutomaticRewind: listener.onCardRewound(); listener.onCardAppeared(manager.getTopView(), manager.getTopPosition()); break; case ManualSwipe: // Notify callback from CardStackLayoutManager break; case ManualCancel: listener.onCardCanceled(); break; } } private int getDx(AnimationSetting setting) { CardStackState state = manager.getCardStackState(); int dx = 0; switch (setting.getDirection()) { case Left: dx = -state.width * 2; break; case Right: dx = state.width * 2; break; case Top: case Bottom: dx = 0; break; } return dx; } private int getDy(AnimationSetting setting) { CardStackState state = manager.getCardStackState(); int dy = 0; switch (setting.getDirection()) { case Left: case Right: dy = state.height / 4; break; case Top: dy = -state.height * 2; break; case Bottom: dy = state.height * 2; break; } return dy; } }
apache-2.0
ANZ-bank/Sysl
pkg/transforms/transforms.go
1271
package transforms import ( "fmt" "github.com/anz-bank/sysl/pkg/eval" sysl "github.com/anz-bank/sysl/pkg/sysl" "github.com/anz-bank/sysl/pkg/syslutil" ) type Worker interface { Apply(mod *sysl.Module, appNames ...string) map[string]*sysl.Value } func NewWorker(transformMod *sysl.Module, appName, viewName string) (Worker, error) { app, has := transformMod.Apps[appName] if !has { return nil, fmt.Errorf("app '%s' not found in transform module", appName) } view, has := app.Views[viewName] if !has { return nil, fmt.Errorf("view '%s' not found in transform app", viewName) } b := base{ mod: transformMod, app: app, view: view, } if len(view.Param) == 1 { _, detail := syslutil.GetTypeDetail(view.Param[0].Type) if detail == templateInputType { return &templated{base: b}, nil } } filenames, has := app.Views["filename"] if !has { return nil, fmt.Errorf("view '%s' not found in transform app", "filename") } return &semantic{base: b, filenames: filenames}, nil } type base struct { mod *sysl.Module app *sysl.Application view *sysl.View } func (b *base) eval(view *sysl.View, scope eval.Scope) *sysl.Value { if view.Expr.Type == nil { view.Expr.Type = view.RetType } return eval.EvaluateApp(b.app, view, scope) }
apache-2.0
alex-charos/footie-predictions
src/main/java/com/oranje/web/rest/AccountResource.java
10432
package com.oranje.web.rest; import java.io.UnsupportedEncodingException; import java.net.URLDecoder; import java.util.List; import java.util.Optional; import javax.inject.Inject; import javax.servlet.http.HttpServletRequest; import javax.validation.Valid; import org.apache.commons.lang.StringUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.http.HttpStatus; import org.springframework.http.MediaType; import org.springframework.http.ResponseEntity; import org.springframework.web.bind.annotation.PathVariable; import org.springframework.web.bind.annotation.RequestBody; import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RequestMethod; import org.springframework.web.bind.annotation.RequestParam; import org.springframework.web.bind.annotation.RestController; import com.codahale.metrics.annotation.Timed; import com.oranje.domain.PersistentToken; import com.oranje.domain.User; import com.oranje.repository.PersistentTokenRepository; import com.oranje.repository.UserRepository; import com.oranje.security.SecurityUtils; import com.oranje.service.impl.MailService; import com.oranje.service.impl.UserService; import com.oranje.web.rest.dto.KeyAndPasswordDTO; import com.oranje.web.rest.dto.UserDTO; import com.oranje.web.rest.util.HeaderUtil; /** * REST controller for managing the current user's account. */ @RestController @RequestMapping("/api") public class AccountResource { private final Logger log = LoggerFactory.getLogger(AccountResource.class); @Inject private UserRepository userRepository; @Inject private UserService userService; @Inject private PersistentTokenRepository persistentTokenRepository; @Inject private MailService mailService; /** * POST /register -> register the user. */ @RequestMapping(value = "/register", method = RequestMethod.POST, produces = MediaType.TEXT_PLAIN_VALUE) @Timed public ResponseEntity<?> registerAccount(@Valid @RequestBody UserDTO userDTO, HttpServletRequest request) { return userRepository.findOneByLogin(userDTO.getLogin()) .map(user -> new ResponseEntity<>("login already in use", HttpStatus.BAD_REQUEST)) .orElseGet(() -> userRepository.findOneByEmail(userDTO.getEmail()) .map(user -> new ResponseEntity<>("e-mail address already in use", HttpStatus.BAD_REQUEST)) .orElseGet(() -> { User user = userService.createUserInformation(userDTO.getLogin(), userDTO.getPassword(), userDTO.getFirstName(), userDTO.getLastName(), userDTO.getEmail().toLowerCase(), userDTO.getLangKey()); String baseUrl = request.getScheme() + // "http" "://" + // "://" request.getServerName() + // "myhost" ":" + // ":" request.getServerPort() + // "80" request.getContextPath(); // "/myContextPath" or "" if deployed in root context mailService.sendActivationEmail(user, baseUrl); return new ResponseEntity<>(HttpStatus.CREATED); }) ); } /** * GET /activate -> activate the registered user. */ @RequestMapping(value = "/activate", method = RequestMethod.GET, produces = MediaType.APPLICATION_JSON_VALUE) @Timed public ResponseEntity<String> activateAccount(@RequestParam(value = "key") String key) { return Optional.ofNullable(userService.activateRegistration(key)) .map(user -> new ResponseEntity<String>(HttpStatus.OK)) .orElse(new ResponseEntity<>(HttpStatus.INTERNAL_SERVER_ERROR)); } /** * GET /authenticate -> check if the user is authenticated, and return its login. */ @RequestMapping(value = "/authenticate", method = RequestMethod.GET, produces = MediaType.APPLICATION_JSON_VALUE) @Timed public String isAuthenticated(HttpServletRequest request) { log.debug("REST request to check if the current user is authenticated"); return request.getRemoteUser(); } /** * GET /account -> get the current user. */ @RequestMapping(value = "/account", method = RequestMethod.GET, produces = MediaType.APPLICATION_JSON_VALUE) @Timed public ResponseEntity<UserDTO> getAccount() { return Optional.ofNullable(userService.getUserWithAuthorities()) .map(user -> new ResponseEntity<>(new UserDTO(user), HttpStatus.OK)) .orElse(new ResponseEntity<>(HttpStatus.INTERNAL_SERVER_ERROR)); } /** * POST /account -> update the current user information. */ @RequestMapping(value = "/account", method = RequestMethod.POST, produces = MediaType.APPLICATION_JSON_VALUE) @Timed public ResponseEntity<String> saveAccount(@RequestBody UserDTO userDTO) { Optional<User> existingUser = userRepository.findOneByEmail(userDTO.getEmail()); if (existingUser.isPresent() && (!existingUser.get().getLogin().equalsIgnoreCase(userDTO.getLogin()))) { return ResponseEntity.badRequest().headers(HeaderUtil.createFailureAlert("user-management", "emailexists", "Email already in use")).body(null); } return userRepository .findOneByLogin(SecurityUtils.getCurrentUser().getUsername()) .map(u -> { userService.updateUserInformation(userDTO.getFirstName(), userDTO.getLastName(), userDTO.getEmail(), userDTO.getLangKey()); return new ResponseEntity<String>(HttpStatus.OK); }) .orElseGet(() -> new ResponseEntity<>(HttpStatus.INTERNAL_SERVER_ERROR)); } /** * POST /change_password -> changes the current user's password */ @RequestMapping(value = "/account/change_password", method = RequestMethod.POST, produces = MediaType.APPLICATION_JSON_VALUE) @Timed public ResponseEntity<?> changePassword(@RequestBody String password) { if (!checkPasswordLength(password)) { return new ResponseEntity<>("Incorrect password", HttpStatus.BAD_REQUEST); } userService.changePassword(password); return new ResponseEntity<>(HttpStatus.OK); } /** * GET /account/sessions -> get the current open sessions. */ @RequestMapping(value = "/account/sessions", method = RequestMethod.GET, produces = MediaType.APPLICATION_JSON_VALUE) @Timed public ResponseEntity<List<PersistentToken>> getCurrentSessions() { return userRepository.findOneByLogin(SecurityUtils.getCurrentUser().getUsername()) .map(user -> new ResponseEntity<>( persistentTokenRepository.findByUser(user), HttpStatus.OK)) .orElse(new ResponseEntity<>(HttpStatus.INTERNAL_SERVER_ERROR)); } /** * DELETE /account/sessions?series={series} -> invalidate an existing session. * * - You can only delete your own sessions, not any other user's session * - If you delete one of your existing sessions, and that you are currently logged in on that session, you will * still be able to use that session, until you quit your browser: it does not work in real time (there is * no API for that), it only removes the "remember me" cookie * - This is also true if you invalidate your current session: you will still be able to use it until you close * your browser or that the session times out. But automatic login (the "remember me" cookie) will not work * anymore. * There is an API to invalidate the current session, but there is no API to check which session uses which * cookie. */ @RequestMapping(value = "/account/sessions/{series}", method = RequestMethod.DELETE) @Timed public void invalidateSession(@PathVariable String series) throws UnsupportedEncodingException { String decodedSeries = URLDecoder.decode(series, "UTF-8"); userRepository.findOneByLogin(SecurityUtils.getCurrentUser().getUsername()).ifPresent(u -> { persistentTokenRepository.findByUser(u).stream() .filter(persistentToken -> StringUtils.equals(persistentToken.getSeries(), decodedSeries)) .findAny().ifPresent(t -> persistentTokenRepository.delete(decodedSeries)); }); } @RequestMapping(value = "/account/reset_password/init", method = RequestMethod.POST, produces = MediaType.TEXT_PLAIN_VALUE) @Timed public ResponseEntity<?> requestPasswordReset(@RequestBody String mail, HttpServletRequest request) { return userService.requestPasswordReset(mail) .map(user -> { String baseUrl = request.getScheme() + "://" + request.getServerName() + ":" + request.getServerPort() + request.getContextPath(); mailService.sendPasswordResetMail(user, baseUrl); return new ResponseEntity<>("e-mail was sent", HttpStatus.OK); }).orElse(new ResponseEntity<>("e-mail address not registered", HttpStatus.BAD_REQUEST)); } @RequestMapping(value = "/account/reset_password/finish", method = RequestMethod.POST, produces = MediaType.APPLICATION_JSON_VALUE) @Timed public ResponseEntity<String> finishPasswordReset(@RequestBody KeyAndPasswordDTO keyAndPassword) { if (!checkPasswordLength(keyAndPassword.getNewPassword())) { return new ResponseEntity<>("Incorrect password", HttpStatus.BAD_REQUEST); } return userService.completePasswordReset(keyAndPassword.getNewPassword(), keyAndPassword.getKey()) .map(user -> new ResponseEntity<String>(HttpStatus.OK)).orElse(new ResponseEntity<>(HttpStatus.INTERNAL_SERVER_ERROR)); } private boolean checkPasswordLength(String password) { return (!StringUtils.isEmpty(password) && password.length() >= UserDTO.PASSWORD_MIN_LENGTH && password.length() <= UserDTO.PASSWORD_MAX_LENGTH); } }
apache-2.0
cluster-profiler/clpr_d
inc/reader.hpp
1103
/** * @file reader.hpp * @author Bill Brouwer <whiskeyjulietb@gmail.com> * @version 1.0 * * @section LICENSE * Copyright 2014 William J. Brouwer, Pierre-Yves Taunay * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * * @section DESCRIPTION * * input reader, fill in db */ #ifndef _READER #define _READER #include <boost/thread/thread.hpp> #include "clpr_proc_db.hpp" #include "utilities.hpp" namespace clpr_d{ /// a class for reading pidstat data from a named pipe class reader{ private: public : reader(clpr_proc_db &input); ~reader(){}; }; }; #endif
apache-2.0
org-wave/Scanner
src/org/wave/scanner/factories/XHTMLFactory.java
778
package org.wave.scanner.factories; import java.lang.reflect.Method; import java.util.List; import org.wave.scanner.elements.BODY; import org.wave.scanner.elements.UL; import org.wave.scanner.elements.XHTML; import org.wave.scanner.enums.ErrorEnum; import org.wave.utils.string.StringUtil; public class XHTMLFactory { private XHTMLFactory() { } public static XHTML createXHTML(List<Method> methods) throws IllegalArgumentException { if (methods == null) { throw new IllegalArgumentException(ErrorEnum.NULL_LIST.getMessage()); } UL ul = new UL(); for (Method method : methods) { ul.add(StringUtil.toHumanCase(method.getName())); } BODY body = new BODY(); body.setUl(ul); XHTML xhtml = new XHTML(); xhtml.setBody(body); return xhtml; } }
apache-2.0
danielsdeleo/critical
spec/unit/output_handler/dispatcher_spec.rb
2904
require File.expand_path(File.dirname(__FILE__) + '/../../spec_helper') describe OutputHandler::Dispatcher do describe "defining output handlers to dispatch to" do before do @handler_class = Class.new(OutputHandler::Dispatcher) end it "is configured with a handler name and options" do @handler_class.handler(:text) do |opts| opts[:output] = STDOUT end @handler_class.handler :deferred, :some_option => :a_setting @handler_class.handlers.should == {OutputHandler::Text => {:output => STDOUT}, OutputHandler::Deferred => {:some_option => :a_setting}} end it "aliases handler() to via() and as()" do @handler_class.via :deferred, :some_option => :a_setting @handler_class.as :text, :output => STDOUT @handler_class.handlers.should == {OutputHandler::Text => {:output => STDOUT}, OutputHandler::Deferred => {:some_option => :a_setting}} end it "raises an error when you try to add a handler that doesn't exist" do @handler_class = Class.new(OutputHandler::Dispatcher) lambda {@handler_class.handler(:doesnotexist, :opts=>:dontmatterhere)}.should raise_error end it "passes itself into a block for handy configuration" do @handler_class.configure do |klass| klass.should equal(@handler_class) end end end describe "when proxying messages to other output handlers" do before do @handler_class = Class.new(OutputHandler::Dispatcher) @handler_class.handler :deferred, :some_option => :a_setting @handler_class.handler :text, :output => STDOUT @handler = @handler_class.new @proxied_handlers = @handler.proxied_handlers end def self.it_dispatches_the_message(message) self.it "dispatches :#{message} to individual output handlers" do # Make sure we're actually testing something before we get to the real tests @proxied_handlers.should_not be_nil @proxied_handlers.should_not be_empty @proxied_handlers.each { |h| h.should_receive(message) } @handler.send(message, :foo) end end it_should_behave_like "a metric output handler" it_dispatches_the_message(:collection_started) it_dispatches_the_message(:collection_succeeded) it_dispatches_the_message(:annotate) it_dispatches_the_message(:collection_failed) it_dispatches_the_message(:processing_failed) it_dispatches_the_message(:expectation_failed) it_dispatches_the_message(:expectation_succeeded) it_dispatches_the_message(:collection_completed) it "sets the metric on all proxied handlers when the metric is set" do @handler.metric = :a_metric_collector @handler.proxied_handlers.each { |h| h.metric.should == :a_metric_collector } end end end
apache-2.0
AubinMahe/AubinMahe.github.io
doxygen-cpp/html/d2/de2/namespacedcrud.js
1345
var namespacedcrud = [ [ "Arguments", "db/df5/classdcrud_1_1_arguments.html", "db/df5/classdcrud_1_1_arguments" ], [ "ClassID", "d2/d51/classdcrud_1_1_class_i_d.html", "d2/d51/classdcrud_1_1_class_i_d" ], [ "GUID", "db/df6/classdcrud_1_1_g_u_i_d.html", "db/df6/classdcrud_1_1_g_u_i_d" ], [ "ICache", "d8/d86/classdcrud_1_1_i_cache.html", "d8/d86/classdcrud_1_1_i_cache" ], [ "ICallback", "d2/dfa/classdcrud_1_1_i_callback.html", "d2/dfa/classdcrud_1_1_i_callback" ], [ "ICRUD", "d9/d9a/classdcrud_1_1_i_c_r_u_d.html", "d9/d9a/classdcrud_1_1_i_c_r_u_d" ], [ "IDispatcher", "d4/de7/classdcrud_1_1_i_dispatcher.html", "d4/de7/classdcrud_1_1_i_dispatcher" ], [ "IOperation", "db/d8b/classdcrud_1_1_i_operation.html", "db/d8b/classdcrud_1_1_i_operation" ], [ "IParticipant", "d4/d0f/classdcrud_1_1_i_participant.html", "d4/d0f/classdcrud_1_1_i_participant" ], [ "IProvided", "d9/d4f/classdcrud_1_1_i_provided.html", "d9/d4f/classdcrud_1_1_i_provided" ], [ "IRegistry", "d7/dad/classdcrud_1_1_i_registry.html", "d7/dad/classdcrud_1_1_i_registry" ], [ "IRequired", "d6/d7b/classdcrud_1_1_i_required.html", "d6/d7b/classdcrud_1_1_i_required" ], [ "Network", "d7/df3/classdcrud_1_1_network.html", null ], [ "Shareable", "d6/d8a/classdcrud_1_1_shareable.html", "d6/d8a/classdcrud_1_1_shareable" ] ];
apache-2.0
jbload/Muscularity
Muscularity.Web/src/app/shared/swagger-codegen/modelsWorkoutsSaveWorkoutWorkoutViewModel.ts
483
/** * Muscularity API * The web API used by Muscularity web and mobile clients. * * OpenAPI spec version: v1 * Contact: support@muscularityapp.com * * NOTE: This class is auto generated by the swagger code generator program. * https://github.com/swagger-api/swagger-codegen.git * Do not edit the class manually. */ export interface ModelsWorkoutsSaveWorkoutWorkoutViewModel { workoutId?: number; bodyWeight?: number; notes?: string; completeDate?: Date; }
apache-2.0
koenighotze/Hotel-Reservation-Tool
portal/src/main/java/org/koenighotze/jee7hotel/portal/IndexView.java
398
package org.koenighotze.jee7hotel.portal; import org.jug.view.View; import org.thymeleaf.TemplateEngine; import javax.inject.Inject; import javax.ws.rs.GET; import javax.ws.rs.Path; /** * @author dschmitz */ @Path("/") public class IndexView { @Inject private TemplateEngine templateEngine; @GET public View index() { return View.of("/index", templateEngine); } }
apache-2.0
OpenGamma/Strata
modules/pricer/src/main/java/com/opengamma/strata/pricer/sensitivity/package-info.java
229
/* * Copyright (C) 2015 - present by OpenGamma Inc. and the OpenGamma group of companies * * Please see distribution for license. */ /** * Calculators for sensitivities. */ package com.opengamma.strata.pricer.sensitivity;
apache-2.0
arenadata/ambari
ambari-server/src/test/java/org/apache/ambari/server/state/ConfigHelperTest.java
50437
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.ambari.server.state; import static org.easymock.EasyMock.createNiceMock; import static org.easymock.EasyMock.createStrictMock; import static org.easymock.EasyMock.expect; import static org.easymock.EasyMock.replay; import static org.easymock.EasyMock.verify; import java.sql.SQLException; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import javax.persistence.EntityManager; import org.apache.ambari.server.AmbariException; import org.apache.ambari.server.H2DatabaseCleaner; import org.apache.ambari.server.actionmanager.HostRoleCommandFactory; import org.apache.ambari.server.actionmanager.RequestFactory; import org.apache.ambari.server.api.services.AmbariMetaInfo; import org.apache.ambari.server.controller.AmbariCustomCommandExecutionHelper; import org.apache.ambari.server.controller.AmbariManagementController; import org.apache.ambari.server.controller.ClusterRequest; import org.apache.ambari.server.controller.ConfigurationRequest; import org.apache.ambari.server.controller.spi.ClusterController; import org.apache.ambari.server.orm.DBAccessor; import org.apache.ambari.server.orm.GuiceJpaInitializer; import org.apache.ambari.server.orm.InMemoryDefaultTestModule; import org.apache.ambari.server.orm.OrmTestHelper; import org.apache.ambari.server.orm.dao.HostRoleCommandDAO; import org.apache.ambari.server.orm.entities.RepositoryVersionEntity; import org.apache.ambari.server.security.SecurityHelper; import org.apache.ambari.server.security.TestAuthenticationFactory; import org.apache.ambari.server.stack.StackManagerFactory; import org.apache.ambari.server.state.cluster.ClusterFactory; import org.apache.ambari.server.state.configgroup.ConfigGroup; import org.apache.ambari.server.state.configgroup.ConfigGroupFactory; import org.apache.ambari.server.state.host.HostFactory; import org.apache.ambari.server.state.stack.OsFamily; import org.junit.After; import org.junit.Before; import org.junit.Test; import org.junit.experimental.runners.Enclosed; import org.junit.runner.RunWith; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.security.core.context.SecurityContextHolder; import com.google.inject.AbstractModule; import com.google.inject.Guice; import com.google.inject.Injector; import com.google.inject.persist.Transactional; import junit.framework.Assert; @RunWith(Enclosed.class) public class ConfigHelperTest { public static class RunWithInMemoryDefaultTestModule { private final static Logger LOG = LoggerFactory.getLogger(ConfigHelperTest.class); private static Clusters clusters; private static Injector injector; private static String clusterName; private static Cluster cluster; private static ConfigGroupFactory configGroupFactory; private static ConfigHelper configHelper; private static AmbariManagementController managementController; private static AmbariMetaInfo metaInfo; private static ConfigFactory configFactory; @Before public void setup() throws Exception { // Set the authenticated user // TODO: remove this or replace the authenticated user to test authorization rules SecurityContextHolder.getContext().setAuthentication(TestAuthenticationFactory.createAdministrator("admin")); injector = Guice.createInjector(new InMemoryDefaultTestModule()); injector.getInstance(GuiceJpaInitializer.class); clusters = injector.getInstance(Clusters.class); configGroupFactory = injector.getInstance(ConfigGroupFactory.class); configHelper = injector.getInstance(ConfigHelper.class); managementController = injector.getInstance(AmbariManagementController.class); metaInfo = injector.getInstance(AmbariMetaInfo.class); configFactory = injector.getInstance(ConfigFactory.class); StackId stackId = new StackId("HDP-2.0.6"); OrmTestHelper helper = injector.getInstance(OrmTestHelper.class); helper.createStack(stackId); RepositoryVersionEntity repositoryVersion = helper.getOrCreateRepositoryVersion(stackId, "2.0.6"); clusterName = "c1"; clusters.addCluster(clusterName, stackId); cluster = clusters.getCluster(clusterName); Assert.assertNotNull(cluster); clusters.addHost("h1"); clusters.addHost("h2"); clusters.addHost("h3"); Assert.assertNotNull(clusters.getHost("h1")); Assert.assertNotNull(clusters.getHost("h2")); Assert.assertNotNull(clusters.getHost("h3")); // core-site ConfigurationRequest cr = new ConfigurationRequest(); cr.setClusterName(clusterName); cr.setType("core-site"); cr.setVersionTag("version1"); cr.setProperties(new HashMap<String, String>() {{ put("ipc.client.connect.max.retries", "30"); put("fs.trash.interval", "30"); }}); cr.setPropertiesAttributes(new HashMap<String, Map<String, String>>() {{ Map<String, String> attrs = new HashMap<>(); attrs.put("ipc.client.connect.max.retries", "1"); attrs.put("fs.trash.interval", "2"); put("attribute1", attrs); }}); final ClusterRequest clusterRequest1 = new ClusterRequest(cluster.getClusterId(), clusterName, cluster.getDesiredStackVersion().getStackVersion(), null); clusterRequest1.setDesiredConfig(Collections.singletonList(cr)); managementController.updateClusters(new HashSet<ClusterRequest>() {{ add(clusterRequest1); }}, null); // flume-conf ConfigurationRequest cr2 = new ConfigurationRequest(); cr2.setClusterName(clusterName); cr2.setType("flume-conf"); cr2.setVersionTag("version1"); cluster.addService("FLUME", repositoryVersion); cluster.addService("OOZIE", repositoryVersion); final ClusterRequest clusterRequest2 = new ClusterRequest(cluster.getClusterId(), clusterName, cluster.getDesiredStackVersion().getStackVersion(), null); clusterRequest2.setDesiredConfig(Collections.singletonList(cr2)); managementController.updateClusters(new HashSet<ClusterRequest>() {{ add(clusterRequest2); }}, null); // global cr.setType("global"); cr.setVersionTag("version1"); cr.setProperties(new HashMap<String, String>() {{ put("dfs_namenode_name_dir", "/hadoop/hdfs/namenode"); put("namenode_heapsize", "1024"); }}); cr.setPropertiesAttributes(new HashMap<String, Map<String, String>>() {{ Map<String, String> attrs = new HashMap<>(); attrs.put("dfs_namenode_name_dir", "3"); attrs.put("namenode_heapsize", "4"); put("attribute2", attrs); }}); final ClusterRequest clusterRequest3 = new ClusterRequest(cluster.getClusterId(), clusterName, cluster.getDesiredStackVersion().getStackVersion(), null); clusterRequest3.setDesiredConfig(Collections.singletonList(cr)); managementController.updateClusters(new HashSet<ClusterRequest>() {{ add(clusterRequest3); }}, null); // oozie-site ConfigurationRequest cr4 = new ConfigurationRequest(); cr4.setClusterName(clusterName); cr4.setType("oozie-site"); cr4.setVersionTag("version1"); cr4.setProperties(new HashMap<String, String>() {{ put("oozie.authentication.type", "simple"); put("oozie.service.HadoopAccessorService.kerberos.enabled", "false"); }}); cr4.setPropertiesAttributes(null); final ClusterRequest clusterRequest4 = new ClusterRequest(cluster.getClusterId(), clusterName, cluster.getDesiredStackVersion().getStackVersion(), null); clusterRequest4.setDesiredConfig(Collections.singletonList(cr4)); managementController.updateClusters(new HashSet<ClusterRequest>() {{ add(clusterRequest4); }}, null); // ams-site ConfigurationRequest cr5 = new ConfigurationRequest(); cr5.setClusterName(clusterName); cr5.setType("ams-site"); cr5.setVersionTag("version1"); cr5.setProperties(new HashMap<String, String>() {{ put("timeline.service.operating.mode", "embedded"); put("timeline.service.fifo.enabled", "false"); }}); cr5.setPropertiesAttributes(null); final ClusterRequest clusterRequest5 = new ClusterRequest(cluster.getClusterId(), clusterName, cluster.getDesiredStackVersion().getStackVersion(), null); clusterRequest5.setDesiredConfig(Collections.singletonList(cr5)); managementController.updateClusters(new HashSet<ClusterRequest>() {{ add(clusterRequest5); }}, null); } @After public void tearDown() throws AmbariException, SQLException { H2DatabaseCleaner.clearDatabaseAndStopPersistenceService(injector); // Clear the authenticated user SecurityContextHolder.getContext().setAuthentication(null); } @Transactional Long addConfigGroup(String name, String tag, List<String> hosts, List<Config> configs) throws AmbariException { Map<Long, Host> hostMap = new HashMap<>(); Map<String, Config> configMap = new HashMap<>(); Long hostId = 1L; for (String hostname : hosts) { Host host = clusters.getHost(hostname); hostMap.put(hostId, host); hostId++; } for (Config config : configs) { configMap.put(config.getType(), config); } ConfigGroup configGroup = configGroupFactory.createNew(cluster, name, tag, tag, "", configMap, hostMap); LOG.info("Config group created with tag " + tag); configGroup.setTag(tag); cluster.addConfigGroup(configGroup); return configGroup.getId(); } void applyConfig(Map<String, String> properties, String configType, String configTag) throws Exception { ConfigurationRequest cr = new ConfigurationRequest(); cr.setClusterName(clusterName); cr.setType(configType); cr.setVersionTag(configTag); cr.setProperties(properties); final ClusterRequest clusterRequest = new ClusterRequest(cluster.getClusterId(), clusterName, cluster.getDesiredStackVersion().getStackVersion(), null); clusterRequest.setDesiredConfig(Collections.singletonList(cr)); managementController.updateClusters(new HashSet<ClusterRequest>() {{ add(clusterRequest); }}, null); } @Test public void testProcessHiddenAttribute() throws Exception { StackInfo stackInfo = metaInfo.getStack("HDP", "2.0.5"); Map<String, Map<String, Map<String, String>>> configAttributes = new HashMap<>(); configAttributes.put("hive-site", stackInfo.getDefaultConfigAttributesForConfigType("hive-site")); Map<String, Map<String, String>> originalConfig_hiveClient = createHiveConfig(); Map<String, Map<String, String>> expectedConfig_hiveClient = new HashMap<String, Map<String, String>>() {{ put("hive-site", new HashMap<String, String>() {{ put("javax.jdo.option.ConnectionDriverName", "oracle"); put("hive.metastore.warehouse.dir", "/tmp"); }}); }}; ConfigHelper.processHiddenAttribute(originalConfig_hiveClient, configAttributes, "HIVE_CLIENT", false); Assert.assertEquals(expectedConfig_hiveClient, originalConfig_hiveClient); Map<String, Map<String, String>> originalConfig_hiveServer = createHiveConfig(); Map<String, Map<String, String>> expectedConfig_hiveServer = createHiveConfig(); ConfigHelper.processHiddenAttribute(originalConfig_hiveServer, configAttributes, "HIVE_SERVER", false); Assert.assertEquals(expectedConfig_hiveServer, originalConfig_hiveServer); Map<String, Map<String, String>> originalConfig_hiveServer1 = createHiveConfig(); Map<String, Map<String, String>> expectedConfig_hiveServer1 = expectedConfig_hiveClient; // config download removes hidden properties without respecting of component ConfigHelper.processHiddenAttribute(originalConfig_hiveServer1, configAttributes, "HIVE_SERVER", true); Assert.assertEquals(expectedConfig_hiveServer1, originalConfig_hiveServer1); } private Map<String, Map<String, String>> createHiveConfig() { return new HashMap<String, Map<String, String>>() {{ put("hive-site", new HashMap<String, String>() {{ put("javax.jdo.option.ConnectionDriverName", "oracle"); put("javax.jdo.option.ConnectionPassword", "1"); put("hive.metastore.warehouse.dir", "/tmp"); }}); }}; } @Test public void testEffectiveTagsForHost() throws Exception { //Setup ConfigurationRequest cr5 = new ConfigurationRequest(); cr5.setClusterName(clusterName); cr5.setType("ams-env"); cr5.setVersionTag("version1"); cr5.setProperties(new HashMap<String, String>() {{ put("metrics_collector_log_dir", "/var/log/ambari-metrics-collector"); put("metrics_collector_pid_dir", "/var/run/ambari-metrics-collector"); }}); cr5.setPropertiesAttributes(null); final ClusterRequest clusterRequest6 = new ClusterRequest(cluster.getClusterId(), clusterName, cluster.getDesiredStackVersion().getStackVersion(), null); clusterRequest6.setDesiredConfig(Collections.singletonList(cr5)); managementController.updateClusters(new HashSet<ClusterRequest>() {{ add(clusterRequest6); }}, null); Map<String, String> properties = new HashMap<>(); properties.put("a", "b"); properties.put("c", "d"); final Config config = configFactory.createNew(cluster, "ams-env", "version122", properties, null); Long groupId = addConfigGroup("g1", "t1", new ArrayList<String>() {{ add("h1"); }}, new ArrayList<Config>() {{ add(config); }}); Assert.assertNotNull(groupId); Map<String, Map<String, String>> configTags = configHelper .getEffectiveDesiredTags(cluster, "h1"); Assert.assertNotNull(configTags); Map<String, String> tagsWithOverrides = configTags.get("ams-env"); Assert.assertNotNull(tagsWithOverrides); Assert.assertTrue(tagsWithOverrides.containsKey(ConfigHelper.CLUSTER_DEFAULT_TAG)); Assert.assertEquals("version1", tagsWithOverrides.get(ConfigHelper.CLUSTER_DEFAULT_TAG)); Assert.assertTrue(tagsWithOverrides.containsKey(groupId.toString())); Assert.assertEquals("version122", tagsWithOverrides.get(groupId.toString())); } @Test public void testEffectivePropertiesWithOverrides() throws Exception { //Setup // core-site ConfigurationRequest cr = new ConfigurationRequest(); cr.setClusterName(clusterName); cr.setType("core-site2"); cr.setVersionTag("version1"); cr.setProperties(new HashMap<String, String>() {{ put("ipc.client.connect.max.retries", "30"); put("fs.trash.interval", "30"); }}); cr.setPropertiesAttributes(new HashMap<String, Map<String, String>>() {{ Map<String, String> attrs = new HashMap<>(); attrs.put("ipc.client.connect.max.retries", "1"); attrs.put("fs.trash.interval", "2"); put("attribute1", attrs); }}); final ClusterRequest clusterRequest1 = new ClusterRequest(cluster.getClusterId(), clusterName, cluster.getDesiredStackVersion().getStackVersion(), null); clusterRequest1.setDesiredConfig(Collections.singletonList(cr)); managementController.updateClusters(new HashSet<ClusterRequest>() {{ add(clusterRequest1); }}, null); // global cr.setType("global2"); cr.setVersionTag("version1"); cr.setProperties(new HashMap<String, String>() {{ put("dfs_namenode_name_dir", "/hadoop/hdfs/namenode"); put("namenode_heapsize", "1024"); }}); cr.setPropertiesAttributes(new HashMap<String, Map<String, String>>() {{ Map<String, String> attrs = new HashMap<>(); attrs.put("dfs_namenode_name_dir", "3"); attrs.put("namenode_heapsize", "4"); put("attribute2", attrs); }}); final ClusterRequest clusterRequest3 = new ClusterRequest(cluster.getClusterId(), clusterName, cluster.getDesiredStackVersion().getStackVersion(), null); clusterRequest3.setDesiredConfig(Collections.singletonList(cr)); managementController.updateClusters(new HashSet<ClusterRequest>() {{ add(clusterRequest3); }}, null); Map<String, String> properties = new HashMap<>(); properties.put("a", "b"); properties.put("c", "d"); final Config config1 = configFactory.createNew(cluster, "core-site2", "version122", properties, null); Map<String, String> properties2 = new HashMap<>(); properties2.put("namenode_heapsize", "1111"); final Config config2 = configFactory.createNew(cluster, "global2", "version122", properties2, null); Long groupId = addConfigGroup("g2", "t1", new ArrayList<String>() {{ add("h1"); }}, new ArrayList<Config>() {{ add(config1); add(config2); }}); Assert.assertNotNull(groupId); Map<String, Map<String, String>> propertyMap = configHelper .getEffectiveConfigProperties(cluster, configHelper.getEffectiveDesiredTags(cluster, "h1")); Assert.assertNotNull(propertyMap); Assert.assertTrue(propertyMap.containsKey("global2")); Map<String, String> globalProps = propertyMap.get("global2"); Assert.assertEquals("1111", globalProps.get("namenode_heapsize")); Assert.assertEquals("/hadoop/hdfs/namenode", globalProps.get("dfs_namenode_name_dir")); Assert.assertTrue(propertyMap.containsKey("core-site")); Map<String, String> coreProps = propertyMap.get("core-site2"); Assert.assertTrue(coreProps.containsKey("a")); Assert.assertTrue(coreProps.containsKey("c")); Assert.assertEquals("30", coreProps.get("ipc.client.connect.max.retries")); } @Test public void testEffectivePropertiesAttributesWithOverrides() throws Exception { //Another version of core-site & global. // core-site3 ConfigurationRequest crr = new ConfigurationRequest(); crr.setClusterName(clusterName); crr.setType("core-site3"); crr.setVersionTag("version1"); crr.setProperties(new HashMap<String, String>() {{ put("ipc.client.connect.max.retries", "30"); put("fs.trash.interval", "30"); }}); crr.setPropertiesAttributes(new HashMap<String, Map<String, String>>() {{ Map<String, String> attrs = new HashMap<>(); attrs.put("ipc.client.connect.max.retries", "1"); attrs.put("fs.trash.interval", "2"); put("attribute1", attrs); }}); final ClusterRequest clusterRequestDup = new ClusterRequest(cluster.getClusterId(), clusterName, cluster.getDesiredStackVersion().getStackVersion(), null); clusterRequestDup.setDesiredConfig(Collections.singletonList(crr)); managementController.updateClusters(new HashSet<ClusterRequest>() {{ add(clusterRequestDup); }}, null); // global3 crr.setType("global3"); crr.setVersionTag("version1"); crr.setProperties(new HashMap<String, String>() {{ put("dfs_namenode_name_dir", "/hadoop/hdfs/namenode"); put("namenode_heapsize", "1024"); }}); crr.setPropertiesAttributes(new HashMap<String, Map<String, String>>() {{ Map<String, String> attrs = new HashMap<>(); attrs.put("dfs_namenode_name_dir", "3"); attrs.put("namenode_heapsize", "4"); put("attribute2", attrs); }}); final ClusterRequest clusterRequestGlobalDup = new ClusterRequest(cluster.getClusterId(), clusterName, cluster.getDesiredStackVersion().getStackVersion(), null); clusterRequestGlobalDup.setDesiredConfig(Collections.singletonList(crr)); managementController.updateClusters(new HashSet<ClusterRequest>() {{ add(clusterRequestGlobalDup); }}, null); Map<String, String> attributes = new HashMap<>(); attributes.put("fs.trash.interval", "11"); attributes.put("b", "y"); Map<String, Map<String, String>> config1Attributes = new HashMap<>(); config1Attributes.put("attribute1", attributes); final Config config1 = configFactory.createNew(cluster, "core-site3", "version122", new HashMap<String, String>(), config1Attributes); attributes = new HashMap<>(); attributes.put("namenode_heapsize", "z"); attributes.put("c", "q"); Map<String, Map<String, String>> config2Attributes = new HashMap<>(); config2Attributes.put("attribute2", attributes); final Config config2 = configFactory.createNew(cluster, "global3", "version122", new HashMap<String, String>(), config2Attributes); Long groupId = addConfigGroup("g3", "t1", new ArrayList<String>() {{ add("h3"); }}, new ArrayList<Config>() {{ add(config1); add(config2); }}); Assert.assertNotNull(groupId); Map<String, Map<String, Map<String, String>>> effectiveAttributes = configHelper .getEffectiveConfigAttributes(cluster, configHelper.getEffectiveDesiredTags(cluster, "h3")); Assert.assertNotNull(effectiveAttributes); Assert.assertEquals(7, effectiveAttributes.size()); Assert.assertTrue(effectiveAttributes.containsKey("global3")); Map<String, Map<String, String>> globalAttrs = effectiveAttributes.get("global3"); Assert.assertEquals(1, globalAttrs.size()); Assert.assertTrue(globalAttrs.containsKey("attribute2")); Map<String, String> attribute2Occurances = globalAttrs.get("attribute2"); Assert.assertEquals(3, attribute2Occurances.size()); Assert.assertTrue(attribute2Occurances.containsKey("namenode_heapsize")); Assert.assertEquals("z", attribute2Occurances.get("namenode_heapsize")); Assert.assertTrue(attribute2Occurances.containsKey("dfs_namenode_name_dir")); Assert.assertEquals("3", attribute2Occurances.get("dfs_namenode_name_dir")); Assert.assertTrue(attribute2Occurances.containsKey("c")); Assert.assertEquals("q", attribute2Occurances.get("c")); Assert.assertTrue(effectiveAttributes.containsKey("core-site3")); Map<String, Map<String, String>> coreAttrs = effectiveAttributes.get("core-site3"); Assert.assertEquals(1, coreAttrs.size()); Assert.assertTrue(coreAttrs.containsKey("attribute1")); Map<String, String> attribute1Occurances = coreAttrs.get("attribute1"); Assert.assertEquals(3, attribute1Occurances.size()); Assert.assertTrue(attribute1Occurances.containsKey("ipc.client.connect.max.retries")); Assert.assertEquals("1", attribute1Occurances.get("ipc.client.connect.max.retries")); Assert.assertTrue(attribute1Occurances.containsKey("fs.trash.interval")); Assert.assertEquals("11", attribute1Occurances.get("fs.trash.interval")); Assert.assertTrue(attribute1Occurances.containsKey("b")); Assert.assertEquals("y", attribute1Occurances.get("b")); } @Test public void testCloneAttributesMap() throws Exception { // init Map<String, Map<String, String>> targetAttributesMap = new HashMap<>(); Map<String, String> attributesValues = new HashMap<>(); attributesValues.put("a", "1"); attributesValues.put("b", "2"); attributesValues.put("f", "3"); attributesValues.put("q", "4"); targetAttributesMap.put("attr", attributesValues); Map<String, Map<String, String>> sourceAttributesMap = new HashMap<>(); attributesValues = new HashMap<>(); attributesValues.put("a", "5"); attributesValues.put("f", "6"); sourceAttributesMap.put("attr", attributesValues); attributesValues = new HashMap<>(); attributesValues.put("f", "7"); attributesValues.put("q", "8"); sourceAttributesMap.put("attr1", attributesValues); // eval configHelper.cloneAttributesMap(sourceAttributesMap, targetAttributesMap); // verification Assert.assertEquals(2, targetAttributesMap.size()); Assert.assertTrue(targetAttributesMap.containsKey("attr")); Assert.assertTrue(targetAttributesMap.containsKey("attr1")); Map<String, String> attributes = targetAttributesMap.get("attr"); Assert.assertEquals(4, attributes.size()); Assert.assertEquals("5", attributes.get("a")); Assert.assertEquals("2", attributes.get("b")); Assert.assertEquals("6", attributes.get("f")); Assert.assertEquals("4", attributes.get("q")); attributes = targetAttributesMap.get("attr1"); Assert.assertEquals(2, attributes.size()); Assert.assertEquals("7", attributes.get("f")); Assert.assertEquals("8", attributes.get("q")); } @Test public void testCloneAttributesMapSourceIsNull() throws Exception { // init Map<String, Map<String, String>> targetAttributesMap = new HashMap<>(); Map<String, String> attributesValues = new HashMap<>(); attributesValues.put("a", "1"); attributesValues.put("b", "2"); attributesValues.put("f", "3"); attributesValues.put("q", "4"); targetAttributesMap.put("attr", attributesValues); Map<String, Map<String, String>> sourceAttributesMap = null; // eval configHelper.cloneAttributesMap(sourceAttributesMap, targetAttributesMap); // verification // No exception should be thrown // targetMap should not be changed Assert.assertEquals(1, targetAttributesMap.size()); Assert.assertTrue(targetAttributesMap.containsKey("attr")); Map<String, String> attributes = targetAttributesMap.get("attr"); Assert.assertEquals(4, attributes.size()); Assert.assertEquals("1", attributes.get("a")); Assert.assertEquals("2", attributes.get("b")); Assert.assertEquals("3", attributes.get("f")); Assert.assertEquals("4", attributes.get("q")); } @Test public void testCloneAttributesMapTargetIsNull() throws Exception { // init Map<String, Map<String, String>> targetAttributesMap = null; Map<String, Map<String, String>> sourceAttributesMap = new HashMap<>(); Map<String, String> attributesValues = new HashMap<>(); attributesValues.put("a", "5"); attributesValues.put("f", "6"); sourceAttributesMap.put("attr", attributesValues); attributesValues = new HashMap<>(); attributesValues.put("f", "7"); attributesValues.put("q", "8"); sourceAttributesMap.put("attr1", attributesValues); // eval configHelper.cloneAttributesMap(sourceAttributesMap, targetAttributesMap); // verification // No exception should be thrown // sourceMap should not be changed Assert.assertEquals(2, sourceAttributesMap.size()); Assert.assertTrue(sourceAttributesMap.containsKey("attr")); Assert.assertTrue(sourceAttributesMap.containsKey("attr1")); Map<String, String> attributes = sourceAttributesMap.get("attr"); Assert.assertEquals(2, attributes.size()); Assert.assertEquals("5", attributes.get("a")); Assert.assertEquals("6", attributes.get("f")); attributes = sourceAttributesMap.get("attr1"); Assert.assertEquals(2, attributes.size()); Assert.assertEquals("7", attributes.get("f")); Assert.assertEquals("8", attributes.get("q")); } @Test public void testMergeAttributes() throws Exception { Map<String, Map<String, String>> persistedAttributes = new HashMap<>(); Map<String, String> persistedFinalAttrs = new HashMap<>(); persistedFinalAttrs.put("a", "true"); persistedFinalAttrs.put("c", "true"); persistedFinalAttrs.put("d", "true"); persistedAttributes.put("final", persistedFinalAttrs); Map<String, Map<String, String>> confGroupAttributes = new HashMap<>(); Map<String, String> confGroupFinalAttrs = new HashMap<>(); confGroupFinalAttrs.put("b", "true"); confGroupAttributes.put("final", confGroupFinalAttrs); Map<String, String> confGroupProperties = new HashMap<>(); confGroupProperties.put("a", "any"); confGroupProperties.put("b", "any"); confGroupProperties.put("c", "any"); Config overrideConfig = configFactory.createNew(cluster, "type", null, confGroupProperties, confGroupAttributes); Map<String, Map<String, String>> result = configHelper.overrideAttributes(overrideConfig, persistedAttributes); Assert.assertNotNull(result); Assert.assertEquals(1, result.size()); Map<String, String> finalResultAttributes = result.get("final"); Assert.assertNotNull(finalResultAttributes); Assert.assertEquals(2, finalResultAttributes.size()); Assert.assertEquals("true", finalResultAttributes.get("b")); Assert.assertEquals("true", finalResultAttributes.get("d")); } @Test public void testMergeAttributesWithNoAttributeOverrides() throws Exception { Map<String, Map<String, String>> persistedAttributes = new HashMap<>(); Map<String, String> persistedFinalAttrs = new HashMap<>(); persistedFinalAttrs.put("a", "true"); persistedFinalAttrs.put("c", "true"); persistedFinalAttrs.put("d", "true"); persistedAttributes.put("final", persistedFinalAttrs); Map<String, Map<String, String>> confGroupAttributes = new HashMap<>(); Map<String, String> confGroupProperties = new HashMap<>(); confGroupProperties.put("a", "any"); confGroupProperties.put("b", "any"); confGroupProperties.put("c", "any"); Config overrideConfig = configFactory.createNew(cluster, "type", null, confGroupProperties, confGroupAttributes); Map<String, Map<String, String>> result = configHelper.overrideAttributes(overrideConfig, persistedAttributes); Assert.assertNotNull(result); Assert.assertEquals(1, result.size()); Map<String, String> finalResultAttributes = result.get("final"); Assert.assertNotNull(finalResultAttributes); Assert.assertEquals(1, finalResultAttributes.size()); Assert.assertEquals("true", finalResultAttributes.get("d")); } @Test public void testMergeAttributesWithNullAttributes() throws Exception { Map<String, Map<String, String>> persistedAttributes = new HashMap<>(); Map<String, String> persistedFinalAttrs = new HashMap<>(); persistedFinalAttrs.put("a", "true"); persistedFinalAttrs.put("c", "true"); persistedFinalAttrs.put("d", "true"); persistedAttributes.put("final", persistedFinalAttrs); Map<String, String> confGroupProperties = new HashMap<>(); confGroupProperties.put("a", "any"); confGroupProperties.put("b", "any"); confGroupProperties.put("c", "any"); Config overrideConfig = configFactory.createNew(cluster, "type", null, confGroupProperties, null); Map<String, Map<String, String>> result = configHelper.overrideAttributes(overrideConfig, persistedAttributes); Assert.assertNotNull(result); Assert.assertEquals(1, result.size()); Map<String, String> finalResultAttributes = result.get("final"); Assert.assertNotNull(finalResultAttributes); Assert.assertEquals(3, finalResultAttributes.size()); Assert.assertEquals("true", finalResultAttributes.get("a")); Assert.assertEquals("true", finalResultAttributes.get("c")); Assert.assertEquals("true", finalResultAttributes.get("d")); } @Test public void testFilterInvalidPropertyValues() { Map<PropertyInfo, String> properties = new HashMap<>(); PropertyInfo prop1 = new PropertyInfo(); prop1.setName("1"); PropertyInfo prop2 = new PropertyInfo(); prop1.setName("2"); PropertyInfo prop3 = new PropertyInfo(); prop1.setName("3"); PropertyInfo prop4 = new PropertyInfo(); prop1.setName("4"); properties.put(prop1, "/tmp"); properties.put(prop2, "null"); properties.put(prop3, ""); properties.put(prop4, null); Set<String> resultSet = configHelper.filterInvalidPropertyValues(properties, "testlist"); Assert.assertEquals(1, resultSet.size()); Assert.assertEquals(resultSet.iterator().next(), "/tmp"); } @Test public void testMergeAttributesWithNullProperties() throws Exception { Map<String, Map<String, String>> persistedAttributes = new HashMap<>(); Map<String, String> persistedFinalAttrs = new HashMap<>(); persistedFinalAttrs.put("a", "true"); persistedFinalAttrs.put("c", "true"); persistedFinalAttrs.put("d", "true"); persistedAttributes.put("final", persistedFinalAttrs); Map<String, Map<String, String>> confGroupAttributes = new HashMap<>(); Map<String, String> confGroupFinalAttrs = new HashMap<>(); confGroupFinalAttrs.put("b", "true"); confGroupAttributes.put("final", confGroupFinalAttrs); Config overrideConfig = configFactory.createNew(cluster, "type", "version122", new HashMap<String,String>(), confGroupAttributes); Map<String, Map<String, String>> result = configHelper.overrideAttributes(overrideConfig, persistedAttributes); Assert.assertNotNull(result); Assert.assertEquals(1, result.size()); Map<String, String> finalResultAttributes = result.get("final"); Assert.assertNotNull(finalResultAttributes); Assert.assertEquals(4, finalResultAttributes.size()); Assert.assertEquals("true", finalResultAttributes.get("a")); Assert.assertEquals("true", finalResultAttributes.get("b")); Assert.assertEquals("true", finalResultAttributes.get("c")); Assert.assertEquals("true", finalResultAttributes.get("d")); } @Test public void testUpdateConfigType() throws Exception { Config currentConfig = cluster.getDesiredConfigByType("core-site"); Map<String, String> properties = currentConfig.getProperties(); // Attributes exist Map<String, Map<String, String>> propertiesAttributes = currentConfig.getPropertiesAttributes(); Assert.assertNotNull(propertiesAttributes); Assert.assertEquals(1, propertiesAttributes.size()); Assert.assertTrue(propertiesAttributes.containsKey("attribute1")); // Config tag before update Assert.assertEquals("version1", currentConfig.getTag()); // Properties before update Assert.assertEquals("30", properties.get("fs.trash.interval")); // Property and attribute exist Assert.assertTrue(properties.containsKey("ipc.client.connect.max.retries")); Assert.assertTrue(propertiesAttributes.get("attribute1").containsKey("ipc.client.connect.max.retries")); Map<String, String> updates = new HashMap<>(); updates.put("new-property", "new-value"); updates.put("fs.trash.interval", "updated-value"); Collection<String> removals = Collections.singletonList("ipc.client.connect.max.retries"); configHelper.updateConfigType(cluster, cluster.getCurrentStackVersion(), managementController, "core-site", updates, removals, "admin", "Test note"); Config updatedConfig = cluster.getDesiredConfigByType("core-site"); // Attributes aren't lost propertiesAttributes = updatedConfig.getPropertiesAttributes(); Assert.assertNotNull(propertiesAttributes); Assert.assertEquals(1, propertiesAttributes.size()); Assert.assertTrue(propertiesAttributes.containsKey("attribute1")); // Config tag updated Assert.assertFalse("version1".equals(updatedConfig.getTag())); // Property added properties = updatedConfig.getProperties(); Assert.assertTrue(properties.containsKey("new-property")); Assert.assertEquals("new-value", properties.get("new-property")); // Property updated Assert.assertTrue(properties.containsKey("fs.trash.interval")); Assert.assertEquals("updated-value", properties.get("fs.trash.interval")); Assert.assertEquals("2", propertiesAttributes.get("attribute1").get("fs.trash.interval")); // Property and attribute removed Assert.assertFalse(properties.containsKey("ipc.client.connect.max.retries")); Assert.assertFalse(propertiesAttributes.get("attribute1").containsKey("ipc.client.connect.max.retries")); } @Test public void testUpdateConfigTypeNoPropertyAttributes() throws Exception { Config currentConfig = cluster.getDesiredConfigByType("oozie-site"); Map<String, String> properties = currentConfig.getProperties(); // Config tag before update Assert.assertEquals("version1", currentConfig.getTag()); // Properties before update Assert.assertEquals("simple", properties.get("oozie.authentication.type")); Assert.assertEquals("false", properties.get("oozie.service.HadoopAccessorService.kerberos.enabled")); Map<String, String> updates = new HashMap<>(); updates.put("oozie.authentication.type", "kerberos"); updates.put("oozie.service.HadoopAccessorService.kerberos.enabled", "true"); configHelper.updateConfigType(cluster, cluster.getCurrentStackVersion(), managementController, "oozie-site", updates, null, "admin", "Test " + "note"); Config updatedConfig = cluster.getDesiredConfigByType("oozie-site"); // Config tag updated Assert.assertFalse("version1".equals(updatedConfig.getTag())); // Property added properties = updatedConfig.getProperties(); Assert.assertTrue(properties.containsKey("oozie.authentication.type")); Assert.assertEquals("kerberos", properties.get("oozie.authentication.type")); // Property updated Assert.assertTrue(properties.containsKey("oozie.service.HadoopAccessorService.kerberos.enabled")); Assert.assertEquals("true", properties.get("oozie.service.HadoopAccessorService.kerberos.enabled")); } @Test public void testUpdateConfigTypeRemovals() throws Exception { Config currentConfig = cluster.getDesiredConfigByType("ams-site"); Map<String, String> properties = currentConfig.getProperties(); // Config tag before update Assert.assertEquals("version1", currentConfig.getTag()); // Properties before update Assert.assertEquals("embedded", properties.get("timeline.service.operating.mode")); Assert.assertEquals("false", properties.get("timeline.service.fifo.enabled")); List<String> removals = new ArrayList<>(); removals.add("timeline.service.operating.mode"); configHelper.updateConfigType(cluster, cluster.getCurrentStackVersion(), managementController, "ams-site", null, removals, "admin", "Test note"); Config updatedConfig = cluster.getDesiredConfigByType("ams-site"); // Config tag updated Assert.assertFalse("version1".equals(updatedConfig.getTag())); // Property removed properties = updatedConfig.getProperties(); Assert.assertFalse(properties.containsKey("timeline.service.operating.mode")); // Property unchanged Assert.assertTrue(properties.containsKey("timeline.service.fifo.enabled")); Assert.assertEquals("false", properties.get("timeline.service.fifo.enabled")); } @Test public void testCalculateIsStaleConfigs() throws Exception { Map<String, HostConfig> schReturn = new HashMap<>(); HostConfig hc = new HostConfig(); // Put a different version to check for change hc.setDefaultVersionTag("version2"); schReturn.put("flume-conf", hc); ServiceComponent sc = createNiceMock(ServiceComponent.class); // set up mocks ServiceComponentHost sch = createNiceMock(ServiceComponentHost.class); expect(sc.getDesiredStackId()).andReturn(cluster.getDesiredStackVersion()).anyTimes(); // set up expectations expect(sch.getActualConfigs()).andReturn(schReturn).times(6); expect(sch.getHostName()).andReturn("h1").anyTimes(); expect(sch.getClusterId()).andReturn(cluster.getClusterId()).anyTimes(); expect(sch.getServiceName()).andReturn("FLUME").anyTimes(); expect(sch.getServiceComponentName()).andReturn("FLUME_HANDLER").anyTimes(); expect(sch.getServiceComponent()).andReturn(sc).anyTimes(); replay(sc, sch); // Cluster level config changes Assert.assertTrue(configHelper.isStaleConfigs(sch, null)); HostConfig hc2 = new HostConfig(); hc2.setDefaultVersionTag("version1"); schReturn.put("flume-conf", hc2); // invalidate cache to test new sch // Cluster level same configs Assert.assertFalse(configHelper.isStaleConfigs(sch, null)); // Cluster level same configs but group specific configs for host have been updated List<String> hosts = new ArrayList<>(); hosts.add("h1"); List<Config> configs = new ArrayList<>(); Config configImpl = configFactory.createNew(cluster, "flume-conf", "FLUME1", new HashMap<String,String>(), null); configs.add(configImpl); addConfigGroup("configGroup1", "FLUME", hosts, configs); // config group added for host - expect staleness Assert.assertTrue(configHelper.isStaleConfigs(sch, null)); HostConfig hc3 = new HostConfig(); hc3.setDefaultVersionTag("version1"); hc3.getConfigGroupOverrides().put(1l, "FLUME1"); schReturn.put("flume-conf", hc3); // version1 and FLUME1 - stale=false Assert.assertFalse(configHelper.isStaleConfigs(sch, null)); HostConfig hc4 = new HostConfig(); hc4.setDefaultVersionTag("version1"); hc4.getConfigGroupOverrides().put(1l, "FLUME2"); schReturn.put("flume-conf", hc4); // version1 and FLUME2 - stale=true Assert.assertTrue(configHelper.isStaleConfigs(sch, null)); HostConfig hc5 = new HostConfig(); hc5.setDefaultVersionTag("version3"); hc5.getConfigGroupOverrides().put(1l, "FLUME1"); schReturn.put("flume-conf", hc5); // version3 and FLUME1 - stale=true Assert.assertTrue(configHelper.isStaleConfigs(sch, null)); verify(sch); } } public static class RunWithCustomModule { private Injector injector; @Before public void setup() throws Exception { injector = Guice.createInjector(new AbstractModule() { @Override protected void configure() { final AmbariMetaInfo mockMetaInfo = createNiceMock(AmbariMetaInfo.class); final ClusterController clusterController = createStrictMock(ClusterController.class); bind(UpgradeContextFactory.class).toInstance(createNiceMock(UpgradeContextFactory.class)); bind(EntityManager.class).toInstance(createNiceMock(EntityManager.class)); bind(DBAccessor.class).toInstance(createNiceMock(DBAccessor.class)); bind(ClusterFactory.class).toInstance(createNiceMock(ClusterFactory.class)); bind(HostFactory.class).toInstance(createNiceMock(HostFactory.class)); bind(SecurityHelper.class).toInstance(createNiceMock(SecurityHelper.class)); bind(OsFamily.class).toInstance(createNiceMock(OsFamily.class)); bind(AmbariCustomCommandExecutionHelper.class).toInstance(createNiceMock(AmbariCustomCommandExecutionHelper.class)); bind(AmbariManagementController.class).toInstance(createNiceMock(AmbariManagementController.class)); bind(AmbariMetaInfo.class).toInstance(mockMetaInfo); bind(RequestFactory.class).toInstance(createNiceMock(RequestFactory.class)); bind(Clusters.class).toInstance(createNiceMock(Clusters.class)); bind(ClusterController.class).toInstance(clusterController); bind(StackManagerFactory.class).toInstance(createNiceMock(StackManagerFactory.class)); bind(HostRoleCommandFactory.class).toInstance(createNiceMock(HostRoleCommandFactory.class)); bind(HostRoleCommandDAO.class).toInstance(createNiceMock(HostRoleCommandDAO.class)); } }); // Set the authenticated user // TODO: remove this or replace the authenticated user to test authorization rules SecurityContextHolder.getContext().setAuthentication(TestAuthenticationFactory.createAdministrator("admin")); } @After public void teardown() { // Clear the authenticated user SecurityContextHolder.getContext().setAuthentication(null); } @Test public void testGetServicePropertiesSimpleInvocation() throws Exception { Cluster mockCluster = createStrictMock(Cluster.class); StackId mockStackVersion = createStrictMock(StackId.class); AmbariMetaInfo mockAmbariMetaInfo = injector.getInstance(AmbariMetaInfo.class); Service mockService = createStrictMock(Service.class); ServiceInfo mockServiceInfo = createStrictMock(ServiceInfo.class); PropertyInfo mockPropertyInfo1 = createStrictMock(PropertyInfo.class); PropertyInfo mockPropertyInfo2 = createStrictMock(PropertyInfo.class); List<PropertyInfo> serviceProperties = Arrays.asList(mockPropertyInfo1, mockPropertyInfo2); expect(mockCluster.getService("SERVICE")).andReturn(mockService).once(); expect(mockService.getDesiredStackId()).andReturn(mockStackVersion).once(); expect(mockStackVersion.getStackName()).andReturn("HDP").once(); expect(mockStackVersion.getStackVersion()).andReturn("2.2").once(); expect(mockAmbariMetaInfo.getService("HDP", "2.2", "SERVICE")).andReturn(mockServiceInfo).once(); expect(mockServiceInfo.getProperties()).andReturn(serviceProperties).once(); replay(mockAmbariMetaInfo, mockCluster, mockService, mockStackVersion, mockServiceInfo, mockPropertyInfo1, mockPropertyInfo2); mockAmbariMetaInfo.init(); Set<PropertyInfo> result = injector.getInstance(ConfigHelper.class) .getServiceProperties(mockCluster, "SERVICE"); Assert.assertNotNull(result); Assert.assertEquals(2, result.size()); verify(mockAmbariMetaInfo, mockCluster, mockStackVersion, mockServiceInfo, mockPropertyInfo1, mockPropertyInfo2); } @Test public void testGetServicePropertiesDoNoRemoveExcluded() throws Exception { StackId mockStackVersion = createStrictMock(StackId.class); AmbariMetaInfo mockAmbariMetaInfo = injector.getInstance(AmbariMetaInfo.class); ServiceInfo mockServiceInfo = createStrictMock(ServiceInfo.class); PropertyInfo mockPropertyInfo1 = createStrictMock(PropertyInfo.class); PropertyInfo mockPropertyInfo2 = createStrictMock(PropertyInfo.class); List<PropertyInfo> serviceProperties = Arrays.asList(mockPropertyInfo1, mockPropertyInfo2); expect(mockStackVersion.getStackName()).andReturn("HDP").once(); expect(mockStackVersion.getStackVersion()).andReturn("2.2").once(); expect(mockAmbariMetaInfo.getService("HDP", "2.2", "SERVICE")).andReturn(mockServiceInfo).once(); expect(mockServiceInfo.getProperties()).andReturn(serviceProperties).once(); replay(mockAmbariMetaInfo, mockStackVersion, mockServiceInfo, mockPropertyInfo1, mockPropertyInfo2); mockAmbariMetaInfo.init(); Set<PropertyInfo> result = injector.getInstance(ConfigHelper.class) .getServiceProperties(mockStackVersion, "SERVICE", false); Assert.assertNotNull(result); Assert.assertEquals(2, result.size()); verify(mockAmbariMetaInfo, mockStackVersion, mockServiceInfo, mockPropertyInfo1, mockPropertyInfo2); } @Test public void testGetServicePropertiesRemoveExcluded() throws Exception { StackId mockStackVersion = createStrictMock(StackId.class); AmbariMetaInfo mockAmbariMetaInfo = injector.getInstance(AmbariMetaInfo.class); ServiceInfo mockServiceInfo = createStrictMock(ServiceInfo.class); PropertyInfo mockPropertyInfo1 = createStrictMock(PropertyInfo.class); PropertyInfo mockPropertyInfo2 = createStrictMock(PropertyInfo.class); List<PropertyInfo> serviceProperties = Arrays.asList(mockPropertyInfo1, mockPropertyInfo2); expect(mockStackVersion.getStackName()).andReturn("HDP").once(); expect(mockStackVersion.getStackVersion()).andReturn("2.2").once(); expect(mockAmbariMetaInfo.getService("HDP", "2.2", "SERVICE")).andReturn(mockServiceInfo).once(); expect(mockServiceInfo.getProperties()).andReturn(serviceProperties).once(); expect(mockServiceInfo.getExcludedConfigTypes()).andReturn(Collections.singleton("excluded-type")).once(); expect(mockPropertyInfo1.getFilename()).andReturn("included-type.xml").times(2); expect(mockPropertyInfo2.getFilename()).andReturn("excluded-type.xml").once(); replay(mockAmbariMetaInfo, mockStackVersion, mockServiceInfo, mockPropertyInfo1, mockPropertyInfo2); mockAmbariMetaInfo.init(); Set<PropertyInfo> result = injector.getInstance(ConfigHelper.class) .getServiceProperties(mockStackVersion, "SERVICE", true); Assert.assertNotNull(result); Assert.assertEquals(1, result.size()); Assert.assertEquals("included-type.xml", result.iterator().next().getFilename()); verify(mockAmbariMetaInfo, mockStackVersion, mockServiceInfo, mockPropertyInfo1, mockPropertyInfo2); } } }
apache-2.0
CSchool/SchoolSite
CSchoolSite/main/migrations/0002_notification_queued.py
457
# -*- coding: utf-8 -*- # Generated by Django 1.11 on 2017-05-04 12:05 from __future__ import unicode_literals from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('main', '0001_initial'), ] operations = [ migrations.AddField( model_name='notification', name='queued', field=models.BooleanField(db_index=True, default=False), ), ]
apache-2.0
htzh/lean
src/frontends/lean/parse_simp_tactic.cpp
3283
/* Copyright (c) 2015 Microsoft Corporation. All rights reserved. Released under Apache 2.0 license as described in the file LICENSE. Author: Leonardo de Moura */ #include "util/sstream.h" #include "library/scoped_ext.h" #include "library/tactic/exact_tactic.h" #include "library/tactic/expr_to_tactic.h" #include "library/simplifier/rewrite_rule_set.h" #include "library/simplifier/simp_tactic.h" #include "frontends/lean/parser.h" #include "frontends/lean/parse_tactic_location.h" #include "frontends/lean/tokens.h" namespace lean { expr parse_simp_tactic(parser & p) { buffer<expr> lemmas; buffer<name> ns; buffer<name> hiding; optional<expr> tac; while (true) { if (p.curr_is_token(get_lbracket_tk())) { p.next(); while (true) { if (p.curr_is_identifier()) { auto id_pos = p.pos(); name id = p.get_name_val(); p.next(); optional<name> real_ns = to_valid_namespace_name(p.env(), id); if (real_ns) { ns.push_back(*real_ns); } else { expr left = p.id_to_expr(id, id_pos); unsigned rbp = 0; while (rbp < p.curr_expr_lbp()) { left = p.parse_led(left); } lemmas.push_back(left); } } else { lemmas.push_back(p.parse_expr()); } if (!p.curr_is_token(get_comma_tk())) break; p.next(); } p.check_token_next(get_rbracket_tk(), "invalid 'simp' tactic, ']' expected"); } else if (p.curr_is_token_or_id(get_hiding_tk())) { p.next(); p.check_token_next(get_lbracket_tk(), "invalid 'simp' tactic, '[' expected"); while (true) { auto id_pos = p.pos(); name id = p.check_constant_next("invalid 'simp' tactic, constant expected"); if (!is_rewrite_rule(p.env(), id)) throw parser_error(sstream() << "invalid 'simp' tactic, '" << id << "' is not an active rewriting rule", id_pos); hiding.push_back(id); if (!p.curr_is_token(get_comma_tk())) break; p.next(); } p.check_token_next(get_rbracket_tk(), "invalid 'simp' tactic, ']' expected"); } else if (p.curr_is_token_or_id(get_using_tk())) { if (tac) throw parser_error("invalid 'simp' tactic, auxiliary tactic was already specified", p.pos()); p.next(); tac = p.parse_tactic(get_max_prec()); } else { break; } } location loc = parse_tactic_location(p); // Remark: simp_tac is the actual result expr simp_tac = mk_simp_tactic_expr(lemmas, ns, hiding, tac, loc); // Using (or_else simp_tac (exact sorry)) to simplify testing auto pos = p.pos(); expr sorry = p.mk_sorry(pos); expr exact_sorry = p.mk_app(get_exact_tac_fn(), sorry, pos); return mk_app(get_or_else_tac_fn(), simp_tac, exact_sorry); } }
apache-2.0
kemixkoo/java-tools
bundles/xml-signature-tool/src/test/java/xyz/kemix/xml/sign/jdk/XmlEnvelopedKeyPairJdkDomSignTest.java
2717
package xyz.kemix.xml.sign.jdk; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertTrue; import org.junit.Test; import org.w3c.dom.Document; import xyz.kemix.xml.sign.IXmlSign; /** * @author Kemix Koo <kemix_koo@163.com> * * Created at 2017-11-29 * */ public class XmlEnvelopedKeyPairJdkDomSignTest extends AbsTestXmlKeyPairJdkDomSign { @Override protected AbsXmlKeyPairJdkDomSign createJdkXmlSign() { return new XmlEnvelopedKeyPairJdkDomSign(); } @Override protected String getTestName() { return super.getTestName() + '-' + "enveloped"; } @Test public void test_validSelf_keyPair_format() throws Exception { Document doc = loadXmlDoc(PATH_JDK + getFilePart() + "_rsa-sha1-sha512_format" + IXmlSign.EXT_XML); assertNotNull(doc); AbsXmlJdkDomSign sign = createJdkXmlSign(); boolean valid = sign.validSelf(doc); assertFalse("After format the Data, won't be valid yet", valid); } @Test public void test_validSelf_keyPair_modified() throws Exception { Document doc = loadXmlDoc(PATH_JDK + getFilePart() + "_rsa-sha1-sha512_modified" + IXmlSign.EXT_XML); assertNotNull(doc); AbsXmlKeyPairJdkDomSign sign = createJdkXmlSign(); boolean valid = sign.validSelf(doc); assertFalse("After change the Data, won't be valid yet", valid); } @Test public void test_validSelf_keyPair_space_text() throws Exception { Document doc = loadXmlDoc(PATH_JDK + getFilePart() + "_rsa-sha1-sha512_space-text" + IXmlSign.EXT_XML); assertNotNull(doc); AbsXmlKeyPairJdkDomSign sign = createJdkXmlSign(); boolean valid = sign.validSelf(doc); assertFalse("After add one space after <Items> node, won't be valid yet", valid); } @Test public void test_validSelf_keyPair_space_node() throws Exception { Document doc = loadXmlDoc(PATH_JDK + getFilePart() + "_rsa-sha1-sha512" + IXmlSign.EXT_XML); assertNotNull(doc); AbsXmlKeyPairJdkDomSign sign = createJdkXmlSign(); boolean valid = sign.validSelf(doc); assertTrue("After add one space like <Items >, won't be valid yet", valid); } @Test public void test_validSelf_keyPair_space_attr() throws Exception { Document doc = loadXmlDoc(PATH_JDK + getFilePart() + "_rsa-sha1-sha512_space-attr" + IXmlSign.EXT_XML); assertNotNull(doc); AbsXmlKeyPairJdkDomSign sign = createJdkXmlSign(); boolean valid = sign.validSelf(doc); assertTrue("After add one space in Attribut for cake, won't be valid yet", valid); } }
apache-2.0
bioinfo-geek/improved-journey-jekyll-test
js/scripts.js
4850
(function( $ ){ // fullscreen gallery var $fullscreen_gallery = $( "#fullscreen-gallery" ); if ( $fullscreen_gallery.length > 0 ) { if ( $fullscreen_gallery.find('.gallery-item').length > 1 ) { // if there are more than 1 image if ( !$fullscreen_gallery.hasClass('kenburns-gallery') ) { // do not initialize if kenburns jQuery('#footer').prepend('<nav id="gallerynav"><a href="#" class="thumbs"> <svg xmlns="http://www.w3.org/2000/svg" width="18" height="18" viewBox="0 0 18 18"> <rect width="5" height="5" x="0" y="0" /><rect width="5" height="5" x="8" y="0" /><rect width="5" height="5" x="0" y="8" /><rect width="5" height="5" x="8" y="8" /></svg></a><a href="#" class="prev">&lt;</a> <a href="#" class="pause">&#9614;&#9614;</a> <a href="#" class="next">&gt;</a></nav>'); $fullscreen_gallery.before('<ul id="gallerythumbs">').cycle({ slideExpr: '.gallery-item', fx: 'fade', speed: 1000, timeout: 5000, pager: '#gallerythumbs', slideResize: true, containerResize: true, width: '100%', height: '100%', fit: 1, cleartypeNoBg : true, pagerAnchorBuilder: function(idx, slide) { return '<li><a href="#"><img src="' + jQuery(slide).find('img').attr('src') + '" alt="" /></a></li>'; }, prev: '#gallerynav .prev', next: '#gallerynav .next' }); var paused = false; jQuery('#gallerynav .pause').on('click', function() { if ( !paused ) { $fullscreen_gallery.cycle('pause'); paused = true; } else { $fullscreen_gallery.cycle('resume'); paused = false; } jQuery(this).toggleClass('active'); }); // show/hide thumbs var revealed = false; jQuery('#gallerynav a.thumbs').on('click', function() { // if clicked on svg button // show thumbs wrapper jQuery('#gallerythumbs').toggleClass('reveal'); // show thumbs if (!revealed) { jQuery('body').addClass('gallerythumbsrevealed'); revealed = true; } // hide thumbs else { jQuery('body').removeClass('gallerythumbsrevealed'); revealed = false; } // pause cycling $fullscreen_gallery.cycle('pause'); paused = true; }); jQuery('#gallerythumbs').on('click', function() { // if clicked on a thumb (large image will be automatically shown) or somewhere else // hide thumbs wrapper jQuery('#gallerythumbs').toggleClass('reveal'); jQuery('body').removeClass('gallerythumbsrevealed'); // resume cycling $fullscreen_gallery.cycle('resume'); paused = false; revealed = false; }); // scroll gallery thumbs with mousewheel jQuery('#gallerythumbs').on('mousewheel', function(event) { if (event.deltaY < 0) { // scroll right jQuery('#gallerythumbs').stop().animate({scrollLeft: '+=180px' }, 300); } else { jQuery('#gallerythumbs').stop().animate({scrollLeft: '-=180px' }, 300); } }); } } } /* ********* WINDOW LOAD ********** */ jQuery(window).load(function() { // load screen jQuery('.loadreveal').addClass('reveal'); jQuery('#loadscreen').stop().animate( { opacity: 0 }, 200, function() { jQuery('body.home').removeClass('loading'); jQuery(this).hide(); }); // masonry gallery var $masonry_gallery = jQuery('.masonry-gallery.gallery'); if ( $masonry_gallery.length > 0 ) { $masonry_gallery.each( function(index, element) { var $masonry_items = $(element).find('.gallery-item'); // set masonry layout $(element).isotope({ masonry: { columnWidth: $(element).find('.gallery-item')[0] }, itemSelector: '.gallery-item' }); $(element).isotope('layout'); // filtering jQuery('#gallery-filter li a').on('click', function(){ jQuery('#gallery-filter li a').removeClass('active'); jQuery(this).addClass('active'); var selector = jQuery(this).attr('data-filter'); $masonry_gallery.isotope({ filter: selector }); return false; }); // changing layout jQuery('#grid-changer li a').on('click', function(){ jQuery('#grid-changer li a').removeClass('active'); jQuery(this).toggleClass('active'); $masonry_items.removeClass('col-3'); $masonry_items.removeClass('col-4'); $masonry_items.removeClass('col-5'); $masonry_items.toggleClass(jQuery(this).closest('li').attr('class')); $masonry_gallery.isotope('layout'); }); }); } // before-after var $before_after = jQuery('.before-after.gallery'); if ( $before_after.length > 0 ) { $before_after.imageReveal({ barWidth: 4, touchBarWidth: 50, startPosition: 0.5, width: jQuery('.before-after img').width(), height: jQuery('.before-after img').height() }); } }); } )( jQuery );
apache-2.0
dhgarrette/2015-ccg-parsing
src/main/scala/dhg/ccg/parse/scg/DualScgParser.scala
12090
package dhg.ccg.parse.scg import scala.collection.mutable.{ Set => MSet } import scala.collection.mutable.{ Map => MMap } import dhg.ccg.cat._ import dhg.ccg.rule._ import dhg.ccg.parse._ import dhg.ccg.tagdict.TagDictionary import dhg.util._ import scalaz._ import Scalaz._ import dhg.ccg.parse.pcfg._ import dhg.ccg.prob._ import java.util.concurrent.atomic.AtomicInteger import dhg.ccg.tagdict.StartEndTags import dhg.ccg.util.DrawMatrix import scala.annotation.tailrec class DualScgParser( val rootDist: LogProbabilityDistribution[Cat], val prodDist: ConditionalLogProbabilityDistribution[Cat, Prod], val lctxDist: ConditionalLogProbabilityDistribution[Cat, Cat], val rctxDist: ConditionalLogProbabilityDistribution[Cat, Cat], //weightedParser: WeightedGuideChartParser, maxIterations: Int, //resultWeighter: ScgWeighter, verbose: Boolean = false)(se: StartEndTags[Cat]) extends GuideChartParser { val ddConverge = new AtomicInteger val ddAttempts = new AtomicInteger def parseAndProbFromGuideChart(guideChart: CfgGuideChart): Option[(CcgTree, LogDouble)] = { val n = guideChart.length if (verbose) guideChart.draw() //if (verbose) println(f" begin dual decomp iterations") ddAttempts.incrementAndGet() val umat = Vector.fill(n, n + 1)(Map.empty[Cat, LogDouble]) val tree = iterate2(guideChart, 1, umat, LogDouble.one, 1) // tree.foreach { case (t,p) => if (verbose) println(t.tagged) } tree.map(t => (t, new SimpleScgWeighter().weight(t, rootDist, prodDist, lctxDist, rctxDist)(se))) } def iterate(guideChart: CfgGuideChart, k: Int, umat: Vector[Vector[Map[Cat, LogDouble]]], prevL: LogDouble, lagrangianIncreases: Int): Option[CcgTree] = { val n = guideChart.length if (verbose) println(f"SCG Parser DD iteration=$k/$maxIterations") printUTable(umat) val delta = new LogDouble(1.0 / lagrangianIncreases) // NOTE: Based on Rush&Collins advice new PcfgParser(rootDist, prodDist).parseAndProbWithWeightsFromGuideChart(guideChart, umat).flatMap { case (maxTree, pMaxTree) => if (verbose) println(f" maxTree:\n${maxTree.pretty.indent(4)}") def getAllTreeCats(t: CcgTree, i: Int, j: Int): Vector[(Cat, Int, Int)] = t match { case CcgBinode(cat, ik, kj) => ((cat, i, j)) +: (getAllTreeCats(ik, i, i + ik.length) ++ getAllTreeCats(kj, i + ik.length, j)) case CcgUnode(cat, sub) => ((cat, i, j)) +: getAllTreeCats(sub, i, j) case CcgLeaf(cat, word, _) => Vector((cat, i, i + 1)) } val maxTreeCats = getAllTreeCats(maxTree, 0, n) val treeNodes: Map[Int, Map[Int, Cat]] = maxTreeCats.groupBy(_._2).mapVals { is => is.groupBy(_._3).mapVals { js => js.only._1 } } var matches = true var pCtxCatProduct = LogDouble.one val newUMat = umat.zipWithIndex.mapt { (row, i) => row.zipWithIndex.mapt { (u, j) => val gc = guideChart(i)(j) if (gc.isEmpty) u else { if (verbose) println(f" Handle ($i,$j)") val (maxCntxCat, pMaxCntxCat) = gc.keys match { case Coll(cat) => if (verbose) println(f" only choice: ${gc.keys.head}") (cat, LogDouble.one) case cats => val contextProbs = { for { cat <- cats lctx <- if (i == 0) Set(se.startTag) else guideChart(i - 1)(i).keys rctx <- if (j == n) Set(se.endTag) else guideChart(j)(j + 1).keys } yield { val pLctx = lctxDist(lctx, cat) val pRctx = rctxDist(rctx, cat) val p = pLctx * pRctx / umat(i)(j).getOrElse(cat, LogDouble.one) if (verbose) println(f" $lctx <-- ${cat} --> $rctx :: ${p.logValue}") cat -> p } } // { // val maxTreeSupertags = maxTree.supertags // for { // cat <- cats // } yield { // val lctx = if (i == 0) se.startTag else maxTreeSupertags(i - 1) // val rctx = if (j == n) se.endTag else maxTreeSupertags(j) // val pLctx = lctxDist(lctx, cat) // val pRctx = rctxDist(rctx, cat) // val p = pLctx * pRctx / umat(i)(j).getOrElse(cat, LogDouble.one) // if (verbose) println(f" $lctx <-- ${cat} --> $rctx :: ${p.logValue}") // cat -> p // } // } contextProbs.maxBy(_._2) } if (verbose) println(f" maxCntxCat: ${maxCntxCat}") val maxTreeCat = treeNodes.get(i).flatMap(_.get(j)) if (verbose) println(f" maxTreeCat: ${maxTreeCat.fold("None")(_.toString)}") pCtxCatProduct *= pMaxCntxCat if (maxTreeCat.exists(_ == maxCntxCat)) { u } else { val u1 = u.updated(maxCntxCat, u.getOrElse(maxCntxCat, LogDouble.one) * delta) maxTreeCat.fold(u1) { mtc => matches = false u1.updated(mtc, u.getOrElse(mtc, LogDouble.one) / delta) } } } } } //if (verbose) println(f" k=${(k + ":").padRight(4)} ${(System.currentTimeMillis() - startTime) / 1000.0}%.3f sec") if (matches || k >= maxIterations) { // matches. stop. ddConverge.incrementAndGet() Some(maxTree) } else { val l = pMaxTree * pCtxCatProduct // Lagrangian object L(u) iterate(guideChart, k + 1, newUMat, l, lagrangianIncreases + (if (l > prevL) 1 else 0)) // next iteration } } } final def iterate2(guideChart: CfgGuideChart, k: Int, umat: Vector[Vector[Map[Cat, LogDouble]]], prevL: LogDouble, lagrangianIncreases: Int): Option[CcgTree] = { if (verbose) println(f"SCG Parser DD iteration=$k/$maxIterations") printUTable(umat) val delta = new LogDouble(1.0 / lagrangianIncreases) // NOTE: Based on Rush&Collins advice new PcfgParser(rootDist, prodDist).parseAndProbWithWeightsFromGuideChart(guideChart, umat).flatMap { case (maxFullTree, pMaxFullTree) => val (newUMat, matches, lagrangian) = computeNewU2(guideChart, umat, delta, maxFullTree.cat, pMaxFullTree) //if (verbose) println(f" k=${(k + ":").padRight(4)} ${(System.currentTimeMillis() - startTime) / 1000.0}%.3f sec") if (matches || k >= maxIterations) { // matches. stop. ddConverge.incrementAndGet() Some(maxFullTree) } else { iterate2(guideChart, k + 1, newUMat, lagrangian, lagrangianIncreases + (if (lagrangian > prevL) 1 else 0)) // next iteration } } } /** * @return (newUMat: Vector[Vector[Map[Cat, LogDouble]]], matches: Boolean, lagrangian: LogDouble) */ def computeNewU2(guideChart: CfgGuideChart, umat: Vector[Vector[Map[Cat, LogDouble]]], delta: LogDouble, maxTreeCat: Cat, pMaxTreeCat: LogDouble): (Vector[Vector[Map[Cat, LogDouble]]], Boolean, LogDouble) = { val n = guideChart.length var matches = true var lagrangian = LogDouble.one // Lagrangian object L(u) // Traverse all cells, updating u in each cell as appropriate val newUMat = umat.zipWithIndex.mapt { (row, i) => row.zipWithIndex.mapt { (u, j) => val gcCell = guideChart(i)(j) gcCell.keys match { case Coll() => // cell is empty. u doesn't change. if (verbose) println(f" empty cell"); u case Coll(cat) => // cell only has one category option. u doesn't change. if (verbose) println(f" only choice: ${gcCell.keys.only}"); u case Coll(cats @ _*) => // cell has multiple category options. find the best under each model and update u if there is a conflict. if (verbose) println(f" Handle ($i,$j)") val (maxSubtreeCat, pMaxSubtreeCat) = getMaxSubtreeCat(i, j, guideChart, umat, maxTreeCat, pMaxTreeCat) val (maxContextCat, pMaxContextCat) = getMaxContextCat(cats, i, j, guideChart, umat, n) if (verbose) println(f" maxSubtreeCat: ${maxSubtreeCat}") if (verbose) println(f" maxCntxCat: ${maxContextCat}") lagrangian *= (pMaxSubtreeCat * pMaxContextCat) if (maxSubtreeCat == maxContextCat) { u } else { matches = false u .updated(maxSubtreeCat, u.getOrElse(maxSubtreeCat, LogDouble.one) / delta) .updated(maxContextCat, u.getOrElse(maxContextCat, LogDouble.one) * delta) } } } } (newUMat, matches, lagrangian) } def sliceMatrix[T](m: Vector[Vector[T]], i: Int, j: Int) = m.slice(i, j).map(row => row.slice(i, j + 1)) def getMaxSubtreeCat(i: Int, j: Int, guideChart: CfgGuideChart, umat: Vector[Vector[Map[Cat, LogDouble]]], maxTreeCat: Cat, pMaxTreeCat: LogDouble): (Cat, LogDouble) = { if (i == 0 && j == guideChart.length) (maxTreeCat, pMaxTreeCat) else { val parser = new PcfgParser(new UniformDefaultLogProbabilityDistribution(LogDouble.one), prodDist) val guideChartSubset = CfgGuideChart(guideChart.words, sliceMatrix(guideChart.matrix, i, j), UniversalSet()) val umatSubset = sliceMatrix(umat, i, j) val (t, p) = parser.parseAndProbWithWeightsFromGuideChart(guideChartSubset, umatSubset).get if (verbose) println(f" ${p.logValue}\n${t.pretty.indent(4)}") (t.cat, p) } } def getMaxContextCat(cats: Seq[Cat], i: Int, j: Int, guideChart: CfgGuideChart, umat: Vector[Vector[Map[Cat, LogDouble]]], n: Int): (Cat, LogDouble) = { val contextProbs = for { cat <- cats lctx <- if (i == 0) Set(se.startTag) else guideChart(i - 1)(i).keys rctx <- if (j == n) Set(se.endTag) else guideChart(j)(j + 1).keys } yield { val pLctx = lctxDist(lctx, cat) val pRctx = rctxDist(rctx, cat) val p = pLctx * pRctx / umat(i)(j).getOrElse(cat, LogDouble.one) if (verbose) println(f" $lctx <-- ${cat} --> $rctx :: ${p.logValue}") cat -> p } contextProbs.maxBy(_._2) } def printUTable(table: Vector[Vector[Map[Cat, LogDouble]]]) { DrawMatrix.drawMatrix(table.map(_.tail))(_.map { case (cat, u) => f"$cat -> ${u.toDouble}" }.mkString("\n"))(println) } def printTreeTable(tree: CcgTree) { val a = Array.fill(tree.length)(Array.fill[Option[Cat]](tree.length + 1)(None)) def r(t: CcgTree, i: Int, j: Int): Unit = t match { case CcgBinode(cat, ik, kj) => a(i)(j) = Some(cat) r(ik, i, i + ik.length) r(kj, i + ik.length, j) case CcgUnode(cat, sub) => assert(i + 1 < j) a(i)(j) = Some(cat) r(sub, i, j) case CcgLeaf(cat, word, _) => } r(tree, 0, tree.length) DrawMatrix.drawMatrix(a.map(_.toVector.tail).toVector)(_.fold("")(_.toString))(println) } def printCtxTreeTable(table: Vector[Vector[Map[Cat, LogDouble]]]) { DrawMatrix.drawMatrix(table.map(_.tail))(_.map { case (cat, u) => f"$cat -> ${u.toDouble}" }.mkString("\n"))(println) } }
apache-2.0
yan74/afplib
org.afplib/src/main/java/org/afplib/afplib/TextFidelityStpTxtEx.java
5103
/** */ package org.afplib.afplib; import java.util.Arrays; import java.util.Collections; import java.util.List; import org.eclipse.emf.common.util.Enumerator; /** * <!-- begin-user-doc --> * A representation of the literals of the enumeration '<em><b>Text Fidelity Stp Txt Ex</b></em>', * and utility methods for working with them. * <!-- end-user-doc --> * @see org.afplib.afplib.AfplibPackage#getTextFidelityStpTxtEx() * @model * @generated */ public enum TextFidelityStpTxtEx implements Enumerator { /** * The '<em><b>Const Stop</b></em>' literal object. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #CONST_STOP_VALUE * @generated * @ordered */ CONST_STOP(1, "ConstStop", "ConstStop"), /** * The '<em><b>Const Do Not Stop</b></em>' literal object. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #CONST_DO_NOT_STOP_VALUE * @generated * @ordered */ CONST_DO_NOT_STOP(2, "ConstDoNotStop", "ConstDoNotStop"); /** * The '<em><b>Const Stop</b></em>' literal value. * <!-- begin-user-doc --> * <p> * If the meaning of '<em><b>Const Stop</b></em>' literal object isn't clear, * there really should be more of a description here... * </p> * <!-- end-user-doc --> * @see #CONST_STOP * @model name="ConstStop" * @generated * @ordered */ public static final int CONST_STOP_VALUE = 1; /** * The '<em><b>Const Do Not Stop</b></em>' literal value. * <!-- begin-user-doc --> * <p> * If the meaning of '<em><b>Const Do Not Stop</b></em>' literal object isn't clear, * there really should be more of a description here... * </p> * <!-- end-user-doc --> * @see #CONST_DO_NOT_STOP * @model name="ConstDoNotStop" * @generated * @ordered */ public static final int CONST_DO_NOT_STOP_VALUE = 2; /** * An array of all the '<em><b>Text Fidelity Stp Txt Ex</b></em>' enumerators. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ private static final TextFidelityStpTxtEx[] VALUES_ARRAY = new TextFidelityStpTxtEx[] { CONST_STOP, CONST_DO_NOT_STOP, }; /** * A public read-only list of all the '<em><b>Text Fidelity Stp Txt Ex</b></em>' enumerators. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public static final List<TextFidelityStpTxtEx> VALUES = Collections.unmodifiableList(Arrays.asList(VALUES_ARRAY)); /** * Returns the '<em><b>Text Fidelity Stp Txt Ex</b></em>' literal with the specified literal value. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @param literal the literal. * @return the matching enumerator or <code>null</code>. * @generated */ public static TextFidelityStpTxtEx get(String literal) { for (int i = 0; i < VALUES_ARRAY.length; ++i) { TextFidelityStpTxtEx result = VALUES_ARRAY[i]; if (result.toString().equals(literal)) { return result; } } return null; } /** * Returns the '<em><b>Text Fidelity Stp Txt Ex</b></em>' literal with the specified name. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @param name the name. * @return the matching enumerator or <code>null</code>. * @generated */ public static TextFidelityStpTxtEx getByName(String name) { for (int i = 0; i < VALUES_ARRAY.length; ++i) { TextFidelityStpTxtEx result = VALUES_ARRAY[i]; if (result.getName().equals(name)) { return result; } } return null; } /** * Returns the '<em><b>Text Fidelity Stp Txt Ex</b></em>' literal with the specified integer value. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @param value the integer value. * @return the matching enumerator or <code>null</code>. * @generated */ public static TextFidelityStpTxtEx get(int value) { switch (value) { case CONST_STOP_VALUE: return CONST_STOP; case CONST_DO_NOT_STOP_VALUE: return CONST_DO_NOT_STOP; } return null; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ private final int value; /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ private final String name; /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ private final String literal; /** * Only this class can construct instances. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ private TextFidelityStpTxtEx(int value, String name, String literal) { this.value = value; this.name = name; this.literal = literal; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public int getValue() { return value; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public String getName() { return name; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public String getLiteral() { return literal; } /** * Returns the literal value of the enumerator, which is its string representation. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public String toString() { return literal; } } //TextFidelityStpTxtEx
apache-2.0
jronrun/benayn
benayn-ustyle/src/main/java/com/benayn/ustyle/JSONer.java
60782
package com.benayn.ustyle; import static com.google.common.base.Preconditions.checkArgument; import static com.google.common.base.Preconditions.checkNotNull; import java.io.BufferedReader; import java.io.ByteArrayInputStream; import java.io.FilterReader; import java.io.IOException; import java.io.InputStreamReader; import java.io.OutputStream; import java.io.Reader; import java.io.Writer; import java.lang.reflect.Field; import java.math.BigDecimal; import java.math.BigInteger; import java.nio.charset.Charset; import java.util.Collection; import java.util.Date; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Set; import com.benayn.ustyle.behavior.StructBehavior; import com.benayn.ustyle.behavior.ValueBehavior; import com.benayn.ustyle.inner.Options; import com.benayn.ustyle.logger.Log; import com.benayn.ustyle.logger.Loggers; import com.benayn.ustyle.string.Strs; import com.google.common.base.Charsets; import com.google.common.base.Converter; import com.google.common.base.Enums; import com.google.common.base.Function; import com.google.common.base.Optional; import com.google.common.base.Strings; import com.google.common.collect.Lists; import com.google.common.collect.Maps; import com.google.common.primitives.Primitives; /** * JSONer custom Converter usage: * * <pre> * JSONer jsoner = JSONer.build(); * * FacadeObject<Item> fo = FacadeObject.wrap(Item.class); * fo.populate4Test(); * * jsoner.register(new JSONer.GenericConverter<String, short[]>() { * * @Override * protected short[] forward(String input) { * Short[] s = JSONer.read(input).asObject(Short[].class); * return Arrays2.unwrap(s); * } * * @Override * protected String backward(short[] input) { * return JSONer.toJson(input); * }}, short[].class); * * String json = jsoner.update(fo.get()).asJson(); * Item item = jsoner.update(json).asObject(Item.class); * * assertDeepEqual(fo.get(), item); * </pre> * * https://github.com/jronrun/benayn */ public final class JSONer { /** * Returns a new {@link JSONer} instance * * @return */ public static JSONer build() { return of(null); } /** * Returns a new {@link JSONer} instance with given target * * @return */ public static JSONer of(Object target) { return new JSONer(target); } /** * Returns a new {@link List} instance with given JSON string or <code>null</code> * * @see ReadJSON#list() * @param target * @return */ public static List<Object> readList(String target) { return read(target).list(); } /** * Returns a new {@link Map} instance with given JSON string or <code>null</code> * * @see ReadJSON#map() * @param target * @return */ public static Map<String, Object> readMap(String target) { return read(target).map(); } /** * Returns a new {@link Map} instance with given JSON string * * @see ReadJSON#noneNullMap() * @param target * @return */ public static Map<String, Object> readNoneNullMap(String target) { return read(target).noneNullMap(); } /** * Returns a new deeply look and tier key supports {@link Map} instance with given JSON string * * @see ReadJSON#deepTierMap() * @param target * @return */ public static Map<String, Object> readDeepTierMap(String target) { return read(target).deepTierMap(); } /** * Returns a new {@link ReadJSON} instance with given JSON string * * @param target * @return */ public static ReadJSON read(String target) { return new ReadJSON(checkNotNull(target), null); } /** * Add the JSON exchange function to the given {@link Reflecter} instance * * @param target * @return */ public static <T> Reflecter<T> addJsonExchangeFunc(Reflecter<T> target) { return checkNotNull(target).exchWithField(JSON_READ_FUNC, JSON_READ_DECISION); } /** * Returns a new {@link WriteJSON} instance * * @param target * @return */ public static WriteJSON write(Object target) { return new WriteJSON(target, null); } /** * Converts given target as a JSON string * * @param target * @return */ public static String toJson(Object target) { return write(target).asJson(); } /** * Format the given JSON as an easy-to-read string * * @param json * @return */ public static String fmtJson(Object json) { return write(json).readable().asJson(); } /** * */ public static final class WriteJSON { /** * */ protected static final Log log = Loggers.from("JSONer.WriteJSON"); /** * Writes the delegate target as JSON string to the given output stream * * @param output * @throws IOException */ public void write(final OutputStream output) throws IOException { write(output, Charsets.UTF_8); } /** * Writes the delegate target as JSON string to the given output stream and {@link Charset} * * @param output * @throws IOException */ public void write(final OutputStream output, Charset charset) throws IOException { output.write(asJson().getBytes(charset)); } /** * Writes the delegate target as JSON string to the given writer * * @param writer * @throws IOException */ public void write(final Writer writer) throws IOException { write(writer, Charsets.UTF_8); } /** * Writes the delegate target as JSON string to the given writer and {@link Charset} * * @param writer * @param charset * @throws IOException */ public void write(final Writer writer, Charset charset) throws IOException { writer.write(new String(asJson().getBytes(charset))); } /** * Converts delegate target as a JSON string * * @return */ public String asJson() { if (this.readabilityO.isPresent()) { return doFmt((this.delegate instanceof String) ? ((String) this.delegate) : intlWriting().get().toString()); } return intlWriting().get().toString(); } /** * Converts delegate target as an easy-to-read JSON string * * @return */ public ReadableOptions readable() { if (this.readabilityO.isPresent()) { return this.readabilityO.get(); } return (this.readabilityO = Optional.of(new ReadableOptions(this))).get(); } /** * */ public class ReadableOptions extends Options<WriteJSON, ReadableOptions> { boolean justifyingL = false; boolean showClassName = false; String fillStringUnit = Strings.repeat(Strs.WHITE_SPACE, 3); Optional<DateStyle> dateStyle = Optional.absent(); private ReadableOptions(WriteJSON jsonW) { this.reference(jsonW, this); } /** * Sets if shows the class name, default is false * * @return */ public ReadableOptions showClassName() { this.showClassName = true; return THIS; } /** * Sets the JSON format fill unit * * @param fillStringUnit * @return */ public ReadableOptions fill(String fillStringUnit) { this.fillStringUnit = fillStringUnit; return THIS; } /** * Sets the JSON format justifying left * * @return */ public ReadableOptions align() { this.justifyingL = true; return THIS; } /** * Sets the date time format style with given date time style * * @param datetimeStyle * @return */ public ReadableOptions dateFmt(String datetimeStyle) { dateStyle = Optional.of(DateStyle.from(checkNotNull(datetimeStyle))); return THIS; } /** * Sets the date time format style with given {@link DateStyle} * * @param datetimeStyle * @return */ public ReadableOptions dateFmt(DateStyle datetimeStyle) { dateStyle = Optional.of(datetimeStyle); return THIS; } /** * @see WriteJSON#asJson() * * @return */ public String asJson() { return outerRef.asJson(); } } /** * */ private WriteJSON(Object target, JSONer jsoner) { this.delegate = target; this.jsoner = Optional.fromNullable(jsoner); } //Decides convert or not convert the value with the register Converter //e.g: avoid convert nested type private boolean convertSwitch = true; private void switchConvert() { this.convertSwitch = !convertSwitch; } private <R, W> R convert(Class<?> type, W typeValue) { if (!convertSwitch || !jsoner.isPresent()) { return null; } Converter<R, W> converter = jsoner.get().getTypeConverter(type); return null != converter ? converter.reverse().convert(typeValue) : null; } @SuppressWarnings("unchecked") private <R, W> R convert(String property, W propertyValue) { if (!convertSwitch || !jsoner.isPresent()) { return null; } Converter<R, W> converter = jsoner.get().getConverter(property); propertyValue = (W) (propertyValue.getClass().isArray() ? Arrays2.wraps(propertyValue) : propertyValue); return null != converter ? converter.reverse().convert(propertyValue) : null; } /** * */ private class JsonWriteStructBehavior extends StructBehavior<StringBuilder> { private StringBuilder strB; private Class<?> fieldClass = null; public JsonWriteStructBehavior(Object delegate, Class<?> fieldClass) { super(delegate); strB = new StringBuilder(); this.fieldClass = fieldClass; } private <T, R> StringBuilder doConvert(Class<?> type, T defaultValue) { R result = convert(null != fieldClass ? fieldClass : type, this.delegate); return null != result ? strB.append(result) : (null != defaultValue ? strB.append(defaultValue) : null); } @Override protected StringBuilder booleanIf() { return doConvert(Boolean.class, ((Boolean) this.delegate).booleanValue()); } @Override protected StringBuilder byteIf() { return doConvert(Byte.class, ((Byte) this.delegate).byteValue()); } @Override protected StringBuilder characterIf() { if (null != doConvert(Character.class, null)) { return strB; } return strB.append(quotes).append(((Character) this.delegate).charValue()).append(quotes); } @Override protected StringBuilder doubleIf() { return doConvert(Double.class, ((Double) this.delegate).doubleValue()); } @Override protected StringBuilder floatIf() { return doConvert(Float.class, ((Float) this.delegate).floatValue()); } @Override protected StringBuilder integerIf() { return doConvert(Integer.class, ((Integer) this.delegate).intValue()); } @Override protected StringBuilder longIf() { return doConvert(Long.class, ((Long) this.delegate).longValue()); } @Override protected StringBuilder shortIf() { return doConvert(Short.class, ((Short) this.delegate).shortValue()); } @Override protected StringBuilder nullIf() { return strB.append("null"); } @Override protected StringBuilder noneMatched() { return null; } } /** * */ private class JsonWriteValueBehaivor extends ValueBehavior<StringBuilder> { private StringBuilder strB = null; private Class<?> fieldClass = null; public JsonWriteValueBehaivor(Object delegate, Class<?> fieldClass) { super(delegate); strB = new StringBuilder(); this.fieldClass = fieldClass; } private <T, R> StringBuilder doConvert(Class<?> type, T defaultValue) { R result = convert(null != fieldClass ? fieldClass : type, this.delegate); return null != result ? strB.append(result) : (null != defaultValue ? strB.append(defaultValue) : null); } @Override protected <T> StringBuilder classIf(Class<T> resolvedP) { if (null != doConvert(Class.class, null)) { return strB; } return strB.append(resolvedP.getName()); } @Override protected StringBuilder primitiveIf() { return strB.append(this.delegate); } @Override protected StringBuilder eightWrapIf() { return new JsonWriteStructBehavior(this.delegate, fieldClass).doDetect(); } @Override protected StringBuilder dateIf(Date resolvedP) { if (null != doConvert(Date.class, null)) { return strB; } if (readabilityO.isPresent() && readabilityO.get().dateStyle.isPresent()) { strB.append(quotes).append( Dater.of(resolvedP).asText(readabilityO.get().dateStyle.get())).append(quotes); } else { strB.append(resolvedP.getTime()); } return strB; } @Override protected StringBuilder stringIf(String resolvedP) { if (null != doConvert(String.class, null)) { return strB; } return asJsonUtf8String((String) this.delegate, strB); } @Override protected StringBuilder enumif(Enum<?> resolvedP) { if (null != doConvert(Enum.class, null)) { return strB; } return strB.append(quotes).append(resolvedP).append(quotes); } @Override protected <T> StringBuilder arrayIf(T[] resolvedP, boolean isPrimitive) { if (null != resolvedP) { if (null != doConvert(isPrimitive ? Arrays2.unwrapArrayType(resolvedP.getClass()) : resolvedP.getClass(), null)) { return strB; } } if (null != doConvert(Object[].class, null)) { return strB; } int no = 0; strB.append(arrayL); for (T t : resolvedP) { switchConvert(); strB.append(asStrBuild(t)); switchConvert(); if (++no < resolvedP.length) { strB.append(comma); } } strB.append(arrayR); return strB; } @Override protected StringBuilder bigDecimalIf(BigDecimal resolvedP) { if (null != doConvert(BigDecimal.class, null)) { return strB; } return strB.append(resolvedP.toString()); } @Override protected StringBuilder bigIntegerIf(BigInteger resolvedP) { if (null != doConvert(BigInteger.class, null)) { return strB; } return strB.append(resolvedP.toString()); } @Override protected <K, V> StringBuilder mapIf(Map<K, V> resolvedP) { if (null != doConvert(Map.class, null)) { return strB; } int no = 0; strB.append(objL); for (K k : resolvedP.keySet()) { V v = resolvedP.get(k); switchConvert(); strB.append(asStrBuild(k, true)).append(colon).append(asStrBuild(v)); switchConvert(); if (++no < resolvedP.size()) { strB.append(comma); } } strB.append(objR); return strB; } @Override protected <T> StringBuilder setIf(Set<T> resolvedP) { if (null != doConvert(Set.class, null)) { return strB; } int no = 0; strB.append(arrayL); for (T t : resolvedP) { switchConvert(); strB.append(asStrBuild(t)); switchConvert(); if (++no < resolvedP.size()) { strB.append(comma); } } strB.append(arrayR); return strB; } @Override protected <T> StringBuilder listIf(List<T> resolvedP) { if (null != doConvert(List.class, null)) { return strB; } int no = 0; strB.append(arrayL); for (T t : resolvedP) { switchConvert(); strB.append(asStrBuild(t)); switchConvert(); if (++no < resolvedP.size()) { strB.append(comma); } } strB.append(arrayR); return strB; } @Override protected StringBuilder beanIf() { if (null != doConvert(Object.class, null)) { return strB; } if (this.clazz == Field.class) { strB.append(quotes).append(this.delegate.toString()).append(quotes); return strB; } int no = 0; Reflecter<Object> ref = Reflecter.from(this.delegate); Map<String, Object> props = ref.asMap(); strB.append(objL); if (readabilityO.isPresent() && readabilityO.get().showClassName) { props.put("class", this.clazz.getName()); } Object convertR = null; for (String k : props.keySet()) { Object v = props.get(k); strB.append(asStrBuild(k, true)).append(colon) .append(null != (convertR = convert(k, v)) ? convertR : asStrBuild(v, ref.field(k).getType())); if (++no < props.size()) { strB.append(comma); } } strB.append(objR); return strB; } @Override protected StringBuilder nullIf() { return strB.append("null"); } } private StringBuilder asStrBuild(Object obj, boolean wrapWithQuotes) { StringBuilder strB = asStrBuild(obj); if (strB.charAt(0) != quotes) { strB.insert(0, quotes).append(quotes); } return strB; } private StringBuilder asStrBuild(Object obj) { return asStrBuild(obj, null); } private StringBuilder asStrBuild(Object obj, Class<?> fieldClass) { return new JsonWriteValueBehaivor(obj, fieldClass).doDetect(); } private String doFmt(String fmtTgt) { int fixedLenth = 0; List<String> tokens = toTokens(fmtTgt); boolean justifyingL = this.readabilityO.get().justifyingL; String fillStringUnit = this.readabilityO.get().fillStringUnit; if (justifyingL) { for (int i = 0; i < tokens.size(); i++) { int length = tokens.get(i).getBytes().length; if (length > fixedLenth && i < tokens.size() - 1 && tokens.get(i + 1).equals(symbolS(colon))) { fixedLenth = length; } } } int count = 0; StringBuilder strB = new StringBuilder(); for (int i = 0; i < tokens.size(); i++) { String token = tokens.get(i); if (token.equals(",")) { strB.append(token); doJsonFmtFill(strB, count, fillStringUnit); continue; } if (token.equals(":")) { strB.append(" ").append(token).append(" "); continue; } if (token.equals("{")) { String nextToken = tokens.get(i + 1); if (nextToken.equals("}")) { i++; strB.append("{ }"); } else { count++; strB.append(token); doJsonFmtFill(strB, count, fillStringUnit); } continue; } if (token.equals("}")) { count--; doJsonFmtFill(strB, count, fillStringUnit); strB.append(token); continue; } if (token.equals("[")) { String nextToken = tokens.get(i + 1); if (nextToken.equals("]")) { i++; strB.append("[ ]"); } else { count++; strB.append(token); doJsonFmtFill(strB, count, fillStringUnit); } continue; } if (token.equals("]")) { count--; doJsonFmtFill(strB, count, fillStringUnit); strB.append(token); continue; } strB.append(token); if (justifyingL) { if (i < tokens.size() - 1 && tokens.get(i + 1).equals(":")) { int fillLength = fixedLenth - token.getBytes().length; if (fillLength > 0) { strB.append(Strings.repeat(Strs.WHITE_SPACE, fillLength)); } } } } return strB.toString(); } private Optional<StringBuilder> intlWriting() { if (this.json.isPresent()) { return json; } return (this.json = Optional.of(asStrBuild(this.delegate))); } private static List<String> toTokens(String json) { String jsonTemp = json; List<String> tokens = Lists.newArrayList(); while (jsonTemp.length() > 0) { String token = nextToken(jsonTemp); jsonTemp = jsonTemp.substring(token.length()); tokens.add(token.trim()); } return tokens; } private static String nextToken(String json) { boolean insideQuotes = false; StringBuilder strB = new StringBuilder(); while (json.length() > 0) { String token = json.substring(0, 1); json = json.substring(1); if (!insideQuotes && (token.equals(symbolS(colon)) || token.equals(symbolS(objL)) || token.equals(symbolS(objR)) || token.equals(symbolS(arrayL)) || token.equals(symbolS(arrayR)) || token.equals(symbolS(comma)))) { if (strB.toString().trim().length() == 0) { strB.append(token); } break; } if (token.equals("\\")) { strB.append(token).append(json.substring(0, 1)); json = json.substring(1); continue; } if (token.equals("\"")) { strB.append(token); if (insideQuotes) { break; } else { insideQuotes = true; continue; } } strB.append(token); } return strB.toString(); } private static final char objL = '{'; private static final char objR = '}'; private static final char arrayL = '['; private static final char arrayR = ']'; private static final char quotes = '"'; private static final char colon = ':'; private static final char slash = '\\'; private static final char comma = ','; private static String symbolS(Character ch) { return ch.toString(); } private static void doJsonFmtFill(StringBuilder buf, int count, String fillStringUnit) { buf.append("\n"); for (int i = 0; i < count; i++) { buf.append(fillStringUnit); } } private static StringBuilder asJsonUtf8String(String s, StringBuilder strB) { strB.append('\"'); int len = s.length(); for (int i = 0; i < len; i++) { char c = s.charAt(i); if (c < ' ') { // Anything less than ASCII space, write either in \\u00xx form, or the special \t, \n, etc. form if (c == '\b') { strB.append("\\b"); } else if (c == '\t') { strB.append("\\t"); } else if (c == '\n') { strB.append("\\n"); } else if (c == '\f') { strB.append("\\f"); } else if (c == '\r') { strB.append("\\r"); } else { String hex = Integer.toHexString(c); strB.append("\\u"); int pad = 4 - hex.length(); for (int k = 0; k < pad; k++) { strB.append('0'); } strB.append(hex); } } else if (c == slash || c == quotes) { strB.append(slash); strB.append(c); } else { // Anything else - write in UTF-8 form (multi-byte encoded) (OutputStreamWriter is UTF-8) strB.append(c); } } return strB.append('\"'); } private Optional<ReadableOptions> readabilityO = Optional.absent(); private Optional<StringBuilder> json = Optional.absent(); private Optional<JSONer> jsoner = Optional.absent(); private Object delegate; private WriteJSON() {} } /** * @see Converter */ public static abstract class GenericConverter<R, W> extends Converter<R, W> { /** * Quotes the given {@link String} target * * @param target * @return */ protected String quotes(String target) { return new StringBuilder() .append(WriteJSON.quotes) .append(target) .append(WriteJSON.quotes).toString(); } @Override protected W doForward(R a) { return forward(a); } @SuppressWarnings("unchecked") @Override protected R doBackward(W b) { R r = backward(b); if (r instanceof String) { return (R) quotes((String) r); } return r; } /** * Converts JSON to object. * {@link Byte}, {@link Short}, {@link Integer}, {@link Long}, * {@link Float}, {@link Double}, {@link BigInteger}, {@link BigDecimal} read as {@link Number} * * @see Converter#doForward(R) */ protected abstract W forward(R input); /** * Converts object to JSON. * {@link Byte}, {@link Short}, {@link Integer}, {@link Long}, * {@link Float}, {@link Double}, {@link BigInteger}, {@link BigDecimal} converts as {@link Number} * * @see Converter#doBackward(W) */ protected abstract R backward(W input); } /** * Register a property based JSON {@link GenericConverter} with given properties * * @param converter * @param property * @return */ public <R, W> JSONer register(Converter<R, W> converter, String... properties) { for (String property : checkNotNull(properties)) { converts.put(property, checkNotNull(converter)); } return this; } /** * Register a class based JSON {@link GenericConverter} with given types * * @param property * @param converter * @return */ public <R, W> JSONer register(Converter<R, W> converter, Class<?>... types) { for (Class<?> type : checkNotNull(types)) { typeConverts.put(type, checkNotNull(converter)); } return this; } /** * Unregister a JSON {@link GenericConverter} with given class type * * @param converter * @return */ public <R, W> JSONer unregister(Class<?> type) { typeConverts.remove(type); return this; } /** * Unregister a JSON {@link GenericConverter} with given property name * * @param converter * @return */ public <R, W> JSONer unregister(String property) { converts.remove(property); return this; } /** * Returns a new {@link WriteJSON} with delegate object * * @return */ public WriteJSON writer() { return new WriteJSON(this.delegate.orNull(), this); } /** * Returns a new {@link WriteJSON} with given object * * @see JSONer#update(Object) * @see JSONer#writer() * @return */ public WriteJSON writer(Object target) { return update(target).writer(); } /** * @see JSONer#writer() * @see WriteJSON#asJson() */ public String asJson() { return writer().asJson(); } /** * @see JSONer#update(Object) * @see JSONer#asJson() */ public String asJson(Object target) { return update(target).asJson(); } /** * @see JSONer#writer() * @see WriteJSON#readable() * @see WriteJSON#asJson() */ public String asFmtJson() { return writer().readable().asJson(); } /** * @see JSONer#update(Object) * @see JSONer#asFmtJson() */ public String asFmtJson(Object target) { return update(target).asFmtJson(); } /** * Returns a new {@link ReadJSON} with delegate * * @return */ public ReadJSON reader() { Object obj = checkNotNull(this.delegate.orNull(), "The delegate object cannot be null"); checkArgument(obj instanceof String, "The delegate object must be JSON string"); return new ReadJSON((String) obj, this); } /** * @see JSONer#reader() * @see ReadJSON#map() */ public Map<String, Object> asMap() { return reader().map(); } /** * @see JSONer#reader() * @see ReadJSON#noneNullMap() */ public Map<String, Object> asNoneNullMap() { return reader().noneNullMap(); } /** * @see JSONer#reader() * @see ReadJSON#deepTierMap() */ public Map<String, Object> asDeepTierMap() { return reader().deepTierMap(); } /** * @see JSONer#reader() * @see ReadJSON#asObject(Object) */ public <T> T asObject(Object target) { return reader().asObject(target); } /** * @see JSONer#reader() * @see ReadJSON#list() */ public List<Object> asList() { return reader().list(); } /** * Resets the delegate with given target * * @param target * @return */ public JSONer update(Object target) { delegate = Optional.fromNullable(target); return this; } /** * */ public static final class ReadJSON { /** * */ protected static final Log log = Loggers.from("JSONer.ReadJSON"); /** * Returns the delegate JSON string as a given target instance * * @param target * @return */ public <T> T asObject(Object target) { return buildReflecter(Reflecter.from(target)).populate(noneNullMap()).get(); } /** * Mapping property with given value mapping function * * @param property * @param mappingFunc * @return */ public <I, O> ReadJSON mapping(String property, Function<I, O> mappingFunc) { this.mappingFuncs.put(property, mappingFunc); return this; } /** * Returns the delegate JSON string as a {@link Gather} * * @return */ public Gather<Object> gather() { return Gather.from(list()); } /** * Returns the delegate JSON string as a {@link List} * * @return */ public List<Object> list() { Object[] vals = (Object[]) noneNullMap().get(itemsF); if (null != vals) { return Lists.newArrayList(vals); } return null; } /** * Returns the delegate JSON string as a {@link Mapper} * * @return */ public Mapper<String, Object> mapper() { return Mapper.from(noneNullMap()); } /** * Returns the delegate JSON string as a deep look and tier key {@link HashMap} * * @see Mapper#deepLook() * @see Mapper#tierKey() * @return */ public Map<String, Object> deepTierMap() { return mapper().deepLook().tierKey().map(); } /** * Returns the delegate JSON string as a {@link HashMap} or null * * @return */ public Map<String, Object> map() { return intlMapping(false).get(); } /** * Returns the delegate JSON string as a {@link HashMap} or empty instance * * @return */ public Map<String, Object> noneNullMap() { return intlMapping(true).get(); } /** * Checks if the delegate JSON string is valid * * @return */ public boolean isValid() { return null != intlMapping(false); } /** * */ private ReadJSON(String target, JSONer jsoner) { this.delegate = new AsyncPushbackReader( new BufferedReader( new InputStreamReader( new ByteArrayInputStream(target.getBytes(Charsets.UTF_8)), Charsets.UTF_8))); this.jsoner = Optional.fromNullable(jsoner); } static final char objL = '{'; static final char objR = '}'; static final char arrayL = '['; static final char arrayR = ']'; static final char quotes = '"'; static final char colon = ':'; static final char slash = '\\'; static final String itemsF = "$items"; private <T> Reflecter<T> buildReflecter(final Reflecter<T> ref) { addJsonExchangeFunc(ref).autoExchange(); if (jsoner.isPresent() && !jsoner.get().typeConverts.isEmpty()) { ref.fieldLoop(new Decisional<Field>() { @Override protected void decision(Field input) { Class<?> type = input.getType(); Converter<?, ?> converter = jsoner.get().getTypeConverter(type); if (null != converter) { ref.exchange(converter, input.getName()); } } }); } if (!this.mappingFuncs.isEmpty()) { ref.setExchangeFuncs(this.mappingFuncs); } return ref; } /** * */ private Object readValue() throws IOException { int ch = this.delegate.read(); if (ch == quotes) { return readString(); } if (isDigit(ch) || ch == '-') { return readNumber(ch); } if (ch == objL) { this.delegate.unread(objL); return mappingBuild(); } if (ch == 't' || ch == 'T') { this.delegate.unread(ch); readToken("true"); return Boolean.TRUE; } if (ch == 'f' || ch == 'F') { this.delegate.unread(ch); readToken("false"); return Boolean.FALSE; } if (ch == 'n' || ch == 'N') { this.delegate.unread(ch); readToken("null"); return null; } if (ch == arrayL) { return readArray(); } if (ch == arrayR) { this.delegate.unread(arrayR); return null; } if (ch == -1) { throw new IOException("EOF reached prematurely"); } throw new IOException("Unknown value type at position " + this.delegate.getPos()); } private Optional<Map<String, Object>> intlMapping(boolean noneNull) { if (this.mapping.isPresent()) { return this.mapping; } try { return (this.mapping = Optional.of(mappingBuild())); } catch (IOException e) { log.error(e.getMessage()); } finally { close(); } if (noneNull) { Map<String, Object> m = Maps.newHashMap(); return (this.mapping = Optional.of(m)); } return null; } private Object readArray() throws IOException { List<Object> l = Lists.newArrayList(); while (true) { skipWhitespace(); Object o = readValue(); if (null != o) { l.add(o); } int c = skipWhitespaceRead(); if (c == arrayR) { break; } if (c != ',') { throw new IOException("Expected ',' or ']' inside array at position " + this.delegate.getPos()); } } return l.toArray(); } /** * Return the specified token from the reader. If it is not found, throw an * IOException indicating that. Converting to c to (char) c is acceptable * because the 'tokens' allowed in a JSON input stream (true, false, null) * are all ASCII. */ private String readToken(String token) throws IOException { int len = token.length(); for (int i = 0; i < len; i++) { int c = this.delegate.read(); if (c == -1) { throw new IOException("EOF reached while reading token: " + token); } c = Character.toLowerCase((char) c); int loTokenChar = token.charAt(i); if (loTokenChar != c) { throw new IOException("Expected token: " + token + " at position " + this.delegate.getPos()); } } return token; } private Number readNumber(int ch) throws IOException { final AsyncPushbackReader in = this.delegate; final char[] numBuf = _numBuf; numBuf[0] = (char) ch; int len = 1; boolean isFloat = false; try { while (true) { ch = in.read(); if ((ch >= '0' && ch <= '9') || ch == '-' || ch == '+') { numBuf[len++] = (char) ch; } else if (ch == '.' || ch == 'e' || ch == 'E') { numBuf[len++] = (char) ch; isFloat = true; } else if (ch == -1) { throw new IOException("Reached EOF while reading number at position " + in.getPos()); } else { in.unread(ch); break; } } } catch (ArrayIndexOutOfBoundsException e) { throw new IOException("Too many digits in number at position " + in.getPos()); } if (isFloat) { // Floating point number needed String num = new String(numBuf, 0, len); try { return Double.parseDouble(num); } catch (NumberFormatException e) { throw new IOException("Invalid floating point number at position " + in.getPos() + ", number: " + num); } } boolean isNeg = numBuf[0] == '-'; long n = 0; for (int i = (isNeg ? 1 : 0); i < len; i++) { n = (numBuf[i] - '0') + n * 10; } return isNeg ? -n : n; } private void close() { try { if (null != this.delegate) { this.delegate.close(); } } catch (IOException ignored) { } } private String readString() throws IOException { final StringBuilder strBuf = _strBuf; strBuf.setLength(0); StringBuilder hex = new StringBuilder(); boolean done = false; final int STATE_STRING_START = 0; final int STATE_STRING_SLASH = 1; final int STATE_HEX_DIGITS = 2; final int STATE_FILED_ARRAY = 3; final int STATE_FILED_OBJ = 4; int state = STATE_STRING_START; int arraySymbol = 0; int objSymbol = 0; while (!done) { int ch = this.delegate.read(); if (ch == -1) { throw new IOException("EOF reached while reading JSON string"); } switch (state) { case STATE_STRING_START: if (ch == slash) { state = STATE_STRING_SLASH; } else if (ch == quotes) { done = true; } //field JSON array else if (ch == arrayL) { state = STATE_FILED_ARRAY; strBuf.append(arrayL); ++arraySymbol; } //field JSON obj else if (ch == objL) { state = STATE_FILED_OBJ; strBuf.append(objL); ++objSymbol; } else { strBuf.append(toChars(ch)); } break; case STATE_FILED_OBJ: if (ch == objL) { ++objSymbol; } else if (ch == objR) { --objSymbol; if (objSymbol == 0) { state = STATE_STRING_START; } } strBuf.append(toChars(ch)); break; case STATE_FILED_ARRAY: if (ch == arrayL) { ++arraySymbol; } else if (ch == arrayR) { --arraySymbol; if (arraySymbol == 0) { state = STATE_STRING_START; } } strBuf.append(toChars(ch)); break; case STATE_STRING_SLASH: if (ch == 'n') { strBuf.append(Strs.LF); } else if (ch == 'r') { strBuf.append(Strs.CR); } else if (ch == 't') { strBuf.append(Strs.TAB); } else if (ch == 'f') { strBuf.append('\f'); } else if (ch == 'b') { strBuf.append('\b'); } else if (ch == slash) { strBuf.append(slash); } else if (ch == '/') { strBuf.append('/'); } else if (ch == quotes) { strBuf.append(quotes); } else if (ch == '\'') { strBuf.append('\''); } else if (ch == 'u') { state = STATE_HEX_DIGITS; hex.setLength(0); break; } else { throw new IOException("Invalid character escape sequence specified at position " + this.delegate.getPos()); } state = STATE_STRING_START; break; case STATE_HEX_DIGITS: if (isHexDigit(ch)) { hex.append((char) ch); if (hex.length() == 4) { int value = Integer.parseInt(hex.toString(), 16); strBuf.append(valueOf((char) value)); state = STATE_STRING_START; } } else { throw new IOException("Expected hexadecimal digits at position " + this.delegate.getPos()); } break; } } return strBuf.toString(); } private static boolean isHexDigit(int ch) { return isDigit(ch) || ch == 'a' || ch == 'A' || ch == 'b' || ch == 'B' || ch == 'c' || ch == 'C' || ch == 'd' || ch == 'D' || ch == 'e' || ch == 'E' || ch == 'f' || ch == 'F'; } private static boolean isDigit(int ch) { return ch >= '0' && ch <= '9'; } /** * This is a performance optimization. The lowest 128 characters are re-used. * * @param c char to match to a Character. * @return a Character that matches the passed in char. If the valuye is * less than 127, then the same Character instances are re-used. */ private static Character valueOf(char c) { return c <= 127 ? _charCache[(int) c] : c; } protected static final int MAX_CODE_POINT = 0x10ffff; protected static final int MIN_SUPPLEMENTARY_CODE_POINT = 0x010000; protected static final char MIN_LOW_SURROGATE = '\uDC00'; protected static final char MIN_HIGH_SURROGATE = '\uD800'; private static char[] toChars(int codePoint) { if (codePoint < 0 || codePoint > MAX_CODE_POINT) { // int UTF-8 char must be in range throw new IllegalArgumentException("value ' + codePoint + ' outside UTF-8 range"); } if (codePoint < MIN_SUPPLEMENTARY_CODE_POINT) { // if the int character fits in two bytes... return new char[]{(char) codePoint}; } char[] result = new char[2]; int offset = codePoint - MIN_SUPPLEMENTARY_CODE_POINT; result[1] = (char) ((offset & 0x3ff) + MIN_LOW_SURROGATE); result[0] = (char) ((offset >>> 10) + MIN_HIGH_SURROGATE); return result; } private int skipWhitespaceRead() throws IOException { final AsyncPushbackReader in = this.delegate; int c = in.read(); while (Strs.isWhitespace(c)) { c = in.read(); } return c; } private void skipWhitespace() throws IOException { int c = skipWhitespaceRead(); this.delegate.unread(c); } /** * */ private static class AsyncPushbackReader extends FilterReader { private final int[] _buf; private int _idx; private long _pos; private AsyncPushbackReader(Reader reader, int size) { super(reader); checkArgument(size > 0, "size <= 0"); _buf = new int[size]; _idx = size; } private AsyncPushbackReader(Reader r) { this(r, 1); } public long getPos() { return _pos; } @Override public int read() throws IOException { _pos++; if (_idx < _buf.length) { return _buf[_idx++]; } return super.read(); } public void unread(int c) throws IOException { if (_idx == 0) { throw new IOException("AsyncPushback buffer overflow: buffer size (" + _buf.length + "), position = " + _pos); } _pos--; _buf[--_idx] = c; } @Override public void close() throws IOException { super.close(); _pos = 0; } } private <R, W> Object doConvert(boolean isPresent, String property, R readValue) { if (!isPresent) { return readValue; } Converter<R, W> converter = jsoner.get().getConverter(property); return null != converter ? converter.convert((R) readValue) : readValue; } private Map<String, Object> mappingBuild() throws IOException { boolean done = false, objectR = false; //'S' read start object, 'F' field, 'V' value, 'P' post char state = 'S'; String field = null; Map<String, Object> map = Maps.newHashMap(); final AsyncPushbackReader in = this.delegate; boolean isConvertPresent = jsoner.isPresent() && !jsoner.get().converts.isEmpty(); while (!done) { int ch; switch (state) { case 'S': ch = skipWhitespaceRead(); if (objL == ch) { objectR = true; ch = skipWhitespaceRead(); if (objR == ch) { return null; } in.unread(ch); state = 'F'; } else if (arrayL == ch) { in.unread(arrayL); state = 'V'; } else { throw new IOException("Input is invalid JSON; does not start with '{' or '[', ch=" + ch); } break; case 'F': ch = skipWhitespaceRead(); if (quotes == ch) { field = readString(); ch = skipWhitespaceRead(); if (colon != ch) { throw new IOException("Expected ':' between string field and value at position " + in.getPos()); } skipWhitespace(); state = 'V'; } else { throw new IOException("Expected quote at position " + in.getPos()); } break; case 'V': if (null == field) { field = itemsF; } map.put(field, doConvert(isConvertPresent, field, readValue())); state = 'P'; break; case 'P': ch = skipWhitespaceRead(); if (ch == -1 && objectR) { throw new IOException("EOF reached before closing '}'"); } if (ch == objR || ch == -1) { done = true; } else if (ch == ',') { state = 'F'; } else { throw new IOException("Object not ended with '}' or ']' at position " + in.getPos()); } break; } } return map; } private final char[] _numBuf = new char[256]; private final StringBuilder _strBuf = new StringBuilder(); private static final Byte[] _byteCache = new Byte[256]; private static final Character[] _charCache = new Character[128]; static { for (int i = 0; i < _charCache.length; i++) { _charCache[i] = (char) i; } for (int i = 0; i < _byteCache.length; i++) { _byteCache[i] = (byte) (i - 128); } } private AsyncPushbackReader delegate; private Optional<Map<String, Object>> mapping = Optional.absent(); private Map<String, Function<?, ?>> mappingFuncs = Maps.newHashMap(); private Optional<JSONer> jsoner = Optional.absent(); private ReadJSON() {} } private Optional<?> delegate = null; private JSONer() {} private JSONer(Object target) { update(target); } private Map<String, Converter<?, ?>> converts = Maps.newHashMap(); private Map<Class<?>, Converter<?, ?>> typeConverts = Maps.newHashMap(); /** * Returns the {@link Converter} instance with given calss type key * * @param key * @return */ @SuppressWarnings("unchecked") public <R, W> Converter<R, W> getTypeConverter(Class<?> key) { return (Converter<R, W>) typeConverts.get(key); } /** * Returns the {@link Converter} instance with given property name key * * @param key * @return */ @SuppressWarnings("unchecked") public <R, W> Converter<R, W> getConverter(String key) { return (Converter<R, W>) converts.get(key); } /** * */ private static final Decision<Field> JSON_READ_DECISION = new Decision<Field>() { @Override public boolean apply(Field input) { return (null != input) && (input.getType().isArray() || input.getType().isEnum() || Collection.class.isAssignableFrom(input.getType()) || Map.class.isAssignableFrom(input.getType())); } }; /** * */ private static final Function<Pair<Field,Object>, Object> JSON_READ_FUNC = new Function<Pair<Field,Object>, Object>() { @SuppressWarnings({ "unchecked", "rawtypes" }) @Override public Object apply(Pair<Field, Object> input) { if (null == input) { return null; } Field field = input.getL(); Object val = input.getR(); if (null == val) { return val; } //field byte array, val string if (field.getType().isArray() && (Byte.class == Primitives.wrap(field.getType().getComponentType())) && (val instanceof String)) { if (Objects2.isPrimitive(field.getType().getComponentType())) { return val.toString().getBytes(Charsets.UTF_8); } return Arrays2.wrap(val.toString().getBytes(Charsets.UTF_8)); } //filed array, val array else if (field.getType().isArray() && Objects2.is8Type(field.getType().getComponentType()) && val.getClass().isArray()) { //field primitive array if (Objects2.isPrimitive(field.getType().getComponentType())) { return Arrays2.unwraps(Arrays2.convert((Object[]) val, Primitives.wrap(field.getType().getComponentType()))); } return Arrays2.convert((Object[]) val, Primitives.wrap(field.getType().getComponentType())); } //field list, val array else if (List.class.isAssignableFrom(field.getType())) { return Resolves.get(field, val); } //field set, val array else if (Set.class.isAssignableFrom(field.getType())) { return Resolves.get(field, val); } //field map else if (Map.class.isAssignableFrom(field.getType())) { return Resolves.get(field, val); } //field enum, val string else if (field.getType().isEnum() && (val instanceof String)) { // return Enums.valueOfFunction((Class<Enum>) field.getType()).apply(val.toString().toUpperCase()); return Enums.stringConverter((Class<Enum>) field.getType()).convert(val.toString().toUpperCase()); } return val; } }; }
apache-2.0
clockworksoul/smudge
membership.go
21001
/* Copyright 2016 The Smudge Authors. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package smudge import ( "errors" "math" "net" "strconv" "sync" "time" ) // A scalar value used to calculate a variety of limits const lambda = 2.5 // How many standard deviations beyond the mean PING/ACK response time we // allow before timing out an ACK. const timeoutToleranceSigmas = 3.0 const defaultIPv4MulticastAddress = "224.0.0.0" const defaultIPv6MulticastAddress = "[ff02::1]" var currentHeartbeat uint32 var pendingAcks = struct { sync.RWMutex m map[string]*pendingAck }{m: make(map[string]*pendingAck)} var thisHostAddress string var thisHost *Node var ipLen = net.IPv4len // This flag is set whenever a known node is added or removed. var knownNodesModifiedFlag = false var pingdata = newPingData(GetPingHistoryFrontload(), 50) /****************************************************************************** * Exported functions (for public consumption) *****************************************************************************/ // Begin starts the server by opening a UDP port and beginning the heartbeat. // Note that this is a blocking function, so act appropriately. func Begin() { // Add this host. logfInfo("Using listen IP: %s", listenIP) // Use IPv6 address length if the listen IP is not an IPv4 address if GetListenIP().To4() == nil { ipLen = net.IPv6len } initGlobalHostEnvironment() logInfo("My host address:", thisHostAddress) // Add this node's status. Don't update any other node's statuses: they'll // report those back to us. updateNodeStatus(thisHost, StatusAlive, 0, thisHost) AddNode(thisHost) go listenUDP(GetListenPort()) // Add initial hosts as specified by the SMUDGE_INITIAL_HOSTS property for _, address := range GetInitialHosts() { n, err := CreateNodeByAddress(address) if err != nil { logfError("Could not create node %s: %v", address, err) } else { AddNode(n) } } if GetMulticastEnabled() { go listenUDPMulticast(GetMulticastPort()) go multicastAnnounce(GetMulticastAddress()) } go startTimeoutCheckLoop() // Loop over a randomized list of all known nodes (except for this host // node), pinging one at a time. If the knownNodesModifiedFlag is set to // true by AddNode() or RemoveNode(), the we get a fresh list and start // again. for { var randomAllNodes = knownNodes.getRandomNodes(0, thisHost) var pingCounter int for _, node := range randomAllNodes { // Exponential backoff of dead nodes, until such time as they are removed. if node.status == StatusDead { var dnc *deadNodeCounter var ok bool deadNodeRetries.Lock() if dnc, ok = deadNodeRetries.m[node.Address()]; !ok { dnc = &deadNodeCounter{retry: 1, retryCountdown: 2} deadNodeRetries.m[node.Address()] = dnc } deadNodeRetries.Unlock() dnc.retryCountdown-- if dnc.retryCountdown <= 0 { dnc.retry++ dnc.retryCountdown = int(math.Pow(2.0, float64(dnc.retry))) if dnc.retry > maxDeadNodeRetries { logDebug("Forgetting dead node", node.Address()) deadNodeRetries.Lock() delete(deadNodeRetries.m, node.Address()) deadNodeRetries.Unlock() RemoveNode(node) continue } } else { continue } } currentHeartbeat++ logfTrace("%d - hosts=%d (announce=%d forward=%d)", currentHeartbeat, len(randomAllNodes), emitCount(), pingRequestCount()) PingNode(node) pingCounter++ time.Sleep(time.Millisecond * time.Duration(GetHeartbeatMillis())) if knownNodesModifiedFlag { knownNodesModifiedFlag = false break } } if pingCounter == 0 { logDebug("No nodes to ping. So lonely. :(") time.Sleep(time.Millisecond * time.Duration(GetHeartbeatMillis())) } } } // PingNode can be used to explicitly ping a node. Calls the low-level // doPingNode(), and outputs a message (and returns an error) if it fails. func PingNode(node *Node) error { err := transmitVerbPingUDP(node, currentHeartbeat) if err != nil { logInfo("Failure to ping", node, "->", err) } return err } /****************************************************************************** * Private functions (for internal use only) *****************************************************************************/ // Multicast announcements are constructed as: // Byte 0 - 1 byte character byte length N // Bytes 1 to N - Cluster name bytes // Bytes N+1... - A message (without members) func decodeMulticastAnnounceBytes(bytes []byte) (string, []byte, error) { nameBytesLen := int(bytes[0]) if nameBytesLen+1 > len(bytes) { return "", nil, errors.New("Invalid multicast message received") } nameBytes := bytes[1 : nameBytesLen+1] name := string(nameBytes) msgBytes := bytes[nameBytesLen+1 : len(bytes)] return name, msgBytes, nil } func doForwardOnTimeout(pack *pendingAck) { filteredNodes := getTargetNodes(pingRequestCount(), thisHost, pack.node) if len(filteredNodes) == 0 { logDebug(thisHost.Address(), "Cannot forward ping request: no more nodes") updateNodeStatus(pack.node, StatusDead, currentHeartbeat, thisHost) } else { for i, n := range filteredNodes { logfDebug("(%d/%d) Requesting indirect ping of %s via %s", i+1, len(filteredNodes), pack.node.Address(), n.Address()) transmitVerbForwardUDP(n, pack.node, currentHeartbeat) } } } // The number of times any node's new status should be emitted after changes. // Currently set to (lambda * log(node count)). func emitCount() int { logn := math.Log(float64(knownNodes.length())) mult := (lambda * logn) + 0.5 return int(mult) } // Multicast announcements are constructed as: // Byte 0 - 1 byte character byte length N // Bytes 1 to N - Cluster name bytes // Bytes N+1... - A message (without members) func encodeMulticastAnnounceBytes() []byte { nameBytes := []byte(GetClusterName()) nameBytesLen := len(nameBytes) if nameBytesLen > 0xFF { panic("Cluster name too long: " + strconv.FormatInt(int64(nameBytesLen), 10) + " bytes (max 254)") } msg := newMessage(verbPing, thisHost, currentHeartbeat) msgBytes := msg.encode() msgBytesLen := len(msgBytes) totalByteCount := 1 + nameBytesLen + msgBytesLen bytes := make([]byte, totalByteCount, totalByteCount) // Add name length byte bytes[0] = byte(nameBytesLen) // Copy the name bytes copy(bytes[1:nameBytesLen+1], nameBytes) // Copy the message proper copy(bytes[nameBytesLen+1:totalByteCount], msgBytes) return bytes } func guessMulticastAddress() string { if multicastAddress == "" { if ipLen == net.IPv6len { multicastAddress = defaultIPv6MulticastAddress } else if ipLen == net.IPv4len { multicastAddress = defaultIPv4MulticastAddress } else { logFatal("Failed to determine IPv4/IPv6") } } return multicastAddress } // getListenInterface gets the network interface for the listen IP func getListenInterface() (*net.Interface, error) { ifaces, err := net.Interfaces() if err == nil { for _, iface := range ifaces { addrs, err := iface.Addrs() if err != nil { logfWarn("Can not get addresses of interface %s", iface.Name) continue } for _, addr := range addrs { ip, _, err := net.ParseCIDR(addr.String()) if err != nil { continue } if ip.String() == GetListenIP().String() { logfInfo("Found interface with listen IP: %s", iface.Name) return &iface, nil } } } } return nil, errors.New("Could not determine the interface of the listen IP address") } // Returns a random slice of valid ping/forward request targets; i.e., not // this node, and not dead. func getTargetNodes(count int, exclude ...*Node) []*Node { randomNodes := knownNodes.getRandomNodes(0, exclude...) filteredNodes := make([]*Node, 0, count) for _, n := range randomNodes { if len(filteredNodes) >= count { break } if n.status == StatusDead { continue } filteredNodes = append(filteredNodes, n) } return filteredNodes } func initGlobalHostEnvironment() { thisHost = &Node{ ip: GetListenIP(), port: uint16(GetListenPort()), timestamp: GetNowInMillis(), pingMillis: PingNoData, } thisHostAddress = thisHost.Address() } func listenUDP(port int) error { listenAddress, err := net.ResolveUDPAddr("udp", ":"+strconv.FormatInt(int64(port), 10)) if err != nil { return err } /* Now listen at selected port */ c, err := net.ListenUDP("udp", listenAddress) if err != nil { return err } defer c.Close() for { buf := make([]byte, 2048) // big enough to fit 1280 IPv6 UDP message n, addr, err := c.ReadFromUDP(buf) if err != nil { logError("UDP read error: ", err) } go func(addr *net.UDPAddr, msg []byte) { err = receiveMessageUDP(addr, buf[0:n]) if err != nil { logError(err) } }(addr, buf[0:n]) } } func listenUDPMulticast(port int) error { addr := GetMulticastAddress() if addr == "" { addr = guessMulticastAddress() } listenAddress, err := net.ResolveUDPAddr("udp", addr+":"+strconv.FormatInt(int64(port), 10)) if err != nil { return err } /* Now listen at selected port */ iface, err := getListenInterface() if err != nil { return err } c, err := net.ListenMulticastUDP("udp", iface, listenAddress) if err != nil { return err } defer c.Close() for { buf := make([]byte, 2048) // big enough to fit 1280 IPv6 UDP message n, addr, err := c.ReadFromUDP(buf) if err != nil { logError("UDP read error:", err) } go func(addr *net.UDPAddr, bytes []byte) { name, msgBytes, err := decodeMulticastAnnounceBytes(bytes) if err != nil { logDebug("Ignoring unexpected multicast message.") } else { if GetClusterName() == name { msg, err := decodeMessage(addr.IP, msgBytes) if err == nil { logfTrace("Got multicast %v from %v code=%d", msg.verb, msg.sender.Address(), msg.senderHeartbeat) // Update statuses of the sender. updateStatusesFromMessage(msg) } else { logError(err) } } } }(addr, buf[0:n]) } } // multicastAnnounce is called when the server first starts to broadcast its // presence to all listening servers within the specified subnet and continues // to broadcast its presence every multicastAnnounceIntervalSeconds in case // this value is larger than zero. func multicastAnnounce(addr string) error { if addr == "" { addr = guessMulticastAddress() } fullAddr := addr + ":" + strconv.FormatInt(int64(GetMulticastPort()), 10) logInfo("Announcing presence on", fullAddr) address, err := net.ResolveUDPAddr("udp", fullAddr) if err != nil { logError(err) return err } laddr := &net.UDPAddr{ IP: GetListenIP(), Port: 0, } for { c, err := net.DialUDP("udp", laddr, address) if err != nil { logError(err) return err } // Compose and send the multicast announcement msgBytes := encodeMulticastAnnounceBytes() _, err = c.Write(msgBytes) if err != nil { logError(err) return err } logfTrace("Sent announcement multicast from %v to %v", laddr, fullAddr) if GetMulticastAnnounceIntervalSeconds() > 0 { time.Sleep(time.Second * time.Duration(GetMulticastAnnounceIntervalSeconds())) } else { return nil } } } // The number of nodes to send a PINGREQ to when a PING times out. // Currently set to (lambda * log(node count)). func pingRequestCount() int { logn := math.Log(float64(knownNodes.length())) mult := (lambda * logn) + 0.5 return int(mult) } func receiveMessageUDP(addr *net.UDPAddr, msgBytes []byte) error { msg, err := decodeMessage(addr.IP, msgBytes) if err != nil { return err } logfTrace("Got %v from %v code=%d", msg.verb, msg.sender.Address(), msg.senderHeartbeat) // Synchronize heartbeats if msg.senderHeartbeat > 0 && msg.senderHeartbeat-1 > currentHeartbeat { logfTrace("Heartbeat advanced from %d to %d", currentHeartbeat, msg.senderHeartbeat-1) currentHeartbeat = msg.senderHeartbeat - 1 } // Update statuses of the sender and any members the message includes. updateStatusesFromMessage(msg) // If there are broadcast bytes in the message, handle them here. receiveBroadcast(msg.broadcast) // Handle the verb. switch msg.verb { case verbPing: err = receiveVerbPingUDP(msg) case verbAck: err = receiveVerbAckUDP(msg) case verbPingRequest: err = receiveVerbForwardUDP(msg) case verbNonForwardingPing: err = receiveVerbNonForwardPingUDP(msg) } if err != nil { return err } return nil } func receiveVerbAckUDP(msg message) error { key := msg.sender.Address() + ":" + strconv.FormatInt(int64(msg.senderHeartbeat), 10) pendingAcks.RLock() _, ok := pendingAcks.m[key] pendingAcks.RUnlock() if ok { msg.sender.Touch() pendingAcks.Lock() if pack, ok := pendingAcks.m[key]; ok { // If this is a response to a requested ping, respond to the // callback node if pack.callback != nil { go transmitVerbAckUDP(pack.callback, pack.callbackCode) } else { // Note the ping response time. notePingResponseTime(pack) } } delete(pendingAcks.m, key) pendingAcks.Unlock() } return nil } func notePingResponseTime(pack *pendingAck) { // Note the elapsed time elapsedMillis := pack.elapsed() pack.node.pingMillis = int(elapsedMillis) // For the purposes of timeout tolerance, we treat all pings less than // the ping lower bound as that lower bound. minMillis := uint32(GetMinPingTime()) if elapsedMillis < minMillis { elapsedMillis = minMillis } pingdata.add(elapsedMillis) mean, stddev := pingdata.data() sigmas := pingdata.nSigma(timeoutToleranceSigmas) logfTrace("Got ACK in %dms (mean=%.02f stddev=%.02f sigmas=%.02f)", elapsedMillis, mean, stddev, sigmas) } func receiveVerbForwardUDP(msg message) error { // We don't forward to a node that we don't know. if len(msg.members) >= 0 && msg.members[0].status == StatusForwardTo { member := msg.members[0] node := member.node code := member.heartbeat key := node.Address() + ":" + strconv.FormatInt(int64(code), 10) pack := pendingAck{ node: node, startTime: GetNowInMillis(), callback: msg.sender, callbackCode: code, packType: packNFP} pendingAcks.Lock() pendingAcks.m[key] = &pack pendingAcks.Unlock() return transmitVerbGenericUDP(node, nil, verbNonForwardingPing, code) } return nil } func receiveVerbPingUDP(msg message) error { return transmitVerbAckUDP(msg.sender, msg.senderHeartbeat) } func receiveVerbNonForwardPingUDP(msg message) error { return transmitVerbAckUDP(msg.sender, msg.senderHeartbeat) } func startTimeoutCheckLoop() { for { pendingAcks.Lock() for k, pack := range pendingAcks.m { elapsed := pack.elapsed() timeoutMillis := uint32(pingdata.nSigma(timeoutToleranceSigmas)) // Ping requests are expected to take quite a bit longer. // Just call it 2x for now. if pack.packType == packPingReq { timeoutMillis *= 2 } // This pending ACK has taken longer than expected. Mark it as // timed out. if elapsed > timeoutMillis { switch pack.packType { case packPing: go doForwardOnTimeout(pack) case packPingReq: logDebug(k, "timed out after", timeoutMillis, "milliseconds (dropped PINGREQ)") if knownNodes.contains(pack.callback) { switch pack.callback.Status() { case StatusDead: break case StatusSuspected: updateNodeStatus(pack.callback, StatusDead, currentHeartbeat, thisHost) pack.callback.pingMillis = PingTimedOut default: updateNodeStatus(pack.callback, StatusSuspected, currentHeartbeat, thisHost) pack.callback.pingMillis = PingTimedOut } } case packNFP: logDebug(k, "timed out after", timeoutMillis, "milliseconds (dropped NFP)") if knownNodes.contains(pack.node) { switch pack.node.Status() { case StatusDead: break case StatusSuspected: updateNodeStatus(pack.node, StatusDead, currentHeartbeat, thisHost) pack.callback.pingMillis = PingTimedOut default: updateNodeStatus(pack.node, StatusSuspected, currentHeartbeat, thisHost) pack.callback.pingMillis = PingTimedOut } } } delete(pendingAcks.m, k) } } pendingAcks.Unlock() time.Sleep(time.Millisecond * 100) } } func transmitVerbGenericUDP(node *Node, forwardTo *Node, verb messageVerb, code uint32) error { // Transmit the ACK remoteAddr, err := net.ResolveUDPAddr("udp", node.Address()) c, err := net.DialUDP("udp", nil, remoteAddr) if err != nil { return err } defer c.Close() msg := newMessage(verb, thisHost, code) if forwardTo != nil { msg.addMember(forwardTo, StatusForwardTo, code, forwardTo.statusSource) } // Add members for update. nodes := getRandomUpdatedNodes(pingRequestCount(), node, thisHost) // No updates to distribute? Send out a few updates on other known nodes. if len(nodes) == 0 { nodes = knownNodes.getRandomNodes(pingRequestCount(), node, thisHost) } for _, n := range nodes { err = msg.addMember(n, n.status, n.heartbeat, n.statusSource) if err != nil { return err } n.emitCounter-- } // Emit counters for broadcasts can be less than 0. We transmit positive // numbers, and decrement all the others. At some value < 0, the broadcast // is removed from the map all together. broadcast := getBroadcastToEmit() if broadcast != nil { if broadcast.emitCounter > 0 { msg.addBroadcast(broadcast) } broadcast.emitCounter-- } _, err = c.Write(msg.encode()) if err != nil { return err } // Decrement the update counters on those nodes for _, m := range msg.members { m.node.emitCounter-- } logfTrace("Sent %v to %v", verb, node.Address()) return nil } func transmitVerbForwardUDP(node *Node, downstream *Node, code uint32) error { key := node.Address() + ":" + strconv.FormatInt(int64(code), 10) pack := pendingAck{ node: node, startTime: GetNowInMillis(), callback: downstream, packType: packPingReq} pendingAcks.Lock() pendingAcks.m[key] = &pack pendingAcks.Unlock() return transmitVerbGenericUDP(node, downstream, verbPingRequest, code) } func transmitVerbAckUDP(node *Node, code uint32) error { return transmitVerbGenericUDP(node, nil, verbAck, code) } func transmitVerbPingUDP(node *Node, code uint32) error { key := node.Address() + ":" + strconv.FormatInt(int64(code), 10) pack := pendingAck{ node: node, startTime: GetNowInMillis(), packType: packPing} pendingAcks.Lock() pendingAcks.m[key] = &pack pendingAcks.Unlock() return transmitVerbGenericUDP(node, nil, verbPing, code) } func updateStatusesFromMessage(msg message) { for _, m := range msg.members { // If the heartbeat in the message is less then the heartbeat // associated with the last known status, then we conclude that the // message is old and we drop it. if m.heartbeat < m.node.heartbeat { logfDebug("Message is old (%d vs %d): dropping", m.node.heartbeat, m.heartbeat) continue } switch m.status { case StatusForwardTo: // The FORWARD_TO status isn't useful here, so we ignore those. continue case StatusDead: // Don't tell ME I'm dead. if m.node.Address() != thisHost.Address() { updateNodeStatus(m.node, m.status, m.heartbeat, m.source) AddNode(m.node) } default: updateNodeStatus(m.node, m.status, m.heartbeat, m.source) AddNode(m.node) } } // Obviously, we know the sender is alive. Report it as such. if msg.senderHeartbeat > msg.sender.heartbeat { updateNodeStatus(msg.sender, StatusAlive, msg.senderHeartbeat, thisHost) } // Finally, if we don't know the sender we add it to the known hosts map. if !knownNodes.contains(msg.sender) { AddNode(msg.sender) } } // pendingAckType represents an expectation of a response to a previously // emitted PING, PINGREQ, or NFP. type pendingAck struct { startTime uint32 node *Node callback *Node callbackCode uint32 packType pendingAckType } func (a *pendingAck) elapsed() uint32 { return GetNowInMillis() - a.startTime } // pendingAckType represents the type of PING that a pendingAckType is waiting // for a response for: PING, PINGREQ, or NFP. type pendingAckType byte const ( packPing pendingAckType = iota packPingReq packNFP ) func (p pendingAckType) String() string { switch p { case packPing: return "PING" case packPingReq: return "PINGREQ" case packNFP: return "NFP" default: return "UNDEFINED" } }
apache-2.0
acthp/ucsc-xena-client
js/plotMutationVector.js
12128
var _ = require('./underscore_ext'); var Rx = require('./rx'); import PureComponent from './PureComponent'; var React = require('react'); var Legend = require('./views/Legend'); var {rxEvents} = require('./react-utils'); var widgets = require('./columnWidgets'); var util = require('./util'); var CanvasDrawing = require('./CanvasDrawing'); var mv = require('./models/mutationVector'); var {drawSV, drawMutations, radius, toYPx, toYPxSubRow, minVariantHeight, splitRows} = require('./drawMutations'); var {chromPositionFromScreen} = require('./exonLayout'); // Since we don't set module.exports, but instead register ourselves // with columWidgets, react-hot-loader can't handle the updates automatically. // Accept hot loading here. if (module.hot) { module.hot.accept(); module.hot.accept('./models/mutationVector', () => { mv = require('./models/mutationVector'); }); } // Since there are multiple components in the file we have to use makeHot // explicitly. function hotOrNot(component) { return module.makeHot ? module.makeHot(component) : component; } function drawMutationLegend({column}) { if (!column.legend) { return null; } var {colors, labels, titles} = column.legend, labelheader = "Variant Impact"; return ( <Legend colors={colors} labels={labels} titles={titles} addNullNotation={1} labelheader={labelheader}/> ); } function drawSVLegend({column}) { if (!column.legend) { return null; } var {colors, labels, titles} = column.legend, labelheader = "Variant Impact"; return ( <Legend colors={colors} labels={labels} titles={titles} addBreakend={1} addNullNotation={1} labelheader={labelheader}/> ); } function closestNodeSNV(nodes, zoom, x, y) { var cutoffX = radius, {index, height, count} = zoom, cutoffY = minVariantHeight(height / count) / 2, end = index + count, nearBy = _.filter(nodes, n => n.y >= index && n.y < end && Math.abs(y - toYPx(zoom, n).y) < cutoffY && (x > n.xStart - cutoffX) && (x < n.xEnd + cutoffX)); var closest = _.max(nearBy, n => mv.impact[mv.getSNVEffect(mv.impact, n.data.effect)] || 0), //multiple records of the same location same sample allClosest = _.filter(nearBy, n => (n.data.start === closest.data.start) && (n.data.end === closest.data.end) && (n.data.sample === closest.data.sample)); return allClosest; } function closestNodeSV(nodes, zoom, x, y) { var {index, height, count} = zoom, end = index + count, toY = splitRows(count, height) ? toYPxSubRow : toYPx, underRow = v => { var {svHeight, y: suby} = toY(zoom, v); return Math.abs(y - suby) < svHeight / 2; }, underMouse = _.filter(nodes, n => n.y >= index && n.y < end && x >= n.xStart && x <= n.xEnd && underRow(n)); return underMouse; } var closestNode = { SV: closestNodeSV, mutation: closestNodeSNV }; function formatAf(af) { return (af === 'NA' || af === '' || af == null) ? null : Math.round(af * 100) + '%'; } var fmtIf = (x, fmt, d = '') => (_.isString(x) && x !== 'NaN' && x !== '') ? fmt(x) : d; var dropNulls = rows => rows.map(row => row.filter(col => col != null)) // drop empty cols .filter(row => row.length > 0); // drop empty rows //gb position string of the segment with 15bp extra on each side, centered at segment var posRegionString = p => `${p.chr}:${util.addCommas(p.start - 15)}-${util.addCommas(p.end + 15)}`; //gb position string like chr3:178,936,070-178,936,070 var posDoubleString = p => `${p.chr}:${util.addCommas(p.start)}-${util.addCommas(p.end)}`; //gb position string like chr3:178,936,070 var posStartString = p => `${p.chr}:${util.addCommas(p.start)}`; var gbURL = (assembly, pos, highlightPos, GBoptions) => { // assembly : e.g. hg18 // pos: e.g. chr3:178,936,070-178,936,070 // highlight: e.g. chr3:178,936,070-178,936,070 // GBoptions.hubUrl: To build a URL that will load the hub directly // GBoptions.fullTracks: full display mode track list var assemblyString = encodeURIComponent(assembly), positionString = encodeURIComponent(pos), highlightString = encodeURIComponent(highlightPos), hubString = GBoptions && GBoptions.assembly === assembly && GBoptions.hubUrl ? "&hubUrl=" + encodeURIComponent(GBoptions.hubUrl) : '', trackString = GBoptions && GBoptions.fullTracks ? '&hideTracks=1' + GBoptions.fullTracks.map(track => `&${track}=full`).join('') : '', GBurl = `http://genome.ucsc.edu/cgi-bin/hgTracks?db=${assemblyString}${hubString}${trackString}&highlight=${assemblyString}.${highlightString}&position=${positionString}`; return GBurl; }; var gbMultiColorURL = (assembly, pos, posColorList, GBoptions) => { // assembly : e.g. hg18 // pos: e.g. chr3:178,936,070-178,936,070 // posColorList: [[chr3:178,936,070-178,936,070, AA0000], ...] // GBoptions.hubUrl: To build a URL that will load the hub directly // GBoptions.fullTracks: full display mode track list var assemblyString = encodeURIComponent(assembly), positionString = encodeURIComponent(pos), highlightString = posColorList.map(p => `${assemblyString}.${p[0]}${p[1]}`).join('|'), hubString = GBoptions && GBoptions.hubUrl ? "&hubUrl=" + encodeURIComponent(GBoptions.hubUrl) : '', trackString = GBoptions && GBoptions.assembly === assembly && GBoptions.fullTracks ? '&hideTracks=1' + GBoptions.fullTracks.map(track => `&${track}=full`).join('') : '', GBurl = `http://genome.ucsc.edu/cgi-bin/hgTracks?db=${assemblyString}${hubString}${trackString}&highlight=${encodeURIComponent(highlightString)}&position=${positionString}`; return GBurl; }; var defaultSNVSVGBsetting = (assembly) => { if (assembly === 'hg19' || assembly === 'GRCh37') { return { assembly: assembly, hubUrl: 'http://hgwdev.soe.ucsc.edu/~max/immuno/track/hub/hub.txt', // Max Haussler's cancer genomics hub fullTracks: ['knownGene', 'refGene', 'wgEncodeGencodeV24lift37', // gene annotation 'ucscGenePfam', 'spUniprot', // protein annotation //'pubs', // publication 'wgEncodeRegDnaseClustered', 'wgEncodeAwgSegmentation', // encode regulation 'hub_29889_dienstmann', 'hub_29889_civic', 'hub_29889_oncokb' // cancer genomics knowledgebase ] }; } else if (assembly === 'hg38' || assembly === 'GRCh38') { return { assembly: assembly, fullTracks: ['knownGene', 'refGene', // gene annotation 'ucscGenePfam', 'spUniprot', // protein annotation //'pubs', // publication 'wgEncodeRegDnaseClustered', // encode regulation ] }; } else { return {}; } }; function sampleTooltip(sampleFormat, dataList, assembly, fields) { var perRowTip = data => { var dnaVaf = data.dnaVaf == null ? null : ['labelValue', 'DNA variant allele freq', formatAf(data.dnaVaf)], rnaVaf = data.rnaVaf == null ? null : ['labelValue', 'RNA variant allele freq', formatAf(data.rnaVaf)], ref = data.reference && ['label', ` ${data.reference} to `], //alt altDirection = data.alt && mv.joinedVariantDirection(data.alt), altStart = altDirection && parseInt(mv.posFromAlt(data.alt)), altPos = altDirection && `chr${mv.chromFromAlt(data.alt)}:${altStart}-${altStart}`, altRegion = altDirection && altDirection === 'left' ? `chr${mv.chromFromAlt(data.alt)}:${altStart - 100}-${altStart - 1}` : `chr${mv.chromFromAlt(data.alt)}:${altStart + 1}-${altStart + 100}`, altDisplayRegion = altDirection && `chr${mv.chromFromAlt(data.alt)}:${altStart - 150}-${altStart + 150}`, //variant variantDirection = data.alt && mv.structuralVariantClass(data.alt), start = data.start, dataRegion = variantDirection && variantDirection === 'left' ? `${data.chr}:${start - 100}-${start - 1}` : `${data.chr}:${start + 1}-${start + 100}`, dataDisplayRegion = altDirection && `${data.chr}:${start - 150}-${start + 150}`, //alt link alt = data.alt && (mv.structuralVariantClass(data.alt) ? ['url', `${data.alt}`, gbMultiColorURL(assembly, altDisplayRegion, [[altPos, '#AA0000' ], [altRegion, '#aec7e8']], defaultSNVSVGBsetting(assembly))] : ['label', `${data.alt}`]), //variant link posDisplay = data && (data.start === data.end) ? posStartString(data) : posDoubleString (data), posURL = ['url', `${assembly} ${posDisplay}`, altDirection ? gbMultiColorURL(assembly, dataDisplayRegion, [[posDoubleString(data), '#AA0000' ], [dataRegion, '#aec7e8']], defaultSNVSVGBsetting(assembly)) : gbURL(assembly, posRegionString(data), posDoubleString (data), defaultSNVSVGBsetting(assembly))], effect = ['value', fmtIf(data.effect, x => `${x}, `, `unannotated`) + //eslint-disable-line comma-spacing fmtIf(data.gene, x => `${x}`) + fmtIf(data.aminoAcid, x => ` (${x})`) + fmtIf(data.altGene, x => `--${x} `) ]; return dropNulls([ [effect], [posURL, ref, alt], [dnaVaf], [rnaVaf] ]); }; //sort dataList by fields[0], put variants annoated with fields[0] in dataset in front dataList = _.sortBy(dataList, obj => obj.gene === fields[0]).reverse(); var rows = _.reduce(_.map(dataList/*.slice(0, 3)*/, perRowTip), function(a, b) {return a.concat(b);}, []); // if (dataList.length > 3) { // var allRows = rows.concat(_.reduce(_.map(dataList.slice(3), perRowTip), function(a, b) { return b.concat(a); }, [])); // rows.push([["popOver", dataList.length - 3 + " more ...", allRows]]); // } return { rows: rows, sampleID: sampleFormat(dataList[0].sample) }; } function posTooltip(layout, samples, sampleFormat, pixPerRow, index, assembly, x, y) { var yIndex = Math.round((y - pixPerRow / 2) / pixPerRow + index), pos = Math.floor(chromPositionFromScreen(layout, x)), coordinate = { chr: layout.chromName, start: pos, end: pos }; return { sampleID: sampleFormat(samples[yIndex]), rows: [[['url', `${assembly} ${posStartString(coordinate)}`, gbURL(assembly, posRegionString(coordinate), posDoubleString(coordinate))]]]}; } function tooltip(id, fieldType, fields, layout, nodes, samples, sampleFormat, zoom, assembly, ev) { var {x, y} = util.eventOffset(ev), {height, count, index} = zoom, pixPerRow = height / count, // XXX workaround for old bookmarks w/o chromName lo = _.updateIn(layout, ['chromName'], c => c || _.getIn(nodes, [0, 'data', 'chr'])), closestNodes = closestNode[fieldType](nodes, zoom, x, y); return { x, id, ...(closestNodes.length > 0 ? sampleTooltip(sampleFormat, _.pluck(closestNodes, 'data'), assembly, fields) : posTooltip(lo, samples, sampleFormat, pixPerRow, index, assembly, x, y))}; } var MutationColumn = hotOrNot(class extends PureComponent { componentWillMount() { var events = rxEvents(this, 'mouseout', 'mousemove', 'mouseover'); // Compute tooltip events from mouse events. this.ttevents = events.mouseover .filter(ev => util.hasClass(ev.currentTarget, 'Tooltip-target')) .flatMap(() => { return events.mousemove .takeUntil(events.mouseout) .map(ev => ({ data: this.tooltip(ev), open: true })) // look up current data .concat(Rx.Observable.of({open: false})); }).subscribe(this.props.tooltip); } componentWillUnmount() { this.ttevents.unsubscribe(); } tooltip = (ev) => { var {column: {fieldType, fields, layout, nodes, assembly}, samples, sampleFormat, zoom, id} = this.props; return tooltip(id, fieldType, fields, layout, nodes, samples, sampleFormat, zoom, assembly, ev); }; render() { var {column, samples, zoom, index, draw} = this.props; return ( <CanvasDrawing ref='plot' draw={draw} wrapperProps={{ className: 'Tooltip-target', onMouseMove: this.on.mousemove, onMouseOut: this.on.mouseout, onMouseOver: this.on.mouseover, onClick: this.props.onClick }} nodes={column.nodes} strand={column.strand} width={column.width} index={index} samples={samples} xzoom={column.zoom} zoom={zoom}/>); } }); widgets.column.add('mutation', props => <MutationColumn draw={drawMutations} {...props} />); widgets.column.add('SV', props => <MutationColumn draw={drawSV} {...props} />); widgets.legend.add('mutation', drawMutationLegend); widgets.legend.add('SV', drawSVLegend);
apache-2.0
yangb8/pravega-inputformat
src/main/java/io/pravega/examples/hadoop/WordCount.java
2781
/* * Copyright 2017 Dell/EMC * * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.pravega.examples.hadoop; import io.pravega.hadoop.mapreduce.PravegaInputFormat; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configured; import org.apache.hadoop.fs.Path; import org.apache.hadoop.io.IntWritable; import org.apache.hadoop.io.Text; import org.apache.hadoop.mapreduce.Job; import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat; import org.apache.hadoop.util.GenericOptionsParser; import org.apache.hadoop.util.Tool; import org.apache.hadoop.util.ToolRunner; public class WordCount extends Configured implements Tool { public static void main(String[] args) throws Exception { int res = ToolRunner.run(new Configuration(), new WordCount(), args); System.exit(res); } @Override public int run(String[] args) throws Exception { Configuration conf = this.getConf(); GenericOptionsParser optionParser = new GenericOptionsParser(conf, args); String[] remainingArgs = optionParser.getRemainingArgs(); if (remainingArgs.length != 4) { System.err.println("Usage: WordCount <url> <scope> <stream> <out>"); System.exit(2); } conf.setStrings(PravegaInputFormat.URI_STRING, remainingArgs[0]); conf.setStrings(PravegaInputFormat.SCOPE_NAME, remainingArgs[1]); conf.setStrings(PravegaInputFormat.STREAM_NAME, remainingArgs[2]); conf.setBoolean(PravegaInputFormat.DEBUG, true); Job job = Job.getInstance(conf, "WordCount"); job.setJarByClass(WordCount.class); job.setInputFormatClass(PravegaInputFormat.class); job.setOutputKeyClass(Text.class); job.setOutputValueClass(IntWritable.class); job.setMapperClass(TokenizerMapper.class); job.setReducerClass(SumReducer.class); FileOutputFormat.setOutputPath(job, new Path(remainingArgs[3])); return job.waitForCompletion(true) ? 0 : 1; } }
apache-2.0
PrisonBreak2017/nodejs-ex
web/src/reducer/globalStatusReducer.js
5773
const defaultStatus = { recording: true, panelRefreshing: true, // indicate whether the record panel should be refreshing showFilter: false, // if the filter panel is showing showMapLocal: false, activeMenuKey: '', canLoadMore: false, interceptHttpsFlag: false, globalProxyFlag: false, // is global proxy now filterStr: '', directory: [], lastActiveRecordId: -1, currentActiveRecordId: -1, shouldClearAllRecord: false, appVersion: '', panelLoadingNext: false, panelLoadingPrev: false, showNewRecordTip: false, isRootCAFileExists: false, fetchingRecord: false, wsPort: null, mappedConfig:[] // configured map config }; import { MenuKeyMap } from 'common/Constant'; import { STOP_RECORDING, RESUME_RECORDING, SHOW_FILTER, HIDE_FILTER, UPDATE_FILTER, UPDATE_LOCAL_DIRECTORY, SHOW_MAP_LOCAL, HIDE_MAP_LOCAL, UPDATE_LOCAL_MAPPED_CONFIG, UPDATE_ACTIVE_RECORD_ITEM, UPDATE_LOCAL_INTERCEPT_HTTPS_FLAG, UPDATE_LOCAL_GLOBAL_PROXY_FLAG, HIDE_ROOT_CA, SHOW_ROOT_CA, UPDATE_CAN_LOAD_MORE, INCREASE_DISPLAY_RECORD_LIST, UPDATE_SHOULD_CLEAR_RECORD, UPDATE_APP_VERSION, UPDATE_IS_ROOTCA_EXISTS, UPDATE_SHOW_NEW_RECORD_TIP, UPDATE_GLOBAL_WSPORT, UPDATE_FETCHING_RECORD_STATUS } from 'action/globalStatusAction'; // The map to save the mapping relationships of the path and it's location in the tree node const directoryNodeMap = {}; // The map to store all the directory in a tree way let direcotryList = []; function getTreeMap(path, sub) { const children = []; sub.directory.forEach((item) => { if (!(item.name.indexOf('.') === 0)) { item.isLeaf = false; directoryNodeMap[item.fullPath] = item; children.push(item); } }); sub.file.forEach((item) => { if (!(item.name.indexOf('.') === 0)) { item.isLeaf = true; directoryNodeMap[item.fullPath] = item; children.push(item); } }); if (!path) { direcotryList = children; } else { directoryNodeMap[path].children = children; } return direcotryList; } function requestListReducer(state = defaultStatus, action) { switch (action.type) { case STOP_RECORDING: { const newState = Object.assign({}, state); newState.recording = false; return newState; } case RESUME_RECORDING: { const newState = Object.assign({}, state); newState.recording = true; return newState; } case SHOW_FILTER: { const newState = Object.assign({}, state); newState.activeMenuKey = MenuKeyMap.RECORD_FILTER; return newState; } case HIDE_FILTER: { const newState = Object.assign({}, state); newState.activeMenuKey = ''; return newState; } case UPDATE_FILTER: { const newState = Object.assign({}, state); newState.filterStr = action.data; return newState; } case SHOW_MAP_LOCAL: { const newState = Object.assign({}, state); newState.activeMenuKey = MenuKeyMap.MAP_LOCAL; return newState; } case HIDE_MAP_LOCAL: { const newState = Object.assign({}, state); newState.activeMenuKey = ''; return newState; } case UPDATE_LOCAL_DIRECTORY: { const newState = Object.assign({}, state); const { path, sub } = action.data; newState.directory = getTreeMap(path, sub); return newState; } case UPDATE_LOCAL_MAPPED_CONFIG: { const newState = Object.assign({}, state); newState.mappedConfig = action.data; return newState; } case UPDATE_ACTIVE_RECORD_ITEM: { const newState = Object.assign({}, state); newState.lastActiveRecordId = state.currentActiveRecordId; newState.currentActiveRecordId = action.data; return newState; } case UPDATE_LOCAL_INTERCEPT_HTTPS_FLAG: { const newState = Object.assign({}, state); newState.interceptHttpsFlag = action.data; return newState; } case UPDATE_LOCAL_GLOBAL_PROXY_FLAG: { const newState = Object.assign({}, state); newState.globalProxyFlag = action.data; return newState; } case SHOW_ROOT_CA: { const newState = Object.assign({}, state); newState.activeMenuKey = MenuKeyMap.ROOT_CA; return newState; } case HIDE_ROOT_CA: { const newState = Object.assign({}, state); newState.activeMenuKey = ''; return newState; } case UPDATE_CAN_LOAD_MORE: { const newState = Object.assign({}, state); newState.canLoadMore = action.data; return newState; } case UPDATE_SHOULD_CLEAR_RECORD: { const newState = Object.assign({}, state); newState.shouldClearAllRecord = action.data; return newState; } case INCREASE_DISPLAY_RECORD_LIST: { const newState = Object.assign({}, state); newState.displayRecordLimit += action.data; return newState; } case UPDATE_APP_VERSION: { const newState = Object.assign({}, state); newState.appVersion = action.data; return newState; } case UPDATE_SHOW_NEW_RECORD_TIP: { const newState = Object.assign({}, state); newState.showNewRecordTip = action.data; return newState; } case UPDATE_IS_ROOTCA_EXISTS: { const newState = Object.assign({}, state); newState.isRootCAFileExists = action.data; return newState; } case UPDATE_GLOBAL_WSPORT: { const newState = Object.assign({}, state); newState.wsPort = action.data; return newState; } case UPDATE_FETCHING_RECORD_STATUS: { const newState = Object.assign({}, state); newState.fetchingRecord = action.data; return newState; } default: { return state; } } } export default requestListReducer;
apache-2.0
thpeng/spring-time
spring-time-web/src/main/webapp/app/js/app/controllers/logout.js
429
'use strict'; angular.module('time') .controller('LogoutCtrl', ['AuthService', '$scope', '$state', '$rootScope', function (auth, $scope, $state, $rootScope) { if ($rootScope.currentUser === null) { alert("no currentUser set!"); } $scope.logout = function () { auth.logout(); console.debug('logout'); $state.transitionTo('login'); }; }]);
apache-2.0
emmartins/wildfly-server-migration
migrations/eap7.4/eap6.4/src/main/java/org/jboss/migration/eap6/to/eap7/tasks/EAP6_4ToEAP7_4UpdateEJB3Subsystem.java
1516
/* * Copyright 2020 Red Hat, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jboss.migration.eap6.to.eap7.tasks; import org.jboss.migration.core.jboss.JBossSubsystemNames; import org.jboss.migration.wfly10.config.task.management.subsystem.UpdateSubsystemResources; import org.jboss.migration.wfly10.config.task.subsystem.ejb3.AddInfinispanPassivationStoreAndDistributableCache; import org.jboss.migration.wfly10.config.task.subsystem.ejb3.DefinePassivationDisabledCacheRef; import org.jboss.migration.wfly10.config.task.subsystem.ejb3.RefHttpRemotingConnectorInEJB3Remote; /** * @author emmartins */ public class EAP6_4ToEAP7_4UpdateEJB3Subsystem<S> extends UpdateSubsystemResources<S> { public EAP6_4ToEAP7_4UpdateEJB3Subsystem() { super(JBossSubsystemNames.EJB3, new RefHttpRemotingConnectorInEJB3Remote<>(), new DefinePassivationDisabledCacheRef<>(), new AddInfinispanPassivationStoreAndDistributableCache<>()); } }
apache-2.0
HackerspaceKRK/zamek
converter.rb
1076
#!/usr/bin/ruby # -*- coding: utf-8 -*- # # Copyright 2013 Łukasz Dubiel <bambucha14@gmail.com> # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # See the License for the specific language governing permissions and # limitations under the License. # # def encode(karta) result = karta.to_i(16).to_s(2) while result.size < 40 result = "0" + result end result = result.reverse.to_i(2).to_s(16) while result.size < 10 result = "0" + result end return result end if ARGV.size == 1 puts(encode(ARGV.first)) else puts(encode(gets)) end
apache-2.0
baotiao/Asenal
acm/fM.cc
4101
#include <iostream> #include <cstdio> #include <algorithm> #include <cstring> #include <string> #include <cmath> #include <vector> #include <queue> #include <map> #include <ctime> #include <set> typedef long long lld; using namespace std; #ifdef DEBUG #define debug(x) cout<<__LINE__<<" "<<#x<<"="<<x<<endl; #define debug2(x, y) cout<<__LINE__<<" "<<#x<<"="<<x<<" "<<#y<<"="<<y<<endl; #else #define debug(x) #define debug2(x, y) #endif class Solution { #ifdef DEBUG #define debug(x) cout<<__LINE__<<" "<<#x<<"="<<x<<endl; #define debug2(x, y) cout<<__LINE__<<" "<<#x<<"="<<x<<" "<<#y<<"="<<y<<endl; #else #define debug(x) #define debug2(x, y) #endif #define here cout<<__LINE__<< " " << "_______________here" <<endl; #define clr(NAME,VALUE) memset(NAME,VALUE,sizeof(NAME)) #define MAX 0x7fffffff #define MIN -9999999 #define N 16000010 #define PRIME 999983 public: /* * 1, 2, 6, 99, 100 * 3, 5, 7, 8, 11, 12, 14, 22, 33, 34, 44, 55, 66, 77, 88, 89, 90, 91, 92 */ double find1(int num, int B[], int n) { if (n % 2 == 1) { debug(num); debug(n); if (num == B[n / 2]) { return num; } else if (num < B[n / 2] && num > B[n / 2 - 1]) { return (double)(B[n / 2] + num) / 2; } else if (num < B[n / 2] && num < B[n / 2 - 1]) { return (double)(B[n / 2] + B[n / 2 - 1]) / 2; } else if (num > B[n / 2] && num < B[n / 2 + 1]) { debug(B[n / 2]); debug(num); return (double)(B[n / 2] + num) / 2; } else if (num > B[n / 2] && num > B[n / 2 + 1]) { return (double)(B[n / 2] + B[n / 2 + 1]) / 2; } } else { if (num >= B[n / 2 - 1] && num <= B[n / 2]) { return num; } else if (num < B[n / 2 - 1]) { return B[n / 2 - 1]; } else { return B[n / 2]; } } } double find2(int n1, int n2, int B[], int n) { int l, r; if (n % 2 == 1) { if (n1 <= B[n / 2] && n2 >= B[n / 2]) { return B[n / 2]; } else if (n2 < B[n / 2]) { if (n2 < B[n / 2 - 1]) { return B[n / 2 - 1]; } else { return n2; } } else if (n1 >= B[n / 2]) { if (n1 >= B[n / 2 + 1]) { return B[n / 2 + 1]; } else { return n1; } } } else { if (n2 <= B[n / 2 - 1]) { return (double)(B[n / 2 - 1] + max(B[n / 2 - 2], n2)) / 2; } else if (n1 >= B[n / 2]) { debug(n1); debug2(B[n / 2 + 1], n1); debug(B[n / 2]); return (double)(B[n / 2] + min(B[n / 2 + 1], n1)) / 2; } else if (n2 <= B[n / 2]) { return (double)(max(B[n / 2 - 1], n1) + n2) / 2; } else if (n1 >= B[n / 2 - 1]) { return (double)(n1 + min(B[n / 2], n2)) / 2; } else if (n2 >= B[n / 2]) { return (double)(B[n / 2] + max(B[n / 2 - 1], n1)) / 2; } else if (n1 <= B[n / 2 - 1]) { return (double)(B[n / 2 - 1] + min(B[n / 2], n2)) / 2; } } } double findRes(int A[], int m, int B[], int n) { debug2(m, n); if (m == 0 && n % 2 == 0) { return (double)(B[n / 2] + B[n / 2 - 1]) / 2; } else if (m == 0 && n % 2 == 1) { return B[n / 2]; } else if (n == 0) { return findRes(B, n, A, m); } else if (m == 1 && n == 1) { return (double)(A[0] + B[0]) / 2; } else if (m == 1) { return find1(A[0], B, n); } else if (n == 1) { return find1(B[0], A, m); } else if (m == 2 && n == 2) { debug2(A[0], A[1]); debug2(B[0], B[1]); return (double)((double)max(A[0], B[0]) + (double)min(A[1], B[1])) / 2; } else if (m == 2) { return find2(A[0], A[1], B, n); } else if (n == 2) { return findRes(B, n, A, m); } int l, r; debug2(m, n); if (A[m / 2] <= B[n / 2]) { l = (m - 1)/ 2; r = n - (n / 2 + 1); debug(l); debug(r); if (l < r) { return findRes(A + l, m - l, B, n - l); } else { return findRes(A + r, m - r, B, n - r); } } else { return findRes(B, n, A, m); } } double findMedianSortedArrays(int A[], int m, int B[], int n) { return findRes(A, m, B, n); } }; int main() { #ifdef DEBUG freopen("a", "r", stdin); #endif Solution s; int A[] = {1, 5, 6, 7}; int B[] = {2, 3, 4, 8, 9, 10}; double ans = s.findMedianSortedArrays(A, 4, B, 6); debug(ans); return 0; }
apache-2.0
mattmelloy/owags
ms_dotnet/metadata.rb
574
name 'ms_dotnet' maintainer 'Criteo' maintainer_email 'b.courtois@criteo.com' license 'Apache 2.0' description 'Installs/Configures ms_dotnet' long_description IO.read(File.join(File.dirname(__FILE__), 'README.md')) version '3.2.1' supports 'windows' depends 'windows', '>= 2.1.0' chef_version '>= 12.1' if respond_to?(:chef_version) source_url 'https://github.com/criteo-cookbooks/ms_dotnet' if respond_to?(:source_url) issues_url 'https://github.com/criteo-cookbooks/ms_dotnet/issues' if respond_to?(:issues_url)
apache-2.0
Epi-Info/Epi-Info-Community-Edition
Epi.Core.EnterInterpreter/Rules/Rule_Assign_DLL_Statement.cs
4205
using System; using System.Collections.Generic; using System.Text; using com.calitha.goldparser; namespace Epi.Core.EnterInterpreter.Rules { public class Rule_Assign_DLL_Statement : EnterRule { string QualifiedId = null; string ClassName = null; string MethodName = null; private List<EnterRule> ParameterList = new List<EnterRule>(); public Rule_Assign_DLL_Statement(Rule_Context pContext, NonterminalToken pToken) : base(pContext) { //<Assign_DLL_Statement> ::= ASSIGN <Qualified ID> '=' identifier'!'Identifier '(' <FunctionParameterList> ')' this.QualifiedId = this.GetCommandElement(pToken.Tokens, 1); this.ClassName = this.GetCommandElement(pToken.Tokens, 3); this.MethodName = this.GetCommandElement(pToken.Tokens, 5); //this.ParameterList = new Rule_FunctionParameterList(pContext, (NonterminalToken)pToken.Tokens[7]); this.ParameterList = EnterRule.GetFunctionParameters(pContext, (NonterminalToken)pToken.Tokens[7]); if (pContext.IsVariableValidationEnable) { if (!string.IsNullOrEmpty(this.QualifiedId) && !this.Context.CommandVariableCheck.ContainsKey(this.QualifiedId.ToLowerInvariant())) { this.Context.CommandVariableCheck.Add(this.QualifiedId.ToLowerInvariant(), this.QualifiedId.ToLowerInvariant()); } } } /// <summary> /// peforms an assign rule by assigning an expression to a variable. return the variable that was assigned /// </summary> /// <returns>object</returns> public override object Execute() { object result = null; object[] args = null; if (!string.IsNullOrEmpty(this.ClassName)) { // this is a dll call // and is NOT implemented as of 11/05/2010 if (this.Context.DLLClassList.ContainsKey(this.ClassName.ToLowerInvariant())) { if (this.ParameterList.Count > 0) { args = new object[this.ParameterList.Count]; for (int i = 0; i < this.ParameterList.Count; i++) { args[i] = this.ParameterList[i].Execute(); } } else { args = new object[0]; } IDLLClass DLLObject = this.Context.DLLClassList[this.ClassName.ToLowerInvariant()]; result = DLLObject.Execute (this.MethodName, args); } } IVariable var; //DataType dataType = DataType.Unknown; string dataValue = string.Empty; var = (IVariable) this.Context.CurrentScope.Resolve(this.QualifiedId); if (var != null) { if (var.VarType == VariableType.DataSource) { IVariable fieldVar = new DataSourceVariableRedefined(var.Name, var.DataType); fieldVar.PromptText = var.PromptText; fieldVar.Expression = result.ToString(); this.Context.CurrentScope.Undefine(var.Name); this.Context.CurrentScope.Define((EpiInfo.Plugin.IVariable)fieldVar); } else { if (result != null) { var.Expression = result.ToString(); } else { var.Expression = "Null"; } } } else { if (result != null) { this.Context.EnterCheckCodeInterface.Assign(this.QualifiedId, result); } } return result; } } }
apache-2.0
axzae/homeassist-builder
app/src/main/java/com/axzae/homeassistant/MainActivity.java
3252
package com.axzae.homeassistant; import android.os.Bundle; import android.support.design.widget.FloatingActionButton; import android.support.v7.app.AppCompatActivity; import android.support.v7.widget.Toolbar; import android.view.View; import android.view.Menu; import android.view.MenuItem; import android.widget.TextView; import android.widget.Toast; import com.axzae.homeassistant.fragment.control.InputSliderFragment; import com.axzae.homeassistant.fragment.control.LightFragment; import com.axzae.homeassistant.model.Entity; import com.axzae.homeassistant.model.rest.CallServiceRequest; import com.axzae.homeassistant.shared.EntityProcessInterface; import java.util.Locale; public class MainActivity extends AppCompatActivity implements EntityProcessInterface { private TextView mDataTextView; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_main); Toolbar toolbar = (Toolbar) findViewById(R.id.toolbar); setSupportActionBar(toolbar); setTitle("HomeAssist Control Builder"); mDataTextView = findViewById(R.id.text_data); FloatingActionButton fab = (FloatingActionButton) findViewById(R.id.fab); fab.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View view) { showEntityAction(); } }); } private void showEntityAction() { try { //TODO: LOAD YOUR ENTITY HERE: //Entity entity = Entity.getInstance(this, "light.hue_sphere.json"); Entity entity = Entity.getInstance(this, "input_slider.bedroom_brightness.json"); if (entity != null) { switch (entity.getDomain()) { case "input_slider": { InputSliderFragment fragment = InputSliderFragment.newInstance(entity); fragment.show(getFragmentManager(), null); break; } case "light": { LightFragment fragment = LightFragment.newInstance(entity); fragment.show(getFragmentManager(), null); break; } default: Toast.makeText(this, "Unknown entity: " + entity.getDomain(), Toast.LENGTH_SHORT).show(); } } } catch (Exception e) { e.printStackTrace(); } } @Override public boolean onCreateOptionsMenu(Menu menu) { return true; } @Override public boolean onOptionsItemSelected(MenuItem item) { int id = item.getItemId(); return super.onOptionsItemSelected(item); } @Override public void callService(String domain, String service, CallServiceRequest serviceRequest) { String requestPayload = String.format(Locale.ENGLISH, "POST /api/services/%s/%s HTTP/1.1\n%s", domain, service, serviceRequest.toString()); mDataTextView.setText(requestPayload); //Snackbar.make(findViewById(android.R.id.content), , Snackbar.LENGTH_LONG).setAction("Action", null).show(); } }
apache-2.0
scray/scray
scray-querying/modules/scray-querying/src/test/scala/scray/querying/planning/MaterializedViewSpecs.scala
3322
package scray.querying.planning import org.junit.runner.RunWith import org.scalatest.WordSpec import org.scalatest.junit.JUnitRunner import java.util.ArrayList import scray.querying.queries.SimpleQuery import scray.querying.description.And import scray.querying.description.Column import scray.querying.description.Equal import scray.querying.description.TableIdentifier import scray.querying.description.internal.SingleValueDomain import scray.querying.description.internal.MaterializedViewQueryException import scray.querying.description.internal.QueryDomainParserExceptionReasons import scray.querying.description.Or import scray.querying.description.internal.Domain import org.scalatest.Assertions._ import scala.collection.mutable.HashMap import scray.querying.description.SimpleRow import scray.querying.description.internal.MaterializedView import scray.querying.queries.DomainQuery import scray.querying.description.TableIdentifier import scray.querying.queries.SimpleQuery import scray.common.key.OrderedStringKeyGenerator @RunWith(classOf[JUnitRunner]) class MaterializedViewSpecs extends WordSpec { "Planer " should { " transform AND-Query to mv query" in { val ti = TableIdentifier("cassandra", "mytestspace", "mycf") val query = SimpleQuery("", ti, where = Some( And( Equal(Column("col4", ti), 4), Equal(Column("col8", ti), 2) ) ) ) val flatQueries = Planner.distributiveOrReductionToConjunctiveQuery(query) assert(flatQueries.size == 1) val domains = Planner.qualifyPredicates(flatQueries.head) val mvDomaine = Planner.getMvQuery(domains, query, ti, "key", OrderedStringKeyGenerator) assert(mvDomaine.column.columnName == "key") assert(mvDomaine.value == "col4=4_col8=2") } " transform OR-Query to mv query" in { val ti = TableIdentifier("cassandra", "mytestspace", "mycf") val query = SimpleQuery("", ti, where = Some( Or( Equal(Column("col4", ti), 4), Equal(Column("col8", ti), 2) ) ) ) val flatQueries = Planner.distributiveOrReductionToConjunctiveQuery(query) assert(flatQueries.size == 2) val domains1 = Planner.qualifyPredicates(flatQueries.head) val mvDomaine1 = Planner.getMvQuery(domains1, query, ti, "key", OrderedStringKeyGenerator) assert(mvDomaine1.column.columnName == "key") assert(mvDomaine1.value == "col4=4") val domains2 = Planner.qualifyPredicates(flatQueries.tail.head) val mvDomaine2 = Planner.getMvQuery(domains2, query, ti, "key", OrderedStringKeyGenerator) assert(mvDomaine2.column.columnName == "key") assert(mvDomaine2.value == "col8=2") } " create default key mv query " in { val ti = TableIdentifier("cassandra", "mytestspace", "mycf") val query = SimpleQuery("", ti, where = None) val flatQueries = Planner.distributiveOrReductionToConjunctiveQuery(query) assert(flatQueries.size == 1) val domains = Planner.qualifyPredicates(flatQueries.head) val mvDomaine = Planner.getMvQuery(domains, query, ti, "key", OrderedStringKeyGenerator) assert(mvDomaine.column.columnName == "key") assert(mvDomaine.value == "_") } } }
apache-2.0
iocanel/sundrio
annotations/dsl/src/main/java/io/sundr/dsl/internal/visitors/TypeArgumentReplace.java
1500
/* * Copyright 2016 The original authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.sundr.dsl.internal.visitors; import io.sundr.builder.TypedVisitor; import io.sundr.codegen.model.ClassRefBuilder; import io.sundr.codegen.model.TypeRef; import java.util.ArrayList; import java.util.List; public class TypeArgumentReplace extends TypedVisitor<ClassRefBuilder> { private final TypeRef target; private final TypeRef replacement; public TypeArgumentReplace(TypeRef target, TypeRef replacement) { this.target = target; this.replacement = replacement; } public void visit(ClassRefBuilder builder) { List<TypeRef> updated = new ArrayList<TypeRef>(); for (TypeRef typeArgument : builder.getArguments()) { if (typeArgument.equals(target)) { updated.add(replacement); } else { updated.add(typeArgument); } } } }
apache-2.0
lanimall/messaging-samples
libs/src/main/java/com/softwareaggov/messaging/libs/utils/impl/CounterImpl.java
5033
/* * Copyright © 2016 - 2018 Software AG, Darmstadt, Germany and/or its licensors * * SPDX-License-Identifier: Apache-2.0 * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.softwareaggov.messaging.libs.utils.impl; import com.softwareaggov.messaging.libs.utils.Counter; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.util.*; import java.util.concurrent.*; /** * Created by fabien.sanglier on 6/28/16. */ public class CounterImpl implements Counter { private static Logger log = LoggerFactory.getLogger(CounterImpl.class); private final ConcurrentHashMap<String, Long> counters; private volatile Long lastCounterCheckpointTime; private volatile HashMap<String, Long> counterPreviousCheckpoint; private volatile HashMap<String, Long> countersRates; private final ScheduledExecutorService scheduler = Executors.newScheduledThreadPool(1); private ScheduledFuture<?> rateCalculationHander = null; public CounterImpl() { this.counters = new ConcurrentHashMap<String, Long>(); this.counterPreviousCheckpoint = new HashMap<String, Long>(); this.countersRates = new HashMap<String, Long>(); } public void startRateCalculator() { final Runnable calculateRates = new Runnable() { public void run() { calculateRates(); } }; rateCalculationHander = scheduler.scheduleAtFixedRate(calculateRates, 10, 5, TimeUnit.SECONDS); } public void stopRateCalculator() { if (null != rateCalculationHander) rateCalculationHander.cancel(true); } @Override public String[] getAllCounterNames() { List<String> counterNames = new ArrayList<String>(); //get all the keys and add to list Enumeration<String> keys = counters.keys(); while (keys.hasMoreElements()) { counterNames.add(keys.nextElement()); } //return new array return counterNames.toArray(new String[counterNames.size()]); } @Override public long getCount(String key) { long count = (counters.containsKey(key)) ? counters.get(key) : 0L; log.debug(String.format("getting counter for key %s = %d", key, count)); return count; } @Override public long getCountRate(String key) { long rate = (countersRates.containsKey(key)) ? countersRates.get(key) : 0L; log.debug(String.format("getting counter rate for key %s = %d", key, rate)); return rate; } @Override public long incrementAndGet(String key) { log.debug("incrementing counter key " + key); if (counters.putIfAbsent(key, new Long(1)) == null) { log.debug("initialized new key and put count to 1"); return 1L; } //cas loop Long oldVal, newVal; do { oldVal = counters.get(key); newVal = oldVal + 1; } while (!counters.replace(key, oldVal, newVal)); // Assumes no removal. log.debug("new count post increment:" + newVal.toString()); return newVal; } @Override public long reset(String key) { log.debug("resetting counter key " + key); return counters.replace(key, new Long(0)); } private void calculateRates() { long now = new Date().getTime(); long timeSinceLastCheckpoint = 0L; if (null != lastCounterCheckpointTime) { timeSinceLastCheckpoint = now - lastCounterCheckpointTime; } for (Map.Entry<String, Long> entry : counters.entrySet()) { //make copies to make sure nothing changes during the processing here String currentCounterKey = entry.getKey(); long currentCount = entry.getValue(); if (counterPreviousCheckpoint.containsKey(currentCounterKey) && timeSinceLastCheckpoint > 0) { long diffCount = currentCount - counterPreviousCheckpoint.get(currentCounterKey); if (diffCount > 0) countersRates.put(currentCounterKey, diffCount * 1000 / timeSinceLastCheckpoint); else countersRates.put(currentCounterKey, 0L); } else { countersRates.put(currentCounterKey, 0L); } //save the current values in the previous checkpoint hashmap counterPreviousCheckpoint.put(currentCounterKey, currentCount); } lastCounterCheckpointTime = now; } }
apache-2.0
brainix/pottery
tests/test_timer.py
3042
# --------------------------------------------------------------------------- # # test_timer.py # # # # Copyright © 2015-2022, Rajiv Bakulesh Shah, original author. # # # # Licensed under the Apache License, Version 2.0 (the "License"); # # you may not use this file except in compliance with the License. # # You may obtain a copy of the License at: # # http://www.apache.org/licenses/LICENSE-2.0 # # # # Unless required by applicable law or agreed to in writing, software # # distributed under the License is distributed on an "AS IS" BASIS, # # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # # See the License for the specific language governing permissions and # # limitations under the License. # # --------------------------------------------------------------------------- # import time from pottery import ContextTimer from tests.base import TestCase class ContextTimerTests(TestCase): _ACCURACY = 50 # in milliseconds def setUp(self): super().setUp() self.timer = ContextTimer() def _confirm_elapsed(self, expected): elapsed = self.timer.elapsed() assert elapsed >= expected, f'elapsed ({elapsed}) is not >= expected ({expected})' assert elapsed < expected + self._ACCURACY, f'elapsed ({elapsed}) is not < expected ({expected + self._ACCURACY})' def test_start_stop_and_elapsed(self): # timer hasn't been started with self.assertRaises(RuntimeError): self.timer.elapsed() with self.assertRaises(RuntimeError): self.timer.stop() # timer has been started but not stopped self.timer.start() with self.assertRaises(RuntimeError): self.timer.start() time.sleep(0.1) self._confirm_elapsed(1*100) self.timer.stop() # timer has been stopped with self.assertRaises(RuntimeError): self.timer.start() time.sleep(0.1) self._confirm_elapsed(1*100) with self.assertRaises(RuntimeError): self.timer.stop() def test_context_manager(self): with self.timer: self._confirm_elapsed(0) for iteration in range(1, 3): with self.subTest(iteration=iteration): time.sleep(0.1) self._confirm_elapsed(iteration*100) self._confirm_elapsed(iteration*100) time.sleep(0.1) self._confirm_elapsed(iteration*100) with self.assertRaises(RuntimeError), self.timer: # pragma: no cover ...
apache-2.0
googleapis/api-compiler
src/main/java/com/google/api/tools/framework/importers/swagger/merger/ServiceMerger.java
1527
/* * Copyright (C) 2016 Google, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.api.tools.framework.importers.swagger.merger; import com.google.api.Service; import com.google.api.tools.framework.importers.swagger.MultiOpenApiParser.OpenApiFile; import java.util.List; /** Merger for converted Swagger to {@link Service} objects. */ public class ServiceMerger { /** Merges multiple built {@link OpenApiFile}s into a single {@link Service} */ public Service merge(List<Service.Builder> serviceBuliders) { Service.Builder serviceBuilder = serviceBuliders.get(0); serviceBuilder.addAllTypes(TypesBuilderFromDescriptor.createAdditionalServiceTypes()); serviceBuilder.addAllEnums(TypesBuilderFromDescriptor.createAdditionalServiceEnums()); // Currently all SwaggerFiles use the same Service.proto to build, so we just pull the first one // TODO (adwright): Separate this to build multiple service.proto and have a merging phase. return serviceBuilder.build(); } }
apache-2.0
cedral/aws-sdk-cpp
aws-cpp-sdk-elasticache/source/model/ProcessedUpdateAction.cpp
4591
/* * Copyright 2010-2017 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). * You may not use this file except in compliance with the License. * A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing * permissions and limitations under the License. */ #include <aws/elasticache/model/ProcessedUpdateAction.h> #include <aws/core/utils/xml/XmlSerializer.h> #include <aws/core/utils/StringUtils.h> #include <aws/core/utils/memory/stl/AWSStringStream.h> #include <utility> using namespace Aws::Utils::Xml; using namespace Aws::Utils; namespace Aws { namespace ElastiCache { namespace Model { ProcessedUpdateAction::ProcessedUpdateAction() : m_replicationGroupIdHasBeenSet(false), m_cacheClusterIdHasBeenSet(false), m_serviceUpdateNameHasBeenSet(false), m_updateActionStatus(UpdateActionStatus::NOT_SET), m_updateActionStatusHasBeenSet(false) { } ProcessedUpdateAction::ProcessedUpdateAction(const XmlNode& xmlNode) : m_replicationGroupIdHasBeenSet(false), m_cacheClusterIdHasBeenSet(false), m_serviceUpdateNameHasBeenSet(false), m_updateActionStatus(UpdateActionStatus::NOT_SET), m_updateActionStatusHasBeenSet(false) { *this = xmlNode; } ProcessedUpdateAction& ProcessedUpdateAction::operator =(const XmlNode& xmlNode) { XmlNode resultNode = xmlNode; if(!resultNode.IsNull()) { XmlNode replicationGroupIdNode = resultNode.FirstChild("ReplicationGroupId"); if(!replicationGroupIdNode.IsNull()) { m_replicationGroupId = Aws::Utils::Xml::DecodeEscapedXmlText(replicationGroupIdNode.GetText()); m_replicationGroupIdHasBeenSet = true; } XmlNode cacheClusterIdNode = resultNode.FirstChild("CacheClusterId"); if(!cacheClusterIdNode.IsNull()) { m_cacheClusterId = Aws::Utils::Xml::DecodeEscapedXmlText(cacheClusterIdNode.GetText()); m_cacheClusterIdHasBeenSet = true; } XmlNode serviceUpdateNameNode = resultNode.FirstChild("ServiceUpdateName"); if(!serviceUpdateNameNode.IsNull()) { m_serviceUpdateName = Aws::Utils::Xml::DecodeEscapedXmlText(serviceUpdateNameNode.GetText()); m_serviceUpdateNameHasBeenSet = true; } XmlNode updateActionStatusNode = resultNode.FirstChild("UpdateActionStatus"); if(!updateActionStatusNode.IsNull()) { m_updateActionStatus = UpdateActionStatusMapper::GetUpdateActionStatusForName(StringUtils::Trim(Aws::Utils::Xml::DecodeEscapedXmlText(updateActionStatusNode.GetText()).c_str()).c_str()); m_updateActionStatusHasBeenSet = true; } } return *this; } void ProcessedUpdateAction::OutputToStream(Aws::OStream& oStream, const char* location, unsigned index, const char* locationValue) const { if(m_replicationGroupIdHasBeenSet) { oStream << location << index << locationValue << ".ReplicationGroupId=" << StringUtils::URLEncode(m_replicationGroupId.c_str()) << "&"; } if(m_cacheClusterIdHasBeenSet) { oStream << location << index << locationValue << ".CacheClusterId=" << StringUtils::URLEncode(m_cacheClusterId.c_str()) << "&"; } if(m_serviceUpdateNameHasBeenSet) { oStream << location << index << locationValue << ".ServiceUpdateName=" << StringUtils::URLEncode(m_serviceUpdateName.c_str()) << "&"; } if(m_updateActionStatusHasBeenSet) { oStream << location << index << locationValue << ".UpdateActionStatus=" << UpdateActionStatusMapper::GetNameForUpdateActionStatus(m_updateActionStatus) << "&"; } } void ProcessedUpdateAction::OutputToStream(Aws::OStream& oStream, const char* location) const { if(m_replicationGroupIdHasBeenSet) { oStream << location << ".ReplicationGroupId=" << StringUtils::URLEncode(m_replicationGroupId.c_str()) << "&"; } if(m_cacheClusterIdHasBeenSet) { oStream << location << ".CacheClusterId=" << StringUtils::URLEncode(m_cacheClusterId.c_str()) << "&"; } if(m_serviceUpdateNameHasBeenSet) { oStream << location << ".ServiceUpdateName=" << StringUtils::URLEncode(m_serviceUpdateName.c_str()) << "&"; } if(m_updateActionStatusHasBeenSet) { oStream << location << ".UpdateActionStatus=" << UpdateActionStatusMapper::GetNameForUpdateActionStatus(m_updateActionStatus) << "&"; } } } // namespace Model } // namespace ElastiCache } // namespace Aws
apache-2.0
tzou24/BPS
BPS/src/com/frameworkset/platform/cms/util/FtpUpfile.java
16503
package com.frameworkset.platform.cms.util; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.File; import java.io.FileInputStream; import java.io.FileNotFoundException; import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStream; import java.util.ArrayList; import java.util.StringTokenizer; import org.apache.commons.net.ftp.FTP; import org.apache.commons.net.ftp.FTPClient; import org.apache.commons.net.ftp.FTPFile; import org.apache.commons.net.ftp.FTPFileFilter; import com.frameworkset.platform.cms.driver.context.Context; import com.frameworkset.platform.cms.driver.context.FTPConfig; import com.frameworkset.platform.cms.driver.distribute.DistributeDestination; public class FtpUpfile { private FTPClient ftpClient; private String ipAddress; private int ipPort; private String userName; private String PassWord; private String localrootpath; private String remoterootpath; public static final int BINARY_FILE_TYPE = FTP.BINARY_FILE_TYPE; public static final int ASCII_FILE_TYPE = FTP.ASCII_FILE_TYPE; /** * 构造函数 * * @param ip * String 机器IP,默认端口为21 * @param username * String FTP用户名 * @param password * String FTP密码 * @throws Exception */ public FtpUpfile(String ip, String username, String password) throws Exception { this(ip,21,username,password); } public FtpUpfile(String ip,int port,String username,String password){ ftpClient = new FTPClient(); this.ipAddress = ip; this.ipPort = port ; this.userName = username; this.PassWord = password; //用于测试 localrootpath = "F:"; remoterootpath = "/abc" ; } public FtpUpfile() { super(); } /** * 登录FTP服务器 * * @throws Exception */ public boolean login() throws Exception { ftpClient.connect(ipAddress, ipPort); boolean flag = ftpClient.login(userName,PassWord); //System.out.println(flag); ftpClient.setFileType(FtpUpfile.BINARY_FILE_TYPE); return flag ; } /** * 退出FTP服务器 * @throws Exception */ public void logout() throws Exception{ if (ftpClient.isConnected()){ ftpClient.disconnect(); } } /** * 类型 * @param fileType * @throws IOException */ public void setFileType(int fileType) throws IOException { ftpClient.setFileType(fileType); } /** * 创建目录 * @param pathName : 绝对路径 * @return true : 成功 false: 失败 * @throws IOException */ public boolean createDirectory(String pathName) throws IOException { return ftpClient.makeDirectory(pathName); } /** * 删除指定的目录 * @param path * @return * @throws IOException */ public boolean removeDirectory(String path) throws IOException { return ftpClient.removeDirectory(path); } /** * 删除指定的文件 * @param pathName * @return * @throws IOException */ public boolean deleteFile(String pathName) throws IOException { return ftpClient.deleteFile(pathName); } /** * 判断是否是根目录 * @param path * @return */ private boolean isRoot(String path){ return "/".equals(path) ; } /** * 删除文件及目录 * @param path * @param isAll true : 删除全部的文件及目录 false : 删除空目录 * @return * @throws IOException */ private boolean removeDirectory(String path, boolean isAll) throws IOException { if (!isAll) { return removeDirectory(path); } FTPFile[] ftpFileArr = null; if(isRoot(path)){ ftpFileArr = ftpClient.listFiles(); }else{ ftpFileArr = ftpClient.listFiles(path); } System.out.println(ftpFileArr.length); if (ftpFileArr == null || ftpFileArr.length == 0) { return removeDirectory(path); } for (int i=0; i<ftpFileArr.length; i++) { String name = ftpFileArr[i].getName(); if (ftpFileArr[i].isDirectory()) { if(!name.equals(".") && !name.equals("..")){ if(isRoot(path)){ removeDirectory(name, true); }else{ removeDirectory(path + "/" + name, true); } } } else if (ftpFileArr[i].isFile()) { if(isRoot(path)){ deleteFile(name); }else{ deleteFile(path + "/" + name); } } else if (ftpFileArr[i].isSymbolicLink()) { } else if (ftpFileArr[i].isUnknown()) { } } return ftpClient.removeDirectory(path); } /** * 删除指定下的所有文件 * @param path * @return * @throws IOException */ public boolean deleteAll(String path) throws IOException{ if(isRoot(path)){ return this.removeDirectory(path,true); } return this.removeDirectory(path.substring(1),true); } /** * 上传目录 * @param path */ public void updir(String path) { String dirname; String dirnameftp; dirname = localrootpath + path; dirnameftp = remoterootpath + path.replace('\\', '/'); try { File source = new File(dirname); String[] filename = source.list(); for (int i = 0; i < filename.length; i++) { File filemen = new File(source.getPath(), filename[i]); if (filemen.isDirectory()) { updir(path + "\\" + filemen.getName()); } else { // 上传文件 String sourcefile = dirname + "\\" + filemen.getName(); String desicfile = dirnameftp + "/" + filemen.getName(); upFile(sourcefile, desicfile); } } } catch (Exception ei) { ei.printStackTrace(); } } /** * 上传文件 * @param localrootpath 本地路径 * @param remoterootpath 远程路径 * @param p */ public void updir(String localrootpath, String remoterootpath, String p) { String dirname; String dirnameftp; dirname = localrootpath; dirnameftp = remoterootpath; try { try { buildList(dirnameftp); } catch (Exception e123) { e123 = null; } File source = new File(dirname); String[] filename = source.list(); for (int i = 0; i < filename.length; i++) { File filemen = new File(source.getPath(), filename[i]); if (filemen.isDirectory()) { // 在ftp服务器上建目录,或上传目录下的文件. try { if(dirnameftp.equals("/")){ createDirectory(filemen.getName()) ; }else{ createDirectory(dirnameftp+ "/" + filemen.getName()) ; } } catch (Exception e12) { e12 = null; e12.printStackTrace(); } if(remoterootpath.equals("/")){ updir(localrootpath + "\\" + filemen.getName(), "/" + filemen.getName(), filemen .getName()); }else{ updir(localrootpath + "\\" + filemen.getName(), remoterootpath + "/" + filemen.getName(), filemen .getName()); } } else { // 上传文件 String sourcefile = dirname + "\\" + filemen.getName(); String desicfile = ""; if(dirnameftp.equals("/")){ desicfile = "/" +filemen.getName(); }else{ desicfile = dirnameftp + "/" + filemen.getName(); } upFile(sourcefile,desicfile); } } } catch (Exception ei) { ei.printStackTrace(); } } /** * 上传指定文件 * @param sourcefile 原文件的具体名称 路径 + 文件名 * @param desicfile 路径 +文件名 * @param pakg */ public void uploadFtp(String sourcefile, String desicfile, String pakg) { try { upFile(sourcefile,desicfile); } catch (FileNotFoundException e) { String message = "上传" + sourcefile + "文件失败! "; System.out.println(message + e); } catch (Exception ee) { ee.printStackTrace(); } } /** * 上传文件 * @param source * @param destination * @throws Exception */ public void upFile(String source, String destination) throws Exception { buildList(destination.substring(0, destination.lastIndexOf("/"))); InputStream iStream = null; try { iStream = new FileInputStream(source); ftpClient.storeFile(destination.substring(1), iStream); } catch (IOException e) { e.printStackTrace(); } finally { if (iStream != null) { iStream.close(); } } } /** * 构建目录 * @param pathList * @throws Exception */ public void buildList(String pathList) throws Exception { StringTokenizer s = new StringTokenizer(pathList, "/"); // sign String pathName = ""; while (s.hasMoreElements()) { if(pathName.equals("")) { pathName = (String) s.nextElement(); } else { pathName = pathName + "/" + (String) s.nextElement(); } createDirectory(pathName); } } /** *上传文件 *输入参数localfilename:\\sitea\\ml\abc.htm */ public void upfilename(String localfilename) throws Exception{ String sourcefile=localrootpath+localfilename; File file1=new File(sourcefile); if (file1.isFile()){ String desicfile=remoterootpath+localfilename.replace('\\','/'); upFile(sourcefile,desicfile); } } /** * 取得指定目录下的所有文件名,不包括目录名称 * @param fullPath String * @return ArrayList * @throws Exception */ public ArrayList fileNames(String fullPath) throws Exception { FTPFile[] ftpFiles= null ; if(isRoot(fullPath)){ ftpFiles = ftpClient.listFiles(); }else{ ftpFiles = ftpClient.listFiles(fullPath.substring(1)); } ArrayList retList = new ArrayList(); if (ftpFiles == null || ftpFiles.length == 0) { return retList; } for (int i=0; i<ftpFiles.length; i++) { if (ftpFiles[i].isFile()) { retList.add(ftpFiles[i].getName()); } } return retList; } /** * 取得指定目录下的所有文件名,不包括目录名称 * @param fullPath String * @return ArrayList * @throws Exception */ public ArrayList fileNames(String fullPath,FTPFileFilter filter) throws Exception { FTPFile[] ftpFiles= null ; if(isRoot(fullPath)){ ftpFiles = ftpClient.listFiles("/", filter); }else{ ftpFiles = ftpClient.listFiles(fullPath.substring(1),filter); } ArrayList retList = new ArrayList(); if (ftpFiles == null || ftpFiles.length == 0) { return retList; } for (int i=0; i<ftpFiles.length; i++) { if (ftpFiles[i].isFile()) { retList.add(ftpFiles[i].getName()); } } return retList; } /** * JSP中的流上传到FTP服务器, * 上传文件只能使用二进制模式,当文件存在时再次上传则会覆盖 * 字节数组做为文件的输入流,此方法适用于JSP中通过 * request输入流来直接上传文件在RequestUpload类中调用了此方法, * destination路径以FTP服务器的"/"开始,带文件名 * @param sourceData byte[] * @param destination String 路径+文件名 * @throws Exception */ public void upFile(byte[] sourceData, String destination) throws Exception { buildList(destination.substring(0, destination.lastIndexOf("/"))); ByteArrayInputStream bis = null; try { bis = new ByteArrayInputStream(sourceData); ftpClient.storeFile(destination.substring(1), bis); } catch (IOException e) { e.printStackTrace(); } finally { if (bis != null) { bis.close(); } } } /** *从FTP文件服务器上下载文件,输出到字节数组中 * @param SourceFileName String * @return byte[] * @throws Exception */ public byte[] downFile(String sourceFileName) throws Exception { InputStream is = null ; ByteArrayOutputStream byteOut = null; byte[] return_arraybyte = null; try{ is = ftpClient.retrieveFileStream(sourceFileName.substring(1)); byteOut = new ByteArrayOutputStream(); byte[] buf = new byte[204800]; int bufsize = 0; while ((bufsize = is.read(buf, 0, buf.length)) != -1) { byteOut.write(buf, 0, bufsize); } return_arraybyte = byteOut.toByteArray(); }catch(Exception e){ e.printStackTrace(); }finally{ byteOut.close(); is.close(); } return return_arraybyte; } /** * 从FTP文件服务器上下载文件SourceFileName,到本地destinationFileName 所有的文件名中都要求包括完整的路径名在内 * * @param SourceFileName * String * @param destinationFileName * String * @throws Exception */ public void downFile(String sourceFileName, String destinationFileName) throws Exception { byte[] temp = downFile(sourceFileName); FileOutputStream ftpOut = new FileOutputStream(destinationFileName); ftpOut.write(temp, 0, temp.length); ftpOut.close(); } /** * 删除ftp服务器上的文件 单个文件删除 * * @param desc_dir * ftp服务器上的相对路径 */ public void deleteRemoteFile(String desc_dir) { try { this.deleteFile(desc_dir.substring(1)); } catch (IOException e) { e.printStackTrace(); } } /** * 指定本地路径和远程路径的ftp目录发布 * * @author ge.tao * @param context * 上下文 */ public void do_ftp_upload(Context context) { FTPConfig ftpconfig = context.getFTPConfig(); String src_dir = context.getPublishTemppath() + "/" + context.getSiteDir(); String desc_dir = ftpconfig.getFtpFolder(); String ftp_id = ftpconfig.getFtpip(); String username = ftpconfig.getUsername(); String password = ftpconfig.getPassword(); FtpUpfile ftpUtil; try { ftpUtil = new FtpUpfile(ftp_id, username, password); ftpUtil.login(); ftpUtil.updir(src_dir, desc_dir, ""); ftpUtil.logout(); } catch (Exception ei) { } } public void upload(DistributeDestination distributeDestination) { FTPConfig ftpconfig = distributeDestination.getFtpconfig(); String src_dir = distributeDestination.getPublishTemppath() + "/" + distributeDestination.getSite().getSiteDir(); String desc_dir = ftpconfig.getFtpFolder(); String ftp_id = ftpconfig.getFtpip(); String username = ftpconfig.getUsername(); String password = ftpconfig.getPassword(); FtpUpfile ftpUtil; try { ftpUtil = new FtpUpfile(ftp_id, username, password); ftpUtil.login(); ftpUtil.updir(src_dir, desc_dir, ""); ftpUtil.logout(); } catch (Exception ei) { } } public boolean testftp() throws Exception { boolean ret = false; try { // 登录Ftp服务器 ret = login(); //ret = createDirectory("test"); logout(); //ret = true; } catch (Exception ex) { ret = false; //System.out.print(ex); throw new Exception(ex); } return ret; } public static void main(String[] args) throws Exception{ FtpUpfile ftp = new FtpUpfile("172.16.168.192","taodd","tao"); //ftp.login(); System.out.println(ftp.testftp()); //ftp.upFile("D:/aa/tao.txt","/web/tao.txt"); //ftp.updir("aa"); // List list = ftp.fileNames("test/image"); // // for (Iterator iter = list.iterator(); iter.hasNext();) { // String fileNames = (String) iter.next(); // System.out.println(fileNames); // // } //上传指定目录下的文件及文件夹 //ftp.updir("d:/aa","/java",""); //删除指定下所有文件及文件夹 //ftp.deleteAll("/java"); //ftp.logout(); //System.out.println(ftp.testftp()); } }
apache-2.0
DevComPack/setupmaker
src/main/java/dcp/logic/model/Group.java
653
package dcp.logic.model; import java.io.Serializable; import org.apache.pivot.collections.ArrayList; import org.apache.pivot.collections.List; public class Group implements Serializable { /** * Class written into save file */ private static final long serialVersionUID = 7019635543387148548L; //Groups public String installGroups = "";//Install Groups * //Attributes public String name;//Group name public Group parent = null;//Parent group, if any public String description = "";//Group's description //Childs public List<Group> children = new ArrayList<Group>();//Childs, if is parent }
apache-2.0
oakkitten/weechat-android
app/src/main/java/com/ubergeek42/WeechatAndroid/media/Exceptions.java
4663
package com.ubergeek42.WeechatAndroid.media; import android.text.TextUtils; import androidx.annotation.Nullable; import com.ubergeek42.WeechatAndroid.BuildConfig; import com.ubergeek42.WeechatAndroid.utils.Utils; import java.io.IOException; import okhttp3.MediaType; class Exceptions { static class CodeException extends IOException { final private int code; CodeException(int code) { this.code = code; } int getCode() { return code; } } static class HtmlBodyLacksRequiredDataException extends CodeException { final CharSequence body; HtmlBodyLacksRequiredDataException(CharSequence body) { super(Cache.ERROR_HTML_BODY_LACKS_REQUIRED_DATA); this.body = body; } @Override @Nullable public String getMessage() { String message = "Couldn't get request url from body"; if (BuildConfig.DEBUG) { int idx = body.toString().indexOf("og:image"); message += idx == -1 ? " (no 'og:image' found): " + Utils.getLongStringSummary(body) : " ('og:image' found but regex failed): " + Utils.getLongStringExcerpt(body, idx + 5); } return message; } } static class ContentLengthExceedsLimitException extends CodeException { final long contentLength; final long maxBodySize; ContentLengthExceedsLimitException(long contentLength, long maxBodySize) { super(Cache.ERROR_UNACCEPTABLE_FILE_SIZE); this.contentLength = contentLength; this.maxBodySize = maxBodySize; } @Override @Nullable public String getMessage() { return "Content length of " + contentLength + " exceeds the maximum limit of " + maxBodySize; } } static class UnknownLengthStreamExceedsLimitException extends CodeException { final long maxBodySize; UnknownLengthStreamExceedsLimitException(long maxBodySize) { super(Cache.ERROR_UNACCEPTABLE_FILE_SIZE); this.maxBodySize = maxBodySize; } @Override @Nullable public String getMessage() { return "Stream of unspecified length exceeded the maximum limit of " + maxBodySize; } } static class BodySizeSmallerThanContentLengthException extends CodeException { final long bodySize; final long minBodySize; BodySizeSmallerThanContentLengthException(long bodySize, long minBodySize) { super(Cache.ERROR_UNACCEPTABLE_FILE_SIZE); this.bodySize = bodySize; this.minBodySize = minBodySize; } @Override @Nullable public String getMessage() { return "Body size of " + bodySize + " smaller than the minimum limit of " + minBodySize; } } static class UnacceptableMediaTypeException extends CodeException { final RequestType requestType; final MediaType mediaType; UnacceptableMediaTypeException(RequestType requestType, MediaType mediaType) { super(Cache.ERROR_UNACCEPTABLE_MEDIA_TYPE); this.requestType = requestType; this.mediaType = mediaType; } @Override @Nullable public String getMessage() { return "Wanted: " + requestType.getShortDescription() + "; got: " + mediaType; } } static class HttpException extends CodeException { final String reasonPhrase; HttpException(int statusCode, String reasonPhrase) { super(statusCode); this.reasonPhrase = reasonPhrase; } @Override @Nullable public String getMessage() { return "HTTP error " + getCode() + ": " + ensureText(reasonPhrase); } } private static String ensureText(String string) { return TextUtils.isEmpty(string) ? "<empty>" : string; } static class SslRequiredException extends CodeException { SslRequiredException() { super(Cache.ERROR_SSL_REQUIRED); } @Override @Nullable public String getMessage() { return "SSL required"; } } static class RedirectToNullStrategyException extends CodeException { final String url; RedirectToNullStrategyException(String url) { super(Cache.ERROR_REDIRECT_TO_NULL_STRATEGY); this.url = url; } @Override @Nullable public String getMessage() { return "Redirected to an address that has a null strategy: " + url; } } }
apache-2.0
mailrest/mailrest
app/controllers/box/MessageRefController.scala
2357
/* * Copyright (C) 2015 Noorq, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package controllers.box import java.util.Date import java.util.HashMap import scala.concurrent.ExecutionContext.Implicits.global import scala.concurrent.Future import scala.reflect.runtime.universe import com.mailrest.maildal.model.Message import com.mailrest.maildal.model.MessageType import play.api.data.Form import play.api.data.Forms._ import play.api.libs.json.JsValue import play.api.libs.json.Json import play.api.libs.json.Writes import play.api.mvc.Controller import scaldi.Injector import services.MessageService import services.NewMessageBean import utils.ScalaHelper import java.util.Collections import scala.collection.JavaConverters import scala.collection.JavaConversions import controllers.domain.AbstractDomainController class MessageRefController(implicit inj: Injector) extends AbstractDomainController { val messageService = inject [MessageService] def findAll(domIdn: String, boxId: String, folderId: String) = domainAction(domIdn).async { implicit request => { Future.successful(Ok) } } def create(domIdn: String, boxId: String, folderId: String) = domainAction(domIdn).async { implicit request => { Future.successful(Ok) } } def find(domIdn: String, boxId: String, folderId: String, msgId: String) = domainAction(domIdn).async { implicit request => { Future.successful(Ok) } } def update(domIdn: String, boxId: String, folderId: String, msgId: String) = domainAction(domIdn).async { implicit request => { Future.successful(Ok) } } def delete(domIdn: String, boxId: String, folderId: String, msgId: String) = domainAction(domIdn).async { implicit request => { Future.successful(Ok) } } }
apache-2.0
openstack/oslo.concurrency
doc/source/conf.py
1942
# -*- coding: utf-8 -*- # Copyright (C) 2020 Red Hat, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. # -- General configuration ---------------------------------------------------- # Add any Sphinx extension module names here, as strings. They can be # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom ones. extensions = [ 'sphinx.ext.autodoc', 'sphinxcontrib.apidoc', 'openstackdocstheme', 'oslo_config.sphinxext', ] # openstackdocstheme options openstackdocs_repo_name = 'openstack/oslo.concurrency' openstackdocs_bug_project = 'oslo.concurrency' openstackdocs_bug_tag = '' # The master toctree document. master_doc = 'index' # General information about the project. copyright = u'2014, OpenStack Foundation' # If true, '()' will be appended to :func: etc. cross-reference text. add_function_parentheses = True # If true, the current module name will be prepended to all description # unit titles (such as .. function::). add_module_names = True # The name of the Pygments (syntax highlighting) style to use. pygments_style = 'native' # -- Options for HTML output ------------------------------------------------- html_theme = 'openstackdocs' # -- sphinxcontrib.apidoc configuration -------------------------------------- apidoc_module_dir = '../../' apidoc_output_dir = 'reference/api' apidoc_excluded_paths = [ 'oslo_concurrency/tests', 'oslo_concurrency/_*', 'setup.py', ]
apache-2.0
schaloner/deadbolt-2-core
test/be/objectify/deadbolt/core/utils/TemplateUtilsTest.java
3663
/* * Copyright 2013 Steve Chaloner * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package be.objectify.deadbolt.core.utils; import org.junit.Assert; import org.junit.Test; /** * @author Steve Chaloner (steve@objectify.be) */ public class TemplateUtilsTest { @Test public void testAllOf_noInput() { final String[] array = TemplateUtils.allOf(); Assert.assertNotNull(array); Assert.assertEquals(0, array.length); } @Test public void testAllOf_singleValueInput() { final String[] array = TemplateUtils.allOf("foo"); Assert.assertNotNull(array); Assert.assertEquals(1, array.length); Assert.assertEquals("foo", array[0]); } @Test public void testAllOf_multipleValueInput() { final String[] array = TemplateUtils.allOf("foo", "bar"); Assert.assertNotNull(array); Assert.assertEquals(2, array.length); Assert.assertEquals("foo", array[0]); Assert.assertEquals("bar", array[1]); } @Test public void testAllOf_nullArrayInput() { final String[] array = TemplateUtils.allOf((String[])null); Assert.assertNotNull(array); Assert.assertEquals(0, array.length); } @Test public void testAllOf_nullStringInput() { final String[] array = TemplateUtils.allOf((String)null); Assert.assertNotNull(array); Assert.assertEquals(1, array.length); Assert.assertNull(array[0]); } @Test public void testAs_noInput() { final String[] array = TemplateUtils.as(); Assert.assertNotNull(array); Assert.assertEquals(0, array.length); } @Test public void testAs_singleValueInput() { final String[] array = TemplateUtils.as("foo"); Assert.assertNotNull(array); Assert.assertEquals(1, array.length); Assert.assertEquals("foo", array[0]); } @Test public void testAs_multipleValueInput() { final String[] array = TemplateUtils.as("foo", "bar"); Assert.assertNotNull(array); Assert.assertEquals(2, array.length); Assert.assertEquals("foo", array[0]); Assert.assertEquals("bar", array[1]); } @Test public void testAs_nullArrayInput() { final String[] array = TemplateUtils.as((String[])null); Assert.assertNotNull(array); Assert.assertEquals(0, array.length); } @Test public void testAs_nullStringInput() { final String[] array = TemplateUtils.as((String)null); Assert.assertNotNull(array); Assert.assertEquals(1, array.length); Assert.assertNull(array[0]); } }
apache-2.0
SuperMap/iClient9
src/common/iServer/ColorDictionary.js
2632
/* Copyright© 2000 - 2022 SuperMap Software Co.Ltd. All rights reserved. * This program are made available under the terms of the Apache License, Version 2.0 * which accompanies this distribution and is available at http://www.apache.org/licenses/LICENSE-2.0.html.*/ import {SuperMap} from '../SuperMap'; import {Util} from '../commontypes/Util'; import {ServerColor} from './ServerColor'; /** * @class SuperMap.ColorDictionary * @category iServer Map Theme * @classdesc 颜色对照表类。 * @description 颜色对照表中的键名为具体的高程值,键值表示该高程值要显示的颜色。对于栅格图层中高程值小于颜色对照表中高程最小值的点使用颜色对照表中高程最小值对应的颜色,对于栅格图层中高程值大于颜色对照表中高程最大值的点使用颜色对照表中高程最大值对应的颜色,对于栅格图层中高程值在颜色对照表中没有对应颜色的点,则查找颜色对照表中与当前高程值相邻的两个高程对应的颜色,然后通过渐变运算要显示的颜色。如果设置了颜色对照表的话,则颜色表设置无效。 * @param {Object} options - 参数。 * @param {number} options.elevation - 高程值。 * @param {SuperMap.ServerColor} options.color - 服务端颜色类。 */ export class ColorDictionary { constructor(options) { options = options || {}; /** * @member {number} SuperMap.ColorDictionary.prototype.elevation * @description 高程值。 */ this.elevation = null; /** * @member {SuperMap.ServerColor} SuperMap.ColorDictionary.prototype.color * @description 服务端颜色类。 */ this.color = null; Util.extend(this, options); var me = this, c = me.color; if (c) { me.color = new ServerColor(c.red, c.green, c.blue); } this.CLASS_NAME = "SuperMap.ColorDictionary"; } /** * @function SuperMap.ColorDictionary.prototype.destroy * @description 释放资源,将引用资源的属性置空。 */ destroy() { Util.reset(this); } /** * @function SuperMap.ColorDictionary.prototype.toServerJSONObject * @description 转换成对应的 JSON 格式对象。 * @returns {Object} JSON 对象。 */ toServerJSONObject() { var dataObj = {}; dataObj = Util.copyAttributes(dataObj, this); return dataObj; } } SuperMap.ColorDictionary = ColorDictionary;
apache-2.0
walkernel/Skadoosh
routes/search.js
1093
var express = require('express'); var passport = require('passport'); var router = express.Router(); var mongoose = require('mongoose'); var accountModel = require('../models/account') //Search page on website router.get('/',function(req,res){ accountModel.findOne({"username":req._passport.session.user},function(err, data){ res.render("search", {schem: data.schemJson.map( function(e){ return encodeURIComponent(e.name).replace(/'/g, "%27"); }), values:data.schemJson.map(function(e){ return e.values.map(function(z){ return encodeURIComponent(z).replace(/'/g, "%27"); })+"]" }) }); }); }); router.post('/', function(req, res){ accountModel.aggregate([ {$match:{'username':req._passport.session.user}}, {$project:{"objects":1,_id:0}}, {$unwind:"$objects"}, {$match:{"objects.properties":{ $elemMatch:{ value:req.body.propertyValue, propertyName:req.body.propertyName } }} }],function(err, result){ console.log(result) res.json(result); }) }) module.exports = router;
apache-2.0
DeepLit/WHG
root/static/js/jsmol/j2s/J/modelset/TickInfo.js
405
Clazz.declarePackage ("J.modelset"); c$ = Clazz.decorateAsClass (function () { this.id = ""; this.type = " "; this.ticks = null; this.tickLabelFormats = null; this.scale = null; this.first = 0; this.signFactor = 1; this.reference = null; Clazz.instantialize (this, arguments); }, J.modelset, "TickInfo"); Clazz.makeConstructor (c$, function (ticks) { this.ticks = ticks; }, "J.util.P3");
apache-2.0
bella0101/websitePortal
src/main/java/org/nmrg/common/utils/log/StoreyLogUtil.java
970
package org.nmrg.common.utils.log; import javax.servlet.http.HttpServletRequest; import org.aspectj.lang.JoinPoint; import org.nmrg.common.utils.verify.VerifyUtils; import org.nmrg.entity.common.mongodb.OperateLogEntity; /** ** Log转置 ** @ClassName: StoreyLogUtil ** @Description: TODO ** @author CC ** @date 2017年11月17日 - 下午4:47:42 */ public class StoreyLogUtil { public OperateLogEntity saveControllerFootprint(HttpServletRequest req, JoinPoint joinPoint){ OperateLogEntity operateLog = new OperateLogEntity(); operateLog.setName("Mongodb"); StringBuffer strBuffer = req.getRequestURL(); String strUrl = new String(); if(VerifyUtils.isEmpty(strBuffer)){ strUrl = strBuffer.toString(); } if(VerifyUtils.isEmpty(strUrl)){ operateLog.setReqURL(strUrl); operateLog.setReqURLQuery(req.getQueryString()); } return null; } }
apache-2.0
kawamon/hue
apps/beeswax/src/beeswax/conf.py
12033
#!/usr/bin/env python # Licensed to Cloudera, Inc. under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. Cloudera, Inc. licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from __future__ import division from builtins import str import logging import math import os.path import sys from desktop.conf import default_ssl_cacerts, default_ssl_validate, AUTH_PASSWORD as DEFAULT_AUTH_PASSWORD,\ AUTH_USERNAME as DEFAULT_AUTH_USERNAME from desktop.lib.conf import ConfigSection, Config, coerce_bool, coerce_csv, coerce_password_from_script if sys.version_info[0] > 2: from django.utils.translation import gettext_lazy as _t, gettext as _ else: from django.utils.translation import ugettext_lazy as _t, ugettext as _ LOG = logging.getLogger(__name__) HIVE_DISCOVERY_LLAP = Config( key="hive_discovery_llap", help=_t("Have Hue determine Hive Server Interactive endpoint from zookeeper"), default="false", type=coerce_bool ) HIVE_DISCOVERY_HS2 = Config( key="hive_discovery_hs2", help=_t("Determines whether we pull a random HiveServer2 from the list in zookeeper. This HS2 instance is cached until hue is restarted."), default="false", type=coerce_bool ) HIVE_DISCOVERY_LLAP_HA = Config( key="hive_discovery_llap_ha", help=_t("If you have more than one HSI server, it has a different znode setup. This will trigger the code to check for the Active HSI Server"), default="false", type=coerce_bool ) HIVE_DISCOVERY_LLAP_ZNODE = Config( key="hive_discovery_llap_znode", help=_t("If LLAP is enabled, you should be using zookeeper service discovery mode, this is the znode of the LLAP Master(s)"), default="/hiveserver2-hive2" ) HIVE_DISCOVERY_HIVESERVER2_ZNODE = Config( key="hive_discovery_hiveserver2_znode", help=_t("If Hive is using zookeeper service discovery mode, this is the znode of the hiveserver2(s)"), default="/hiveserver2" ) CACHE_TIMEOUT = Config( key="cache_timeout", help=_t("How long to pause before reaching back out to zookeeper to get the current Active HSI endpoint"), default=60, type=int ) LLAP_SERVER_PORT = Config( key="llap_server_port", help=_t("LLAP binary Thrift port (10500 default)."), default=10500, type=int ) LLAP_SERVER_THRIFT_PORT = Config( key="llap_server_thrift_port", help=_t("LLAP http Thrift port (10501 default)"), default=10501, type=int ) LLAP_SERVER_HOST = Config( key="llap_server_host", help=_t("Host where Hive Server Interactive is running. If Kerberos security is enabled, " "the fully-qualified domain name (FQDN) is required"), default="localhost" ) HIVE_SERVER_HOST = Config( key="hive_server_host", help=_t("Host where HiveServer2 server is running. If Kerberos security is enabled, " "the fully-qualified domain name (FQDN) is required"), default="localhost") def get_hive_thrift_binary_port(): """Devise port from core-site Thrift / execution mode & Http port""" from beeswax.hive_site import hiveserver2_thrift_binary_port, get_hive_execution_mode # Cyclic dependency return hiveserver2_thrift_binary_port() or (10500 if (get_hive_execution_mode() or '').lower() == 'llap' else 10000) HIVE_SERVER_PORT = Config( key="hive_server_port", help=_t("Configure the binary Thrift port for HiveServer2."), dynamic_default=get_hive_thrift_binary_port, type=int) def get_hive_thrift_http_port(): """Devise port from core-site Thrift / execution mode & Http port""" from beeswax.hive_site import hiveserver2_thrift_http_port, get_hive_execution_mode # Cyclic dependency return hiveserver2_thrift_http_port() or (10501 if (get_hive_execution_mode() or '').lower() == 'llap' else 10001) HIVE_HTTP_THRIFT_PORT = Config( key="hive_server_http_port", help=_t("Configure the Http Thrift port for HiveServer2."), dynamic_default=get_hive_thrift_http_port, type=int) HIVE_METASTORE_HOST = Config( key="hive_metastore_host", help=_t("Host where Hive Metastore Server (HMS) is running. If Kerberos security is enabled, " "the fully-qualified domain name (FQDN) is required"), default="localhost") HIVE_METASTORE_PORT = Config( key="hive_metastore_port", help=_t("Configure the port the Hive Metastore Server runs on."), default=9083, type=int) HIVE_CONF_DIR = Config( key='hive_conf_dir', help=_t('Hive configuration directory, where hive-site.xml is located.'), default=os.environ.get("HIVE_CONF_DIR", '/etc/hive/conf')) HIVE_SERVER_BIN = Config( key="hive_server_bin", help=_t("Path to HiveServer2 start script"), default='/usr/lib/hive/bin/hiveserver2', private=True) LOCAL_EXAMPLES_DATA_DIR = Config( key='local_examples_data_dir', default=os.path.join(os.path.dirname(__file__), "..", "..", "data"), help=_t('The local filesystem path containing the Hive examples.')) SERVER_CONN_TIMEOUT = Config( key='server_conn_timeout', default=120, type=int, help=_t('Timeout in seconds for Thrift calls.')) USE_GET_LOG_API = Config( # To remove in Hue 4 key='use_get_log_api', default=False, type=coerce_bool, help=_t('Choose whether to use the old GetLog() Thrift call from before Hive 0.14 to retrieve the logs.' 'If false, use the FetchResults() Thrift call from Hive 1.0 or more instead.') ) BROWSE_PARTITIONED_TABLE_LIMIT = Config( # Deprecated, to remove in Hue 4 key='browse_partitioned_table_limit', default=1000, type=int, help=_t('Limit the number of partitions to list on the partitions page. A positive value will be set as the LIMIT. If 0 or negative, do not set any limit.')) QUERY_PARTITIONS_LIMIT = Config( key='query_partitions_limit', default=10, type=int, help=_t('The maximum number of partitions that will be included in the SELECT * LIMIT sample query for partitioned tables.')) def get_browse_partitioned_table_limit(): """Get the old default""" return BROWSE_PARTITIONED_TABLE_LIMIT.get() LIST_PARTITIONS_LIMIT = Config( key='list_partitions_limit', dynamic_default=get_browse_partitioned_table_limit, type=int, help=_t('Limit the number of partitions that can be listed. A positive value will be set as the LIMIT.')) # Deprecated DOWNLOAD_CELL_LIMIT = Config( key='download_cell_limit', default=10000000, type=int, help=_t('A limit to the number of cells (rows * columns) that can be downloaded from a query ' '(e.g. - 10K rows * 1K columns = 10M cells.) ' 'A value of -1 means there will be no limit.')) def get_deprecated_download_cell_limit(): """Get the old default""" return math.floor(DOWNLOAD_CELL_LIMIT.get() / 100) if DOWNLOAD_CELL_LIMIT.get() > 0 else DOWNLOAD_CELL_LIMIT.get() DOWNLOAD_ROW_LIMIT = Config( key='download_row_limit', dynamic_default=get_deprecated_download_cell_limit, type=int, help=_t('A limit to the number of rows that can be downloaded from a query before it is truncated. ' 'A value of -1 means there will be no limit.')) DOWNLOAD_BYTES_LIMIT = Config( key='download_bytes_limit', default=-1, type=int, help=_t('A limit to the number of bytes that can be downloaded from a query before it is truncated. ' 'A value of -1 means there will be no limit.')) APPLY_NATURAL_SORT_MAX = Config( key="apply_natural_sort_max", help=_t("The max number of records in the result set permitted to apply a natural sort to the database or tables list."), type=int, default=2000 ) CLOSE_QUERIES = Config( key="close_queries", help=_t("Hue will try to close the Hive query when the user leaves the editor page. " "This will free all the query resources in HiveServer2, but also make its results inaccessible."), type=coerce_bool, default=False ) MAX_NUMBER_OF_SESSIONS = Config( key="max_number_of_sessions", help=_t("Hue will use at most this many HiveServer2 sessions per user at a time" # The motivation for -1 is that Hue does currently keep track of session state perfectly and the user does not have ability to manage them effectively. The cost of a session is low "-1 is unlimited number of sessions."), type=int, default=1 ) THRIFT_VERSION = Config( key="thrift_version", help=_t("Thrift version to use when communicating with HiveServer2."), type=int, default=11 ) CONFIG_WHITELIST = Config( key='config_whitelist', default='hive.map.aggr,hive.exec.compress.output,hive.exec.parallel,hive.execution.engine,mapreduce.job.queuename', type=coerce_csv, help=_t('A comma-separated list of white-listed Hive configuration properties that users are authorized to set.') ) SSL = ConfigSection( key='ssl', help=_t('SSL configuration for the server.'), members=dict( CACERTS = Config( key="cacerts", help=_t("Path to Certificate Authority certificates."), type=str, dynamic_default=default_ssl_cacerts, ), KEY = Config( key="key", help=_t("Path to the private key file, e.g. /etc/hue/key.pem"), type=str, default=None ), CERT = Config( key="cert", help=_t("Path to the public certificate file, e.g. /etc/hue/cert.pem"), type=str, default=None ), VALIDATE = Config( key="validate", help=_t("Choose whether Hue should validate certificates received from the server."), type=coerce_bool, dynamic_default=default_ssl_validate, ) ) ) def get_auth_username(): """Get from top level default from desktop""" return DEFAULT_AUTH_USERNAME.get() AUTH_USERNAME = Config( key="auth_username", help=_t("Auth username of the hue user used for authentications."), dynamic_default=get_auth_username) def get_auth_password(): """Get from script or backward compatibility""" password = AUTH_PASSWORD_SCRIPT.get() if password: return password return DEFAULT_AUTH_PASSWORD.get() AUTH_PASSWORD = Config( key="auth_password", help=_t("LDAP/PAM/.. password of the hue user used for authentications."), private=True, dynamic_default=get_auth_password) AUTH_PASSWORD_SCRIPT = Config( key="auth_password_script", help=_t("Execute this script to produce the auth password. This will be used when `auth_password` is not set."), private=True, type=coerce_password_from_script, default=None) def get_use_sasl_default(): """Get from hive_site or backward compatibility""" from beeswax.hive_site import get_hiveserver2_authentication, get_use_sasl # Cyclic dependency use_sasl = get_use_sasl() if use_sasl is not None: return use_sasl.upper() == 'TRUE' return get_hiveserver2_authentication() in ('KERBEROS', 'NONE', 'LDAP', 'PAM') # list for backward compatibility USE_SASL = Config( key="use_sasl", help=_t("Use SASL framework to establish connection to host"), private=False, type=coerce_bool, dynamic_default=get_use_sasl_default) def has_multiple_sessions(): """When true will create multiple sessions for user queries""" return MAX_NUMBER_OF_SESSIONS.get() != 1 CLOSE_SESSIONS = Config( key="close_sessions", help=_t( 'When set to True, Hue will close sessions created for background queries and open new ones as needed.' 'When set to False, Hue will keep sessions created for background queries opened and reuse them as needed.' 'This flag is useful when max_number_of_sessions != 1'), type=coerce_bool, dynamic_default=has_multiple_sessions ) def has_session_pool(): return has_multiple_sessions() and not CLOSE_SESSIONS.get()
apache-2.0
kwakeroni/BusinessParameters
parameters-backend/parameters-backend-elasticsearch/src/main/java/be/kwakeroni/parameters/backend/es/api/ElasticSearchCriteria.java
565
package be.kwakeroni.parameters.backend.es.api; import org.json.JSONObject; /** * (C) 2017 Maarten Van Puymbroeck */ public interface ElasticSearchCriteria { public void inGroup(String groupName); public void addParameterMatch(String parameter, String value); public void addParameterNotMatch(String parameter, String value); public <T> void addParameterComparison(String parameter, ElasticSearchDataType<T> dataType, String operator, T value); public void addComplexFilter(JSONObject filter); public JSONObject toJSONObject(); }
apache-2.0
detnavillus/modular-informatic-designs
pipeline/src/main/java/com/modinfodesigns/app/search/model/ResultDisplayFieldProperties.java
2588
package com.modinfodesigns.app.search.model; import com.modinfodesigns.property.DataObject; import com.modinfodesigns.property.DataObjectBean; import com.modinfodesigns.property.IProperty; import com.modinfodesigns.property.schema.DataObjectSchema; import com.modinfodesigns.property.string.StringProperty; public class ResultDisplayFieldProperties extends DataObjectBean { public static final String URL_FIELD = "URLField"; public static final String IMG_URL_FIELD = "ImgURLField"; public static final String RESULT_TRANSFORM = "ResultTransform"; public static final String RESULT_FIELD_RENDERER = "ResultFieldRenderer"; private String fieldName; private String resultFieldRenderer; // name of special IResultFieldRenderer for this field public void setName( String fieldName ) { this.fieldName = fieldName; } public String getName( ) { return this.fieldName; } public void setURLField( String urlField ) { doSetProperty( new StringProperty( URL_FIELD, urlField )); } public void addURLField( String urlField ) { setURLField( urlField ); } public String getURLField( ) { IProperty urlProp = getProperty( URL_FIELD ); return (urlProp != null) ? urlProp.getValue() : null; } public void setImgURLField( String imgURLField ) { doSetProperty( new StringProperty( IMG_URL_FIELD, imgURLField )); } public String getImgURLField( ) { IProperty imgURLProp = getProperty( IMG_URL_FIELD ); return (imgURLProp != null) ? imgURLProp.getValue() : null; } /** * sets name of Result IPropertyHolderTransform */ public void setResultTransform( String resultTransform ) { doSetProperty( new StringProperty( RESULT_TRANSFORM, resultTransform )); } public String getResultTransform( ) { IProperty resTransProp = getProperty( RESULT_TRANSFORM ); return (resTransProp != null) ? resTransProp.getValue() : null; } public void setResultFieldRenderer( String resultFieldRenderer ) { doSetProperty( new StringProperty( RESULT_FIELD_RENDERER, resultFieldRenderer )); } public String getResultFieldRenderer( ) { IProperty resRendProp = getProperty( RESULT_FIELD_RENDERER ); return (resRendProp != null) ? resRendProp.getValue() : null; } @Override public DataObjectSchema createDataObjectSchema( DataObject context ) { return null; } }
apache-2.0
xbyg/Silicon-App
src/main/java/com/xbyg_plus/silicon/dialog/LoadingDialog.java
1618
package com.xbyg_plus.silicon.dialog; import android.app.Activity; import android.app.ProgressDialog; import android.content.Context; import android.support.design.widget.Snackbar; import android.view.View; import io.reactivex.Observable; import io.reactivex.android.schedulers.AndroidSchedulers; public class LoadingDialog extends ProgressDialog { private View activityRootViewGroup; public LoadingDialog(Context context) { super(context); this.setCancelable(false); this.setIndeterminate(true); this.activityRootViewGroup = (((Activity) context).findViewById(android.R.id.content)); } public LoadingDialog setMessage(int stringId) { return this.setMessage(getContext().getString(stringId)); } public LoadingDialog setMessage(String msg) { //this dialog is usually used in non main thread, we need to set the message in ui thread Observable.just(msg).observeOn(AndroidSchedulers.mainThread()).subscribe(super::setMessage); return this; } public void dismiss(int dismissMessageId) { this.dismiss(getContext().getString(dismissMessageId)); } public void dismiss(String dismissMessage) { this.dismiss(); Snackbar.make(activityRootViewGroup, dismissMessage, Snackbar.LENGTH_LONG).show(); } public void dismiss(int dismissMessageId, View v) { this.dismiss(getContext().getString(dismissMessageId), v); } public void dismiss(String dismissMessage, View v) { this.dismiss(); Snackbar.make(v, dismissMessage, Snackbar.LENGTH_LONG).show(); } }
apache-2.0
Wanagow/Proyecto-Wanagow
Wanagow/Resources/iphone/alloy/controllers/Registro.js
12233
function __processArg(obj, key) { var arg = null; if (obj) { arg = obj[key] || null; delete obj[key]; } return arg; } function Controller() { function checkemail(emailAddress) { var testresults; var str = emailAddress; var filter = /^([A-Za-z0-9_\-\.])+\@([A-Za-z0-9_\-\.])+\.([A-Za-z]{2,4})$/; testresults = filter.test(str) ? true : false; return testresults; } function NuevaCuenta() { if ("" != $.txtEmailw.value && "" != $.txtPasswordw.value && "" != $.txtconfirmew.value && "" != $.txtnombrew.value && "" != $.txtapellidow.value && null != picker.value && "" != picker.value && 1 == $.mujer.opacity || 1 == $.hombre.opacity) if ($.txtPasswordw.value != $.txtconfirmew.value) alert("Las contraseñas no coinciden"); else if (checkemail($.txtEmailw.value)) { var genero; genero = 1 == $.mujer.opacity ? 0 : 1; createReq.open("POST", servidor + "servidor/new.php"); var params = { nombre: $.txtnombrew.value, apellido: $.txtapellidow.value, email: $.txtEmailw.value, fecha: picker.value, genero: genero, password: Ti.Utils.md5HexDigest($.txtPasswordw.value) }; createReq.send(params); alert("Informacion enviada"); } else alert("Por favor ingresa un correo valido"); else alert("Complete la informacion necesaria"); } require("alloy/controllers/BaseController").apply(this, Array.prototype.slice.call(arguments)); this.__controllerPath = "Registro"; if (arguments[0]) { __processArg(arguments[0], "__parentSymbol"); __processArg(arguments[0], "$model"); __processArg(arguments[0], "__itemTemplate"); } var $ = this; var exports = {}; var __defers = {}; $.__views.container = Ti.UI.createWindow({ backgroundColor: "white", id: "container" }); $.__views.container && $.addTopLevelView($.__views.container); $.__views.imagenw = Ti.UI.createImageView({ image: "imagen/reeee.jpg", height: 100, width: 800, righ: 900, top: 20, id: "imagenw" }); $.__views.container.add($.__views.imagenw); $.__views.imagew = Ti.UI.createImageView({ image: "imagen/face.jpg", height: Ti.UI.SIZE, width: Ti.UI.SIZE, top: 130, id: "imagew" }); $.__views.container.add($.__views.imagew); $.__views.label1w = Ti.UI.createLabel({ height: Ti.UI.SIZE, width: Ti.UI.SIZE, color: "black", layout: "center", top: 190, text: "DATOS DE LA CUENTA", id: "label1w" }); $.__views.container.add($.__views.label1w); $.__views.txtEmailw = Ti.UI.createTextField({ width: 300, right: 50, left: 250, height: 45, top: 250, hintText: "Email", borderColor: "#F4CE00", borderWidth: 1.2, borderRadius: 10, borderStyle: "Ti.UI.INPUT_BORDERSTYLE_ROUNDED", color: "black", id: "txtEmailw" }); $.__views.container.add($.__views.txtEmailw); $.__views.txtPasswordw = Ti.UI.createTextField({ width: 300, right: 50, left: 250, height: 45, top: 300, borderWidth: 1.2, borderRadius: 10, hintText: "Password", borderColor: "#F4CE00", borderStyle: "Ti.UI.INPUT_BORDERSTYLE_ROUNDED", color: "black", id: "txtPasswordw", passwordMask: "true" }); $.__views.container.add($.__views.txtPasswordw); $.__views.txtconfirmew = Ti.UI.createTextField({ width: 300, right: 50, left: 250, height: 45, top: 350, hintText: "Confirme Password", borderColor: "#F4CE00", borderStyle: "Ti.UI.INPUT_BORDERSTYLE_ROUNDED", color: "black", id: "txtconfirmew", passwordMask: "true" }); $.__views.container.add($.__views.txtconfirmew); $.__views.label2w = Ti.UI.createLabel({ height: Ti.UI.SIZE, width: Ti.UI.SIZE, top: 410, text: "DATOS DEL USUARIO", id: "label2w" }); $.__views.container.add($.__views.label2w); $.__views.txtnombrew = Ti.UI.createTextField({ width: 300, right: 50, left: 250, height: 45, top: 450, hintText: "Nombre", borderColor: "#F4CE00", borderStyle: "Ti.UI.INPUT_BORDERSTYLE_ROUNDED", color: "black", id: "txtnombrew" }); $.__views.container.add($.__views.txtnombrew); $.__views.txtapellidow = Ti.UI.createTextField({ width: 300, right: 50, left: 250, height: 45, top: 500, hintText: "Apellido", borderColor: "#F4CE00", borderStyle: "Ti.UI.INPUT_BORDERSTYLE_ROUNDED", color: "black", id: "txtapellidow" }); $.__views.container.add($.__views.txtapellidow); $.__views.btn1w = Ti.UI.createButton({ width: 300, right: 300, left: 250, backgroundColor: "#E3C109", height: 50, top: 570, font: { fontFamily: "Helvetica Neue" }, color: "white", title: "Fecha de Nacimiento", id: "btn1w" }); $.__views.container.add($.__views.btn1w); $.__views.label3w = Ti.UI.createLabel({ width: 100, right: 100, left: 260, height: 50, top: 650, text: "Sexo:", id: "label3w" }); $.__views.container.add($.__views.label3w); $.__views.mujer = Ti.UI.createButton({ width: 100, right: 100, left: 340, backgroundColor: "#DCBC0D", height: 50, top: 650, font: { fontFamily: "Helvetica Neue" }, color: "white", title: "Mujer", opacity: .4, id: "mujer" }); $.__views.container.add($.__views.mujer); $.__views.hombre = Ti.UI.createButton({ width: 100, right: 100, left: 436, backgroundColor: "#DCBC0D", height: 50, top: 650, font: { fontFamily: "Helvetica Neue" }, color: "white", title: "Hombre", opacity: 1, id: "hombre" }); $.__views.container.add($.__views.hombre); $.__views.btn4w = Ti.UI.createButton({ width: 100, right: 500, left: 600, backgroundColor: "#515050", height: 50, top: 750, font: { fontFamily: "Helvetica Neue" }, color: "white", title: "Siguiente", id: "btn4w" }); $.__views.container.add($.__views.btn4w); NuevaCuenta ? $.__views.btn4w.addEventListener("click", NuevaCuenta) : __defers["$.__views.btn4w!click!NuevaCuenta"] = true; exports.destroy = function() {}; _.extend($, $.__views); var servidor; servidor = "iphone" == Ti.Platform.osname || "ipad" == Ti.Platform.osname ? "http://localhost/" : "http://10.0.2.2/"; $.imagew.addEventListener("click", function() { alert("Lamentamos los inconvenientes esta funcion no esta disponible aun"); }); var cancel = Titanium.UI.createButton({ title: "Cerrar", top: 2, left: 30, height: 30, width: 44 }); var done = Titanium.UI.createButton({ title: "Aceptar", right: 30, top: 2, height: 40, width: "20%" }); var picker_view = Titanium.UI.createView({ backgroundColor: "#E3C109", top: "50%", height: 400, width: 420 }); var picker = Ti.UI.createPicker({ top: 43, value: new Date(), type: Ti.UI.PICKER_TYPE_DATE, minDate: new Date(1980, 11, 31), maxDate: new Date(2016, 11, 31), selectionIndicator: true, useSpinner: true }); if ("iphone" == Ti.Platform.osname || "android" == Ti.Platform.osname) { var alineacion = "13%"; var ancho = "75%"; var alto = "7%"; var letranormal = { fontFamily: "Arial", fontSize: "12%" }; $.imagenw.height = "8%"; $.imagew.top = "13%"; $.imagew.width = "40%"; $.label1w.font = letranormal; $.label2w.font = letranormal; $.label3w.font = letranormal; picker_view.width = ancho; picker_view.top = "55%"; picker_view.left = alineacion; done.top = "-1.4%"; cancel.top = "0%"; done.left = "70%"; cancel.left = "10%"; $.label1w.top = "19%"; $.txtEmailw.top = "22%"; $.txtPasswordw.top = "30%"; $.txtconfirmew.top = "38%"; $.label2w.top = "44%"; $.txtnombrew.top = "50%"; $.txtapellidow.top = "58%"; $.btn1w.top = "66%"; $.label3w.top = "74%"; $.mujer.top = "74%"; $.hombre.top = "74%"; $.btn4w.top = "82%"; $.label1w.left = alineacion; $.txtconfirmew.left = alineacion; $.txtEmailw.left = alineacion; $.txtPasswordw.left = alineacion; $.label2w.left = alineacion; $.txtnombrew.left = alineacion; $.txtapellidow.left = alineacion; $.btn1w.left = alineacion; $.label3w.left = alineacion; $.mujer.left = "25%"; $.hombre.left = "58%"; $.btn4w.left = alineacion; $.txtPasswordw.width = ancho; $.txtconfirmew.width = ancho; $.txtEmailw.width = ancho; $.label2w.width = ancho; $.txtnombrew.width = ancho; $.txtapellidow.width = ancho; $.btn1w.width = ancho; $.label3w.width = ancho; $.mujer.width = "30%"; $.hombre.width = "30%"; $.btn4w.width = ancho; $.txtPasswordw.height = alto; $.txtconfirmew.height = alto; $.txtEmailw.height = alto; $.label2w.height = alto; $.txtnombrew.height = alto; $.txtapellidow.height = alto; $.btn1w.height = alto; $.label3w.height = alto; $.mujer.height = alto; $.hombre.height = alto; $.btn4w.height = alto; } $.mujer.addEventListener("click", function() { $.mujer.opacity = 1; $.hombre.opacity = .4; }); $.hombre.addEventListener("click", function() { $.hombre.opacity = 1; $.mujer.opacity = .4; }); $.btn1w.addEventListener("click", function() { picker_view.animate({ duration: 1e3, top: "50%" }); done.addEventListener("click", function() { picker_view.animate({ duration: 1e3, top: "-50%" }); }); cancel.addEventListener("click", function() { picker_view.animate({ duration: 1e3, top: "120%" }); }); picker_view.add(cancel); picker_view.add(done); picker_view.add(picker); $.container.add(picker_view); }); var createReq = Titanium.Network.createHTTPClient({ onload: function() { if ("Insert failed" == this.responseText || "That username or email already exists" == this.responseText) alert(this.responseText); else { var genero; genero = 1 == $.mujer.opacity ? 0 : 1; var parametos = { email: $.txtEmailw.value, nombre: $.txtnombrew.value, apellido: $.txtapellidow.value, fecha: picker.value, genero: genero, password: Ti.Utils.md5HexDigest($.txtPasswordw.value) }; Alloy.createController("Next", parametos).getView().open(); } } }); __defers["$.__views.btn4w!click!NuevaCuenta"] && $.__views.btn4w.addEventListener("click", NuevaCuenta); _.extend($, exports); } var Alloy = require("alloy"), Backbone = Alloy.Backbone, _ = Alloy._; module.exports = Controller;
apache-2.0
skandragon/thing
app/helpers/application_helper.rb
385
# encoding: utf-8 module ApplicationHelper def application_name Rails.application.class.parent_name end def pretty_date_from_now(date, never = 'Never') if date.nil? return never end ret = distance_of_time_in_words_to_now(date) now = Time.now if date < now ret += ' ago' elsif date > now ret = "in #{ret}" end ret end end
apache-2.0
aehlig/bazel
src/main/java/com/google/devtools/build/lib/includescanning/CppIncludeScanningContextImpl.java
2339
// Copyright 2018 The Bazel Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.devtools.build.lib.includescanning; import com.google.common.base.Supplier; import com.google.common.util.concurrent.ListenableFuture; import com.google.devtools.build.lib.actions.ActionExecutionContext; import com.google.devtools.build.lib.actions.Artifact; import com.google.devtools.build.lib.actions.ExecException; import com.google.devtools.build.lib.actions.ExecutionStrategy; import com.google.devtools.build.lib.rules.cpp.CppCompileAction; import com.google.devtools.build.lib.rules.cpp.CppIncludeScanningContext; import com.google.devtools.build.lib.rules.cpp.IncludeProcessing; import com.google.devtools.build.lib.rules.cpp.IncludeScanner.IncludeScannerSupplier; import com.google.devtools.build.lib.rules.cpp.IncludeScanner.IncludeScanningHeaderData; /** * Include scanning context implementation. */ @ExecutionStrategy(contextType = CppIncludeScanningContext.class) public class CppIncludeScanningContextImpl implements CppIncludeScanningContext { private final Supplier<? extends IncludeScannerSupplier> includeScannerSupplier; public CppIncludeScanningContextImpl( Supplier<? extends IncludeScannerSupplier> includeScannerSupplier) { this.includeScannerSupplier = includeScannerSupplier; } @Override public ListenableFuture<Iterable<Artifact>> findAdditionalInputs( CppCompileAction action, ActionExecutionContext actionExecutionContext, IncludeProcessing includeProcessing, IncludeScanningHeaderData includeScanningHeaderData) throws ExecException, InterruptedException { return includeProcessing.determineAdditionalInputs( includeScannerSupplier.get(), action, actionExecutionContext, includeScanningHeaderData); } }
apache-2.0
keepsl/keepsmis
crm/src/main/java/com/keeps/crm/utils/Test1.java
399
package com.keeps.crm.utils; /** * <p>Title: Test1.java</p> * <p>Description: @TODO </p> * <p>Copyright: Copyright (c) KEEPS</p> * @author keeps * @version v 1.00 * @date 创建日期:2017年7月17日 * 修改日期: * 修改人: * 复审人: */ public class Test1 { public static void main(String[] args) { // TODO Auto-generated method stub } }
apache-2.0
joningis/coredata-java-api
src/main/java/com/bangsapabbi/api/file/File.java
3867
package com.bangsapabbi.api.file; import java.util.Set; import javax.validation.ConstraintViolation; import javax.validation.constraints.NotNull; import com.bangsapabbi.api.common.ApiDTO; import com.bangsapabbi.api.common.Insertable; import com.bangsapabbi.api.project.Project; import com.bangsapabbi.api.space.Space; import com.google.gson.annotations.SerializedName; public class File implements Insertable<File> { @NotNull private String filename; @NotNull private String title; @SerializedName("id") private String uuid; private Project project; private String parent; /** * This does not seem to be used in the v2 api. */ private String folder; private Space space; @SerializedName("mime_type") private String mimeType; private String localPath; public String getFilename() { return filename; } public void setFilename(final String filename) { this.filename = filename; } public String getTitle() { return title; } public void setTitle(final String title) { this.title = title; } public String getUUID() { return uuid; } public void setUUID(final String uuid) { this.uuid = uuid; } public Project getProject() { return project; } public void setProject(final Project project) { this.project = project; } public String getParentUUID() { return parent; } public void setParent(final String parent) { this.parent = parent; } public Space getSpace() { return space; } public void setSpace(final Space space) { this.space = space; } public String getMimeType() { return mimeType; } public void setMimeType(final String mimeType) { this.mimeType = mimeType; } @Override public String toString() { return "File{" + "filename='" + filename + '\'' + ", uuid='" + uuid + '\'' + ", project=" + project + ", parent='" + parent + '\'' + ", folder='" + folder + '\'' + ", space=" + space + ", mimeType='" + mimeType + '\'' + '}'; } public String getLocalPath() { return localPath; } public void setLocalPath(final String localPath) { this.localPath = localPath; } @Override public boolean isValidForPost() { return false; } @Override public Set<ConstraintViolation<File>> getConstraintViolations() { return null; } @Override public String getViolationsAsString() { return null; } /* "aspects": {}, "category": "main", "created": "2014-04-06T16:55:09", "created_by": "Administrator Administrator", "description": "", "digest": "", "dynatype": { "caption_plural": "dynatypes_labels:File_plural:", "caption_singular": "dynatypes_labels:File_singular:", "id": "4f59e726-bdab-11e3-a0fa-6003088b5c52" }, "filename": "proxylab.pdf", "folder": null, "id": "362327bc-bdac-11e3-a17a-6003088b5c52", "mime_type": "application/pdf", "modified": "2014-04-06T16:55:09", "modified_by": null, "owner": null, "parent": "21772f2a-bdac-11e3-a9b9-6003088b5c52", "project": { "id": "215de344-bdac-11e3-af12-6003088b5c52", "identifier": "2014-1", "title": "Project JIS" }, "resource_uri": "/api/v2/files/362327bc-bdac-11e3-a17a-6003088b5c52/", "size": 0, "space": { "id": "0818f41e-bdac-11e3-9029-6003088b5c52", "title": "Space JIS" }, "tags": [], "title": "proxylab", "type": "File", "version": "None.0" */ }
apache-2.0
VladimirErshov/ignite
modules/core/src/test/java/org/apache/ignite/platform/PlatformComputeBinarizableArgTask.java
4083
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.ignite.platform; import java.io.Externalizable; import java.io.IOException; import java.io.ObjectInput; import java.io.ObjectOutput; import java.util.Collections; import java.util.List; import java.util.Map; import org.apache.ignite.Ignite; import org.apache.ignite.IgniteException; import org.apache.ignite.binary.BinaryType; import org.apache.ignite.cluster.ClusterNode; import org.apache.ignite.compute.ComputeJob; import org.apache.ignite.compute.ComputeJobAdapter; import org.apache.ignite.compute.ComputeJobResult; import org.apache.ignite.compute.ComputeTaskAdapter; import org.apache.ignite.internal.binary.BinaryObjectEx; import org.apache.ignite.internal.util.typedef.F; import org.apache.ignite.resources.IgniteInstanceResource; import org.jetbrains.annotations.Nullable; /** * Task working with binarizable argument. */ public class PlatformComputeBinarizableArgTask extends ComputeTaskAdapter<Object, Integer> { /** {@inheritDoc} */ @Nullable @Override public Map<? extends ComputeJob, ClusterNode> map(List<ClusterNode> subgrid, @Nullable Object arg) { return Collections.singletonMap(new BinarizableArgJob(arg), F.first(subgrid)); } /** {@inheritDoc} */ @SuppressWarnings("ThrowableResultOfMethodCallIgnored") @Nullable @Override public Integer reduce(List<ComputeJobResult> results) { ComputeJobResult res = results.get(0); if (res.getException() != null) throw res.getException(); else return results.get(0).getData(); } /** * Job. */ private static class BinarizableArgJob extends ComputeJobAdapter implements Externalizable { /** */ @IgniteInstanceResource private Ignite ignite; /** Argument. */ private Object arg; /** * Constructor. */ public BinarizableArgJob() { // No-op. } /** * Constructor. * * @param arg Argument. */ private BinarizableArgJob(Object arg) { this.arg = arg; } /** {@inheritDoc} */ @Nullable @Override public Object execute() { BinaryObjectEx arg0 = ((BinaryObjectEx)arg); BinaryType meta = ignite.binary().type(arg0.typeId()); if (meta == null) throw new IgniteException("Metadata doesn't exist."); if (meta.fieldNames() == null || !meta.fieldNames().contains("Field")) throw new IgniteException("Field metadata doesn't exist."); if (!F.eq("int", meta.fieldTypeName("Field"))) throw new IgniteException("Invalid field type: " + meta.fieldTypeName("Field")); if (meta.affinityKeyFieldName() != null) throw new IgniteException("Unexpected affinity key: " + meta.affinityKeyFieldName()); return arg0.field("field"); } /** {@inheritDoc} */ @Override public void writeExternal(ObjectOutput out) throws IOException { out.writeObject(arg); } /** {@inheritDoc} */ @Override public void readExternal(ObjectInput in) throws IOException, ClassNotFoundException { arg = in.readObject(); } } }
apache-2.0
square/kochiku
db/migrate/20170804214538_add_enabled_bool_to_repositories.rb
166
class AddEnabledBoolToRepositories < ActiveRecord::Migration[5.0] def change add_column :repositories, :enabled, :boolean, default: true, null: false end end
apache-2.0
CloudBoost/cloudboost
data-service/helpers/util.js
4570
/* # CloudBoost - Core Engine that powers Bakend as a Service # (c) 2014 HackerBay, Inc. # CloudBoost may be freely distributed under the Apache 2 License */ /* eslint no-use-before-define: 0, no-param-reassign: 0 */ const URL = require('url'); const q = require('q'); const fs = require('fs'); const _ = require('underscore'); const winston = require('winston'); module.exports = { addDefaultACL() { return { read: { allow: { user: ['all'], role: [], }, deny: { user: [], role: [], }, }, write: { allow: { user: ['all'], role: [], }, deny: { user: [], role: [], }, }, }; }, importCSV(obj, tableName) { const util = this; // Sets the properties on each JSON obj.expires = obj.expires ? obj.expires : null; obj._id = util.getId(); obj._version = obj._version || '1'; obj._type = obj._type || 'custom'; if (obj.createdAt) { if (new Date(obj.createdAt) === 'Invalid Date') { obj.created = obj.createdAt; } } else { obj.createdAt = ''; } if (obj.updatedAt) { if (new Date(obj.updatedAt) === 'Invalid Date') { obj.updated = obj.updatedAt; } } else { obj.updatedAt = ''; } obj.ACL = util.isJsonString(obj.ACL) ? JSON.parse(obj.ACL) : util.addDefaultACL(); obj._modifiedColumns = Object.keys(obj); obj._isModified = true; obj._tableName = tableName; return obj; }, isUrlValid(data) { try { const obj = URL.parse(data); if (!obj.protocol || !obj.hostname) return false; return true; } catch (err) { winston.log('error', { error: String(err), stack: new Error().stack, }); return false; } }, isEmailValid(data) { try { const re = /^(([^<>()[\]\\.,;:\s@']+(\.[^<>()[\]\\.,;:\s@']+)*)|('.+'))@((\[[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}])|(([a-zA-Z\-0-9]+\.)+[a-zA-Z]{2,}))$/; return re.test(data); } catch (err) { winston.log('error', { error: String(err), stack: new Error().stack, }); return false; } }, getId() { try { let id = ''; const possible = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789'; for (let i = 0; i < 8; i++) { id += possible.charAt(Math.floor(Math.random() * possible.length)); } return id; } catch (err) { winston.log('error', { error: String(err), stack: new Error().stack, }); return err; } }, isJsonString(str) { try { JSON.parse(str); return true; } catch (err) { winston.log('error', { error: String(err), stack: new Error().stack, }); return false; } }, isJsonObject(obj) { try { JSON.stringify(obj); return true; } catch (err) { winston.log('error', { error: String(err), stack: new Error().stack, }); return false; } }, getLatLongDistance(lat1, lon1, lat2, lon2) { const radlat1 = Math.PI * lat1 / 180; const radlat2 = Math.PI * lat2 / 180; const theta = lon1 - lon2; const radtheta = Math.PI * theta / 180; let dist = Math.sin(radlat1) * Math.sin(radlat2) + Math.cos(radlat1) * Math.cos(radlat2) * Math.cos(radtheta); dist = Math.acos(dist); dist = dist * 180 / Math.PI; dist = dist * 60 * 1.1515; dist *= 1609.344; return dist; }, _checkFileExists(filePath) { const deferred = q.defer(); try { fs.readFile(filePath, (err, data) => { if (err) { return deferred.reject(err); } return deferred.resolve(data); }); } catch (err) { winston.log('error', { error: String(err), stack: new Error().stack, }); } return deferred.promise; }, _isJSON(json) { // String if (json && typeof (json) === 'string') { try { JSON.parse(json); return true; } catch (e) { return false; } } else { return _.isObject(json); } }, getNestedValue: (path, object) => { const _path = Array.isArray(path) ? path : path.split('.'); // eslint-disable-next-line no-confusing-arrow return _path.reduce((acc, curr) => acc && acc[curr] ? acc[curr] : undefined, object); }, handleException: fn => (req, res, next) => fn(req, res).catch(error => next(error)), };
apache-2.0
awsdocs/aws-doc-sdk-examples
.dotnet/example_code/DynamoDB/LowLevelParallelScan.cs
10287
// snippet-sourcedescription:[ ] // snippet-service:[dynamodb] // snippet-keyword:[dotNET] // snippet-keyword:[Amazon DynamoDB] // snippet-keyword:[Code Sample] // snippet-keyword:[ ] // snippet-sourcetype:[full-example] // snippet-sourcedate:[ ] // snippet-sourceauthor:[AWS] // snippet-start:[dynamodb.dotNET.CodeExample.LowLevelParallelScan] /** * Copyright 2010-2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * This file is licensed under the Apache License, Version 2.0 (the "License"). * You may not use this file except in compliance with the License. A copy of * the License is located at * * http://aws.amazon.com/apache2.0/ * * This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. */ using System; using System.Collections.Generic; using System.Threading; using System.Threading.Tasks; using Amazon.DynamoDBv2; using Amazon.DynamoDBv2.Model; using Amazon.Runtime; namespace com.amazonaws.codesamples { class LowLevelParallelScan { private static AmazonDynamoDBClient client = new AmazonDynamoDBClient(); private static string tableName = "ProductCatalog"; private static int exampleItemCount = 100; private static int scanItemLimit = 10; private static int totalSegments = 5; static void Main(string[] args) { try { DeleteExampleTable(); CreateExampleTable(); UploadExampleData(); ParallelScanExampleTable(); } catch (AmazonDynamoDBException e) { Console.WriteLine(e.Message); } catch (AmazonServiceException e) { Console.WriteLine(e.Message); } catch (Exception e) { Console.WriteLine(e.Message); } Console.WriteLine("To continue, press Enter"); Console.ReadLine(); } private static void ParallelScanExampleTable() { Console.WriteLine("\n*** Creating {0} Parallel Scan Tasks to scan {1}", totalSegments, tableName); Task[] tasks = new Task[totalSegments]; for (int segment = 0; segment < totalSegments; segment++) { int tmpSegment = segment; Task task = Task.Factory.StartNew(() => { ScanSegment(totalSegments, tmpSegment); }); tasks[segment] = task; } Console.WriteLine("All scan tasks are created, waiting for them to complete."); Task.WaitAll(tasks); Console.WriteLine("All scan tasks are completed."); } private static void ScanSegment(int totalSegments, int segment) { Console.WriteLine("*** Starting to Scan Segment {0} of {1} out of {2} total segments ***", segment, tableName, totalSegments); Dictionary<string, AttributeValue> lastEvaluatedKey = null; int totalScannedItemCount = 0; int totalScanRequestCount = 0; do { var request = new ScanRequest { TableName = tableName, Limit = scanItemLimit, ExclusiveStartKey = lastEvaluatedKey, Segment = segment, TotalSegments = totalSegments }; var response = client.Scan(request); lastEvaluatedKey = response.LastEvaluatedKey; totalScanRequestCount++; totalScannedItemCount += response.ScannedCount; foreach (var item in response.Items) { Console.WriteLine("Segment: {0}, Scanned Item with Title: {1}", segment, item["Title"].S); } } while (lastEvaluatedKey.Count != 0); Console.WriteLine("*** Completed Scan Segment {0} of {1}. TotalScanRequestCount: {2}, TotalScannedItemCount: {3} ***", segment, tableName, totalScanRequestCount, totalScannedItemCount); } private static void UploadExampleData() { Console.WriteLine("\n*** Uploading {0} Example Items to {1} Table***", exampleItemCount, tableName); Console.Write("Uploading Items: "); for (int itemIndex = 0; itemIndex < exampleItemCount; itemIndex++) { Console.Write("{0}, ", itemIndex); CreateItem(itemIndex.ToString()); } Console.WriteLine(); } private static void CreateItem(string itemIndex) { var request = new PutItemRequest { TableName = tableName, Item = new Dictionary<string, AttributeValue>() { { "Id", new AttributeValue { N = itemIndex }}, { "Title", new AttributeValue { S = "Book " + itemIndex + " Title" }}, { "ISBN", new AttributeValue { S = "11-11-11-11" }}, { "Authors", new AttributeValue { SS = new List<string>{"Author1", "Author2" } }}, { "Price", new AttributeValue { N = "20.00" }}, { "Dimensions", new AttributeValue { S = "8.5x11.0x.75" }}, { "InPublication", new AttributeValue { BOOL = false } } } }; client.PutItem(request); } private static void CreateExampleTable() { Console.WriteLine("\n*** Creating {0} Table ***", tableName); var request = new CreateTableRequest { AttributeDefinitions = new List<AttributeDefinition>() { new AttributeDefinition { AttributeName = "Id", AttributeType = "N" } }, KeySchema = new List<KeySchemaElement> { new KeySchemaElement { AttributeName = "Id", KeyType = "HASH" //Partition key } }, ProvisionedThroughput = new ProvisionedThroughput { ReadCapacityUnits = 5, WriteCapacityUnits = 6 }, TableName = tableName }; var response = client.CreateTable(request); var result = response; var tableDescription = result.TableDescription; Console.WriteLine("{1}: {0} \t ReadsPerSec: {2} \t WritesPerSec: {3}", tableDescription.TableStatus, tableDescription.TableName, tableDescription.ProvisionedThroughput.ReadCapacityUnits, tableDescription.ProvisionedThroughput.WriteCapacityUnits); string status = tableDescription.TableStatus; Console.WriteLine(tableName + " - " + status); WaitUntilTableReady(tableName); } private static void DeleteExampleTable() { try { Console.WriteLine("\n*** Deleting {0} Table ***", tableName); var request = new DeleteTableRequest { TableName = tableName }; var response = client.DeleteTable(request); var result = response; Console.WriteLine("{0} is being deleted...", tableName); WaitUntilTableDeleted(tableName); } catch (ResourceNotFoundException) { Console.WriteLine("{0} Table delete failed: Table does not exist", tableName); } } private static void WaitUntilTableReady(string tableName) { string status = null; // Let us wait until table is created. Call DescribeTable. do { System.Threading.Thread.Sleep(5000); // Wait 5 seconds. try { var res = client.DescribeTable(new DescribeTableRequest { TableName = tableName }); Console.WriteLine("Table name: {0}, status: {1}", res.Table.TableName, res.Table.TableStatus); status = res.Table.TableStatus; } catch (ResourceNotFoundException) { // DescribeTable is eventually consistent. So you might // get resource not found. So we handle the potential exception. } } while (status != "ACTIVE"); } private static void WaitUntilTableDeleted(string tableName) { string status = null; // Let us wait until table is deleted. Call DescribeTable. do { System.Threading.Thread.Sleep(5000); // Wait 5 seconds. try { var res = client.DescribeTable(new DescribeTableRequest { TableName = tableName }); Console.WriteLine("Table name: {0}, status: {1}", res.Table.TableName, res.Table.TableStatus); status = res.Table.TableStatus; } catch (ResourceNotFoundException) { Console.WriteLine("Table name: {0} is not found. It is deleted", tableName); return; } } while (status == "DELETING"); } } } // snippet-end:[dynamodb.dotNET.CodeExample.LowLevelParallelScan]
apache-2.0
shaolinwu/uimaster
modules/datamodel/src/main/java/org/shaolin/bmdp/datamodel/page/OpSetVariableType.java
2683
// // This file was generated by the JavaTM Architecture for XML Binding(JAXB) Reference Implementation, v2.2.6 // See <a href="http://java.sun.com/xml/jaxb">http://java.sun.com/xml/jaxb</a> // Any modifications to this file will be lost upon recompilation of the source schema. // Generated on: 2014.09.20 at 04:07:50 PM CST // package org.shaolin.bmdp.datamodel.page; import java.io.Serializable; import javax.xml.bind.annotation.XmlAccessType; import javax.xml.bind.annotation.XmlAccessorType; import javax.xml.bind.annotation.XmlAttribute; import javax.xml.bind.annotation.XmlElement; import javax.xml.bind.annotation.XmlType; import org.shaolin.bmdp.datamodel.common.ExpressionType; /** * Single expression. Most commonly used. * * <p>Java class for OpSetVariableType complex type. * * <p>The following schema fragment specifies the expected content contained within this class. * * <pre> * &lt;complexType name="OpSetVariableType"> * &lt;complexContent> * &lt;extension base="{http://bmdp.shaolin.org/datamodel/Page}OpType"> * &lt;sequence> * &lt;element name="exp" type="{http://bmdp.shaolin.org/datamodel/Common}ExpressionType"/> * &lt;/sequence> * &lt;attribute name="varName" type="{http://www.w3.org/2001/XMLSchema}string" /> * &lt;/extension> * &lt;/complexContent> * &lt;/complexType> * </pre> * * */ @XmlAccessorType(XmlAccessType.FIELD) @XmlType(name = "OpSetVariableType", propOrder = { "exp" }) public class OpSetVariableType extends OpType implements Serializable { private final static long serialVersionUID = 1L; @XmlElement(required = true) protected ExpressionType exp; @XmlAttribute(name = "varName") protected String varName; /** * Gets the value of the exp property. * * @return * possible object is * {@link ExpressionType } * */ public ExpressionType getExp() { return exp; } /** * Sets the value of the exp property. * * @param value * allowed object is * {@link ExpressionType } * */ public void setExp(ExpressionType value) { this.exp = value; } /** * Gets the value of the varName property. * * @return * possible object is * {@link String } * */ public String getVarName() { return varName; } /** * Sets the value of the varName property. * * @param value * allowed object is * {@link String } * */ public void setVarName(String value) { this.varName = value; } }
apache-2.0
quantumarun/Xamarin.iOS
D2.Core/D2.iOS/ViewController.designer.cs
443
// WARNING // // This file has been generated automatically by Xamarin Studio from the outlets and // actions declared in your storyboard file. // Manual changes to this file will not be maintained. // using Foundation; using System; using System.CodeDom.Compiler; namespace D2.iOS { [Register ("ViewController")] partial class ViewController { void ReleaseDesignerOutlets () { } } }
apache-2.0
mykmelez/qbrt
test/devtools/window.js
2874
/* Copyright 2017 Mozilla * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ 'use strict'; const { classes: Cc, interfaces: Ci, results: Cr, utils: Cu } = Components; const { Runtime } = Cu.import('resource://qbrt/modules/Runtime.jsm', {}); const { Services } = Cu.import('resource://gre/modules/Services.jsm', {}); function loadDevToolsWindow(target) { const devToolsWindow = Runtime.openDevTools(target); return new Promise(resolve => { devToolsWindow.addEventListener('DOMContentLoaded', resolve); }) .then(() => { // Wait for the DevTools window's title to change, which indicates // that the window has successfully connected to its target. // We dump the window title so the test can check stdout to confirm // that the correct set of windows was loaded. return new Promise(resolve => { const observer = new MutationObserver(mutations => { for (const mutation of mutations) { if (mutation.attributeName === 'title') { dump(`${devToolsWindow.document.title}\n`); observer.disconnect(); resolve(); } } }); observer.observe(devToolsWindow.document.querySelector('window'), { attributes: true }); }); }) .then(() => { // Sleep for a second to work around a deadlock on Mac in CGLClearDrawable // <https://bugzilla.mozilla.org/show_bug.cgi?id=1369207>. return new Promise(resolve => window.setTimeout(resolve, 1000)); }) .then(() => { return new Promise(resolve => { devToolsWindow.addEventListener('unload', resolve); devToolsWindow.close(); }); }); } const targets = [ document.getElementById('browser-chrome'), document.getElementById('browser-content'), window, ]; // Wait for all targets to load but then test them serially to avoid failures // that seem to occur when two DevTools windows are opened at the same time // (TypeError: eventLoop is undefined at devtools/server/actors/script.js:545). Promise.all(targets.map(target => new Promise(resolve => target.addEventListener('load', resolve, true, true)))) .then(async () => { for (const target of targets) { await loadDevToolsWindow(target); } }) .then(() => { Services.startup.quit(Ci.nsIAppStartup.eForceQuit); }) .catch(error => { dump(`${error}\n`); Services.startup.quit(Ci.nsIAppStartup.eForceQuit); });
apache-2.0
googleapis/python-iam
setup.py
2813
# -*- coding: utf-8 -*- # # Copyright 2018 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import io import os import setuptools name = "google-cloud-iam" description = "IAM Service Account Credentials API client library" version = "2.5.1" # Should be one of: # 'Development Status :: 3 - Alpha' # 'Development Status :: 4 - Beta' # 'Development Status :: 5 - Production/Stable' release_status = "Development Status :: 5 - Production/Stable" dependencies = [ # NOTE: Maintainers, please do not require google-api-core>=2.x.x # Until this issue is closed # https://github.com/googleapis/google-cloud-python/issues/10566 "google-api-core[grpc] >= 1.28.0, <3.0.0dev", "proto-plus >= 0.4.0", ] package_root = os.path.abspath(os.path.dirname(__file__)) readme_filename = os.path.join(package_root, "README.rst") with io.open(readme_filename, encoding="utf-8") as readme_file: readme = readme_file.read() packages = [ package for package in setuptools.PEP420PackageFinder.find() if package.startswith("google") ] namespaces = ["google"] if "google.cloud" in packages: namespaces.append("google.cloud") setuptools.setup( name=name, version=version, description=description, long_description=readme, author="Google LLC", author_email="googleapis-packages@google.com", license="Apache 2.0", url="https://github.com/googleapis/python-iam", classifiers=[ release_status, "Intended Audience :: Developers", "License :: OSI Approved :: Apache Software License", "Programming Language :: Python", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.6", "Programming Language :: Python :: 3.7", "Programming Language :: Python :: 3.8", "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", "Operating System :: OS Independent", "Topic :: Internet", ], platforms="Posix; MacOS X; Windows", packages=packages, namespace_packages=namespaces, install_requires=dependencies, extras_require={"fixup": ["libcst >= 0.2.5"]}, python_requires=">=3.6", scripts=["scripts/fixup_iam_credentials_v1_keywords.py"], include_package_data=True, zip_safe=False, )
apache-2.0
spring-projects/spring-framework
spring-beans/src/main/java/org/springframework/beans/factory/support/GenericTypeAwareAutowireCandidateResolver.java
7117
/* * Copyright 2002-2021 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.beans.factory.support; import java.lang.reflect.Method; import java.util.Properties; import org.springframework.beans.factory.BeanFactory; import org.springframework.beans.factory.BeanFactoryAware; import org.springframework.beans.factory.FactoryBean; import org.springframework.beans.factory.config.BeanDefinition; import org.springframework.beans.factory.config.BeanDefinitionHolder; import org.springframework.beans.factory.config.ConfigurableListableBeanFactory; import org.springframework.beans.factory.config.DependencyDescriptor; import org.springframework.core.ResolvableType; import org.springframework.lang.Nullable; import org.springframework.util.ClassUtils; /** * Basic {@link AutowireCandidateResolver} that performs a full generic type * match with the candidate's type if the dependency is declared as a generic type * (e.g. Repository&lt;Customer&gt;). * * <p>This is the base class for * {@link org.springframework.beans.factory.annotation.QualifierAnnotationAutowireCandidateResolver}, * providing an implementation all non-annotation-based resolution steps at this level. * * @author Juergen Hoeller * @since 4.0 */ public class GenericTypeAwareAutowireCandidateResolver extends SimpleAutowireCandidateResolver implements BeanFactoryAware, Cloneable { @Nullable private BeanFactory beanFactory; @Override public void setBeanFactory(BeanFactory beanFactory) { this.beanFactory = beanFactory; } @Nullable protected final BeanFactory getBeanFactory() { return this.beanFactory; } @Override public boolean isAutowireCandidate(BeanDefinitionHolder bdHolder, DependencyDescriptor descriptor) { if (!super.isAutowireCandidate(bdHolder, descriptor)) { // If explicitly false, do not proceed with any other checks... return false; } return checkGenericTypeMatch(bdHolder, descriptor); } /** * Match the given dependency type with its generic type information against the given * candidate bean definition. */ protected boolean checkGenericTypeMatch(BeanDefinitionHolder bdHolder, DependencyDescriptor descriptor) { ResolvableType dependencyType = descriptor.getResolvableType(); if (dependencyType.getType() instanceof Class) { // No generic type -> we know it's a Class type-match, so no need to check again. return true; } ResolvableType targetType = null; boolean cacheType = false; RootBeanDefinition rbd = null; if (bdHolder.getBeanDefinition() instanceof RootBeanDefinition) { rbd = (RootBeanDefinition) bdHolder.getBeanDefinition(); } if (rbd != null) { targetType = rbd.targetType; if (targetType == null) { cacheType = true; // First, check factory method return type, if applicable targetType = getReturnTypeForFactoryMethod(rbd, descriptor); if (targetType == null) { RootBeanDefinition dbd = getResolvedDecoratedDefinition(rbd); if (dbd != null) { targetType = dbd.targetType; if (targetType == null) { targetType = getReturnTypeForFactoryMethod(dbd, descriptor); } } } } } if (targetType == null) { // Regular case: straight bean instance, with BeanFactory available. if (this.beanFactory != null) { Class<?> beanType = this.beanFactory.getType(bdHolder.getBeanName()); if (beanType != null) { targetType = ResolvableType.forClass(ClassUtils.getUserClass(beanType)); } } // Fallback: no BeanFactory set, or no type resolvable through it // -> best-effort match against the target class if applicable. if (targetType == null && rbd != null && rbd.hasBeanClass() && rbd.getFactoryMethodName() == null) { Class<?> beanClass = rbd.getBeanClass(); if (!FactoryBean.class.isAssignableFrom(beanClass)) { targetType = ResolvableType.forClass(ClassUtils.getUserClass(beanClass)); } } } if (targetType == null) { return true; } if (cacheType) { rbd.targetType = targetType; } if (descriptor.fallbackMatchAllowed() && (targetType.hasUnresolvableGenerics() || targetType.resolve() == Properties.class)) { // Fallback matches allow unresolvable generics, e.g. plain HashMap to Map<String,String>; // and pragmatically also java.util.Properties to any Map (since despite formally being a // Map<Object,Object>, java.util.Properties is usually perceived as a Map<String,String>). return true; } // Full check for complex generic type match... return dependencyType.isAssignableFrom(targetType); } @Nullable protected RootBeanDefinition getResolvedDecoratedDefinition(RootBeanDefinition rbd) { BeanDefinitionHolder decDef = rbd.getDecoratedDefinition(); if (decDef != null && this.beanFactory instanceof ConfigurableListableBeanFactory clbf) { if (clbf.containsBeanDefinition(decDef.getBeanName())) { BeanDefinition dbd = clbf.getMergedBeanDefinition(decDef.getBeanName()); if (dbd instanceof RootBeanDefinition) { return (RootBeanDefinition) dbd; } } } return null; } @Nullable protected ResolvableType getReturnTypeForFactoryMethod(RootBeanDefinition rbd, DependencyDescriptor descriptor) { // Should typically be set for any kind of factory method, since the BeanFactory // pre-resolves them before reaching out to the AutowireCandidateResolver... ResolvableType returnType = rbd.factoryMethodReturnType; if (returnType == null) { Method factoryMethod = rbd.getResolvedFactoryMethod(); if (factoryMethod != null) { returnType = ResolvableType.forMethodReturnType(factoryMethod); } } if (returnType != null) { Class<?> resolvedClass = returnType.resolve(); if (resolvedClass != null && descriptor.getDependencyType().isAssignableFrom(resolvedClass)) { // Only use factory method metadata if the return type is actually expressive enough // for our dependency. Otherwise, the returned instance type may have matched instead // in case of a singleton instance having been registered with the container already. return returnType; } } return null; } /** * This implementation clones all instance fields through standard * {@link Cloneable} support, allowing for subsequent reconfiguration * of the cloned instance through a fresh {@link #setBeanFactory} call. * @see #clone() */ @Override public AutowireCandidateResolver cloneIfNecessary() { try { return (AutowireCandidateResolver) clone(); } catch (CloneNotSupportedException ex) { throw new IllegalStateException(ex); } } }
apache-2.0