repo_name
stringlengths
4
116
path
stringlengths
4
379
size
stringlengths
1
7
content
stringlengths
3
1.05M
license
stringclasses
15 values
testify-project/testify
modules/junit4/jersey2-system-test/src/test/java/org/testifyproject/junit4/fixture/web/GreetingApplication.java
1148
/* * Copyright 2016-2017 Testify Project. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.testifyproject.junit4.fixture.web; import javax.ws.rs.ApplicationPath; import org.glassfish.jersey.server.ResourceConfig; import org.testifyproject.junit4.fixture.web.feature.ServiceLocatorConfigFeature; /** * * @author saden */ @ApplicationPath("/") public class GreetingApplication extends ResourceConfig { public GreetingApplication() { packages(true, "org.testifyproject.junit4.fixture.web"); setApplicationName("Greeting Application"); register(ServiceLocatorConfigFeature.class); } }
apache-2.0
GoogleCloudPlatform/dotnet-docs-samples
cloud-sql/sql-server/Startup.cs
6978
/* * Copyright (c) 2019 Google LLC. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ using Google.Cloud.Diagnostics.AspNetCore; using Microsoft.AspNetCore.Builder; using Microsoft.AspNetCore.Hosting; using Microsoft.Extensions.Configuration; using Microsoft.Extensions.DependencyInjection; using Microsoft.Extensions.Logging; using Polly; using System; using System.Data; using System.Data.Common; using System.Data.SqlClient; namespace CloudSql { public class Startup { public IConfiguration Configuration { get; } public Startup(IConfiguration configuration) { Configuration = configuration; } // This method gets called by the runtime. Use this method to add // services to the container. // For more information on how to configure your application, visit // http://go.microsoft.com/fwlink/?LinkID=398940 public void ConfigureServices(IServiceCollection services) { services.AddSingleton(sp => StartupExtensions.GetSqlServerConnectionString()); services.AddMvc(options => { options.Filters.Add(typeof(DbExceptionFilterAttribute)); }); } // This method gets called by the runtime. Use this method to configure the HTTP request pipeline. public void Configure(IApplicationBuilder app, IHostingEnvironment env, ILoggerFactory loggerFactory) { loggerFactory.AddConsole(Configuration.GetSection("Logging")); if (env.IsDevelopment()) { app.UseDeveloperExceptionPage(); } else { // Configure error reporting service. app.UseExceptionHandler("/Home/Error"); } app.UseMvc(routes => { routes.MapRoute( name: "default", template: "{controller=Home}/{action=Index}/{id?}"); }); } } static class StartupExtensions { public static void OpenWithRetry(this DbConnection connection) => // [START cloud_sql_sqlserver_dotnet_ado_backoff] Policy .Handle<SqlException>() .WaitAndRetry(new[] { TimeSpan.FromSeconds(1), TimeSpan.FromSeconds(2), TimeSpan.FromSeconds(5) }) .Execute(() => connection.Open()); // [END cloud_sql_sqlserver_dotnet_ado_backoff] public static void InitializeDatabase() { var connectionString = GetSqlServerConnectionString(); using(DbConnection connection = new SqlConnection(connectionString.ConnectionString)) { connection.OpenWithRetry(); using (var createTableCommand = connection.CreateCommand()) { // Create the 'votes' table if it does not already exist. createTableCommand.CommandText = @" IF OBJECT_ID(N'dbo.votes', N'U') IS NULL BEGIN CREATE TABLE dbo.votes( vote_id INT NOT NULL IDENTITY(1, 1) PRIMARY KEY, time_cast datetime NOT NULL, candidate CHAR(6) NOT NULL) END"; createTableCommand.ExecuteNonQuery(); } } } public static SqlConnectionStringBuilder GetSqlServerConnectionString() { // [START cloud_sql_sqlserver_dotnet_ado_connection_tcp] // Equivalent connection string: // "User Id=<DB_USER>;Password=<DB_PASS>;Server=<DB_HOST>;Database=<DB_NAME>;" var connectionString = new SqlConnectionStringBuilder() { // Remember - storing secrets in plain text is potentially unsafe. Consider using // something like https://cloud.google.com/secret-manager/docs/overview to help keep // secrets secret. DataSource = Environment.GetEnvironmentVariable("DB_HOST"), // e.g. '127.0.0.1' // Set Host to 'cloudsql' when deploying to App Engine Flexible environment UserID = Environment.GetEnvironmentVariable("DB_USER"), // e.g. 'my-db-user' Password = Environment.GetEnvironmentVariable("DB_PASS"), // e.g. 'my-db-password' InitialCatalog = Environment.GetEnvironmentVariable("DB_NAME"), // e.g. 'my-database' // The Cloud SQL proxy provides encryption between the proxy and instance Encrypt = false, }; connectionString.Pooling = true; // [START_EXCLUDE] // The values set here are for demonstration purposes only. You // should set these values to what works best for your application. // [START cloud_sql_sqlserver_dotnet_ado_limit] // MaximumPoolSize sets maximum number of connections allowed in the pool. connectionString.MaxPoolSize = 5; // MinimumPoolSize sets the minimum number of connections in the pool. connectionString.MinPoolSize = 0; // [END cloud_sql_sqlserver_dotnet_ado_limit] // [START cloud_sql_sqlserver_dotnet_ado_timeout] // ConnectionTimeout sets the time to wait (in seconds) while // trying to establish a connection before terminating the attempt. connectionString.ConnectTimeout = 15; // [END cloud_sql_sqlserver_dotnet_ado_timeout] // [START cloud_sql_sqlserver_dotnet_ado_lifetime] // ADO.NET connection pooler removes a connection // from the pool after it's been idle for approximately // 4-8 minutes, or if the pooler detects that the // connection with the server no longer exists. // [END cloud_sql_sqlserver_dotnet_ado_lifetime] connectionString.TrustServerCertificate = true; // [END_EXCLUDE] return connectionString; // [END cloud_sql_sqlserver_dotnet_ado_connection_tcp] } } }
apache-2.0
leuschel/logen
old_logen/pylogen/MakeLogen.py
5719
#sicstus --goal "set_prolog_flag(single_var_warnings,off), ensure_loaded('sicstus.pl'),ensure_loaded('socket.pl'),ensure_loaded('logen_main.pl'),save_program('logen_main.sav'),halt." 2>&1 | grep -v "^%" | grep -v "from clpq is private" import os import development class PrologSav: def __init__(self,name, depends, buildcmd): self.name = name self.depends = depends self.buildcmd = buildcmd def uptodate(self): if not os.path.exists(self.name): return False save_state = os.stat(self.name).st_mtime for file in self.depends: time_file = os.stat(file).st_mtime if time_file > save_state: return False return True def write_to_splash(self,splash=None,text=""): if splash != None: splash.text.insert("end",text) splash.update() else: print text def refresh(self,splash=None): self.write_to_splash(splash,"\n\nChecking %s..." % self.name) if not self.uptodate(): self.write_to_splash(splash,"Out of date!\nRebuilding save state:" ) self.build() self.write_to_splash(splash,"\n%s Rebuilt" % self.name) else: self.write_to_splash(splash,"OK") print self.name, "is is uptodate" def build(self): os.system("%s %s" %(self.buildcmd,"2>&1 | grep -v \"^%\" | grep -v \"from clpq is private\" | grep -v \"^SICStus 3\" | grep -v \"$Licencensed to \"") ) #print self.buildcmd #os.system(self.buildcmd) print "Rebuilt %s" % (self.name) class Logen_Saves: def __init__(self): logen_depends = ['ann_db.pl', 'annfile.pl', 'bta.pl', 'builtin_db.pl', 'cogen-tools.pl', 'cogen.pl', 'flags.pl', 'gensym.pl', 'logen_annotation.pl', 'logen_attributes.pl', 'logen_benchmark.pl', 'logen_benchmark_loop.pl', 'logen_codesize.pl', 'logen_filter.pl', 'logen_main.pl', 'logen_messages.pl', 'logen_post.pl', 'pp/inline.pl', 'pp/saver.pl', 'pp/deadcode.pl', 'pp/loader.pl', 'memoizer.pl', 'moduledriver.pl', 'pylogen_main.pl', 'pp.pl', 'run_gx.pl', 'sicstus.pl', 'sicstus_term.pl', 'socket.pl', 'annotation/match_ann.pl', 'annotation/match_unknowns.pl', 'annotation/save_ann.pl', 'annotation/tokens.pl', 'annotation/parser.pl', 'annotation/read.pl', 'prob/logen_preferences.pl'] self.logen=PrologSav('pylogen_main.sav', logen_depends, "sicstus --goal \"set_prolog_flag(single_var_warnings,off), ensure_loaded('pylogen_main.pl'),save_program('pylogen_main.sav'),halt.\" ") # spld --static --output pylogen_main --resources=logen_main.sav=/pylogen_main.sav bta_depends = [ 'auto_bta.pl', 'bta_driver.pl', 'convex_analyser.pl', 'convex_hull.pl', 'convex_norm.pl', 'builtin_norms.pl', 'filter_prop/auto_bta.pl', 'filter_prop/filters.pl', 'filter_prop/logen/filter.pl', 'filter_prop/abstractBuiltins.pl', 'filter_prop/abstractCallSucc.pl', 'filter_prop/logen/logen_create_types.pl', 'filter_prop/readprog.pl', 'run_binsolve.pl', 'run_bta.pl', 'bta_files/bin_loader.pl', 'bta_files/bta_pp.pl', 'bta_files/pp_annfile.pl', 'bta_files/pp_iftransform.pl', ] bta_depends.extend(logen_depends) self.localbta=PrologSav('run_bta.sav', bta_depends, #"sicstus --goal \"set_prolog_flag(single_var_warnings,off),prolog_flag(compiling,_,profiledcode),ensure_loaded('run_bta.pl'),save,halt.\" 2>&1| grep -v \"^%\" | grep -v \"from clpq is private\"" "sicstus --goal \"set_prolog_flag(single_var_warnings,off),ensure_loaded('run_bta.pl'),save,halt.\" 2>&1| grep -v \"^%\" | grep -v \"from clpq is private\"" ) #self.localbta_profile =PrologSav('bta.sav', # bta_depends, # "sicstus --goal \"set_prolog_flag(single_var_warnings,off),prolog_flag(compiling,_,profiledcode),ensure_loaded('auto_bta.pl'),compile_bta,halt.\" 2>&1| grep -v \"^%\" | grep -v \"from clpq is private\"" # ) def refresh(self,splash=None): if development.get_development(): self.logen.refresh(splash=splash) self.localbta.refresh(splash=splash) else: print "Using precompiled binaries" if __name__=="__main__": print "Manual Make and Check to build sav states" development.set_development_mode(True) l = Logen_Saves() l.refresh()
apache-2.0
tgraf/cilium
pkg/hubble/server/serveroption/option.go
4292
// Copyright 2020 Authors of Hubble // Copyright 2020 Authors of Cilium // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package serveroption import ( "crypto/tls" "fmt" "net" "os" "strings" observerpb "github.com/cilium/cilium/api/v1/observer" peerpb "github.com/cilium/cilium/api/v1/peer" recorderpb "github.com/cilium/cilium/api/v1/recorder" "github.com/cilium/cilium/pkg/api" "github.com/cilium/cilium/pkg/crypto/certloader" v1 "github.com/cilium/cilium/pkg/hubble/api/v1" "golang.org/x/sys/unix" "google.golang.org/grpc/health" healthpb "google.golang.org/grpc/health/grpc_health_v1" ) // MinTLSVersion defines the minimum TLS version clients are expected to // support in order to establish a connection to the hubble server. const MinTLSVersion = tls.VersionTLS13 // Options stores all the configuration values for the hubble server. type Options struct { Listener net.Listener HealthService healthpb.HealthServer ObserverService observerpb.ObserverServer PeerService peerpb.PeerServer RecorderService recorderpb.RecorderServer ServerTLSConfig certloader.ServerConfigBuilder Insecure bool } // Option customizes then configuration of the hubble server. type Option func(o *Options) error // WithTCPListener configures a TCP listener with the address. func WithTCPListener(address string) Option { return func(o *Options) error { socket, err := net.Listen("tcp", address) if err != nil { return err } if o.Listener != nil { socket.Close() return fmt.Errorf("listener already configured: %s", address) } o.Listener = socket return nil } } // WithUnixSocketListener configures a unix domain socket listener with the // given file path. When the process runs in privileged mode, the file group // owner is set to socketGroup. func WithUnixSocketListener(path string) Option { return func(o *Options) error { if o.Listener != nil { return fmt.Errorf("listener already configured") } socketPath := strings.TrimPrefix(path, "unix://") unix.Unlink(socketPath) socket, err := net.Listen("unix", socketPath) if err != nil { return err } if os.Getuid() == 0 { if err := api.SetDefaultPermissions(socketPath); err != nil { socket.Close() return err } } o.Listener = socket return nil } } // WithHealthService configures the server to expose the gRPC health service. func WithHealthService() Option { return func(o *Options) error { healthSvc := health.NewServer() healthSvc.SetServingStatus(v1.ObserverServiceName, healthpb.HealthCheckResponse_SERVING) o.HealthService = healthSvc return nil } } // WithObserverService configures the server to expose the given observer server service. func WithObserverService(svc observerpb.ObserverServer) Option { return func(o *Options) error { o.ObserverService = svc return nil } } // WithPeerService configures the server to expose the given peer server service. func WithPeerService(svc peerpb.PeerServer) Option { return func(o *Options) error { o.PeerService = svc return nil } } // WithInsecure disables transport security. Transport security is required // unless WithInsecure is set. Use WithTLS to set transport credentials for // transport security. func WithInsecure() Option { return func(o *Options) error { o.Insecure = true return nil } } // WithServerTLS sets the transport credentials for the server based on TLS. func WithServerTLS(cfg certloader.ServerConfigBuilder) Option { return func(o *Options) error { o.ServerTLSConfig = cfg return nil } } // WithPeerService configures the server to expose the given peer server service. func WithRecorderService(svc recorderpb.RecorderServer) Option { return func(o *Options) error { o.RecorderService = svc return nil } }
apache-2.0
istio/cni
test/install_k8s_test.go
6711
// Copyright 2018 Istio Authors // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // This tests the k8s installation. It validates the CNI plugin configuration // and the existence of the CNI plugin binary locations. package install_test import ( "fmt" "os" "testing" //"github.com/nsf/jsondiff" "istio.io/cni/deployments/kubernetes/install/test" ) var ( TestWorkDir, _ = os.Getwd() Hub = "gcr.io/istio-release" Tag = "master-latest-daily" ) type testCase struct { name string preConfFile string resultFileName string expectedOutputFile string expectedPostCleanFile string cniConfDirOrderedFiles []string } func doTest(testNum int, tc testCase, t *testing.T) { _ = os.Setenv("HUB", Hub) _ = os.Setenv("TAG", Tag) t.Logf("Running install CNI test with HUB=%s, TAG=%s", Hub, Tag) test.RunInstallCNITest(testNum, tc.preConfFile, tc.resultFileName, tc.expectedOutputFile, tc.expectedPostCleanFile, tc.cniConfDirOrderedFiles, t) } func TestInstall(t *testing.T) { envHub := os.Getenv("HUB") if envHub != "" { Hub = envHub } envTag := os.Getenv("TAG") if envTag != "" { Tag = envTag } t.Logf("HUB=%s, TAG=%s", Hub, Tag) testDataDir := TestWorkDir + "/../deployments/kubernetes/install/test/data" cases := []testCase{ { name: "File with pre-plugins--.conflist", preConfFile: "00-calico.conflist", resultFileName: "00-calico.conflist", expectedOutputFile: testDataDir + "/expected/10-calico.conflist-istioconfig", expectedPostCleanFile: testDataDir + "/pre/calico.conflist", cniConfDirOrderedFiles: []string{"calico.conflist"}, }, { name: "File without pre-plugins--.conf", preConfFile: "00-minikube_cni.conf", resultFileName: "00-minikube_cni.conflist", expectedOutputFile: testDataDir + "/expected/minikube_cni.conflist.expected", expectedPostCleanFile: testDataDir + "/expected/minikube_cni.conflist.clean", cniConfDirOrderedFiles: []string{"minikube_cni.conf"}, }, { name: "First file with pre-plugins--.conflist", preConfFile: "NONE", resultFileName: "00-calico.conflist", expectedOutputFile: testDataDir + "/expected/10-calico.conflist-istioconfig", expectedPostCleanFile: testDataDir + "/pre/calico.conflist", cniConfDirOrderedFiles: []string{"calico.conflist", "minikube_cni.conf"}, }, { name: "First file without pre-plugins--.conf", preConfFile: "NONE", resultFileName: "00-minikube_cni.conflist", expectedOutputFile: testDataDir + "/expected/minikube_cni.conflist.expected", expectedPostCleanFile: testDataDir + "/expected/minikube_cni.conflist.clean", cniConfDirOrderedFiles: []string{"minikube_cni.conf", "calico.conflist"}, }, { name: "Skip non-json file for first valid .conf file", preConfFile: "NONE", resultFileName: "01-minikube_cni.conflist", expectedOutputFile: testDataDir + "/expected/minikube_cni.conflist.expected", expectedPostCleanFile: testDataDir + "/expected/minikube_cni.conflist.clean", cniConfDirOrderedFiles: []string{"non_json.conf", "minikube_cni.conf", "calico.conflist"}, }, { name: "Skip non-json file for first valid .conflist file", preConfFile: "NONE", resultFileName: "01-calico.conflist", expectedOutputFile: testDataDir + "/expected/10-calico.conflist-istioconfig", expectedPostCleanFile: testDataDir + "/pre/calico.conflist", cniConfDirOrderedFiles: []string{"non_json.conf", "calico.conflist", "minikube_cni.conf"}, }, { name: "Skip invalid .conf file for first valid .conf file", preConfFile: "NONE", resultFileName: "01-minikube_cni.conflist", expectedOutputFile: testDataDir + "/expected/minikube_cni.conflist.expected", expectedPostCleanFile: testDataDir + "/expected/minikube_cni.conflist.clean", cniConfDirOrderedFiles: []string{"bad_minikube_cni.conf", "minikube_cni.conf", "calico.conflist"}, }, { name: "Skip invalid .conf file for first valid .conflist file", preConfFile: "NONE", resultFileName: "01-calico.conflist", expectedOutputFile: testDataDir + "/expected/10-calico.conflist-istioconfig", expectedPostCleanFile: testDataDir + "/pre/calico.conflist", cniConfDirOrderedFiles: []string{"bad_minikube_cni.conf", "calico.conflist", "minikube_cni.conf"}, }, { name: "Skip invalid .conflist files for first valid .conf file", preConfFile: "NONE", resultFileName: "02-minikube_cni.conflist", expectedOutputFile: testDataDir + "/expected/minikube_cni.conflist.expected", expectedPostCleanFile: testDataDir + "/expected/minikube_cni.conflist.clean", cniConfDirOrderedFiles: []string{"noname_calico.conflist", "noplugins_calico.conflist", "minikube_cni.conf", "calico.conflist"}, }, { name: "Skip invalid .conflist files for first valid .conflist file", preConfFile: "NONE", resultFileName: "02-calico.conflist", expectedOutputFile: testDataDir + "/expected/10-calico.conflist-istioconfig", expectedPostCleanFile: testDataDir + "/pre/calico.conflist", cniConfDirOrderedFiles: []string{"noname_calico.conflist", "noplugins_calico.conflist", "calico.conflist", "minikube_cni.conf"}, }, { name: "confFile env var point to missing .conf with valid .conflist file", preConfFile: "00-calico.conf", resultFileName: "00-calico.conflist", expectedOutputFile: testDataDir + "/expected/10-calico.conflist-istioconfig", expectedPostCleanFile: testDataDir + "/pre/calico.conflist", cniConfDirOrderedFiles: []string{"calico.conflist"}, }, } for i, c := range cases { t.Run(fmt.Sprintf("case %d %s", i, c.name), func(t *testing.T) { t.Logf("%s: Test preconf %s, expected %s", c.name, c.preConfFile, c.expectedOutputFile) doTest(i, c, t) }) } }
apache-2.0
yuzhiping/jeeplus
jeeplus-weixin/src/main/java/com/jeeplus/weixin/fastweixin/company/message/resp/QYVoiceRespMsg.java
1231
package com.jeeplus.weixin.fastweixin.company.message.resp; import com.jeeplus.weixin.fastweixin.message.RespType; import com.jeeplus.weixin.fastweixin.message.util.MessageBuilder; /** * 微信企业号被动响应语音消息 * ==================================================================== * 上海聚攒软件开发有限公司 * -------------------------------------------------------------------- * @author Nottyjay * @version 1.0.beta * @since 1.3.6 * ==================================================================== */ public class QYVoiceRespMsg extends QYBaseRespMsg { private String mediaId; public QYVoiceRespMsg() { } public QYVoiceRespMsg(String mediaId) { this.mediaId = mediaId; } public String getMediaId() { return mediaId; } public void setMediaId(String mediaId) { this.mediaId = mediaId; } @Override public String toXml(){ MessageBuilder mb = new MessageBuilder(super.toXml()); mb.addData("MsgType", RespType.VOICE); mb.append("<Voice>\n"); mb.addData("MediaId", mediaId); mb.append("</Voice>\n"); mb.surroundWith("xml"); return mb.toString(); } }
apache-2.0
vam-google/google-cloud-java
google-api-grpc/proto-google-cloud-bigtable-admin-v2/src/main/java/com/google/bigtable/admin/v2/GetTableRequestOrBuilder.java
1432
// Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/bigtable/admin/v2/bigtable_table_admin.proto package com.google.bigtable.admin.v2; public interface GetTableRequestOrBuilder extends // @@protoc_insertion_point(interface_extends:google.bigtable.admin.v2.GetTableRequest) com.google.protobuf.MessageOrBuilder { /** * * * <pre> * The unique name of the requested table. * Values are of the form * `projects/&lt;project&gt;/instances/&lt;instance&gt;/tables/&lt;table&gt;`. * </pre> * * <code>string name = 1;</code> */ java.lang.String getName(); /** * * * <pre> * The unique name of the requested table. * Values are of the form * `projects/&lt;project&gt;/instances/&lt;instance&gt;/tables/&lt;table&gt;`. * </pre> * * <code>string name = 1;</code> */ com.google.protobuf.ByteString getNameBytes(); /** * * * <pre> * The view to be applied to the returned table's fields. * Defaults to `SCHEMA_VIEW` if unspecified. * </pre> * * <code>.google.bigtable.admin.v2.Table.View view = 2;</code> */ int getViewValue(); /** * * * <pre> * The view to be applied to the returned table's fields. * Defaults to `SCHEMA_VIEW` if unspecified. * </pre> * * <code>.google.bigtable.admin.v2.Table.View view = 2;</code> */ com.google.bigtable.admin.v2.Table.View getView(); }
apache-2.0
tormozz48/2do2go.ru
test/task1/transformers/output/csv.js
1464
'use strict'; const CSVOutputTransformer = require('../../../../src/task1/transformers/output/csv'); describe('src/task1/transformers/output/csv', () => { it('should properly create output in csv format', () => { const csvOutputTransformer = new CSVOutputTransformer({}); const inputData = [ {foo: 'some-foo1', bar: 'some-bar1'}, {foo: 'some-foo2', bar: 'some-bar2'} ]; assert.equal( csvOutputTransformer.run(inputData), '"some-foo1","some-bar1"\r\n"some-foo2","some-bar2"' ); }); it('should use custom configured delimiter', () => { const csvOutputTransformer = new CSVOutputTransformer({delimiter: '||'}); const inputData = [ {foo: 'some-foo1', bar: 'some-bar1'}, {foo: 'some-foo2', bar: 'some-bar2'} ]; assert.equal( csvOutputTransformer.run(inputData), '"some-foo1"||"some-bar1"\r\n"some-foo2"||"some-bar2"' ); }); it('should optionally add fields as header', () => { const csvOutputTransformer = new CSVOutputTransformer({addHeader: true}); const inputData = [ {foo: 'some-foo1', bar: 'some-bar1'}, {foo: 'some-foo2', bar: 'some-bar2'} ]; assert.equal( csvOutputTransformer.run(inputData), 'foo,bar\r\n"some-foo1","some-bar1"\r\n"some-foo2","some-bar2"' ); }); });
apache-2.0
harryouyang/fristApp
app/src/androidTest/java/com/pangu/fristapp/ApplicationTest.java
349
package com.pangu.fristapp; import android.app.Application; import android.test.ApplicationTestCase; /** * <a href="http://d.android.com/tools/testing/testing_android.html">Testing Fundamentals</a> */ public class ApplicationTest extends ApplicationTestCase<Application> { public ApplicationTest() { super(Application.class); } }
apache-2.0
aNNiMON/HotaruFX
app/src/main/java/com/annimon/hotarufx/visual/PropertyTimeline.java
1059
package com.annimon.hotarufx.visual; import java.util.Map; import java.util.TreeMap; import javafx.animation.Interpolator; import javafx.beans.value.WritableValue; public class PropertyTimeline<T> { private final WritableValue<T> property; private final Map<KeyFrame, KeyFrameValue<T>> keyFrames; public PropertyTimeline(WritableValue<T> property) { this.property = property; keyFrames = new TreeMap<>(); } public WritableValue<T> getProperty() { return property; } public Map<KeyFrame, KeyFrameValue<T>> getKeyFrames() { return keyFrames; } public PropertyTimeline<T> add(KeyFrame keyFrame, T value) { keyFrames.put(keyFrame, new KeyFrameValue<>(value)); return this; } public PropertyTimeline<T> add(KeyFrame keyFrame, T value, Interpolator interpolator) { keyFrames.put(keyFrame, new KeyFrameValue<>(value, interpolator)); return this; } public PropertyTimeline<T> clear() { keyFrames.clear(); return this; } }
apache-2.0
nttlabs/cli
cf/api/authentication/authentication.go
4328
package authentication import ( "encoding/base64" "fmt" "net/url" "strings" . "github.com/nttlabs/cli/cf/i18n" "github.com/nttlabs/cli/cf/configuration/core_config" "github.com/nttlabs/cli/cf/errors" "github.com/nttlabs/cli/cf/net" ) type TokenRefresher interface { RefreshAuthToken() (updatedToken string, apiErr error) } type AuthenticationRepository interface { RefreshAuthToken() (updatedToken string, apiErr error) Authenticate(credentials map[string]string) (apiErr error) GetLoginPromptsAndSaveUAAServerURL() (map[string]core_config.AuthPrompt, error) } type UAAAuthenticationRepository struct { config core_config.ReadWriter gateway net.Gateway } func NewUAAAuthenticationRepository(gateway net.Gateway, config core_config.ReadWriter) (uaa UAAAuthenticationRepository) { uaa.gateway = gateway uaa.config = config return } func (uaa UAAAuthenticationRepository) Authenticate(credentials map[string]string) (apiErr error) { data := url.Values{ "grant_type": {"password"}, "scope": {""}, } for key, val := range credentials { data[key] = []string{val} } apiErr = uaa.getAuthToken(data) switch response := apiErr.(type) { case errors.HttpError: if response.StatusCode() == 401 { apiErr = errors.New(T("Credentials were rejected, please try again.")) } } return } type LoginResource struct { Prompts map[string][]string Links map[string]string } var knownAuthPromptTypes = map[string]core_config.AuthPromptType{ "text": core_config.AuthPromptTypeText, "password": core_config.AuthPromptTypePassword, } func (r *LoginResource) parsePrompts() (prompts map[string]core_config.AuthPrompt) { prompts = make(map[string]core_config.AuthPrompt) for key, val := range r.Prompts { prompts[key] = core_config.AuthPrompt{ Type: knownAuthPromptTypes[val[0]], DisplayName: val[1], } } return } func (uaa UAAAuthenticationRepository) GetLoginPromptsAndSaveUAAServerURL() (prompts map[string]core_config.AuthPrompt, apiErr error) { url := fmt.Sprintf("%s/login", uaa.config.AuthenticationEndpoint()) resource := &LoginResource{} apiErr = uaa.gateway.GetResource(url, resource) prompts = resource.parsePrompts() if resource.Links["uaa"] == "" { uaa.config.SetUaaEndpoint(uaa.config.AuthenticationEndpoint()) } else { uaa.config.SetUaaEndpoint(resource.Links["uaa"]) } return } func (uaa UAAAuthenticationRepository) RefreshAuthToken() (string, error) { data := url.Values{ "refresh_token": {uaa.config.RefreshToken()}, "grant_type": {"refresh_token"}, "scope": {""}, } apiErr := uaa.getAuthToken(data) updatedToken := uaa.config.AccessToken() return updatedToken, apiErr } func (uaa UAAAuthenticationRepository) getAuthToken(data url.Values) error { type uaaErrorResponse struct { Code string `json:"error"` Description string `json:"error_description"` } type AuthenticationResponse struct { AccessToken string `json:"access_token"` TokenType string `json:"token_type"` RefreshToken string `json:"refresh_token"` Error uaaErrorResponse `json:"error"` } path := fmt.Sprintf("%s/oauth/token", uaa.config.AuthenticationEndpoint()) request, err := uaa.gateway.NewRequest("POST", path, "Basic "+base64.StdEncoding.EncodeToString([]byte("cf:")), strings.NewReader(data.Encode())) if err != nil { return errors.NewWithError(T("Failed to start oauth request"), err) } request.HttpReq.Header.Set("Content-Type", "application/x-www-form-urlencoded") response := new(AuthenticationResponse) _, err = uaa.gateway.PerformRequestForJSONResponse(request, &response) switch err.(type) { case nil: case errors.HttpError: return err case *errors.InvalidTokenError: return errors.New(T("Authentication has expired. Please log back in to re-authenticate.\n\nTIP: Use `cf login -a <endpoint> -u <user> -o <org> -s <space>` to log back in and re-authenticate.")) default: return errors.NewWithError(T("auth request failed"), err) } // TODO: get the actual status code if response.Error.Code != "" { return errors.NewHttpError(0, response.Error.Code, response.Error.Description) } uaa.config.SetAccessToken(fmt.Sprintf("%s %s", response.TokenType, response.AccessToken)) uaa.config.SetRefreshToken(response.RefreshToken) return nil }
apache-2.0
Steampunkrue/go
command_interface/command_manager.go
642
package main import "fmt" type Result struct { result string err error } var commands = map[string]Command{} func AppendCommand(c Command) { commands[c.Name()] = c } func CommandInit() { AppendCommand(NewReplacer()) AppendCommand(NewStringOverwriter()) } func main() { // do something here // request all commands to be loaded // await user input // compare user input to command list // run based on input // apparently I dont have a better solution CommandInit() fmt.Println(commands["replacer"].Help()) fmt.Println(commands["overwrite"].Help()) }
apache-2.0
emeryotopalik/appinventor-sources
appinventor/appengine/src/com/google/appinventor/server/UserInfoServiceImpl.java
4168
// -*- mode: java; c-basic-offset: 2; -*- // Copyright 2009-2011 Google, All Rights reserved // Copyright 2011-2012 MIT, All rights reserved // Released under the Apache License, Version 2.0 // http://www.apache.org/licenses/LICENSE-2.0 package com.google.appinventor.server; import com.google.appinventor.server.flags.Flag; import com.google.appinventor.server.storage.StorageIo; import com.google.appinventor.server.storage.StorageIoInstanceHolder; import com.google.appinventor.shared.rpc.user.Config; import com.google.appinventor.shared.rpc.user.User; import com.google.appinventor.shared.rpc.user.UserInfoService; /** * Implementation of the user information service. * * <p>Note that this service must be state-less so that it can be run on * multiple servers. * */ public class UserInfoServiceImpl extends OdeRemoteServiceServlet implements UserInfoService { // Storage of user settings private final transient StorageIo storageIo = StorageIoInstanceHolder.INSTANCE; private static final long serialVersionUID = -7316312435338169166L; /** * Returns System Config, including user information record * */ @Override public Config getSystemConfig(String sessionId) { Config config = new Config(); User user = userInfoProvider.getUser(); user.setSessionId(sessionId); storageIo.setUserSessionId(userInfoProvider.getUserId(), sessionId); Flag<String> rendezvousFlag = Flag.createFlag("use.rendezvousserver", ""); if (!rendezvousFlag.get().equals("")) { config.setRendezvousServer(rendezvousFlag.get()); } config.setUser(user); // Check to see if we need to upgrade this user's project to GCS storageIo.checkUpgrade(userInfoProvider.getUserId()); return config; } /** * Returns user information. * * (obsoleted by getSystemConfig()) * * @return user information record */ @Override public User getUserInformation(String sessionId) { // This is a little evil here. We are fetching the User object // *and* side effecting it by storing the sessionId // A more pedagotically correct way would be to do the store // in a separate RPC. But that would add another round trip. User user = userInfoProvider.getUser(); user.setSessionId(sessionId); // Store local copy // Store it in the data store storageIo.setUserSessionId(userInfoProvider.getUserId(), sessionId); return user; } /** * Returns user information based on userId. * * @return user information record */ @Override public User getUserInformationByUserId(String userId) { return storageIo.getUser(userId); } /** * Retrieves the user's settings. * * @return user's settings */ @Override public String loadUserSettings() { return storageIo.loadSettings(userInfoProvider.getUserId()); } /** * Stores the user's settings. * @param settings user's settings */ @Override public void storeUserSettings(String settings) { storageIo.storeSettings(userInfoProvider.getUserId(), settings); } /** * Stores the user's name. * @param name user's name */ @Override public void storeUserName(String name) { storageIo.setUserName(userInfoProvider.getUserId(), name); } /** * Stores the user's link. * @param name user's link */ @Override public void storeUserLink(String link) { storageIo.setUserLink(userInfoProvider.getUserId(), link); } /** * Stores the user's email notification frequency. * @param emailFrequency user's email frequency */ @Override public void storeUserEmailFrequency(int emailFrequency) { storageIo.setUserEmailFrequency(userInfoProvider.getUserId(), emailFrequency); } /** * Returns true if the current user has a user file with the given file name */ @Override public boolean hasUserFile(String fileName) { return storageIo.getUserFiles(userInfoProvider.getUserId()).contains(fileName); } /** * Deletes the user file with the given file name */ @Override public void deleteUserFile(String fileName) { storageIo.deleteUserFile(userInfoProvider.getUserId(), fileName); } }
apache-2.0
lcmanager/gdb
gdb-base/src/main/java/org/lcmanager/gdb/base/NoWeave.java
1022
/* * #%L * Game Database * %% * Copyright (C) 2016 - 2016 LCManager Group * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ package org.lcmanager.gdb.base; import java.lang.annotation.ElementType; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; import java.lang.annotation.Target; /** * Declares classes not to be weaved by AspectJ. * */ @Retention(RetentionPolicy.RUNTIME) @Target(ElementType.TYPE) public @interface NoWeave { // Nothing to do. }
apache-2.0
scalyr/scalyr-agent-2
tests/utils/log_reader.py
8890
# Copyright 2014-2020 Scalyr Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from __future__ import unicode_literals from __future__ import print_function from __future__ import absolute_import if False: # NOSONAR from typing import Optional from typing import List from typing import Dict from typing import Tuple from typing import Generator from typing import Any import re import threading import time from io import open import collections import six ErrorPatternInfo = collections.namedtuple("ErrorPatternInfo", ["pattern", "message"]) class LogReaderError(Exception): pass class LogReaderTimeoutError(LogReaderError): pass class LogReader(threading.Thread): """ Reader that allows to read file and to wait for new lines, wait for new lines with some conditions or patterns and etc. """ def __init__(self, file_path): super(LogReader, self).__init__() self._file_path = file_path self._file = open(six.text_type(file_path), "r") self._error_line_patterns = dict() self._lines = list() def add_error_check(self, pattern, message=None): # type: (six.text_type, six.text_type) -> None """ Add new regex pattern to match every incoming line. In case of match, those lines are considered as errors and exception is raised. :param pattern: Regular expression pattern. :param message: Additional message to show when error log line is found. """ compiled_pattern = re.compile(pattern) info = ErrorPatternInfo(compiled_pattern, message) self._error_line_patterns[pattern] = info def _check_line_for_error(self, line): # type: (six.text_type) -> None """ Match new line with patters that must be caught as error. """ for pattern, (compiled_pattern, message) in self._error_line_patterns.items(): if compiled_pattern.match(line): if not message: message = ( "Log file {0} contains error line: '{1}'. Pattern: {2}.".format( self._file_path, line, pattern ) ) raise LogReaderError(message) def _new_line_callback(self, line): # type: (six.text_type) -> None """ Callback which is invoked when new line is read. """ self._lines.append(line) self._check_line_for_error(line) def _line_generator(self): # type: () -> Generator """Generator that reads all available lines.""" for line in self._file: line = line.strip("\n") self._new_line_callback(line) yield line def _line_generator_blocking(self, timeout=10): # type: (float) -> Generator """ Wraps '_line_generator' and yields new log lines until timeout is reached. This can be used to wait for new log lines without reimplementing timeout logic. """ timeout_time = time.time() + timeout while True: line_generator = self._line_generator() try: line = next(line_generator) yield line except StopIteration: if time.time() >= timeout_time: raise LogReaderTimeoutError( "Timeout of %s seconds reached while waiting for new line." % timeout ) time.sleep(0.01) def wait_for_next_line(self, timeout=10): # type: (float) -> six.text_type """ Waits for new line from the log file. Also returns this line for more convenience. """ return next(self._line_generator_blocking(timeout=timeout)) def go_to_end(self): """ Just goes to the end of the file. it is useful when there is no need to wait for some particular line, but there is need to check all new lines (for errors specified by 'add_error_check' for example.). It does not block or wait, it just reads and processes new lines until EOF. """ # TODO: make LogReader do this autimatically by using ContextManager. for _ in self._line_generator(): pass def wait_for_matching_line(self, pattern, timeout=10): # type: ignore # type: (six.text_type, int) -> Optional[six.text_type] """ Wait for line which matches to provided pattern. """ compiled_pattern = re.compile(pattern) for line in self._line_generator_blocking(timeout=timeout): if compiled_pattern.match(line): return line def wait(self, seconds): # type: (float) -> None """ Keep checking for new lines for some period of time(in seconds)'. """ stop_time = time.time() + seconds while True: self.go_to_end() time.sleep(0.01) if time.time() >= stop_time: break @property def last_line(self): # type: () -> six.text_type if self._lines: return self._lines[-1] else: return "" class AgentLogReader(LogReader): """ Generic reader for agent log. """ def __init__(self, file_path): super(AgentLogReader, self).__init__(file_path) self.add_error_check(r"\d{4}-\d{2}-\d{2} \d{2}:\d{2}:\d{2}\.\d+Z ERROR") class LogMetricReader(LogReader): """ Subclass that allows to read and wait for particular metrics. """ LINE_PATTERN = "" # type: six.text_type def __init__(self, file_path): super(LogMetricReader, self).__init__(file_path) self.current_metrics = dict() def _parse_line(self, line): # type: (six.text_type) -> Tuple """ Parse metric log line. """ m = re.match(type(self).LINE_PATTERN, line) if not m: raise LogReaderError("Line: '{}' can not be parsed.") metric_name = m.group("metric_name") metric_value_string = m.group("metric_value") if metric_value_string.isdigit(): metric_value = int(metric_value_string) # type: Any[int, float] else: try: metric_value = float(metric_value_string) except ValueError: metric_value = metric_value_string return metric_name, metric_value def _new_line_callback(self, line): # type: (six.text_type) -> None """ Override this callback to parse metrics from new line. :return: """ super(LogMetricReader, self)._new_line_callback(line) name, value = self._parse_line(line) self.current_metrics[name] = value def wait_for_metrics_exist(self, names, timeout=10): # type: ignore # type: (List[six.text_type], int) -> Dict[six.text_type, six.text_type] """ Waits until all needed metrics are presented in log file at least once. :param names: list on needed metric names. :return: metric_name -> metric_value dict with needed metrics. """ remaining_metrics = set(names) for _ in self._line_generator_blocking(timeout=timeout): for name in list(remaining_metrics): value = self.current_metrics.get(name) if value is not None: remaining_metrics.remove(name) if len(remaining_metrics) == 0: return {name: self.current_metrics[name] for name in names} def wait_for_metrics_equal( self, expected, timeout=10 ): # type: (Dict, float) -> Dict """ Wait until current metric values equals to values which are specified in the 'expected' dict. :param expected: metric_name -> value pairs with expected values if specified metrics. :return: metric_name -> metric_value dict with needed metrics. """ line_gen = self._line_generator_blocking(timeout=timeout) while True: needed_metrics = { name: self.current_metrics[name] for name in expected.keys() } if needed_metrics == expected: return needed_metrics # read for other line only after current metrics are evaluated. next(line_gen)
apache-2.0
kxbmap/configs
project/Release.scala
2115
import com.jsuereth.sbtpgp.SbtPgp import mdoc.MdocPlugin.autoImport._ import sbt.Keys._ import sbt._ import sbtrelease.ReleasePlugin import sbtrelease.ReleasePlugin.autoImport._ import sbtrelease.ReleaseStateTransformations._ import sbtrelease.Version.Bump import xerial.sbt.Sonatype import xerial.sbt.Sonatype.SonatypeKeys._ object Release extends AutoPlugin { override def requires: Plugins = Common && Sonatype && ReleasePlugin && SbtPgp object autoImport { val readmeFileName = settingKey[String]("Readme file name") val updateReadme = taskKey[File]("Update readme file") } import autoImport._ private val docs = LocalProject("docs") override def projectSettings: Seq[Setting[_]] = Seq( readmeFileName := "README.md", updateReadme := updateReadmeTask.value, releaseCrossBuild := true, releaseVersionBump := Bump.Minor, releaseProcess := Seq( checkSnapshotDependencies, inquireVersions, runClean, runTest, setReleaseVersion, releaseStepTask(updateReadme), commitReadme, commitReleaseVersion, tagRelease, publishArtifacts, releaseStepCommand("sonatypeBundleRelease"), setNextVersion, commitNextVersion ) ) private def updateReadmeTask = Def.sequential( Def.taskDyn { (docs / mdoc).toTask(s" --include ${readmeFileName.value}") }, Def.task { val name = readmeFileName.value val out = (docs / mdocOut).value / name val readme = baseDirectory.value / name IO.copy(Seq(out -> readme)) readme }) private val commitReadme = ReleaseStep { st => val x = Project.extract(st) val vcs = x.get(releaseVcs).getOrElse( sys.error("Aborting release. Working directory is not a repository of a recognized VCS.")) val sign = x.get(releaseVcsSign) val signOff = x.get(releaseVcsSignOff) val name = x.get(readmeFileName) vcs.add(name) ! st.log val status = vcs.status.!!.trim if (status.contains(name)) { vcs.commit(s"Update $name", sign, signOff) ! st.log } st } }
apache-2.0
RonyK/SuaDB
suadb-server/src/suadb/remote/RemoteDriver.java
383
package suadb.remote; import java.rmi.*; /** * The RMI suadb.remote interface corresponding to Driver. * The method is similar to that of Driver, * except that it takes no arguments and * throws RemoteExceptions instead of SQLExceptions. * @author Edward Sciore */ public interface RemoteDriver extends Remote { public RemoteConnection connect() throws RemoteException; }
apache-2.0
kogotko/carburetor
openstack_dashboard/dashboards/admin/snapshots/tests.py
10310
# Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from django.conf import settings from django.core.urlresolvers import reverse from django import http from django.test.utils import override_settings from django.utils.http import urlunquote from mox3.mox import IsA from openstack_dashboard.api import cinder from openstack_dashboard.api import keystone from openstack_dashboard.test import helpers as test from openstack_dashboard.dashboards.admin.snapshots import forms from openstack_dashboard.dashboards.admin.snapshots import tables INDEX_URL = 'horizon:admin:snapshots:index' class VolumeSnapshotsViewTests(test.BaseAdminViewTests): @test.create_stubs({cinder: ('volume_list', 'volume_snapshot_list_paged',), keystone: ('tenant_list',)}) def test_snapshots_tab(self): cinder.volume_snapshot_list_paged( IsA(http.HttpRequest), paginate=True, marker=None, sort_dir='desc', search_opts={'all_tenants': True},).AndReturn( [self.cinder_volume_snapshots.list(), False, False]) cinder.volume_list(IsA(http.HttpRequest), search_opts={ 'all_tenants': True}).\ AndReturn(self.cinder_volumes.list()) keystone.tenant_list(IsA(http.HttpRequest)). \ AndReturn([self.tenants.list(), False]) self.mox.ReplayAll() url = reverse(INDEX_URL) res = self.client.get(urlunquote(url)) self.assertEqual(res.status_code, 200) self.assertTemplateUsed(res, 'horizon/common/_data_table_view.html') snapshots = res.context['volume_snapshots_table'].data self.assertItemsEqual(snapshots, self.cinder_volume_snapshots.list()) @test.create_stubs({cinder: ('volume_list', 'volume_snapshot_list_paged',), keystone: ('tenant_list',)}) def _test_snapshots_index_paginated(self, marker, sort_dir, snapshots, url, has_more, has_prev): cinder.volume_snapshot_list_paged( IsA(http.HttpRequest), paginate=True, marker=marker, sort_dir=sort_dir, search_opts={'all_tenants': True}) \ .AndReturn([snapshots, has_more, has_prev]) cinder.volume_list(IsA(http.HttpRequest), search_opts={ 'all_tenants': True}).\ AndReturn(self.cinder_volumes.list()) keystone.tenant_list(IsA(http.HttpRequest)) \ .AndReturn([self.tenants.list(), False]) self.mox.ReplayAll() res = self.client.get(urlunquote(url)) self.assertTemplateUsed(res, 'horizon/common/_data_table_view.html') self.assertEqual(res.status_code, 200) self.mox.UnsetStubs() return res @override_settings(API_RESULT_PAGE_SIZE=1) def test_snapshots_index_paginated(self): size = settings.API_RESULT_PAGE_SIZE mox_snapshots = self.cinder_volume_snapshots.list() base_url = reverse(INDEX_URL) next = tables.VolumeSnapshotsTable._meta.pagination_param # get first page expected_snapshots = mox_snapshots[:size] res = self._test_snapshots_index_paginated( marker=None, sort_dir="desc", snapshots=expected_snapshots, url=base_url, has_more=True, has_prev=False) snapshots = res.context['volume_snapshots_table'].data self.assertItemsEqual(snapshots, expected_snapshots) # get second page expected_snapshots = mox_snapshots[size:2 * size] marker = expected_snapshots[0].id url = base_url + "?%s=%s" % (next, marker) res = self._test_snapshots_index_paginated( marker=marker, sort_dir="desc", snapshots=expected_snapshots, url=url, has_more=True, has_prev=True) snapshots = res.context['volume_snapshots_table'].data self.assertItemsEqual(snapshots, expected_snapshots) # get last page expected_snapshots = mox_snapshots[-size:] marker = expected_snapshots[0].id url = base_url + "?%s=%s" % (next, marker) res = self._test_snapshots_index_paginated( marker=marker, sort_dir="desc", snapshots=expected_snapshots, url=url, has_more=False, has_prev=True) snapshots = res.context['volume_snapshots_table'].data self.assertItemsEqual(snapshots, expected_snapshots) @override_settings(API_RESULT_PAGE_SIZE=1) def test_snapshots_index_paginated_prev(self): size = settings.API_RESULT_PAGE_SIZE max_snapshots = self.cinder_volume_snapshots.list() base_url = reverse('horizon:admin:snapshots:index') prev = tables.VolumeSnapshotsTable._meta.prev_pagination_param # prev from some page expected_snapshots = max_snapshots[size:2 * size] marker = max_snapshots[0].id url = base_url + "?%s=%s" % (prev, marker) res = self._test_snapshots_index_paginated( marker=marker, sort_dir="asc", snapshots=expected_snapshots, url=url, has_more=False, has_prev=True) snapshots = res.context['volume_snapshots_table'].data self.assertItemsEqual(snapshots, expected_snapshots) # back to first page expected_snapshots = max_snapshots[:size] marker = max_snapshots[0].id url = base_url + "?%s=%s" % (prev, marker) res = self._test_snapshots_index_paginated( marker=marker, sort_dir="asc", snapshots=expected_snapshots, url=url, has_more=True, has_prev=False) snapshots = res.context['volume_snapshots_table'].data self.assertItemsEqual(snapshots, expected_snapshots) @test.create_stubs({cinder: ('volume_snapshot_reset_state', 'volume_snapshot_get')}) def test_update_snapshot_status(self): snapshot = self.cinder_volume_snapshots.first() state = 'error' cinder.volume_snapshot_get(IsA(http.HttpRequest), snapshot.id) \ .AndReturn(snapshot) cinder.volume_snapshot_reset_state(IsA(http.HttpRequest), snapshot.id, state) self.mox.ReplayAll() formData = {'status': state} url = reverse('horizon:admin:snapshots:update_status', args=(snapshot.id,)) res = self.client.post(url, formData) self.assertNoFormErrors(res) @test.create_stubs({cinder: ('volume_snapshot_get', 'volume_get')}) def test_get_volume_snapshot_details(self): volume = self.cinder_volumes.first() snapshot = self.cinder_volume_snapshots.first() cinder.volume_get(IsA(http.HttpRequest), volume.id). \ AndReturn(volume) cinder.volume_snapshot_get(IsA(http.HttpRequest), snapshot.id). \ AndReturn(snapshot) self.mox.ReplayAll() url = reverse('horizon:admin:snapshots:detail', args=[snapshot.id]) res = self.client.get(url) self.assertTemplateUsed(res, 'horizon/common/_detail.html') self.assertEqual(res.context['snapshot'].id, snapshot.id) @test.create_stubs({cinder: ('volume_snapshot_get', 'volume_get')}) def test_get_volume_snapshot_details_with_snapshot_exception(self): # Test to verify redirect if get volume snapshot fails snapshot = self.cinder_volume_snapshots.first() cinder.volume_snapshot_get(IsA(http.HttpRequest), snapshot.id).\ AndRaise(self.exceptions.cinder) self.mox.ReplayAll() url = reverse('horizon:admin:snapshots:detail', args=[snapshot.id]) res = self.client.get(url) self.assertNoFormErrors(res) self.assertMessageCount(error=1) self.assertRedirectsNoFollow(res, reverse(INDEX_URL)) @test.create_stubs({cinder: ('volume_snapshot_get', 'volume_get')}) def test_get_volume_snapshot_details_with_volume_exception(self): # Test to verify redirect if get volume fails volume = self.cinder_volumes.first() snapshot = self.cinder_volume_snapshots.first() cinder.volume_get(IsA(http.HttpRequest), volume.id). \ AndRaise(self.exceptions.cinder) cinder.volume_snapshot_get(IsA(http.HttpRequest), snapshot.id). \ AndReturn(snapshot) self.mox.ReplayAll() url = reverse('horizon:admin:snapshots:detail', args=[snapshot.id]) res = self.client.get(url) self.assertNoFormErrors(res) self.assertMessageCount(error=1) self.assertRedirectsNoFollow(res, reverse(INDEX_URL)) def test_get_snapshot_status_choices_without_current(self): current_status = {'status': 'available'} status_choices = forms.populate_status_choices(current_status, forms.STATUS_CHOICES) self.assertEqual(len(status_choices), len(forms.STATUS_CHOICES)) self.assertNotIn(current_status['status'], [status[0] for status in status_choices]) @test.create_stubs({cinder: ('volume_snapshot_get',)}) def test_update_volume_status_get(self): snapshot = self.cinder_volume_snapshots.first() cinder.volume_snapshot_get(IsA(http.HttpRequest), snapshot.id). \ AndReturn(snapshot) self.mox.ReplayAll() url = reverse('horizon:admin:snapshots:update_status', args=[snapshot.id]) res = self.client.get(url) status_option = "<option value=\"%s\"></option>" % snapshot.status self.assertNotContains(res, status_option)
apache-2.0
AlexanderYao/ThreadPool
ThreadPool.Test/TestLongTerm.cs
1827
using Microsoft.VisualStudio.TestTools.UnitTesting; using System; using System.Collections.Generic; using System.Diagnostics; using System.Linq; using System.Text; using System.Threading; using System.Threading.Tasks; namespace ThreadPool.Test { [TestClass] public class TestLongTerm { private IThreadPool _pool; public TestLongTerm() { StartInfo info = new StartInfo { Timeout = 5, MinWorkerThreads = 1, MaxWorkerThreads = 5, }; _pool = ThreadPoolFactory.Create(info, "long term pool"); } [TestMethod] public void TestLongTerm_Use4Threads() { for (int i = 0; i < 7; i++) { _pool.QueueUserWorkItem(Print, "i'm item " + i); } //_pool.WaitAll(); Thread.Sleep(7000); Assert.IsTrue(_pool.MaxThreadCount == 4); } [TestMethod] public void TestLongTerm_Use5Threads() { for (int i = 0; i < 8; i++) { _pool.QueueUserWorkItem(Print, "i'm item " + i); } //_pool.WaitAll(); Thread.Sleep(7000); Assert.IsTrue(_pool.MaxThreadCount == 5); } [TestMethod] public void TestLongTerm_20ItemUse5Threads() { for (int i = 0; i < 20; i++) { _pool.QueueUserWorkItem(Print, "i'm item " + i); } //_pool.WaitAll(); Thread.Sleep(15000); Assert.IsTrue(_pool.MaxThreadCount == 5); } private Object Print(Object o) { Thread.Sleep(1000); Debug.WriteLine(o as String); return null; } } }
apache-2.0
CyR1en/MineCordBot
src/main/java/us/cyrien/minecordbot/chat/listeners/mcListeners/MentionListener.java
1494
package us.cyrien.minecordbot.chat.listeners.mcListeners; import net.dv8tion.jda.core.JDA; import net.dv8tion.jda.core.entities.Member; import net.dv8tion.jda.core.entities.TextChannel; import org.bukkit.event.EventHandler; import org.bukkit.event.server.TabCompleteEvent; import us.cyrien.minecordbot.Minecordbot; import java.util.ArrayList; import java.util.List; public class MentionListener extends MCBListener { private JDA jda; private List<TextChannel> tcArray; public MentionListener(Minecordbot mcb) { super(mcb); jda = mcb.getBot().getJda(); tcArray= mcb.getRelayChannels(); } @EventHandler public void onTabComplete(TabCompleteEvent e) { String[] buffers = e.getBuffer().split(" "); List<String> all = new ArrayList<>(); if (e.getBuffer().endsWith("@")) { tcArray.forEach((tc) -> { for (Member m : tc.getMembers()) all.add("@" + m.getUser().getName().replaceAll("_", "-").replaceAll(" ", "_")); }); e.setCompletions(all); } else if (buffers[buffers.length - 1].startsWith("@")) { tcArray.forEach((tc) -> { for (Member m : tc.getMembers()) if(m.getUser().getName().startsWith(buffers[buffers.length - 1].replaceAll("@", ""))) all.add("@" + m.getUser().getName().replaceAll(" ", "_")); }); e.setCompletions(all); } } }
apache-2.0
glameyzhou/scaffold
scaffold-web/src/main/java/org/glamey/scaffold/web/util/IpUtils.java
1619
package org.glamey.scaffold.web.util; import org.apache.commons.lang3.StringUtils; import javax.servlet.http.HttpServletRequest; /** * @author by zhouyang.zhou. */ public class IpUtils { public static final String UNKNOWN = "unknown"; public static final String X_REAL_IP = "X-Real-IP"; public static final String X_FORWARDED_FOR = "x-forwarded-for"; public static final String PROXY_CLIENT_IP = "Proxy-Client-IP"; public static final String WL_PROXY_CLIENT_IP = "WL-Proxy-Client-IP"; /** * 获取访问者的IP地址 * * @param request * @return */ public static String getClientRealIP(HttpServletRequest request) { String ip = request.getHeader(X_REAL_IP); if (StringUtils.isBlank(ip) || StringUtils.equalsIgnoreCase(UNKNOWN, ip)) { ip = request.getHeader(X_FORWARDED_FOR); } if (StringUtils.isBlank(ip) || StringUtils.equalsIgnoreCase(UNKNOWN, ip)) { ip = request.getHeader(PROXY_CLIENT_IP); } if (StringUtils.isBlank(ip) || StringUtils.equalsIgnoreCase(UNKNOWN, ip)) { ip = request.getHeader(WL_PROXY_CLIENT_IP); } if (StringUtils.isBlank(ip) || StringUtils.equalsIgnoreCase(UNKNOWN, ip)) { ip = request.getRemoteAddr(); } return getValidIp(ip); } private static String getValidIp(String ip) { int pos = ip.lastIndexOf(','); if (pos >= 0) { ip = ip.substring(pos); } if ("0:0:0:0:0:0:0:1".equals(ip)) { ip = "127.0.0.1"; } return ip; } }
apache-2.0
sabob/ratel
ratel/src/com/google/ratel/deps/jackson/databind/jsonFormatVisitors/JsonNullFormatVisitor.java
394
package com.google.ratel.deps.jackson.databind.jsonFormatVisitors; public interface JsonNullFormatVisitor { /** * Default "empty" implementation, useful as the base to start on; * especially as it is guaranteed to implement all the method * of the interface, even if new methods are getting added. */ public static class Base implements JsonNullFormatVisitor { } }
apache-2.0
socrata-platform/data-coordinator
coordinator/src/main/scala/com/socrata/datacoordinator/Launch.scala
354
package com.socrata.datacoordinator object Launch extends App { if(args.length > 0) { val className = args(0) val subargs = args.drop(1) val cls = Class.forName(className) cls.getMethod("main", classOf[Array[String]]).invoke(null, subargs) } else { Console.err.println("Usage: Launch CLASSNAME [ARGS...]") sys.exit(1) } }
apache-2.0
usgin/geoportal-server-catalog
geoportal/src/main/webapp/viewer/widgets/Search/setting/nls/nl/strings.js
1985
define( ({ "sourceSetting": "Broninstellingen zoeken", "instruction": "Geocodeerservices of objectlagen toevoegen en configureren als zoekbronnen. Deze bepaalde bronnen bepalen wat doorzoekbaar is in het zoekvak.", "add": "Zoekbron toevoegen", "addGeocoder": "Geocoder toevoegen", "geocoder": "Geocoder", "setLayerSource": "Laagbron instellen", "setGeocoderURL": "Geocoder-URL instellen", "searchableLayer": "Objectlaag", "name": "Naam", "countryCode": "Land- of regiocode(s)", "countryCodeEg": "bijv. ", "countryCodeHint": "Laat deze waarde leeg en alle landen en regio\'s worden gezocht", "generalSetting": "Algemene instellingen", "allPlaceholder": "Plaatshoudertekst om alles te zoeken: ", "showInfoWindowOnSelect": "Pop-up weergeven voor gevonden object of locatie", "searchInCurrentMapExtent": "Alleen zoeken in huidige kaartextent", "zoomScale": "Schaal voor in-/uitzoomen", "locatorUrl": "Geocoder-URL", "locatorName": "Naam van geocoder", "locatorExample": "Voorbeeld", "locatorWarning": "Deze versie van geocodeerservice wordt niet ondersteund. De widget ondersteunt geocodeerservice 10.0 en hoger.", "locatorTips": "Suggesties zijn niet beschikbaar omdat de geocodeerservice geen suggesties ondersteunt.", "layerSource": "Laagbron", "searchLayerTips": "Suggesties zijn niet beschikbaar omdat de objectservice geen pagination ondersteunt.", "placeholder": "Tekst van tijdelijke aanduiding", "searchFields": "Zoekvelden", "displayField": "Weergaveveld:", "exactMatch": "Exacte overeenkomst", "maxSuggestions": "Maximale suggesties", "maxResults": "Maximale resultaten", "setSearchFields": "Zoekvelden instellen", "set": "Instellen", "fieldSearchable": "doorzoekbaar", "fieldName": "Naam", "fieldAlias": "Alias", "ok": "OK", "cancel": "Annuleren", "invalidUrlTip": "De URL ${URL} is ongeldig of ontoegankelijk." }) );
apache-2.0
muatik/logmon
src/LogMon/LogConfig/IFieldMapper.php
1146
<?php namespace LogMon\LogConfig; interface IFieldMapper { /** * checks whether the mapping is valid or not. * * @param object $mapping * @access public * @return boolean */ public function isMappingValid($mapping); /** * sets the given mapping to the field * * @param string $field * @param object $mapping * @access public * @return void * @throws if the field is unknown or the mapping is not valid. */ public function setFieldMapping($field, $mapping); /** * validates the field mapping * * @access public * @return boolean * @throws if any mapping is invalid */ public function validate(); /** * maps the given data * * @param Array $data * @access public * @return Array */ public function map(Array $data); /** * loads the field mapping from the given json * * @param object|string $jsonObject * @access public * @return void * @throws if any required field does not exists */ public function fromJson($jsonObject); /** * exports the data of the field mapping * * @access public * @return array */ public function toJson(); }
apache-2.0
rrpgfirecast/firecast
Plugins/Sheets/Gerenciador SotDL/output/rdkObjs/GerenciadorSotDL/EfeitoCombatTracker.lfm.lua
6300
require("firecast.lua"); local __o_rrpgObjs = require("rrpgObjs.lua"); require("rrpgGUI.lua"); require("rrpgDialogs.lua"); require("rrpgLFM.lua"); require("ndb.lua"); require("locale.lua"); local __o_Utils = require("utils.lua"); local function constructNew_frmEfeitoCombatTracker() local obj = GUI.fromHandle(_obj_newObject("form")); local self = obj; local sheet = nil; rawset(obj, "_oldSetNodeObjectFunction", rawget(obj, "setNodeObject")); function obj:setNodeObject(nodeObject) sheet = nodeObject; self.sheet = nodeObject; self:_oldSetNodeObjectFunction(nodeObject); end; function obj:setNodeDatabase(nodeObject) self:setNodeObject(nodeObject); end; _gui_assignInitialParentForForm(obj.handle); obj:beginUpdate(); obj:setName("frmEfeitoCombatTracker"); obj:setHeight(18); obj:setPadding({left=1,top=1,right=1,bottom=1}); local function createDraggerForAtor() return self:findControlByName("frmCombatTracker"); end; local function getTracker() return self:findControlByName("frmCombatTracker"); end; function self:apagarEfeito() ndb.deleteNode(sheet); end; function self:doUsuarioAlterouDuracao() if sheet ~= nil then sheet.duracaoReal = nil; end; end; obj.imgEfeito = GUI.fromHandle(_obj_newObject("image")); obj.imgEfeito:setParent(obj); obj.imgEfeito:setName("imgEfeito"); obj.imgEfeito:setAlign("left"); obj.imgEfeito:setSRC("/GerenciadorSotDL/images/effect.png"); obj.imgEfeito:setWidth(16); obj.imgEfeito:setHitTest(true); obj.imgEfeito:setHint("Um efeito/condição do ator"); obj.edtDescricao = GUI.fromHandle(_obj_newObject("edit")); obj.edtDescricao:setParent(obj); obj.edtDescricao:setName("edtDescricao"); obj.edtDescricao:setAlign("client"); obj.edtDescricao:setField("descricao"); obj.edtDescricao:setTransparent(true); obj.edtDescricao:setFontSize(10); obj.edtDescricao:setMargins({right=2}); obj.horzLine1 = GUI.fromHandle(_obj_newObject("horzLine")); obj.horzLine1:setParent(obj.edtDescricao); obj.horzLine1:setAlign("bottom"); obj.horzLine1:setStrokeSize(1); obj.horzLine1:setStrokeColor("black"); obj.horzLine1:setOpacity(0.5); obj.horzLine1:setName("horzLine1"); obj.layDireita = GUI.fromHandle(_obj_newObject("layout")); obj.layDireita:setParent(obj); obj.layDireita:setName("layDireita"); obj.layDireita:setAlign("right"); obj.layDireita:setWidth(40); obj.edtDuracao = GUI.fromHandle(_obj_newObject("edit")); obj.edtDuracao:setParent(obj.layDireita); obj.edtDuracao:setName("edtDuracao"); obj.edtDuracao:setAlign("left"); obj.edtDuracao:setWidth(25); obj.edtDuracao:setType("number"); obj.edtDuracao:setHint("Duração do efeito/condição (medido em Rodadas)"); obj.edtDuracao:setMargins({right=3, top=1, bottom=1}); obj.edtDuracao:setHorzTextAlign("center"); obj.edtDuracao:setFontSize(10); obj.edtDuracao:setField("duracao"); obj.edtDuracao:setMin(0); obj.imgDeleteEfeito = GUI.fromHandle(_obj_newObject("image")); obj.imgDeleteEfeito:setParent(obj.layDireita); obj.imgDeleteEfeito:setName("imgDeleteEfeito"); obj.imgDeleteEfeito:setAlign("left"); obj.imgDeleteEfeito:setSRC("/GerenciadorSotDL/images/deleteEffect.png"); obj.imgDeleteEfeito:setHitTest(true); obj.imgDeleteEfeito:setWidth(12); obj.imgDeleteEfeito:setCursor("handPoint"); obj.imgDeleteEfeito:setOpacity(0.6); if system.isMobile() then self.height = 30; self.imgEfeito.width = 30; self.imgEfeito.style = "autoFit"; self.edtDescricao.fontSize = 12; self.edtDuracao.width = 30; self.edtDuracao.fontSize = 12; self.imgDeleteEfeito.width = 30; self.imgDeleteEfeito.style = "autoFit"; self.layDireita.width = 63; end; local tracker = getTracker(); local function setDropRecursivo(ctrl) ctrl.onStartDrop = tracker.handleStartDrop; local childs = ctrl:getChildren(); for i = 1, #childs, 1 do setDropRecursivo(childs[i]); end; end; setDropRecursivo(self); obj._e_event0 = obj.edtDuracao:addEventListener("onUserChange", function (_) self:doUsuarioAlterouDuracao() end, obj); obj._e_event1 = obj.imgDeleteEfeito:addEventListener("onMouseUp", function (_, event) if event.button == 'left' then self:apagarEfeito(); end; end, obj); function obj:_releaseEvents() __o_rrpgObjs.removeEventListenerById(self._e_event1); __o_rrpgObjs.removeEventListenerById(self._e_event0); end; obj._oldLFMDestroy = obj.destroy; function obj:destroy() self:_releaseEvents(); if (self.handle ~= 0) and (self.setNodeDatabase ~= nil) then self:setNodeDatabase(nil); end; if self.imgDeleteEfeito ~= nil then self.imgDeleteEfeito:destroy(); self.imgDeleteEfeito = nil; end; if self.imgEfeito ~= nil then self.imgEfeito:destroy(); self.imgEfeito = nil; end; if self.edtDescricao ~= nil then self.edtDescricao:destroy(); self.edtDescricao = nil; end; if self.horzLine1 ~= nil then self.horzLine1:destroy(); self.horzLine1 = nil; end; if self.layDireita ~= nil then self.layDireita:destroy(); self.layDireita = nil; end; if self.edtDuracao ~= nil then self.edtDuracao:destroy(); self.edtDuracao = nil; end; self:_oldLFMDestroy(); end; obj:endUpdate(); return obj; end; function newfrmEfeitoCombatTracker() local retObj = nil; __o_rrpgObjs.beginObjectsLoading(); __o_Utils.tryFinally( function() retObj = constructNew_frmEfeitoCombatTracker(); end, function() __o_rrpgObjs.endObjectsLoading(); end); assert(retObj ~= nil); return retObj; end; local _frmEfeitoCombatTracker = { newEditor = newfrmEfeitoCombatTracker, new = newfrmEfeitoCombatTracker, name = "frmEfeitoCombatTracker", dataType = "", formType = "undefined", formComponentName = "form", title = "", description=""}; frmEfeitoCombatTracker = _frmEfeitoCombatTracker; Firecast.registrarForm(_frmEfeitoCombatTracker); return _frmEfeitoCombatTracker;
apache-2.0
CamfedCode/Camfed
db/migrate/20110214170926_create_configurations.rb
397
class CreateConfigurations < ActiveRecord::Migration def self.up create_table :configurations do |t| t.string :epi_surveyor_url t.string :epi_surveyor_user t.string :epi_surveyor_token t.string :salesforce_url t.string :salesforce_user t.string :salesforce_token t.timestamps end end def self.down drop_table :configurations end end
apache-2.0
Esri/arcobjects-sdk-community-samples
Net/Raster/CustomRasterFunction/CSharp/WatermarkFunctionUI/WatermarkFunctionUIForm.cs
8975
/* Copyright 2019 Esri Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ using System; using System.Collections.Generic; using System.ComponentModel; using System.Data; using System.Drawing; using System.Linq; using System.Text; using System.Windows.Forms; using ESRI.ArcGIS.Catalog; using ESRI.ArcGIS.CatalogUI; using ESRI.ArcGIS.DataSourcesRaster; using ESRI.ArcGIS.Geodatabase; using ESRI.ArcGIS.esriSystem; namespace CustomFunctionUI { public partial class WatermarkFunctionUIForm : Form { #region Private Members private object myInputRaster; private string myWaterMarkImagePath; private double myBlendPercentage; private CustomFunction.esriWatermarkLocation myWatermarkLocation; private bool myDirtyFlag; #endregion #region WatermarkFunctionUIForm Properties /// <summary> /// Constructor /// </summary> public WatermarkFunctionUIForm() { InitializeComponent(); myInputRaster = null; myWaterMarkImagePath = ""; myBlendPercentage = 0.0; myWatermarkLocation = CustomFunction.esriWatermarkLocation.esriWatermarkBottomRight; } /// <summary> /// Get or set the watermark image path /// </summary> public string WatermarkImagePath { get { myWaterMarkImagePath = watermarkImageTxtbox.Text; return myWaterMarkImagePath; } set { myWaterMarkImagePath = value; watermarkImageTxtbox.Text = value; } } /// <summary> /// Flag to specify if the form has changed /// </summary> public bool IsFormDirty { get { return myDirtyFlag; } set { myDirtyFlag = value; } } /// <summary> /// Get or set the input raster /// </summary> public object InputRaster { get { return myInputRaster; } set { myInputRaster = value; inputRasterTxtbox.Text = GetInputRasterName(myInputRaster); } } /// <summary> /// Get or set the blending percentage /// </summary> public double BlendPercentage { get { if (blendPercentTxtbox.Text == "") blendPercentTxtbox.Text = "50.00"; myBlendPercentage = Convert.ToDouble(blendPercentTxtbox.Text); return myBlendPercentage; } set { myBlendPercentage = value; blendPercentTxtbox.Text = Convert.ToString(value); } } /// <summary> /// Get or set the watermark location. /// </summary> public CustomFunction.esriWatermarkLocation WatermarkLocation { get { return myWatermarkLocation; } set { myWatermarkLocation = value; } } #endregion #region WatermarkFunctionUIForm Members /// <summary> /// This function takes a raster object and returns the formatted name of /// the object for display in the UI. /// </summary> /// <param name="inputRaster">Object whose name is to be found</param> /// <returns>Name of the object</returns> private string GetInputRasterName(object inputRaster) { if ((inputRaster is IRasterDataset)) { IRasterDataset rasterDataset = (IRasterDataset)inputRaster; return rasterDataset.CompleteName; } if ((inputRaster is IRaster)) { IRaster myRaster = (IRaster)inputRaster; return ((IRaster2)myRaster).RasterDataset.CompleteName; } if (inputRaster is IDataset) { IDataset dataset = (IDataset)inputRaster; return dataset.Name; } if (inputRaster is IName) { if (inputRaster is IDatasetName) { IDatasetName inputDSName = (IDatasetName)inputRaster; return inputDSName.Name; } if (inputRaster is IFunctionRasterDatasetName) { IFunctionRasterDatasetName inputFRDName = (IFunctionRasterDatasetName)inputRaster; return inputFRDName.BrowseName; } if (inputRaster is IMosaicDatasetName) { IMosaicDatasetName inputMDName = (IMosaicDatasetName)inputRaster; return "MD"; } IName inputName = (IName)inputRaster; return inputName.NameString; } if (inputRaster is IRasterFunctionTemplate) { IRasterFunctionTemplate rasterFunctionTemplate = (IRasterFunctionTemplate)inputRaster; return rasterFunctionTemplate.Function.Name; } if (inputRaster is IRasterFunctionVariable) { IRasterFunctionVariable rasterFunctionVariable = (IRasterFunctionVariable)inputRaster; return rasterFunctionVariable.Name; } return ""; } /// <summary> /// Updates the UI textboxes using the properties that have been set. /// </summary> public void UpdateUI() { if (myInputRaster != null) inputRasterTxtbox.Text = GetInputRasterName(myInputRaster); blendPercentTxtbox.Text = Convert.ToString(myBlendPercentage); watermarkImageTxtbox.Text = myWaterMarkImagePath; LocationComboBx.SelectedIndex = (int)myWatermarkLocation; } private void inputRasterBtn_Click(object sender, EventArgs e) { IEnumGxObject ipSelectedObjects = null; ShowRasterDatasetBrowser((int)(Handle.ToInt32()), out ipSelectedObjects); IGxObject selectedObject = ipSelectedObjects.Next(); if (selectedObject is IGxDataset) { IGxDataset ipGxDS = (IGxDataset)selectedObject; IDataset ipDataset; ipDataset = ipGxDS.Dataset; myInputRaster = ipDataset.FullName; inputRasterTxtbox.Text = GetInputRasterName(myInputRaster); myDirtyFlag = true; } } public void ShowRasterDatasetBrowser(int handle, out IEnumGxObject ipSelectedObjects) { IGxObjectFilterCollection ipFilterCollection = new GxDialogClass(); IGxObjectFilter ipFilter1 = new GxFilterRasterDatasetsClass(); ipFilterCollection.AddFilter(ipFilter1, true); IGxDialog ipGxDialog = (IGxDialog)(ipFilterCollection); ipGxDialog.RememberLocation = true; ipGxDialog.Title = "Open"; ipGxDialog.AllowMultiSelect = false; ipGxDialog.RememberLocation = true; ipGxDialog.DoModalOpen((int)(Handle.ToInt32()), out ipSelectedObjects); return; } private void LocationComboBx_SelectedIndexChanged(object sender, EventArgs e) { myWatermarkLocation = (CustomFunction.esriWatermarkLocation)LocationComboBx.SelectedIndex; myDirtyFlag = true; } private void watermarkImageBtn_Click(object sender, EventArgs e) { watermarkImageDlg.ShowDialog(); if (watermarkImageDlg.FileName != "") { watermarkImageTxtbox.Text = watermarkImageDlg.FileName; myDirtyFlag = true; } } private void blendPercentTxtbox_ModifiedChanged(object sender, EventArgs e) { if (blendPercentTxtbox.Text != "") { myBlendPercentage = Convert.ToDouble(blendPercentTxtbox.Text); myDirtyFlag = true; } } #endregion } }
apache-2.0
youngjun0528/moheeto
CodingTraining/PythonStudy/Chapter02/Question02.py
151
def Question02(): print ('What is the input string?') strInput = input() print (strInput + ' has ' + str(len(strInput)) + ' characters.')
apache-2.0
RyanSkraba/beam
sdks/python/apache_beam/runners/pipeline_context.py
7541
# # Licensed to the Apache Software Foundation (ASF) under one or more # contributor license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright ownership. # The ASF licenses this file to You under the Apache License, Version 2.0 # (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # """Utility class for serializing pipelines via the runner API. For internal use only; no backwards-compatibility guarantees. """ from __future__ import absolute_import from builtins import object from typing import TYPE_CHECKING from typing import Any from typing import Dict from typing import Mapping from typing import Optional from typing import Union from apache_beam import coders from apache_beam import pipeline from apache_beam import pvalue from apache_beam.internal import pickler from apache_beam.portability.api import beam_fn_api_pb2 from apache_beam.portability.api import beam_runner_api_pb2 from apache_beam.transforms import core from apache_beam.transforms import environments from apache_beam.typehints import native_type_compatibility if TYPE_CHECKING: from google.protobuf import message # pylint: disable=ungrouped-imports from apache_beam.coders.coder_impl import IterableStateReader from apache_beam.coders.coder_impl import IterableStateWriter class _PipelineContextMap(object): """This is a bi-directional map between objects and ids. Under the hood it encodes and decodes these objects into runner API representations. """ def __init__(self, context, obj_type, namespace, # type: str proto_map=None # type: Optional[Mapping[str, message.Message]] ): self._pipeline_context = context self._obj_type = obj_type self._namespace = namespace self._obj_to_id = {} # type: Dict[Any, str] self._id_to_obj = {} # type: Dict[str, Any] self._id_to_proto = dict(proto_map) if proto_map else {} self._counter = 0 def _unique_ref(self, obj=None, label=None): # type: (Optional[Any], Optional[str]) -> str self._counter += 1 return "%s_%s_%s_%d" % ( self._namespace, self._obj_type.__name__, label or type(obj).__name__, self._counter) def populate_map(self, proto_map): # type: (Mapping[str, message.Message]) -> None for id, proto in self._id_to_proto.items(): proto_map[id].CopyFrom(proto) def get_id(self, obj, label=None): # type: (Any, Optional[str]) -> str if obj not in self._obj_to_id: id = self._unique_ref(obj, label) self._id_to_obj[id] = obj self._obj_to_id[obj] = id self._id_to_proto[id] = obj.to_runner_api(self._pipeline_context) return self._obj_to_id[obj] def get_proto(self, obj, label=None): # type: (Any, Optional[str]) -> message.Message return self._id_to_proto[self.get_id(obj, label)] def get_by_id(self, id): # type: (str) -> Any if id not in self._id_to_obj: self._id_to_obj[id] = self._obj_type.from_runner_api( self._id_to_proto[id], self._pipeline_context) return self._id_to_obj[id] def get_by_proto(self, maybe_new_proto, label=None, deduplicate=False): # type: (message.Message, Optional[str], bool) -> str if deduplicate: for id, proto in self._id_to_proto.items(): if proto == maybe_new_proto: return id return self.put_proto(self._unique_ref(label), maybe_new_proto) def get_id_to_proto_map(self): # type: () -> Dict[str, message.Message] return self._id_to_proto def put_proto(self, id, proto): # type: (str, message.Message) -> str if id in self._id_to_proto: raise ValueError("Id '%s' is already taken." % id) self._id_to_proto[id] = proto return id def __getitem__(self, id): # type: (str) -> Any return self.get_by_id(id) def __contains__(self, id): # type: (str) -> bool return id in self._id_to_proto class PipelineContext(object): """For internal use only; no backwards-compatibility guarantees. Used for accessing and constructing the referenced objects of a Pipeline. """ _COMPONENT_TYPES = { 'transforms': pipeline.AppliedPTransform, 'pcollections': pvalue.PCollection, 'coders': coders.Coder, 'windowing_strategies': core.Windowing, 'environments': environments.Environment, } def __init__(self, proto=None, # type: Optional[Union[beam_runner_api_pb2.Components, beam_fn_api_pb2.ProcessBundleDescriptor]] default_environment=None, # type: Optional[environments.Environment] use_fake_coders=False, iterable_state_read=None, # type: Optional[IterableStateReader] iterable_state_write=None, # type: Optional[IterableStateWriter] namespace='ref', allow_proto_holders=False ): if isinstance(proto, beam_fn_api_pb2.ProcessBundleDescriptor): proto = beam_runner_api_pb2.Components( coders=dict(proto.coders.items()), windowing_strategies=dict(proto.windowing_strategies.items()), environments=dict(proto.environments.items())) for name, cls in self._COMPONENT_TYPES.items(): setattr( self, name, _PipelineContextMap( self, cls, namespace, getattr(proto, name, None))) if default_environment: self._default_environment_id = self.environments.get_id( default_environment, label='default_environment') else: self._default_environment_id = None self.use_fake_coders = use_fake_coders self.iterable_state_read = iterable_state_read self.iterable_state_write = iterable_state_write self.allow_proto_holders = allow_proto_holders # If fake coders are requested, return a pickled version of the element type # rather than an actual coder. The element type is required for some runners, # as well as performing a round-trip through protos. # TODO(BEAM-2717): Remove once this is no longer needed. def coder_id_from_element_type(self, element_type): # type: (Any) -> str if self.use_fake_coders: return pickler.dumps(element_type) else: return self.coders.get_id(coders.registry.get_coder(element_type)) def element_type_from_coder_id(self, coder_id): # type: (str) -> Any if self.use_fake_coders or coder_id not in self.coders: return pickler.loads(coder_id) else: return native_type_compatibility.convert_to_beam_type( self.coders[coder_id].to_type_hint()) @staticmethod def from_runner_api(proto): # type: (beam_runner_api_pb2.Components) -> PipelineContext return PipelineContext(proto) def to_runner_api(self): # type: () -> beam_runner_api_pb2.Components context_proto = beam_runner_api_pb2.Components() for name in self._COMPONENT_TYPES: getattr(self, name).populate_map(getattr(context_proto, name)) return context_proto def default_environment_id(self): # type: () -> Optional[str] return self._default_environment_id
apache-2.0
o3project/openflowj-otn
src/main/java/org/projectfloodlight/openflow/protocol/ver14/OFTableFeaturesVer14.java
19953
// Copyright (c) 2008 The Board of Trustees of The Leland Stanford Junior University // Copyright (c) 2011, 2012 Open Networking Foundation // Copyright (c) 2012, 2013 Big Switch Networks, Inc. // This library was generated by the LoxiGen Compiler. // See the file LICENSE.txt which should have been included in the source distribution // Automatically generated by LOXI from template of_class.java // Do not modify package org.projectfloodlight.openflow.protocol.ver14; import org.projectfloodlight.openflow.protocol.*; import org.projectfloodlight.openflow.protocol.action.*; import org.projectfloodlight.openflow.protocol.actionid.*; import org.projectfloodlight.openflow.protocol.bsntlv.*; import org.projectfloodlight.openflow.protocol.errormsg.*; import org.projectfloodlight.openflow.protocol.meterband.*; import org.projectfloodlight.openflow.protocol.instruction.*; import org.projectfloodlight.openflow.protocol.instructionid.*; import org.projectfloodlight.openflow.protocol.match.*; import org.projectfloodlight.openflow.protocol.oxm.*; import org.projectfloodlight.openflow.protocol.queueprop.*; import org.projectfloodlight.openflow.types.*; import org.projectfloodlight.openflow.util.*; import org.projectfloodlight.openflow.exceptions.*; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.util.List; import com.google.common.collect.ImmutableList; import java.util.Set; import org.jboss.netty.buffer.ChannelBuffer; import com.google.common.hash.PrimitiveSink; import com.google.common.hash.Funnel; class OFTableFeaturesVer14 implements OFTableFeatures { private static final Logger logger = LoggerFactory.getLogger(OFTableFeaturesVer14.class); // version: 1.4 final static byte WIRE_VERSION = 5; final static int MINIMUM_LENGTH = 64; private final static TableId DEFAULT_TABLE_ID = TableId.ALL; private final static String DEFAULT_NAME = ""; private final static U64 DEFAULT_METADATA_MATCH = U64.ZERO; private final static U64 DEFAULT_METADATA_WRITE = U64.ZERO; private final static long DEFAULT_CONFIG = 0x0L; private final static long DEFAULT_MAX_ENTRIES = 0x0L; private final static List<OFTableFeatureProp> DEFAULT_PROPERTIES = ImmutableList.<OFTableFeatureProp>of(); // OF message fields private final TableId tableId; private final String name; private final U64 metadataMatch; private final U64 metadataWrite; private final long config; private final long maxEntries; private final List<OFTableFeatureProp> properties; // // Immutable default instance final static OFTableFeaturesVer14 DEFAULT = new OFTableFeaturesVer14( DEFAULT_TABLE_ID, DEFAULT_NAME, DEFAULT_METADATA_MATCH, DEFAULT_METADATA_WRITE, DEFAULT_CONFIG, DEFAULT_MAX_ENTRIES, DEFAULT_PROPERTIES ); // package private constructor - used by readers, builders, and factory OFTableFeaturesVer14(TableId tableId, String name, U64 metadataMatch, U64 metadataWrite, long config, long maxEntries, List<OFTableFeatureProp> properties) { if(tableId == null) { throw new NullPointerException("OFTableFeaturesVer14: property tableId cannot be null"); } if(name == null) { throw new NullPointerException("OFTableFeaturesVer14: property name cannot be null"); } if(metadataMatch == null) { throw new NullPointerException("OFTableFeaturesVer14: property metadataMatch cannot be null"); } if(metadataWrite == null) { throw new NullPointerException("OFTableFeaturesVer14: property metadataWrite cannot be null"); } if(properties == null) { throw new NullPointerException("OFTableFeaturesVer14: property properties cannot be null"); } this.tableId = tableId; this.name = name; this.metadataMatch = metadataMatch; this.metadataWrite = metadataWrite; this.config = config; this.maxEntries = maxEntries; this.properties = properties; } // Accessors for OF message fields @Override public TableId getTableId() { return tableId; } @Override public String getName() { return name; } @Override public U64 getMetadataMatch() { return metadataMatch; } @Override public U64 getMetadataWrite() { return metadataWrite; } @Override public long getConfig() { return config; } @Override public long getMaxEntries() { return maxEntries; } @Override public List<OFTableFeatureProp> getProperties() { return properties; } @Override public OFVersion getVersion() { return OFVersion.OF_14; } public OFTableFeatures.Builder createBuilder() { return new BuilderWithParent(this); } static class BuilderWithParent implements OFTableFeatures.Builder { final OFTableFeaturesVer14 parentMessage; // OF message fields private boolean tableIdSet; private TableId tableId; private boolean nameSet; private String name; private boolean metadataMatchSet; private U64 metadataMatch; private boolean metadataWriteSet; private U64 metadataWrite; private boolean configSet; private long config; private boolean maxEntriesSet; private long maxEntries; private boolean propertiesSet; private List<OFTableFeatureProp> properties; BuilderWithParent(OFTableFeaturesVer14 parentMessage) { this.parentMessage = parentMessage; } @Override public TableId getTableId() { return tableId; } @Override public OFTableFeatures.Builder setTableId(TableId tableId) { this.tableId = tableId; this.tableIdSet = true; return this; } @Override public String getName() { return name; } @Override public OFTableFeatures.Builder setName(String name) { this.name = name; this.nameSet = true; return this; } @Override public U64 getMetadataMatch() { return metadataMatch; } @Override public OFTableFeatures.Builder setMetadataMatch(U64 metadataMatch) { this.metadataMatch = metadataMatch; this.metadataMatchSet = true; return this; } @Override public U64 getMetadataWrite() { return metadataWrite; } @Override public OFTableFeatures.Builder setMetadataWrite(U64 metadataWrite) { this.metadataWrite = metadataWrite; this.metadataWriteSet = true; return this; } @Override public long getConfig() { return config; } @Override public OFTableFeatures.Builder setConfig(long config) { this.config = config; this.configSet = true; return this; } @Override public long getMaxEntries() { return maxEntries; } @Override public OFTableFeatures.Builder setMaxEntries(long maxEntries) { this.maxEntries = maxEntries; this.maxEntriesSet = true; return this; } @Override public List<OFTableFeatureProp> getProperties() { return properties; } @Override public OFTableFeatures.Builder setProperties(List<OFTableFeatureProp> properties) { this.properties = properties; this.propertiesSet = true; return this; } @Override public OFVersion getVersion() { return OFVersion.OF_14; } @Override public OFTableFeatures build() { TableId tableId = this.tableIdSet ? this.tableId : parentMessage.tableId; if(tableId == null) throw new NullPointerException("Property tableId must not be null"); String name = this.nameSet ? this.name : parentMessage.name; if(name == null) throw new NullPointerException("Property name must not be null"); U64 metadataMatch = this.metadataMatchSet ? this.metadataMatch : parentMessage.metadataMatch; if(metadataMatch == null) throw new NullPointerException("Property metadataMatch must not be null"); U64 metadataWrite = this.metadataWriteSet ? this.metadataWrite : parentMessage.metadataWrite; if(metadataWrite == null) throw new NullPointerException("Property metadataWrite must not be null"); long config = this.configSet ? this.config : parentMessage.config; long maxEntries = this.maxEntriesSet ? this.maxEntries : parentMessage.maxEntries; List<OFTableFeatureProp> properties = this.propertiesSet ? this.properties : parentMessage.properties; if(properties == null) throw new NullPointerException("Property properties must not be null"); // return new OFTableFeaturesVer14( tableId, name, metadataMatch, metadataWrite, config, maxEntries, properties ); } } static class Builder implements OFTableFeatures.Builder { // OF message fields private boolean tableIdSet; private TableId tableId; private boolean nameSet; private String name; private boolean metadataMatchSet; private U64 metadataMatch; private boolean metadataWriteSet; private U64 metadataWrite; private boolean configSet; private long config; private boolean maxEntriesSet; private long maxEntries; private boolean propertiesSet; private List<OFTableFeatureProp> properties; @Override public TableId getTableId() { return tableId; } @Override public OFTableFeatures.Builder setTableId(TableId tableId) { this.tableId = tableId; this.tableIdSet = true; return this; } @Override public String getName() { return name; } @Override public OFTableFeatures.Builder setName(String name) { this.name = name; this.nameSet = true; return this; } @Override public U64 getMetadataMatch() { return metadataMatch; } @Override public OFTableFeatures.Builder setMetadataMatch(U64 metadataMatch) { this.metadataMatch = metadataMatch; this.metadataMatchSet = true; return this; } @Override public U64 getMetadataWrite() { return metadataWrite; } @Override public OFTableFeatures.Builder setMetadataWrite(U64 metadataWrite) { this.metadataWrite = metadataWrite; this.metadataWriteSet = true; return this; } @Override public long getConfig() { return config; } @Override public OFTableFeatures.Builder setConfig(long config) { this.config = config; this.configSet = true; return this; } @Override public long getMaxEntries() { return maxEntries; } @Override public OFTableFeatures.Builder setMaxEntries(long maxEntries) { this.maxEntries = maxEntries; this.maxEntriesSet = true; return this; } @Override public List<OFTableFeatureProp> getProperties() { return properties; } @Override public OFTableFeatures.Builder setProperties(List<OFTableFeatureProp> properties) { this.properties = properties; this.propertiesSet = true; return this; } @Override public OFVersion getVersion() { return OFVersion.OF_14; } // @Override public OFTableFeatures build() { TableId tableId = this.tableIdSet ? this.tableId : DEFAULT_TABLE_ID; if(tableId == null) throw new NullPointerException("Property tableId must not be null"); String name = this.nameSet ? this.name : DEFAULT_NAME; if(name == null) throw new NullPointerException("Property name must not be null"); U64 metadataMatch = this.metadataMatchSet ? this.metadataMatch : DEFAULT_METADATA_MATCH; if(metadataMatch == null) throw new NullPointerException("Property metadataMatch must not be null"); U64 metadataWrite = this.metadataWriteSet ? this.metadataWrite : DEFAULT_METADATA_WRITE; if(metadataWrite == null) throw new NullPointerException("Property metadataWrite must not be null"); long config = this.configSet ? this.config : DEFAULT_CONFIG; long maxEntries = this.maxEntriesSet ? this.maxEntries : DEFAULT_MAX_ENTRIES; List<OFTableFeatureProp> properties = this.propertiesSet ? this.properties : DEFAULT_PROPERTIES; if(properties == null) throw new NullPointerException("Property properties must not be null"); return new OFTableFeaturesVer14( tableId, name, metadataMatch, metadataWrite, config, maxEntries, properties ); } } final static Reader READER = new Reader(); static class Reader implements OFMessageReader<OFTableFeatures> { @Override public OFTableFeatures readFrom(ChannelBuffer bb) throws OFParseError { int start = bb.readerIndex(); int length = U16.f(bb.readShort()); if(length < MINIMUM_LENGTH) throw new OFParseError("Wrong length: Expected to be >= " + MINIMUM_LENGTH + ", was: " + length); if(bb.readableBytes() + (bb.readerIndex() - start) < length) { // Buffer does not have all data yet bb.readerIndex(start); return null; } if(logger.isTraceEnabled()) logger.trace("readFrom - length={}", length); TableId tableId = TableId.readByte(bb); // pad: 5 bytes bb.skipBytes(5); String name = ChannelUtils.readFixedLengthString(bb, 32); U64 metadataMatch = U64.ofRaw(bb.readLong()); U64 metadataWrite = U64.ofRaw(bb.readLong()); long config = U32.f(bb.readInt()); long maxEntries = U32.f(bb.readInt()); List<OFTableFeatureProp> properties = ChannelUtils.readList(bb, length - (bb.readerIndex() - start), OFTableFeaturePropVer14.READER); OFTableFeaturesVer14 tableFeaturesVer14 = new OFTableFeaturesVer14( tableId, name, metadataMatch, metadataWrite, config, maxEntries, properties ); if(logger.isTraceEnabled()) logger.trace("readFrom - read={}", tableFeaturesVer14); return tableFeaturesVer14; } } public void putTo(PrimitiveSink sink) { FUNNEL.funnel(this, sink); } final static OFTableFeaturesVer14Funnel FUNNEL = new OFTableFeaturesVer14Funnel(); static class OFTableFeaturesVer14Funnel implements Funnel<OFTableFeaturesVer14> { private static final long serialVersionUID = 1L; @Override public void funnel(OFTableFeaturesVer14 message, PrimitiveSink sink) { // FIXME: skip funnel of length message.tableId.putTo(sink); // skip pad (5 bytes) sink.putUnencodedChars(message.name); message.metadataMatch.putTo(sink); message.metadataWrite.putTo(sink); sink.putLong(message.config); sink.putLong(message.maxEntries); FunnelUtils.putList(message.properties, sink); } } public void writeTo(ChannelBuffer bb) { WRITER.write(bb, this); } final static Writer WRITER = new Writer(); static class Writer implements OFMessageWriter<OFTableFeaturesVer14> { @Override public void write(ChannelBuffer bb, OFTableFeaturesVer14 message) { int startIndex = bb.writerIndex(); // length is length of variable message, will be updated at the end int lengthIndex = bb.writerIndex(); bb.writeShort(U16.t(0)); message.tableId.writeByte(bb); // pad: 5 bytes bb.writeZero(5); ChannelUtils.writeFixedLengthString(bb, message.name, 32); bb.writeLong(message.metadataMatch.getValue()); bb.writeLong(message.metadataWrite.getValue()); bb.writeInt(U32.t(message.config)); bb.writeInt(U32.t(message.maxEntries)); ChannelUtils.writeList(bb, message.properties); // update length field int length = bb.writerIndex() - startIndex; bb.setShort(lengthIndex, length); } } @Override public String toString() { StringBuilder b = new StringBuilder("OFTableFeaturesVer14("); b.append("tableId=").append(tableId); b.append(", "); b.append("name=").append(name); b.append(", "); b.append("metadataMatch=").append(metadataMatch); b.append(", "); b.append("metadataWrite=").append(metadataWrite); b.append(", "); b.append("config=").append(config); b.append(", "); b.append("maxEntries=").append(maxEntries); b.append(", "); b.append("properties=").append(properties); b.append(")"); return b.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (getClass() != obj.getClass()) return false; OFTableFeaturesVer14 other = (OFTableFeaturesVer14) obj; if (tableId == null) { if (other.tableId != null) return false; } else if (!tableId.equals(other.tableId)) return false; if (name == null) { if (other.name != null) return false; } else if (!name.equals(other.name)) return false; if (metadataMatch == null) { if (other.metadataMatch != null) return false; } else if (!metadataMatch.equals(other.metadataMatch)) return false; if (metadataWrite == null) { if (other.metadataWrite != null) return false; } else if (!metadataWrite.equals(other.metadataWrite)) return false; if( config != other.config) return false; if( maxEntries != other.maxEntries) return false; if (properties == null) { if (other.properties != null) return false; } else if (!properties.equals(other.properties)) return false; return true; } @Override public int hashCode() { final int prime = 31; int result = 1; result = prime * result + ((tableId == null) ? 0 : tableId.hashCode()); result = prime * result + ((name == null) ? 0 : name.hashCode()); result = prime * result + ((metadataMatch == null) ? 0 : metadataMatch.hashCode()); result = prime * result + ((metadataWrite == null) ? 0 : metadataWrite.hashCode()); result = prime * (int) (config ^ (config >>> 32)); result = prime * (int) (maxEntries ^ (maxEntries >>> 32)); result = prime * result + ((properties == null) ? 0 : properties.hashCode()); return result; } }
apache-2.0
ox-it/ords-ui
src/main/webapp/project/controllers/newMemberController.js
2975
'use strict'; ords.controller('newMemberController', function ($rootScope, $scope, $location, $routeParams, AuthService, Project, User, Member, growl, gettextCatalog, Invitation) { // // This page doesm't make sense to view // without being logged in, so redirect // back to the home view // AuthService.check(); // // Get the current Project // $scope.project = Project.get({ id: $routeParams.id }); // // Flag for switching the type of form // $scope.isInvite = false; $scope.cancel = function(){ $location.path("/project/"+$scope.project.projectId); } // // Process the POST to create the Member // $scope.newMember = function(){ // // Check the user specified exists - if not, we want to show the Invite page instead // User.lookup( {email:$scope.member.principalName}, function(){ $scope.createNewMember(); }, function(response){ if (response.status === 404){ $scope.isInvite = true; $scope.invitation = {}; $scope.invitation.email = $scope.member.principalName; $scope.invitation.roleRequired = $scope.member.role; } } ) } // // Actually add the member to the project // $scope.createNewMember = function(){ Member.save( { id:$scope.project.projectId }, $scope.member, function(){ growl.success( gettextCatalog.getString("MemPost200") ); $location.path("/project/"+$scope.project.projectId); }, function(response){ if (response.status === 400) { growl.error( gettextCatalog.getString("MemPost400") ) }; if (response.status === 403) { growl.error( gettextCatalog.getString("Gen403") ) }; if (response.status === 404) { growl.error( gettextCatalog.getString("MemPost404") ) }; if (response.status === 410) { growl.error( gettextCatalog.getString("Gen410") ) }; if (response.status === 500) { growl.error( gettextCatalog.getString("Gen500") ) }; $location.path("/project/"+$scope.project.projectId); } ); } // // Create a new invitation request // $scope.newInvitation = function(){ $scope.invitation.projectId = $scope.project.projectId; $scope.invitation.sender = $rootScope.user.name; Invitation.save( { id:$scope.project.projectId }, $scope.invitation, function(){ growl.success( gettextCatalog.getString("InvPost200") ); $location.path("/project/"+$scope.project.projectId); }, function(response){ if (response.status === 400) { growl.error( gettextCatalog.getString("InvPost400") ) }; if (response.status === 403) { growl.error( gettextCatalog.getString("Gen403") ) }; if (response.status === 404) { growl.error( gettextCatalog.getString("InvPost404") ) }; if (response.status === 410) { growl.error( gettextCatalog.getString("Gen410") ) }; if (response.status === 500) { growl.error( gettextCatalog.getString("Gen500") ) }; $location.path("/project/"+$scope.project.projectId); } ); } });
apache-2.0
daryllxd/riant_fronty
application/views/pages/survey.php
4678
<section id="survey-form" class="container"> <div class="row"> <div class="span12"> <h1>Post-Usage Evaluation</h1> <p>Thank you for using our system. Your answers will be completely confidential.</p> <form action="<?php echo base_url('submit_survey'); ?>" method="post" id="form-survey" class="form-inline"> <table class="table"> <tr> <td colspan="2"><strong>Hello.</strong></td> </tr> <tr> <td> I am a... </td> <td class="choices"> <label class="radio inline"> <input type="radio" name="survey-profession" value="designer"> Web designer </label> <label class="radio inline"> <input type="radio" name="survey-profession" value="developer"> Web developer </label> <label class="radio inline"> <input type="radio" name="survey-profession" value="both"> Both </label> </td> </tr> <tr> <td>I have been creating websites for...</td> <td class="choices"> <div class="input-append"> <input name="survey-years-experience" id="appendedInput" class="inline" type="number"> <span class="add-on">years</span> </div> </td> </tr> <tr> <td>When I develop web sites, I use a/an...</td> <td class="choices"> <label class="checkbox inline"> <input type="checkbox" name="survey-tools-used" id="optionsRadios1" value="text editor"> <a class="tooltipper" data-toggle="tooltip" title="Notepad++, Sublime Text, Vim, Emacs">Text Editors</a> </label> <label class="checkbox inline"> <input type="checkbox" name="survey-tools-used" value="ide"> <a class="tooltipper" data-toggle="tooltip" title="Eclipse, Netbeans, Visual Studio">IDEs</a> </label> <label class="checkbox inline"> <input type="checkbox" name="survey-tools-used" value="wysiwyg"> <a class="tooltipper" data-toggle="tooltip" title="Dreamweaver, Amaya">WYSIWYG</a> </label> <label class="checkbox inline"> <input type="checkbox" name="survey-tools-used" value="website builder"> <a class="tooltipper" data-toggle="tooltip" title="Weebly, Wix, Yola">Website Builders</a> </label> </td> </tr> <tr> <td colspan="2"><strong>What were your experiences while using our software?</strong></td> </tr> <?php foreach ($questions as $resource) { ?> <tr> <td><?php echo $resource['question_text']; ?></td> <td class="question choices"> <label class="radio inline"> <input type="radio" name="survey-question-<?php echo $resource['question_id']; ?>">Yes</label> <label class="radio inline"> <input type="radio" name="survey-question-<?php echo $resource['question_id']; ?>" value="0">No</label> </td> </tr> <?php } ?> </table> <div class="control-group"> <div class="controls"> <a id="submit-survey" class="btn">Submit</a> </div> </div> </form> </div> </div> </section>
apache-2.0
YAFNET/YAFNET
yafsrc/Lucene.Net/Lucene.Net.Queries/Function/ValueSources/MinFloatFunction.cs
1788
// Lucene version compatibility level 4.8.1 using System; namespace YAF.Lucene.Net.Queries.Function.ValueSources { /* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /// <summary> /// <see cref="MinSingleFunction"/> returns the min of it's components. /// <para/> /// NOTE: This was MinFloatFunction in Lucene /// </summary> public class MinSingleFunction : MultiSingleFunction { public MinSingleFunction(ValueSource[] sources) : base(sources) { } protected override string Name => "min"; protected override float Func(int doc, FunctionValues[] valsArr) { if (valsArr.Length == 0) { return 0.0f; } float val = float.PositiveInfinity; foreach (FunctionValues vals in valsArr) { val = Math.Min(vals.SingleVal(doc), val); } return val; } } }
apache-2.0
mwitkow/go-httpwares
wrapped_responsewriter_go18.go
1911
// +build go1.8 package httpwares import ( "bufio" "io" "net" "net/http" ) // newWrappedResponseWriter handles the four different methods of upgrading a // http.ResponseWriter to delegator. func newWrappedResponseWriter(w http.ResponseWriter) WrappedResponseWriter { wrapped := &wrappedResponseWriter{ResponseWriter: w} _, isCloseNotifier := w.(http.CloseNotifier) _, isFlusher := w.(http.Flusher) _, isHijacker := w.(http.Hijacker) _, isPusher := w.(http.Pusher) _, isReaderFrom := w.(io.ReaderFrom) // Check for the four most common combination of interfaces a // http.ResponseWriter might implement. if !isHijacker && isPusher && isCloseNotifier { // http2.responseWriter (http 2.0) return &http2WrappedResponseWriter{wrapped} } else if isCloseNotifier && isFlusher && isHijacker && isReaderFrom { // http.response (http 1.1) return &http1WrappedResponseWriter{wrapped} } return wrapped } type http2WrappedResponseWriter struct { *wrappedResponseWriter } func (w *http2WrappedResponseWriter) Flush() { w.wrappedResponseWriter.ResponseWriter.(http.Flusher).Flush() } func (w *http2WrappedResponseWriter) CloseNotify() <-chan bool { return w.wrappedResponseWriter.ResponseWriter.(http.CloseNotifier).CloseNotify() } func (w *http2WrappedResponseWriter) Push(target string, opts *http.PushOptions) error { return w.wrappedResponseWriter.ResponseWriter.(http.Pusher).Push(target, opts) } type http1WrappedResponseWriter struct { *wrappedResponseWriter } func (w *http1WrappedResponseWriter) Flush() { w.wrappedResponseWriter.ResponseWriter.(http.Flusher).Flush() } func (w *http1WrappedResponseWriter) CloseNotify() <-chan bool { return w.wrappedResponseWriter.ResponseWriter.(http.CloseNotifier).CloseNotify() } func (w *http1WrappedResponseWriter) Hijack() (net.Conn, *bufio.ReadWriter, error) { return w.wrappedResponseWriter.ResponseWriter.(http.Hijacker).Hijack() }
apache-2.0
mrvisser/Hilary
node_modules/oae-authentication/tests/test-cookies.js
12584
/* * Copyright 2015 Apereo Foundation (AF) Licensed under the * Educational Community License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. You may * obtain a copy of the License at * * http://opensource.org/licenses/ECL-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" * BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing * permissions and limitations under the License. */ var _ = require('underscore'); var assert = require('assert'); var RestAPI = require('oae-rest'); var TestsUtil = require('oae-tests'); var AuthenticationAPI = require('oae-authentication'); describe('Authentication', function() { // Rest context that can be used every time we need to make a request as a tenant admin var camAdminRestContext = null; // Rest context that can be used every time we need to make a request as a global admin var globalAdminRestContext = null; /** * Function that will fill up the tenant admin and anymous rest context */ before(function(callback) { // Prepare the contexts with which we'll perform requests camAdminRestContext = TestsUtil.createTenantAdminRestContext(global.oaeTests.tenants.cam.host); globalAdminRestContext = TestsUtil.createGlobalAdminRestContext(); return callback(); }); describe('Local authentication', function() { /*! * Given a set of user agents, perform an authentication using each and record the cookie * data that is returned in the authentication response for each user agent * * @param {RestContext} restContext The REST context to use for each authentication * @param {String} username The username to use for each authentication * @param {String} password The password to use for each authentication * @param {String[]} userAgents A list of user agents to authenticate with * @param {Function} callback Invoked when all authentications have successfully completed * @param {Object} callback.userAgentCookies An object whose keys are the user agents, and values are an array of `request` Cookie's that were returned in the authentication response */ var _getCookiesForUserAgents = function(restContext, username, password, userAgents, callback, _userAgentCookiesToGet, _userAgentCookies) { _userAgentCookiesToGet = _userAgentCookiesToGet || userAgents.slice(); _userAgentCookies = _userAgentCookies || {}; // If there are no more user agents to authenticate with, return with the cookies we have if (_.isEmpty(_userAgentCookiesToGet)) { return callback(_userAgentCookies); } // Get the next user agent to authenticate, and apply the user-agent header to the // restContext var userAgent = _userAgentCookiesToGet.shift(); restContext.additionalHeaders = restContext.additionalHeaders || {}; restContext.additionalHeaders['user-agent'] = userAgent; // First logout the context to clear the cookie jar RestAPI.Authentication.logout(restContext, function(err) { assert.ok(!err); // Authenticate the user agent RestAPI.Authentication.login(restContext, username, password, function(err) { assert.ok(!err); // Aggregate the cookies into the user agent map _userAgentCookies[userAgent] = restContext.cookieJar._jar.getCookiesSync(restContext.host); return _getCookiesForUserAgents(restContext, username, password, userAgents, callback, _userAgentCookiesToGet, _userAgentCookies); }); }); }; /** * Test that verifies that cookies have an appropriate expiration heuristic depending on the * client device and tenant */ it('verify cookie expiration for mobile and non-mobile browsers', function(callback) { /*! * A collection of user agents for a variety of desktop / non-mobile clients */ var nonMobileUserAgents = [ // Firefox variants 'Mozilla/5.0 (Windows NT 6.3; rv:36.0) Gecko/20100101 Firefox/36.0', 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10; rv:33.0) Gecko/20100101 Firefox/33.0', 'Mozilla/5.0 (X11; Linux i586; rv:31.0) Gecko/20100101 Firefox/31.0', 'Mozilla/5.0 (Windows NT 6.1; WOW64; rv:31.0) Gecko/20130401 Firefox/31.0', 'Mozilla/5.0 (Windows NT 5.1; rv:31.0) Gecko/20100101 Firefox/31.0', // Chrome variants 'Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2228.0 Safari/537.36', 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2227.1 Safari/537.36', 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2227.0 Safari/537.36', 'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2227.0 Safari/537.36', // Safari variants 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_3) AppleWebKit/537.75.14 (KHTML, like Gecko) Version/7.0.3 Safari/7046A194A', 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_6_8) AppleWebKit/537.13+ (KHTML, like Gecko) Version/5.1.7 Safari/534.57.2', 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_7_3) AppleWebKit/534.55.3 (KHTML, like Gecko) Version/5.1.3 Safari/534.53.10', // IE variants 'Mozilla/5.0 (Windows NT 6.1; WOW64; Trident/7.0; AS; rv:11.0) like Gecko', 'Mozilla/5.0 (compatible, MSIE 11, Windows NT 6.3; Trident/7.0; rv:11.0) like Gecko', 'Mozilla/5.0 (compatible; MSIE 10.6; Windows NT 6.1; Trident/5.0; InfoPath.2; SLCC1; .NET CLR 3.0.4506.2152; .NET CLR 3.5.30729; .NET CLR 2.0.50727) 3gpp-gba UNTRUSTED/1.0', 'Mozilla/5.0 (compatible; MSIE 10.0; Windows NT 6.1; WOW64; Trident/6.0)', 'Mozilla/5.0 (compatible; MSIE 10.0; Windows NT 6.1; Trident/6.0)', 'Mozilla/5.0 (compatible; MSIE 10.0; Windows NT 6.1; Trident/5.0)', 'Mozilla/5.0 (compatible; MSIE 10.0; Windows NT 6.1; Trident/4.0; InfoPath.2; SV1; .NET CLR 2.0.50727; WOW64)', 'Mozilla/5.0 (compatible; MSIE 10.0; Macintosh; Intel Mac OS X 10_7_3; Trident/6.0)', 'Mozilla/4.0 (Compatible; MSIE 8.0; Windows NT 5.2; Trident/6.0)', 'Mozilla/4.0 (compatible; MSIE 10.0; Windows NT 6.1; Trident/5.0)', 'Mozilla/1.22 (compatible; MSIE 10.0; Windows 3.1)' ]; /*! * A collection of user agents for mobile devices (phones, tablets, etc...) */ var mobileUserAgents = [ // iPhone/iPad variants 'Mozilla/5.0 (iPhone; U; CPU iPhone OS 4_0 like Mac OS X; en-us) AppleWebKit/532.9 (KHTML, like Gecko) Version/4.0.5 Mobile/8A293 Safari/6531.22.7', 'Mozilla/5.0 (iPhone; CPU iPhone OS 5_0 like Mac OS X) AppleWebKit/534.46 (KHTML, like Gecko) Version/5.1 Mobile/9A334 Safari/7534.48.3', 'Mozilla/5.0 (iPad; CPU OS 5_0 like Mac OS X) AppleWebKit/534.46 (KHTML, like Gecko) Version/5.1 Mobile/9A334 Safari/7534.48.3', 'Mozilla/5.0 (iPhone; CPU iPhone OS 6_0 like Mac OS X) AppleWebKit/536.26 (KHTML, like Gecko) Version/6.0 Mobile/10A5376e Safari/8536.25', 'Mozilla/5.0 (iPad; CPU OS 6_0 like Mac OS X) AppleWebKit/536.26 (KHTML, like Gecko) Version/6.0 Mobile/10A5376e Safari/8536.25', // Android variants 'Mozilla/5.0 (Linux; U; Android 4.0.3; ko-kr; LG-L160L Build/IML74K) AppleWebkit/534.30 (KHTML, like Gecko) Version/4.0 Mobile Safari/534.30', 'Mozilla/5.0 (Linux; U; Android 4.0.3; de-ch; HTC Sensation Build/IML74K) AppleWebKit/534.30 (KHTML, like Gecko) Version/4.0 Mobile Safari/534.30', 'Mozilla/5.0 (Linux; U; Android 2.3.5; zh-cn; HTC_IncredibleS_S710e Build/GRJ90) AppleWebKit/533.1 (KHTML, like Gecko) Version/4.0 Mobile Safari/533.1', // Windows variants 'Mozilla/5.0 (compatible; MSIE 10.0; Windows Phone 8.0; Trident/6.0; IEMobile/10.0; ARM; Touch; NOKIA; Lumia 920)', 'Mozilla/5.0 (compatible; MSIE 9.0; Windows Phone OS 7.5; Trident/5.0; IEMobile/9.0)', 'HTC_Touch_3G Mozilla/4.0 (compatible; MSIE 6.0; Windows CE; IEMobile 7.11)', 'Mozilla/4.0 (compatible; MSIE 7.0; Windows Phone OS 7.0; Trident/3.1; IEMobile/7.0; Nokia;N70)' ]; // Combine all user agents var allUserAgents = _.chain(mobileUserAgents).union(nonMobileUserAgents).shuffle().value(); // Create a test user var username = TestsUtil.generateTestUserId(); var email = TestsUtil.generateTestEmailAddress(null, global.oaeTests.tenants.cam.emailDomain); RestAPI.User.createUser(camAdminRestContext, username, 'password', 'Test User', email, {}, function(err, createdUser) { assert.ok(!err); var userRestContext = TestsUtil.createTenantRestContext(global.oaeTests.tenants.cam.host, username, 'password'); // Get all user agents for a user. When using a user tenant, mobile user-agents // should result in a cookie that has a length expiry _getCookiesForUserAgents(userRestContext, username, 'password', allUserAgents, function(userAgentCookies) { assert.strictEqual(_.keys(userAgentCookies).length, allUserAgents.length); // Ensure each mobile user agent has a cookie with an explicit expiry time that // is more than 29 days into the future _.each(mobileUserAgents, function(mobileUserAgent) { var cookies = userAgentCookies[mobileUserAgent]; assert.strictEqual(cookies.length, 2); _.each(cookies, function(cookie) { assert.ok(_.isNumber(cookie.TTL())); assert.ok(cookie.TTL() > (1000 * 60 * 60 * 24 * 29)); assert.notEqual(cookie.TTL(), Infinity); }); }); // Ensure each non-mobile user agent has a cookie without an explicit expiry // (i.e., browser session cookie) _.each(nonMobileUserAgents, function(nonMobileUserAgent) { var cookies = userAgentCookies[nonMobileUserAgent]; assert.strictEqual(cookies.length, 2); _.each(cookies, function(cookie) { assert.strictEqual(cookie.TTL(), Infinity); }); }); // Get all user agents for a global admin login. When using the global admin // tenant, both mobile and non-mobile user-agents should not have an extended // expiry _getCookiesForUserAgents(globalAdminRestContext, 'administrator', 'administrator', allUserAgents, function(userAgentCookies) { assert.strictEqual(_.keys(userAgentCookies).length, allUserAgents.length); // Ensure all user agents have a cookie without an explicit expiry (i.e., // browser session cookie) _.each(allUserAgents, function(userAgent) { var cookies = userAgentCookies[userAgent]; assert.ok(!_.isEmpty(cookies)); _.each(cookies, function(cookie) { assert.strictEqual(cookie.TTL(), Infinity); }); }); return callback(); }); }); }); }); }); });
apache-2.0
codecentric/elasticsearch-shield-kerberos-realm
src/test/java/de/codecentric/elasticsearch/plugin/kerberosrealm/support/EmbeddedKRBServer.java
4151
/* Copyright 2015 codecentric AG Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. Author: Hendrik Saly <hendrik.saly@codecentric.de> */ package de.codecentric.elasticsearch.plugin.kerberosrealm.support; import java.io.File; import org.apache.commons.io.FileUtils; import org.apache.kerby.kerberos.kdc.impl.NettyKdcServerImpl; import org.apache.kerby.kerberos.kerb.server.SimpleKdcServer; import org.apache.kerby.kerberos.kerb.spec.ticket.TgtTicket; import org.apache.kerby.util.NetworkUtil; import org.elasticsearch.common.SuppressForbidden; @SuppressForbidden(reason = "unit test") public class EmbeddedKRBServer { private SimpleKdcServer simpleKdcServer; private String realm = "CCK.COM"; public void start(final File workDir) throws Exception { simpleKdcServer = new SimpleKdcServer(); simpleKdcServer.enableDebug(); simpleKdcServer.setKdcTcpPort(NetworkUtil.getServerPort()); simpleKdcServer.setKdcUdpPort(NetworkUtil.getServerPort()); simpleKdcServer.setAllowTcp(true); simpleKdcServer.setAllowUdp(true); simpleKdcServer.setKdcRealm(realm); simpleKdcServer.setKdcHost("localhost"); FileUtils.forceMkdir(workDir); simpleKdcServer.setWorkDir(workDir); simpleKdcServer.setInnerKdcImpl(new NettyKdcServerImpl(simpleKdcServer.getKdcSetting())); simpleKdcServer.init(); //System.setErr(new PrintStream(new NullOutputStream())); simpleKdcServer.start(); } public SimpleKdcServer getSimpleKdcServer() { return simpleKdcServer; } public static void main(final String[] args) throws Exception { final File workDir = new File("."); final EmbeddedKRBServer eks = new EmbeddedKRBServer(); eks.realm = "DUMMY.COM"; eks.start(workDir); eks.getSimpleKdcServer().createPrincipal("kirk/admin@DUMMY.COM", "kirkpwd"); eks.getSimpleKdcServer().createPrincipal("uhura@DUMMY.COM", "uhurapwd"); eks.getSimpleKdcServer().createPrincipal("service/1@DUMMY.COM", "service1pwd"); eks.getSimpleKdcServer().createPrincipal("service/2@DUMMY.COM", "service2pwd"); eks.getSimpleKdcServer().exportPrincipal("service/1@DUMMY.COM", new File(workDir, "service1.keytab")); //server, acceptor eks.getSimpleKdcServer().exportPrincipal("service/2@DUMMY.COM", new File(workDir, "service2.keytab")); //server, acceptor eks.getSimpleKdcServer().createPrincipal("HTTP/localhost@DUMMY.COM", "httplocpwd"); eks.getSimpleKdcServer().exportPrincipal("HTTP/localhost@DUMMY.COM", new File(workDir, "httploc.keytab")); //server, acceptor eks.getSimpleKdcServer().createPrincipal("HTTP/localhost@DUMMY.COM", "httpcpwd"); eks.getSimpleKdcServer().exportPrincipal("HTTP/localhost@DUMMY.COM", new File(workDir, "http.keytab")); //server, acceptor final TgtTicket tgt = eks.getSimpleKdcServer().getKrbClient().requestTgtWithPassword("kirk/admin@DUMMY.COM", "kirkpwd"); eks.getSimpleKdcServer().getKrbClient().storeTicket(tgt, new File(workDir, "kirk.cc")); try { try { FileUtils.copyFile(new File("/etc/krb5.conf"), new File("/etc/krb5.conf.bak")); } catch (final Exception e) { //ignore } FileUtils.copyFileToDirectory(new File(workDir, "krb5.conf"), new File("/etc/")); System.out.println("Generated krb5.conf copied to /etc"); } catch (final Exception e) { System.out.println("Unable to copy generated krb5.conf to /etc diúe to " + e.getMessage()); } } }
apache-2.0
lklong/fuckproject
src/com/zhigu/service/user/IAccountService.java
2119
package com.zhigu.service.user; import java.util.Date; import java.util.List; import com.zhigu.model.Account; import com.zhigu.model.AccountDetail; import com.zhigu.model.PageBean; import com.zhigu.model.RechargeRecord; import com.zhigu.model.dto.MsgBean; /** * 用户帐户信息 * * @author zhouqibing 2014年7月21日上午9:34:39 */ public interface IAccountService { /** * 查询用户的帐户信息(密码等机密信息隐藏) * * @param userID */ public Account queryAccountByUserID(int userID); /** * 根据订单号(流水号) 查询用户的收支明细信息 * * @param userID */ public AccountDetail queryAccountDetailBySerialNO(String serialNO); /** * 根据日期查询用户的收支明细信息 * * @param userID */ public List<AccountDetail> queryAccountDetailList(PageBean<AccountDetail> page, int userID, Date startDate, Date endDate); /** * 保存充值记录 * * @param record */ public MsgBean saveRechargeRecord(int payType, String money); /** * 查询充值记录 * * @param recordID * @return */ public RechargeRecord queryRechargeRecord(int recordID); /** * 查询充值记录 * * @param recordID * @return */ public RechargeRecord queryRechargeRecord(String PaymentNO); /** * 查询充值记录 * * @param page * @return */ public List<RechargeRecord> queryRechargeRecord(Integer userID, PageBean<RechargeRecord> page); /** * 修改充值记录 * * @param record */ public void updateRechargeSuccess(RechargeRecord record); /** * 验证支付密码(BCrypt加密验证),当前session用户 * * @param passwd * @return */ public MsgBean verifyPaypasswd(String payPasswd); /** * 修改支付密码(支付密码使用 bcrypt 加密) * * @param userID * @param paymentPwd */ public MsgBean updatePaypasswd(int userID, String paymentPwd, String captcha); /** * 修改账户绑定银行卡 * * @param userId * @param bankNo * @return */ public MsgBean updateBankNo(String bankNo, String bankCardMaster, String captcha, String bankName); }
apache-2.0
razvanphp/arangodb
js/apps/system/_admin/aardvark/APP/clusterFrontend/build/cluster.js
127847
/*jshint unused: false */ /*global window, $, document */ (function() { "use strict"; var isCoordinator; window.isCoordinator = function() { if (isCoordinator === undefined) { $.ajax( "cluster/amICoordinator", { async: false, success: function(d) { isCoordinator = d; } } ); } return isCoordinator; }; window.versionHelper = { fromString: function (s) { var parts = s.replace(/-[a-zA-Z0-9_\-]*$/g, '').split('.'); return { major: parseInt(parts[0], 10) || 0, minor: parseInt(parts[1], 10) || 0, patch: parseInt(parts[2], 10) || 0, toString: function() { return this.major + "." + this.minor + "." + this.patch; } }; }, toString: function (v) { return v.major + '.' + v.minor + '.' + v.patch; } }; window.arangoHelper = { lastNotificationMessage: null, CollectionTypes: {}, systemAttributes: function () { return { '_id' : true, '_rev' : true, '_key' : true, '_bidirectional' : true, '_vertices' : true, '_from' : true, '_to' : true, '$id' : true }; }, fixTooltips: function (selector, placement) { $(selector).tooltip({ placement: placement, hide: false, show: false }); }, currentDatabase: function () { var returnVal = false; $.ajax({ type: "GET", cache: false, url: "/_api/database/current", contentType: "application/json", processData: false, async: false, success: function(data) { returnVal = data.result.name; }, error: function() { returnVal = false; } }); return returnVal; }, allHotkeys: { global: { name: "Site wide", content: [{ label: "scroll up", letter: "j" },{ label: "scroll down", letter: "k" }] }, jsoneditor: { name: "AQL editor", content: [{ label: "Submit", letter: "Ctrl + Return" },{ label: "Toggle comments", letter: "Ctrl + Shift + C" },{ label: "Undo", letter: "Ctrl + Z" },{ label: "Redo", letter: "Ctrl + Shift + Z" }] }, doceditor: { name: "Document editor", content: [{ label: "Insert", letter: "Ctrl + Insert" },{ label: "Save", letter: "Ctrl + Return, CMD + Return" },{ label: "Append", letter: "Ctrl + Shift + Insert" },{ label: "Duplicate", letter: "Ctrl + D" },{ label: "Remove", letter: "Ctrl + Delete" }] }, modals: { name: "Modal", content: [{ label: "Submit", letter: "Return" },{ label: "Close", letter: "Esc" },{ label: "Navigate buttons", letter: "Arrow keys" },{ label: "Navigate content", letter: "Tab" }] } }, hotkeysFunctions: { scrollDown: function () { window.scrollBy(0,180); }, scrollUp: function () { window.scrollBy(0,-180); }, showHotkeysModal: function () { var buttons = [], content = window.arangoHelper.allHotkeys; window.modalView.show("modalHotkeys.ejs", "Keyboard Shortcuts", buttons, content); } }, enableKeyboardHotkeys: function (enable) { var hotkeys = window.arangoHelper.hotkeysFunctions; if (enable === true) { $(document).on('keydown', null, 'j', hotkeys.scrollDown); $(document).on('keydown', null, 'k', hotkeys.scrollUp); } }, databaseAllowed: function () { var currentDB = this.currentDatabase(), returnVal = false; $.ajax({ type: "GET", cache: false, url: "/_db/"+ encodeURIComponent(currentDB) + "/_api/database/", contentType: "application/json", processData: false, async: false, success: function() { returnVal = true; }, error: function() { returnVal = false; } }); return returnVal; }, arangoNotification: function (title, content) { window.App.notificationList.add({title:title, content: content}); }, arangoError: function (title, content) { window.App.notificationList.add({title:title, content: content}); }, getRandomToken: function () { return Math.round(new Date().getTime()); }, isSystemAttribute: function (val) { var a = this.systemAttributes(); return a[val]; }, isSystemCollection: function (val) { return val.name.substr(0, 1) === '_'; // the below code is completely inappropriate as it will // load the collection just for the check whether it // is a system collection. as a consequence, the below // code would load ALL collections when the web interface // is called /* var returnVal = false; $.ajax({ type: "GET", url: "/_api/collection/" + encodeURIComponent(val) + "/properties", contentType: "application/json", processData: false, async: false, success: function(data) { returnVal = data.isSystem; }, error: function(data) { returnVal = false; } }); return returnVal; */ }, setDocumentStore : function (a) { this.arangoDocumentStore = a; }, collectionApiType: function (identifier, refresh) { // set "refresh" to disable caching collection type if (refresh || this.CollectionTypes[identifier] === undefined) { this.CollectionTypes[identifier] = this.arangoDocumentStore .getCollectionInfo(identifier).type; } if (this.CollectionTypes[identifier] === 3) { return "edge"; } return "document"; }, collectionType: function (val) { if (! val || val.name === '') { return "-"; } var type; if (val.type === 2) { type = "document"; } else if (val.type === 3) { type = "edge"; } else { type = "unknown"; } if (this.isSystemCollection(val)) { type += " (system)"; } return type; }, formatDT: function (dt) { var pad = function (n) { return n < 10 ? '0' + n : n; }; return dt.getUTCFullYear() + '-' + pad(dt.getUTCMonth() + 1) + '-' + pad(dt.getUTCDate()) + ' ' + pad(dt.getUTCHours()) + ':' + pad(dt.getUTCMinutes()) + ':' + pad(dt.getUTCSeconds()); }, escapeHtml: function (val) { // HTML-escape a string return String(val).replace(/&/g, '&amp;') .replace(/</g, '&lt;') .replace(/>/g, '&gt;') .replace(/"/g, '&quot;') .replace(/'/g, '&#39;'); } }; }()); /*global window, Backbone */ (function() { "use strict"; window.ClusterCollection = Backbone.Model.extend({ defaults: { "name": "", "status": "ok" }, idAttribute: "name", forList: function() { return { name: this.get("name"), status: this.get("status") }; } }); }()); /*global window, Backbone */ (function() { "use strict"; window.ClusterCoordinator = Backbone.Model.extend({ defaults: { "name": "", "url": "", "status": "ok" }, idAttribute: "name", /* url: "/_admin/aardvark/cluster/Coordinators"; updateUrl: function() { this.url = window.getNewRoute("Coordinators"); }, */ forList: function() { return { name: this.get("name"), status: this.get("status"), url: this.get("url") }; } }); }()); /*global window, Backbone */ (function() { "use strict"; window.ClusterDatabase = Backbone.Model.extend({ defaults: { "name": "", "status": "ok" }, idAttribute: "name", forList: function() { return { name: this.get("name"), status: this.get("status") }; } /* url: "/_admin/aardvark/cluster/Databases"; updateUrl: function() { this.url = window.getNewRoute("Databases"); } */ }); }()); /*global window, Backbone, $, _*/ (function() { "use strict"; window.ClusterPlan = Backbone.Model.extend({ defaults: { }, url: "cluster/plan", idAttribute: "config", getVersion: function() { var v = this.get("version"); return v || "2.0"; }, getCoordinator: function() { if (this._coord) { return this._coord[ this._lastStableCoord ]; } var tmpList = []; var i,j,r,l; r = this.get("runInfo"); if (!r) { return; } j = r.length-1; while (j > 0) { if(r[j].isStartServers) { l = r[j]; if (l.endpoints) { for (i = 0; i < l.endpoints.length;i++) { if (l.roles[i] === "Coordinator") { tmpList.push(l.endpoints[i] .replace("tcp://","http://") .replace("ssl://", "https://") ); } } } } j--; } this._coord = tmpList; this._lastStableCoord = Math.floor(Math.random() * this._coord.length); }, rotateCoordinator: function() { var last = this._lastStableCoord, next; if (this._coord.length > 1) { do { next = Math.floor(Math.random() * this._coord.length); } while (next === last); this._lastStableCoord = next; } }, isAlive : function() { var result = false; $.ajax({ cache: false, type: "GET", async: false, // sequential calls! url: "cluster/healthcheck", success: function(data) { result = data; }, error: function(data) { } }); return result; }, storeCredentials: function(name, passwd) { var self = this; $.ajax({ url: "cluster/plan/credentials", type: "PUT", data: JSON.stringify({ user: name, passwd: passwd }), async: false }).done(function() { self.fetch(); }); }, isSymmetricSetup: function() { var config = this.get("config"); var count = _.size(config.dispatchers); return count === config.numberOfCoordinators && count === config.numberOfDBservers; }, isTestSetup: function() { return _.size(this.get("config").dispatchers) === 1; }, cleanUp: function() { $.ajax({ url: "cluster/plan/cleanUp", type: "DELETE", async: false }); } }); }()); /*global window, Backbone */ (function() { "use strict"; window.ClusterServer = Backbone.Model.extend({ defaults: { name: "", address: "", role: "", status: "ok" }, idAttribute: "name", /* url: "/_admin/aardvark/cluster/DBServers"; updateUrl: function() { this.url = window.getNewRoute("DBServers"); }, */ forList: function() { return { name: this.get("name"), address: this.get("address"), status: this.get("status") }; } }); }()); /*global window, Backbone */ (function() { "use strict"; window.ClusterShard = Backbone.Model.extend({ defaults: { }, idAttribute: "name", forList: function() { return { server: this.get("name"), shards: this.get("shards") }; } /* url: "/_admin/aardvark/cluster/Shards"; updateUrl: function() { this.url = window.getNewRoute("Shards"); } */ }); }()); /*global window, Backbone, $, _*/ (function() { "use strict"; window.ClusterType = Backbone.Model.extend({ defaults: { "type": "testPlan" } }); }()); /*global window, Backbone, console */ (function() { "use strict"; window.AutomaticRetryCollection = Backbone.Collection.extend({ _retryCount: 0, checkRetries: function() { var self = this; this.updateUrl(); if (this._retryCount > 10) { window.setTimeout(function() { self._retryCount = 0; }, 10000); window.App.clusterUnreachable(); return false; } return true; }, successFullTry: function() { this._retryCount = 0; }, failureTry: function(retry, ignore, err) { if (err.status === 401) { window.App.requestAuth(); } else { window.App.clusterPlan.rotateCoordinator(); this._retryCount++; retry(); } } }); }()); /*global Backbone, window */ window.ClusterStatisticsCollection = Backbone.Collection.extend({ model: window.Statistics, url: "/_admin/statistics", updateUrl: function() { this.url = window.App.getNewRoute("statistics"); }, initialize: function() { window.App.registerForUpdate(this); }, // The callback has to be invokeable for each result individually fetch: function(callback, errCB) { this.forEach(function (m) { m.fetch({ beforeSend: window.App.addAuth.bind(window.App), error: function() { errCB(m); } }).done(function() { callback(m); }); }); } }); /*global window, Backbone */ (function() { "use strict"; window.ClusterCollections = window.AutomaticRetryCollection.extend({ model: window.ClusterCollection, updateUrl: function() { this.url = window.App.getNewRoute(this.dbname + "/Collections"); }, url: function() { return "/_admin/aardvark/cluster/" + this.dbname + "/" + "Collections"; }, initialize: function() { this.isUpdating = false; this.timer = null; this.interval = 1000; window.App.registerForUpdate(this); }, getList: function(db, callback) { if (db === undefined) { return; } this.dbname = db; if(!this.checkRetries()) { return; } var self = this; this.fetch({ beforeSend: window.App.addAuth.bind(window.App), error: self.failureTry.bind(self, self.getList.bind(self, db, callback)) }).done(function() { callback(self.map(function(m) { return m.forList(); })); }); }, stopUpdating: function () { window.clearTimeout(this.timer); this.isUpdating = false; }, startUpdating: function () { if (this.isUpdating) { return; } this.isUpdating = true; var self = this; this.timer = window.setInterval(function() { self.updateUrl(); self.fetch({ beforeSend: window.App.addAuth.bind(window.App) }); }, this.interval); } }); }()); /*global window, Backbone, console */ (function() { "use strict"; window.ClusterCoordinators = window.AutomaticRetryCollection.extend({ model: window.ClusterCoordinator, url: "/_admin/aardvark/cluster/Coordinators", updateUrl: function() { this.url = window.App.getNewRoute("Coordinators"); }, initialize: function() { window.App.registerForUpdate(this); }, statusClass: function(s) { switch (s) { case "ok": return "success"; case "warning": return "warning"; case "critical": return "danger"; case "missing": return "inactive"; default: return "danger"; } }, getStatuses: function(cb, nextStep) { if(!this.checkRetries()) { return; } var self = this; this.fetch({ beforeSend: window.App.addAuth.bind(window.App), error: self.failureTry.bind(self, self.getStatuses.bind(self, cb, nextStep)) }).done(function() { self.successFullTry(); self.forEach(function(m) { cb(self.statusClass(m.get("status")), m.get("address")); }); nextStep(); }); }, byAddress: function (res, callback) { if(!this.checkRetries()) { return; } var self = this; this.fetch({ beforeSend: window.App.addAuth.bind(window.App), error: self.failureTry.bind(self, self.byAddress.bind(self, res, callback)) }).done(function() { self.successFullTry(); res = res || {}; self.forEach(function(m) { var addr = m.get("address"); addr = addr.split(":")[0]; res[addr] = res[addr] || {}; res[addr].coords = res[addr].coords || []; res[addr].coords.push(m); }); callback(res); }); }, checkConnection: function(callback) { var self = this; if(!this.checkRetries()) { return; } this.fetch({ beforeSend: window.App.addAuth.bind(window.App), error: self.failureTry.bind(self, self.checkConnection.bind(self, callback)) }).done(function() { self.successFullTry(); callback(); }); }, getList: function() { throw "Do not use coordinator.getList"; /* this.fetch({ async: false, beforeSend: window.App.addAuth.bind(window.App) }); return this.map(function(m) { return m.forList(); }); */ }, getOverview: function() { throw "Do not use coordinator.getOverview"; /* this.fetch({ async: false, beforeSend: window.App.addAuth.bind(window.App) }); var res = { plan: 0, having: 0, status: "ok" }, updateStatus = function(to) { if (res.status === "critical") { return; } res.status = to; }; this.each(function(m) { res.plan++; switch (m.get("status")) { case "ok": res.having++; break; case "warning": res.having++; updateStatus("warning"); break; case "critical": updateStatus("critical"); break; default: console.debug("Undefined server state occured. This is still in development"); } }); return res; */ } }); }()); /*global window, Backbone */ (function() { "use strict"; window.ClusterDatabases = window.AutomaticRetryCollection.extend({ model: window.ClusterDatabase, url: "/_admin/aardvark/cluster/Databases", updateUrl: function() { this.url = window.App.getNewRoute("Databases"); }, initialize: function() { window.App.registerForUpdate(this); }, getList: function(callback) { if(!this.checkRetries()) { return; } var self = this; this.fetch({ beforeSend: window.App.addAuth.bind(window.App), error: self.failureTry.bind(self, self.getList.bind(self, callback)) }).done(function() { self.successFullTry(); callback(self.map(function(m) { return m.forList(); })); }); } }); }()); /*global window, Backbone, _, console */ (function() { "use strict"; window.ClusterServers = window.AutomaticRetryCollection.extend({ model: window.ClusterServer, url: "/_admin/aardvark/cluster/DBServers", updateUrl: function() { this.url = window.App.getNewRoute("DBServers"); }, initialize: function() { window.App.registerForUpdate(this); }, statusClass: function(s) { switch (s) { case "ok": return "success"; case "warning": return "warning"; case "critical": return "danger"; case "missing": return "inactive"; default: return "danger"; } }, getStatuses: function(cb) { if(!this.checkRetries()) { return; } var self = this, completed = function() { self.successFullTry(); self._retryCount = 0; self.forEach(function(m) { cb(self.statusClass(m.get("status")), m.get("address")); }); }; // This is the first function called in // Each update loop this.fetch({ beforeSend: window.App.addAuth.bind(window.App), error: self.failureTry.bind(self, self.getStatuses.bind(self, cb)) }).done(completed); }, byAddress: function (res, callback) { if(!this.checkRetries()) { return; } var self = this; this.fetch({ beforeSend: window.App.addAuth.bind(window.App), error: self.failureTry.bind(self, self.byAddress.bind(self, res, callback)) }).done(function() { self.successFullTry(); res = res || {}; self.forEach(function(m) { var addr = m.get("address"); addr = addr.split(":")[0]; res[addr] = res[addr] || {}; res[addr].dbs = res[addr].dbs || []; res[addr].dbs.push(m); }); callback(res); }); }, getList: function(callback) { throw "Do not use"; /* var self = this; this.fetch({ beforeSend: window.App.addAuth.bind(window.App), error: self.failureTry.bind(self, self.getList.bind(self, callback)) }).done(function() { self.successFullTry(); var res = []; _.each(self.where({role: "primary"}), function(m) { var e = {}; e.primary = m.forList(); if (m.get("secondary")) { e.secondary = self.get(m.get("secondary")).forList(); } res.push(e); }); callback(res); }); */ }, getOverview: function() { throw "Do not use DbServer.getOverview"; /* this.fetch({ async: false, beforeSend: window.App.addAuth.bind(window.App) }); var res = { plan: 0, having: 0, status: "ok" }, self = this, updateStatus = function(to) { if (res.status === "critical") { return; } res.status = to; }; _.each(this.where({role: "primary"}), function(m) { res.plan++; switch (m.get("status")) { case "ok": res.having++; break; case "warning": res.having++; updateStatus("warning"); break; case "critical": var bkp = self.get(m.get("secondary")); if (!bkp || bkp.get("status") === "critical") { updateStatus("critical"); } else { if (bkp.get("status") === "ok") { res.having++; updateStatus("warning"); } } break; default: console.debug("Undefined server state occured. This is still in development"); } }); return res; */ } }); }()); /*global window, Backbone */ (function() { "use strict"; window.ClusterShards = window.AutomaticRetryCollection.extend({ model: window.ClusterShard, updateUrl: function() { this.url = window.App.getNewRoute( this.dbname + "/" + this.colname + "/Shards" ); }, url: function() { return "/_admin/aardvark/cluster/" + this.dbname + "/" + this.colname + "/" + "Shards"; }, initialize: function() { this.isUpdating = false; this.timer = null; this.interval = 1000; window.App.registerForUpdate(this); }, getList: function(dbname, colname, callback) { if (dbname === undefined || colname === undefined) { return; } this.dbname = dbname; this.colname = colname; if(!this.checkRetries()) { return; } var self = this; this.fetch({ beforeSend: window.App.addAuth.bind(window.App), error: self.failureTry.bind(self, self.getList.bind( self, dbname, colname, callback) ) }).done(function() { callback(self.map(function(m) { return m.forList(); })); }); }, stopUpdating: function () { window.clearTimeout(this.timer); this.isUpdating = false; }, startUpdating: function () { if (this.isUpdating) { return; } this.isUpdating = true; var self = this; this.timer = window.setInterval(function() { self.updateUrl(); self.fetch({ beforeSend: window.App.addAuth.bind(window.App) }); }, this.interval); } }); }()); /*global window, Backbone, arangoHelper, _ */ window.arangoDocumentModel = Backbone.Model.extend({ initialize: function () { 'use strict'; }, urlRoot: "/_api/document", defaults: { _id: "", _rev: "", _key: "" }, getSorted: function () { 'use strict'; var self = this; var keys = Object.keys(self.attributes).sort(function (l, r) { var l1 = arangoHelper.isSystemAttribute(l); var r1 = arangoHelper.isSystemAttribute(r); if (l1 !== r1) { if (l1) { return -1; } return 1; } return l < r ? -1 : 1; }); var sorted = {}; _.each(keys, function (k) { sorted[k] = self.attributes[k]; }); return sorted; } }); /*global window, Backbone */ window.Statistics = Backbone.Model.extend({ defaults: { }, url: function() { 'use strict'; return "/_admin/statistics"; } }); /*global window, Backbone */ window.StatisticsDescription = Backbone.Model.extend({ defaults: { "figures" : "", "groups" : "" }, url: function() { 'use strict'; return "/_admin/statistics-description"; } }); /*jshint browser: true */ /*jshint unused: false */ /*global Backbone, window, $, _ */ (function () { "use strict"; window.PaginatedCollection = Backbone.Collection.extend({ page: 0, pagesize: 10, totalAmount: 0, getPage: function() { return this.page + 1; }, setPage: function(counter) { if (counter >= this.getLastPageNumber()) { this.page = this.getLastPageNumber()-1; return; } if (counter < 1) { this.page = 0; return; } this.page = counter - 1; }, getLastPageNumber: function() { return Math.max(Math.ceil(this.totalAmount / this.pagesize), 1); }, getOffset: function() { return this.page * this.pagesize; }, getPageSize: function() { return this.pagesize; }, setPageSize: function(newPagesize) { if (newPagesize === "all") { this.pagesize = 'all'; } else { try { newPagesize = parseInt(newPagesize, 10); this.pagesize = newPagesize; } catch (ignore) { } } }, setToFirst: function() { this.page = 0; }, setToLast: function() { this.setPage(this.getLastPageNumber()); }, setToPrev: function() { this.setPage(this.getPage() - 1); }, setToNext: function() { this.setPage(this.getPage() + 1); }, setTotal: function(total) { this.totalAmount = total; }, getTotal: function() { return this.totalAmount; }, setTotalMinusOne: function() { this.totalAmount--; } }); }()); /*jshint browser: true */ /*jshint unused: false */ /*global Backbone, window */ window.StatisticsCollection = Backbone.Collection.extend({ model: window.Statistics, url: "/_admin/statistics" }); /*jshint browser: true */ /*jshint unused: false */ /*global window, Backbone, arangoDocumentModel, _, arangoHelper, $*/ (function() { "use strict"; window.arangoDocuments = window.PaginatedCollection.extend({ collectionID: 1, filters: [], MAX_SORT: 12000, lastQuery: {}, sortAttribute: "_key", url: '/_api/documents', model: window.arangoDocumentModel, loadTotal: function() { var self = this; $.ajax({ cache: false, type: "GET", url: "/_api/collection/" + this.collectionID + "/count", contentType: "application/json", processData: false, async: false, success: function(data) { self.setTotal(data.count); } }); }, setCollection: function(id) { this.resetFilter(); this.collectionID = id; this.setPage(1); this.loadTotal(); }, setSort: function(key) { this.sortAttribute = key; }, getSort: function() { return this.sortAttribute; }, addFilter: function(attr, op, val) { this.filters.push({ attr: attr, op: op, val: val }); }, setFiltersForQuery: function(bindVars) { if (this.filters.length === 0) { return ""; } var query = " FILTER", parts = _.map(this.filters, function(f, i) { var res = " x.`"; res += f.attr; res += "` "; res += f.op; res += " @param"; res += i; bindVars["param" + i] = f.val; return res; }); return query + parts.join(" &&"); }, setPagesize: function(size) { this.setPageSize(size); }, resetFilter: function() { this.filters = []; }, moveDocument: function (key, fromCollection, toCollection, callback) { var querySave, queryRemove, queryObj, bindVars = { "@collection": fromCollection, "filterid": key }, queryObj1, queryObj2; querySave = "FOR x IN @@collection"; querySave += " FILTER x._key == @filterid"; querySave += " INSERT x IN "; querySave += toCollection; queryRemove = "FOR x in @@collection"; queryRemove += " FILTER x._key == @filterid"; queryRemove += " REMOVE x IN @@collection"; queryObj1 = { query: querySave, bindVars: bindVars }; queryObj2 = { query: queryRemove, bindVars: bindVars }; window.progressView.show(); // first insert docs in toCollection $.ajax({ cache: false, type: 'POST', async: true, url: '/_api/cursor', data: JSON.stringify(queryObj1), contentType: "application/json", success: function(data) { // if successful remove unwanted docs $.ajax({ cache: false, type: 'POST', async: true, url: '/_api/cursor', data: JSON.stringify(queryObj2), contentType: "application/json", success: function(data) { if (callback) { callback(); } window.progressView.hide(); }, error: function(data) { window.progressView.hide(); arangoHelper.arangoNotification( "Document error", "Documents inserted, but could not be removed." ); } }); }, error: function(data) { window.progressView.hide(); arangoHelper.arangoNotification("Document error", "Could not move selected documents."); } }); }, getDocuments: function (callback) { window.progressView.showWithDelay(300, "Fetching documents..."); var self = this, query, bindVars, tmp, queryObj; bindVars = { "@collection": this.collectionID, "offset": this.getOffset(), "count": this.getPageSize() }; // fetch just the first 25 attributes of the document // this number is arbitrary, but may reduce HTTP traffic a bit query = "FOR x IN @@collection LET att = SLICE(ATTRIBUTES(x), 0, 25)"; query += this.setFiltersForQuery(bindVars); // Sort result, only useful for a small number of docs if (this.getTotal() < this.MAX_SORT) { if (this.getSort() === '_key') { query += " SORT TO_NUMBER(x." + this.getSort() + ") == 0 ? x." + this.getSort() + " : TO_NUMBER(x." + this.getSort() + ")"; } else { query += " SORT x." + this.getSort(); } } if (bindVars.count !== 'all') { query += " LIMIT @offset, @count RETURN KEEP(x, att)"; } else { tmp = { "@collection": this.collectionID }; bindVars = tmp; query += " RETURN KEEP(x, att)"; } queryObj = { query: query, bindVars: bindVars }; if (this.getTotal() < 10000 || this.filters.length > 0) { queryObj.options = { fullCount: true, }; } $.ajax({ cache: false, type: 'POST', async: true, url: '/_api/cursor', data: JSON.stringify(queryObj), contentType: "application/json", success: function(data) { window.progressView.toShow = false; self.clearDocuments(); if (data.extra && data.extra.stats.fullCount !== undefined) { self.setTotal(data.extra.stats.fullCount); } if (self.getTotal() !== 0) { _.each(data.result, function(v) { self.add({ "id": v._id, "rev": v._rev, "key": v._key, "content": v }); }); } self.lastQuery = queryObj; callback(); window.progressView.hide(); }, error: function(data) { window.progressView.hide(); arangoHelper.arangoNotification("Document error", "Could not fetch requested documents."); } }); }, clearDocuments: function () { this.reset(); }, buildDownloadDocumentQuery: function() { var self = this, query, queryObj, bindVars; bindVars = { "@collection": this.collectionID }; query = "FOR x in @@collection"; query += this.setFiltersForQuery(bindVars); // Sort result, only useful for a small number of docs if (this.getTotal() < this.MAX_SORT) { query += " SORT x." + this.getSort(); } query += " RETURN x"; queryObj = { query: query, bindVars: bindVars }; return queryObj; }, uploadDocuments : function (file) { var result; $.ajax({ type: "POST", async: false, url: '/_api/import?type=auto&collection='+ encodeURIComponent(this.collectionID)+ '&createCollection=false', data: file, processData: false, contentType: 'json', dataType: 'json', complete: function(xhr) { if (xhr.readyState === 4 && xhr.status === 201) { result = true; } else { result = "Upload error"; } try { var data = JSON.parse(xhr.responseText); if (data.errors > 0) { result = "At least one error occurred during upload"; } } catch (err) { } } }); return result; } }); }()); /*jshint unused: false */ /*global EJS, window, _, $*/ (function() { "use strict"; // For tests the templates are loaded some where else. // We need to use a different engine there. if (!window.hasOwnProperty("TEST_BUILD")) { var TemplateEngine = function() { var exports = {}; exports.createTemplate = function(id) { var template = $("#" + id.replace(".", "\\.")).html(); return { render: function(params) { return _.template(template, params); } }; }; return exports; }; window.templateEngine = new TemplateEngine(); } }()); /*jshint browser: true */ /*jshint unused: false */ /*global Backbone, templateEngine, $, arangoHelper, window*/ (function() { "use strict"; window.FooterView = Backbone.View.extend({ el: '#footerBar', system: {}, isOffline: true, isOfflineCounter: 0, firstLogin: true, events: { 'click .footer-center p' : 'showShortcutModal' }, initialize: function () { //also server online check var self = this; window.setInterval(function(){ self.getVersion(); }, 15000); self.getVersion(); }, template: templateEngine.createTemplate("footerView.ejs"), showServerStatus: function(isOnline) { if (isOnline === true) { $('.serverStatusIndicator').addClass('isOnline'); $('.serverStatusIndicator').addClass('fa-check-circle-o'); $('.serverStatusIndicator').removeClass('fa-times-circle-o'); } else { $('.serverStatusIndicator').removeClass('isOnline'); $('.serverStatusIndicator').removeClass('fa-check-circle-o'); $('.serverStatusIndicator').addClass('fa-times-circle-o'); } }, showShortcutModal: function() { window.arangoHelper.hotkeysFunctions.showHotkeysModal(); }, getVersion: function () { var self = this; // always retry this call, because it also checks if the server is online $.ajax({ type: "GET", cache: false, url: "/_api/version", contentType: "application/json", processData: false, async: true, success: function(data) { self.showServerStatus(true); if (self.isOffline === true) { self.isOffline = false; self.isOfflineCounter = 0; if (!self.firstLogin) { window.setTimeout(function(){ self.showServerStatus(true); }, 1000); } else { self.firstLogin = false; } self.system.name = data.server; self.system.version = data.version; self.render(); } }, error: function (data) { self.isOffline = true; self.isOfflineCounter++; if (self.isOfflineCounter >= 1) { //arangoHelper.arangoError("Server", "Server is offline"); self.showServerStatus(false); } } }); if (! self.system.hasOwnProperty('database')) { $.ajax({ type: "GET", cache: false, url: "/_api/database/current", contentType: "application/json", processData: false, async: true, success: function(data) { var name = data.result.name; self.system.database = name; var timer = window.setInterval(function () { var navElement = $('#databaseNavi'); if (navElement) { window.clearTimeout(timer); timer = null; if (name === '_system') { // show "logs" button $('.logs-menu').css('visibility', 'visible'); $('.logs-menu').css('display', 'inline'); // show dbs menues $('#databaseNavi').css('display','inline'); } else { // hide "logs" button $('.logs-menu').css('visibility', 'hidden'); $('.logs-menu').css('display', 'none'); } self.render(); } }, 50); } }); } }, renderVersion: function () { if (this.system.hasOwnProperty('database') && this.system.hasOwnProperty('name')) { $(this.el).html(this.template.render({ name: this.system.name, version: this.system.version, database: this.system.database })); } }, render: function () { if (!this.system.version) { this.getVersion(); } $(this.el).html(this.template.render({ name: this.system.name, version: this.system.version })); return this; } }); }()); /*jshint browser: true */ /*jshint unused: false */ /*global Backbone, EJS, $, flush, window, arangoHelper, nv, d3, localStorage*/ /*global document, console, Dygraph, _,templateEngine */ (function () { "use strict"; function fmtNumber (n, nk) { if (n === undefined || n === null) { n = 0; } return n.toFixed(nk); } window.DashboardView = Backbone.View.extend({ el: '#content', interval: 10000, // in milliseconds defaultTimeFrame: 20 * 60 * 1000, // 20 minutes in milliseconds defaultDetailFrame: 2 * 24 * 60 * 60 * 1000, history: {}, graphs: {}, events: { // will be filled in initialize }, tendencies: { asyncPerSecondCurrent: [ "asyncPerSecondCurrent", "asyncPerSecondPercentChange" ], syncPerSecondCurrent: [ "syncPerSecondCurrent", "syncPerSecondPercentChange" ], clientConnectionsCurrent: [ "clientConnectionsCurrent", "clientConnectionsPercentChange" ], clientConnectionsAverage: [ "clientConnections15M", "clientConnections15MPercentChange" ], numberOfThreadsCurrent: [ "numberOfThreadsCurrent", "numberOfThreadsPercentChange" ], numberOfThreadsAverage: [ "numberOfThreads15M", "numberOfThreads15MPercentChange" ], virtualSizeCurrent: [ "virtualSizeCurrent", "virtualSizePercentChange" ], virtualSizeAverage: [ "virtualSize15M", "virtualSize15MPercentChange" ] }, barCharts: { totalTimeDistribution: [ "queueTimeDistributionPercent", "requestTimeDistributionPercent" ], dataTransferDistribution: [ "bytesSentDistributionPercent", "bytesReceivedDistributionPercent" ] }, barChartsElementNames: { queueTimeDistributionPercent: "Queue", requestTimeDistributionPercent: "Computation", bytesSentDistributionPercent: "Bytes sent", bytesReceivedDistributionPercent: "Bytes received" }, getDetailFigure : function (e) { var figure = $(e.currentTarget).attr("id").replace(/ChartButton/g, ""); return figure; }, showDetail: function (e) { var self = this, figure = this.getDetailFigure(e), options; options = this.dygraphConfig.getDetailChartConfig(figure); this.getHistoryStatistics(figure); this.detailGraphFigure = figure; window.modalView.hideFooter = true; window.modalView.hide(); window.modalView.show( "modalGraph.ejs", options.header, undefined, undefined, undefined, undefined, this.events ); window.modalView.hideFooter = false; $('#modal-dialog').on('hidden', function () { self.hidden(); }); $('#modal-dialog').toggleClass("modal-chart-detail", true); options.height = $(window).height() * 0.7; options.width = $('.modal-inner-detail').width(); // Reselect the labelsDiv. It was not known when requesting options options.labelsDiv = $(options.labelsDiv)[0]; this.detailGraph = new Dygraph( document.getElementById("lineChartDetail"), this.history[this.server][figure], options ); }, hidden: function () { this.detailGraph.destroy(); delete this.detailGraph; delete this.detailGraphFigure; }, getCurrentSize: function (div) { if (div.substr(0,1) !== "#") { div = "#" + div; } var height, width; $(div).attr("style", ""); height = $(div).height(); width = $(div).width(); return { height: height, width: width }; }, prepareDygraphs: function () { var self = this, options; this.dygraphConfig.getDashBoardFigures().forEach(function (f) { options = self.dygraphConfig.getDefaultConfig(f); var dimensions = self.getCurrentSize(options.div); options.height = dimensions.height; options.width = dimensions.width; self.graphs[f] = new Dygraph( document.getElementById(options.div), self.history[self.server][f] || [], options ); }); }, initialize: function () { this.dygraphConfig = this.options.dygraphConfig; this.d3NotInitialised = true; this.events["click .dashboard-sub-bar-menu-sign"] = this.showDetail.bind(this); this.events["mousedown .dygraph-rangesel-zoomhandle"] = this.stopUpdating.bind(this); this.events["mouseup .dygraph-rangesel-zoomhandle"] = this.startUpdating.bind(this); this.server = this.options.serverToShow; if (! this.server) { this.server = "-local-"; } this.history[this.server] = {}; }, updateCharts: function () { var self = this; if (this.detailGraph) { this.updateLineChart(this.detailGraphFigure, true); return; } this.prepareD3Charts(this.isUpdating); this.prepareResidentSize(this.isUpdating); this.updateTendencies(); Object.keys(this.graphs).forEach(function (f) { self.updateLineChart(f, false); }); }, updateTendencies: function () { var self = this, map = this.tendencies; var tempColor = ""; Object.keys(map).forEach(function (a) { var p = ""; var v = 0; if (self.history.hasOwnProperty(self.server) && self.history[self.server].hasOwnProperty(a)) { v = self.history[self.server][a][1]; } if (v < 0) { tempColor = "red"; } else { tempColor = "green"; p = "+"; } $("#" + a).html(self.history[self.server][a][0] + '<br/><span class="dashboard-figurePer" style="color: ' + tempColor +';">' + p + v + '%</span>'); }); }, updateDateWindow: function (graph, isDetailChart) { var t = new Date().getTime(); var borderLeft, borderRight; if (isDetailChart && graph.dateWindow_) { borderLeft = graph.dateWindow_[0]; borderRight = t - graph.dateWindow_[1] - this.interval * 5 > 0 ? graph.dateWindow_[1] : t; return [borderLeft, borderRight]; } return [t - this.defaultTimeFrame, t]; }, updateLineChart: function (figure, isDetailChart) { var g = isDetailChart ? this.detailGraph : this.graphs[figure], opts = { file: this.history[this.server][figure], dateWindow: this.updateDateWindow(g, isDetailChart) }; g.updateOptions(opts); }, mergeDygraphHistory: function (newData, i) { var self = this, valueList; this.dygraphConfig.getDashBoardFigures(true).forEach(function (f) { // check if figure is known if (! self.dygraphConfig.mapStatToFigure[f]) { return; } // need at least an empty history if (! self.history[self.server][f]) { self.history[self.server][f] = []; } // generate values for this key valueList = []; self.dygraphConfig.mapStatToFigure[f].forEach(function (a) { if (! newData[a]) { return; } if (a === "times") { valueList.push(new Date(newData[a][i] * 1000)); } else { valueList.push(newData[a][i]); } }); // if we found at list one value besides times, then use the entry if (valueList.length > 1) { self.history[self.server][f].push(valueList); } }); }, cutOffHistory: function (f, cutoff) { var self = this; while (self.history[self.server][f].length !== 0) { var v = self.history[self.server][f][0][0]; if (v >= cutoff) { break; } self.history[self.server][f].shift(); } }, cutOffDygraphHistory: function (cutoff) { var self = this; var cutoffDate = new Date(cutoff); this.dygraphConfig.getDashBoardFigures(true).forEach(function (f) { // check if figure is known if (! self.dygraphConfig.mapStatToFigure[f]) { return; } // history must be non-empty if (! self.history[self.server][f]) { return; } self.cutOffHistory(f, cutoffDate); }); }, mergeHistory: function (newData) { var self = this, i; for (i = 0; i < newData.times.length; ++i) { this.mergeDygraphHistory(newData, i); } this.cutOffDygraphHistory(new Date().getTime() - this.defaultTimeFrame); // convert tendency values Object.keys(this.tendencies).forEach(function (a) { var n1 = 1; var n2 = 1; if (a === "virtualSizeCurrent" || a === "virtualSizeAverage") { newData[self.tendencies[a][0]] /= (1024 * 1024 * 1024); n1 = 2; } else if (a === "clientConnectionsCurrent") { n1 = 0; } else if (a === "numberOfThreadsCurrent") { n1 = 0; } self.history[self.server][a] = [ fmtNumber(newData[self.tendencies[a][0]], n1), fmtNumber(newData[self.tendencies[a][1]] * 100, n2) ]; }); // update distribution Object.keys(this.barCharts).forEach(function (a) { self.history[self.server][a] = self.mergeBarChartData(self.barCharts[a], newData); }); // update physical memory self.history[self.server].physicalMemory = newData.physicalMemory; self.history[self.server].residentSizeCurrent = newData.residentSizeCurrent; self.history[self.server].residentSizePercent = newData.residentSizePercent; // generate chart description self.history[self.server].residentSizeChart = [ { "key": "", "color": this.dygraphConfig.colors[1], "values": [ { label: "used", value: newData.residentSizePercent * 100 } ] }, { "key": "", "color": this.dygraphConfig.colors[0], "values": [ { label: "used", value: 100 - newData.residentSizePercent * 100 } ] } ] ; // remember next start this.nextStart = newData.nextStart; }, mergeBarChartData: function (attribList, newData) { var i, v1 = { "key": this.barChartsElementNames[attribList[0]], "color": this.dygraphConfig.colors[0], "values": [] }, v2 = { "key": this.barChartsElementNames[attribList[1]], "color": this.dygraphConfig.colors[1], "values": [] }; for (i = newData[attribList[0]].values.length - 1; 0 <= i; --i) { v1.values.push({ label: this.getLabel(newData[attribList[0]].cuts, i), value: newData[attribList[0]].values[i] }); v2.values.push({ label: this.getLabel(newData[attribList[1]].cuts, i), value: newData[attribList[1]].values[i] }); } return [v1, v2]; }, getLabel: function (cuts, counter) { if (!cuts[counter]) { return ">" + cuts[counter - 1]; } return counter === 0 ? "0 - " + cuts[counter] : cuts[counter - 1] + " - " + cuts[counter]; }, getStatistics: function (callback) { var self = this; var url = "/_db/_system/_admin/aardvark/statistics/short"; var urlParams = "?start="; if (self.nextStart) { urlParams += self.nextStart; } else { urlParams += (new Date().getTime() - self.defaultTimeFrame) / 1000; } if (self.server !== "-local-") { url = self.server.endpoint + "/_admin/aardvark/statistics/cluster"; urlParams += "&type=short&DBserver=" + self.server.target; if (! self.history.hasOwnProperty(self.server)) { self.history[self.server] = {}; } } $.ajax( url + urlParams, {async: true} ).done( function (d) { if (d.times.length > 0) { self.isUpdating = true; self.mergeHistory(d); } if (self.isUpdating === false) { return; } if (callback) { callback(); } self.updateCharts(); }); }, getHistoryStatistics: function (figure) { var self = this; var url = "statistics/long"; var urlParams = "?filter=" + this.dygraphConfig.mapStatToFigure[figure].join(); if (self.server !== "-local-") { url = self.server.endpoint + "/_admin/aardvark/statistics/cluster"; urlParams += "&type=long&DBserver=" + self.server.target; if (! self.history.hasOwnProperty(self.server)) { self.history[self.server] = {}; } } $.ajax( url + urlParams, {async: true} ).done( function (d) { var i; self.history[self.server][figure] = []; for (i = 0; i < d.times.length; ++i) { self.mergeDygraphHistory(d, i, true); } } ); }, prepareResidentSize: function (update) { var self = this; var dimensions = this.getCurrentSize('#residentSizeChartContainer'); var current = self.history[self.server].residentSizeCurrent / 1024 / 1024; var currentA = ""; if (current < 1025) { currentA = fmtNumber(current, 2) + " MB"; } else { currentA = fmtNumber(current / 1024, 2) + " GB"; } var currentP = fmtNumber(self.history[self.server].residentSizePercent * 100, 2); var data = [fmtNumber(self.history[self.server].physicalMemory / 1024 / 1024 / 1024, 0) + " GB"]; nv.addGraph(function () { var chart = nv.models.multiBarHorizontalChart() .x(function (d) { return d.label; }) .y(function (d) { return d.value; }) .width(dimensions.width) .height(dimensions.height) .margin({ top: ($("residentSizeChartContainer").outerHeight() - $("residentSizeChartContainer").height()) / 2, right: 1, bottom: ($("residentSizeChartContainer").outerHeight() - $("residentSizeChartContainer").height()) / 2, left: 1 }) .showValues(false) .showYAxis(false) .showXAxis(false) .transitionDuration(100) .tooltips(false) .showLegend(false) .showControls(false) .stacked(true); chart.yAxis .tickFormat(function (d) {return d + "%";}) .showMaxMin(false); chart.xAxis.showMaxMin(false); d3.select('#residentSizeChart svg') .datum(self.history[self.server].residentSizeChart) .call(chart); d3.select('#residentSizeChart svg').select('.nv-zeroLine').remove(); if (update) { d3.select('#residentSizeChart svg').select('#total').remove(); d3.select('#residentSizeChart svg').select('#percentage').remove(); } d3.select('.dashboard-bar-chart-title .percentage') .html(currentA + " ("+ currentP + " %)"); d3.select('.dashboard-bar-chart-title .absolut') .html(data[0]); nv.utils.windowResize(chart.update); return chart; }, function() { d3.selectAll("#residentSizeChart .nv-bar").on('click', function() { // no idea why this has to be empty, well anyways... } ); }); }, prepareD3Charts: function (update) { var self = this; var barCharts = { totalTimeDistribution: [ "queueTimeDistributionPercent", "requestTimeDistributionPercent"], dataTransferDistribution: [ "bytesSentDistributionPercent", "bytesReceivedDistributionPercent"] }; if (this.d3NotInitialised) { update = false; this.d3NotInitialised = false; } _.each(Object.keys(barCharts), function (k) { var dimensions = self.getCurrentSize('#' + k + 'Container .dashboard-interior-chart'); var selector = "#" + k + "Container svg"; nv.addGraph(function () { var tickMarks = [0, 0.25, 0.5, 0.75, 1]; var marginLeft = 75; var marginBottom = 23; var bottomSpacer = 6; if (dimensions.width < 219) { tickMarks = [0, 0.5, 1]; marginLeft = 72; marginBottom = 21; bottomSpacer = 5; } else if (dimensions.width < 299) { tickMarks = [0, 0.3334, 0.6667, 1]; marginLeft = 77; } else if (dimensions.width < 379) { marginLeft = 87; } else if (dimensions.width < 459) { marginLeft = 95; } else if (dimensions.width < 539) { marginLeft = 100; } else if (dimensions.width < 619) { marginLeft = 105; } var chart = nv.models.multiBarHorizontalChart() .x(function (d) { return d.label; }) .y(function (d) { return d.value; }) .width(dimensions.width) .height(dimensions.height) .margin({ top: 5, right: 20, bottom: marginBottom, left: marginLeft }) .showValues(false) .showYAxis(true) .showXAxis(true) .transitionDuration(100) .tooltips(false) .showLegend(false) .showControls(false) .forceY([0,1]); chart.yAxis .showMaxMin(false); var yTicks2 = d3.select('.nv-y.nv-axis') .selectAll('text') .attr('transform', 'translate (0, ' + bottomSpacer + ')') ; chart.yAxis .tickValues(tickMarks) .tickFormat(function (d) {return fmtNumber(((d * 100 * 100) / 100), 0) + "%";}); d3.select(selector) .datum(self.history[self.server][k]) .call(chart); nv.utils.windowResize(chart.update); return chart; }, function() { d3.selectAll(selector + " .nv-bar").on('click', function() { // no idea why this has to be empty, well anyways... } ); }); }); }, stopUpdating: function () { this.isUpdating = false; }, startUpdating: function () { var self = this; if (self.timer) { return; } self.timer = window.setInterval(function () { self.getStatistics(); }, self.interval ); }, resize: function () { if (!this.isUpdating) { return; } var self = this, dimensions; _.each(this.graphs,function (g) { dimensions = self.getCurrentSize(g.maindiv_.id); g.resize(dimensions.width, dimensions.height); }); if (this.detailGraph) { dimensions = this.getCurrentSize(this.detailGraph.maindiv_.id); this.detailGraph.resize(dimensions.width, dimensions.height); } this.prepareD3Charts(true); this.prepareResidentSize(true); }, template: templateEngine.createTemplate("dashboardView.ejs"), render: function (modalView) { if (!modalView) { $(this.el).html(this.template.render()); } var callback = function() { this.prepareDygraphs(); if (this.isUpdating) { this.prepareD3Charts(); this.prepareResidentSize(); this.updateTendencies(); } this.startUpdating(); }.bind(this); //check if user has _system permission var authorized = this.options.database.hasSystemAccess(); if (!authorized) { $('.contentDiv').remove(); $('.headerBar').remove(); $('.dashboard-headerbar').remove(); $('.dashboard-row').remove(); $('#content').append( '<div style="color: red">You do not have permission to view this page.</div>' ); $('#content').append( '<div style="color: red">You can switch to \'_system\' to see the dashboard.</div>' ); } else { this.getStatistics(callback); } } }); }()); /*jshint browser: true */ /*global Backbone, $, window, setTimeout, Joi, _ */ /*global templateEngine*/ (function () { "use strict"; var createButtonStub = function(type, title, cb, confirm) { return { type: type, title: title, callback: cb, confirm: confirm }; }; var createTextStub = function(type, label, value, info, placeholder, mandatory, joiObj, addDelete, addAdd, maxEntrySize, tags) { var obj = { type: type, label: label }; if (value !== undefined) { obj.value = value; } if (info !== undefined) { obj.info = info; } if (placeholder !== undefined) { obj.placeholder = placeholder; } if (mandatory !== undefined) { obj.mandatory = mandatory; } if (addDelete !== undefined) { obj.addDelete = addDelete; } if (addAdd !== undefined) { obj.addAdd = addAdd; } if (maxEntrySize !== undefined) { obj.maxEntrySize = maxEntrySize; } if (tags !== undefined) { obj.tags = tags; } if (joiObj){ // returns true if the string contains the match obj.validateInput = function() { // return regexp.test(el.val()); return joiObj; }; } return obj; }; window.ModalView = Backbone.View.extend({ _validators: [], _validateWatchers: [], baseTemplate: templateEngine.createTemplate("modalBase.ejs"), tableTemplate: templateEngine.createTemplate("modalTable.ejs"), el: "#modalPlaceholder", contentEl: "#modalContent", hideFooter: false, confirm: { list: "#modal-delete-confirmation", yes: "#modal-confirm-delete", no: "#modal-abort-delete" }, enabledHotkey: false, enableHotKeys : true, buttons: { SUCCESS: "success", NOTIFICATION: "notification", DELETE: "danger", NEUTRAL: "neutral", CLOSE: "close" }, tables: { READONLY: "readonly", TEXT: "text", BLOB: "blob", PASSWORD: "password", SELECT: "select", SELECT2: "select2", CHECKBOX: "checkbox" }, initialize: function() { Object.freeze(this.buttons); Object.freeze(this.tables); }, createModalHotkeys: function() { //submit modal $(this.el).bind('keydown', 'return', function(){ $('.modal-footer .button-success').click(); }); $("input", $(this.el)).bind('keydown', 'return', function(){ $('.modal-footer .button-success').click(); }); $("select", $(this.el)).bind('keydown', 'return', function(){ $('.modal-footer .button-success').click(); }); }, createInitModalHotkeys: function() { var self = this; //navigate through modal buttons //left cursor $(this.el).bind('keydown', 'left', function(){ self.navigateThroughButtons('left'); }); //right cursor $(this.el).bind('keydown', 'right', function(){ self.navigateThroughButtons('right'); }); }, navigateThroughButtons: function(direction) { var hasFocus = $('.modal-footer button').is(':focus'); if (hasFocus === false) { if (direction === 'left') { $('.modal-footer button').first().focus(); } else if (direction === 'right') { $('.modal-footer button').last().focus(); } } else if (hasFocus === true) { if (direction === 'left') { $(':focus').prev().focus(); } else if (direction === 'right') { $(':focus').next().focus(); } } }, createCloseButton: function(title, cb) { var self = this; return createButtonStub(this.buttons.CLOSE, title, function () { self.hide(); if (cb) { cb(); } }); }, createSuccessButton: function(title, cb) { return createButtonStub(this.buttons.SUCCESS, title, cb); }, createNotificationButton: function(title, cb) { return createButtonStub(this.buttons.NOTIFICATION, title, cb); }, createDeleteButton: function(title, cb, confirm) { return createButtonStub(this.buttons.DELETE, title, cb, confirm); }, createNeutralButton: function(title, cb) { return createButtonStub(this.buttons.NEUTRAL, title, cb); }, createDisabledButton: function(title) { var disabledButton = createButtonStub(this.buttons.NEUTRAL, title); disabledButton.disabled = true; return disabledButton; }, createReadOnlyEntry: function(id, label, value, info, addDelete, addAdd) { var obj = createTextStub(this.tables.READONLY, label, value, info,undefined, undefined, undefined,addDelete, addAdd); obj.id = id; return obj; }, createTextEntry: function(id, label, value, info, placeholder, mandatory, regexp) { var obj = createTextStub(this.tables.TEXT, label, value, info, placeholder, mandatory, regexp); obj.id = id; return obj; }, createBlobEntry: function(id, label, value, info, placeholder, mandatory, regexp) { var obj = createTextStub(this.tables.BLOB, label, value, info, placeholder, mandatory, regexp); obj.id = id; return obj; }, createSelect2Entry: function( id, label, value, info, placeholder, mandatory, addDelete, addAdd, maxEntrySize, tags) { var obj = createTextStub(this.tables.SELECT2, label, value, info, placeholder, mandatory, undefined, addDelete, addAdd, maxEntrySize, tags); obj.id = id; return obj; }, createPasswordEntry: function(id, label, value, info, placeholder, mandatory) { var obj = createTextStub(this.tables.PASSWORD, label, value, info, placeholder, mandatory); obj.id = id; return obj; }, createCheckboxEntry: function(id, label, value, info, checked) { var obj = createTextStub(this.tables.CHECKBOX, label, value, info); obj.id = id; if (checked) { obj.checked = checked; } return obj; }, createSelectEntry: function(id, label, selected, info, options) { var obj = createTextStub(this.tables.SELECT, label, null, info); obj.id = id; if (selected) { obj.selected = selected; } obj.options = options; return obj; }, createOptionEntry: function(label, value) { return { label: label, value: value || label }; }, show: function(templateName, title, buttons, tableContent, advancedContent, extraInfo, events, noConfirm) { var self = this, lastBtn, confirmMsg, closeButtonFound = false; buttons = buttons || []; noConfirm = Boolean(noConfirm); this.clearValidators(); if (buttons.length > 0) { buttons.forEach(function (b) { if (b.type === self.buttons.CLOSE) { closeButtonFound = true; } if (b.type === self.buttons.DELETE) { confirmMsg = confirmMsg || b.confirm; } }); if (!closeButtonFound) { // Insert close as second from right lastBtn = buttons.pop(); buttons.push(self.createCloseButton('Cancel')); buttons.push(lastBtn); } } else { buttons.push(self.createCloseButton('Dismiss')); } $(this.el).html(this.baseTemplate.render({ title: title, buttons: buttons, hideFooter: this.hideFooter, confirm: confirmMsg })); _.each(buttons, function(b, i) { if (b.disabled || !b.callback) { return; } if (b.type === self.buttons.DELETE && !noConfirm) { $("#modalButton" + i).bind("click", function() { $(self.confirm.yes).unbind("click"); $(self.confirm.yes).bind("click", b.callback); $(self.confirm.list).css("display", "block"); }); return; } $("#modalButton" + i).bind("click", b.callback); }); $(this.confirm.no).bind("click", function() { $(self.confirm.list).css("display", "none"); }); var template = templateEngine.createTemplate(templateName); $(".modal-body").html(template.render({ content: tableContent, advancedContent: advancedContent, info: extraInfo })); $('.modalTooltips').tooltip({ position: { my: "left top", at: "right+55 top-1" } }); var completeTableContent = tableContent || []; if (advancedContent && advancedContent.content) { completeTableContent = completeTableContent.concat(advancedContent.content); } _.each(completeTableContent, function(row) { self.modalBindValidation(row); if (row.type === self.tables.SELECT2) { //handle select2 $('#'+row.id).select2({ tags: row.tags || [], showSearchBox: false, minimumResultsForSearch: -1, width: "336px", maximumSelectionSize: row.maxEntrySize || 8 }); } }); if (events) { this.events = events; this.delegateEvents(); } $("#modal-dialog").modal("show"); //enable modal hotkeys after rendering is complete if (this.enabledHotkey === false) { this.createInitModalHotkeys(); this.enabledHotkey = true; } if (this.enableHotKeys) { this.createModalHotkeys(); } //if input-field is available -> autofocus first one var focus = $('#modal-dialog').find('input'); if (focus) { setTimeout(function() { var focus = $('#modal-dialog'); if (focus.length > 0) { focus = focus.find('input'); if (focus.length > 0) { $(focus[0]).focus(); } } }, 800); } }, modalBindValidation: function(entry) { var self = this; if (entry.hasOwnProperty("id") && entry.hasOwnProperty("validateInput")) { var validCheck = function() { var $el = $("#" + entry.id); var validation = entry.validateInput($el); var error = false; _.each(validation, function(validator) { var value = $el.val(); if (!validator.rule) { validator = {rule: validator}; } if (typeof validator.rule === 'function') { try { validator.rule(value); } catch (e) { error = validator.msg || e.message; } } else { var result = Joi.validate(value, validator.rule); if (result.error) { error = validator.msg || result.error.message; } } if (error) { return false; } }); if (error) { return error; } }; var $el = $('#' + entry.id); // catch result of validation and act $el.on('keyup focusout', function() { var msg = validCheck(); var errorElement = $el.next()[0]; if (msg) { $el.addClass('invalid-input'); if (errorElement) { //error element available $(errorElement).text(msg); } else { //error element not available $el.after('<p class="errorMessage">' + msg+ '</p>'); } $('.modal-footer .button-success') .prop('disabled', true) .addClass('disabled'); } else { $el.removeClass('invalid-input'); if (errorElement) { $(errorElement).remove(); } self.modalTestAll(); } }); this._validators.push(validCheck); this._validateWatchers.push($el); } }, modalTestAll: function() { var tests = _.map(this._validators, function(v) { return v(); }); var invalid = _.any(tests); if (invalid) { $('.modal-footer .button-success') .prop('disabled', true) .addClass('disabled'); } else { $('.modal-footer .button-success') .prop('disabled', false) .removeClass('disabled'); } return !invalid; }, clearValidators: function() { this._validators = []; _.each(this._validateWatchers, function(w) { w.unbind('keyup focusout'); }); this._validateWatchers = []; }, hide: function() { this.clearValidators(); $("#modal-dialog").modal("hide"); } }); }()); /*global _, Dygraph, window, document */ (function () { "use strict"; window.dygraphConfig = { defaultFrame : 20 * 60 * 1000, zeropad: function (x) { if (x < 10) { return "0" + x; } return x; }, xAxisFormat: function (d) { if (d === -1) { return ""; } var date = new Date(d); return this.zeropad(date.getHours()) + ":" + this.zeropad(date.getMinutes()) + ":" + this.zeropad(date.getSeconds()); }, mergeObjects: function (o1, o2, mergeAttribList) { if (!mergeAttribList) { mergeAttribList = []; } var vals = {}, res; mergeAttribList.forEach(function (a) { var valO1 = o1[a], valO2 = o2[a]; if (valO1 === undefined) { valO1 = {}; } if (valO2 === undefined) { valO2 = {}; } vals[a] = _.extend(valO1, valO2); }); res = _.extend(o1, o2); Object.keys(vals).forEach(function (k) { res[k] = vals[k]; }); return res; }, mapStatToFigure : { residentSize : ["times", "residentSizePercent"], pageFaults : ["times", "majorPageFaultsPerSecond", "minorPageFaultsPerSecond"], systemUserTime : ["times", "systemTimePerSecond", "userTimePerSecond"], totalTime : ["times", "avgQueueTime", "avgRequestTime", "avgIoTime"], dataTransfer : ["times", "bytesSentPerSecond", "bytesReceivedPerSecond"], requests : ["times", "getsPerSecond", "putsPerSecond", "postsPerSecond", "deletesPerSecond", "patchesPerSecond", "headsPerSecond", "optionsPerSecond", "othersPerSecond"] }, //colors for dygraphs colors: ["#617e2b", "#296e9c", "#81ccd8", "#7ca530", "#3c3c3c", "#aa90bd", "#e1811d", "#c7d4b2", "#d0b2d4"], // figure dependend options figureDependedOptions: { clusterRequestsPerSecond: { showLabelsOnHighlight: true, title: '', header : "Cluster Requests per Second", stackedGraph: true, div: "lineGraphLegend", labelsKMG2: false, axes: { y: { valueFormatter: function (y) { return parseFloat(y.toPrecision(3)); }, axisLabelFormatter: function (y) { if (y === 0) { return 0; } return parseFloat(y.toPrecision(3)); } } } }, residentSize: { header: "Resident Size", axes: { y: { labelsKMG2: false, axisLabelFormatter: function (y) { return parseFloat(y.toPrecision(3) * 100) + "%"; }, valueFormatter: function (y) { return parseFloat(y.toPrecision(3) * 100) + "%"; } } } }, pageFaults: { header : "Page Faults", visibility: [true, false], labels: ["datetime", "Major Page", "Minor Page"], div: "pageFaultsChart", labelsKMG2: false, axes: { y: { valueFormatter: function (y) { return parseFloat(y.toPrecision(3)); }, axisLabelFormatter: function (y) { if (y === 0) { return 0; } return parseFloat(y.toPrecision(3)); } } } }, systemUserTime: { div: "systemUserTimeChart", header: "System and User Time", labels: ["datetime", "System Time", "User Time"], stackedGraph: true, labelsKMG2: false, axes: { y: { valueFormatter: function (y) { return parseFloat(y.toPrecision(3)); }, axisLabelFormatter: function (y) { if (y === 0) { return 0; } return parseFloat(y.toPrecision(3)); } } } }, totalTime: { div: "totalTimeChart", header: "Total Time", labels: ["datetime", "Queue", "Computation", "I/O"], labelsKMG2: false, axes: { y: { valueFormatter: function (y) { return parseFloat(y.toPrecision(3)); }, axisLabelFormatter: function (y) { if (y === 0) { return 0; } return parseFloat(y.toPrecision(3)); } } }, stackedGraph: true }, dataTransfer: { header: "Data Transfer", labels: ["datetime", "Bytes sent", "Bytes received"], stackedGraph: true, div: "dataTransferChart" }, requests: { header: "Requests", labels: ["datetime", "GET", "PUT", "POST", "DELETE", "PATCH", "HEAD", "OPTIONS", "OTHER"], stackedGraph: true, div: "requestsChart", axes: { y: { valueFormatter: function (y) { return parseFloat(y.toPrecision(3)); }, axisLabelFormatter: function (y) { if (y === 0) { return 0; } return parseFloat(y.toPrecision(3)); } } } } }, getDashBoardFigures : function (all) { var result = [], self = this; Object.keys(this.figureDependedOptions).forEach(function (k) { // ClusterRequestsPerSecond should not be ignored. Quick Fix if (k !== "clusterRequestsPerSecond" && (self.figureDependedOptions[k].div || all)) { result.push(k); } }); return result; }, //configuration for chart overview getDefaultConfig: function (figure) { var self = this; var result = { digitsAfterDecimal: 1, drawGapPoints: true, fillGraph: true, showLabelsOnHighlight: false, strokeWidth: 1.5, strokeBorderWidth: 1.5, includeZero: true, highlightCircleSize: 2.5, labelsSeparateLines : true, strokeBorderColor: '#ffffff', interactionModel: {}, maxNumberWidth : 10, colors: [this.colors[0]], xAxisLabelWidth: "50", rightGap: 15, showRangeSelector: false, rangeSelectorHeight: 50, rangeSelectorPlotStrokeColor: '#365300', rangeSelectorPlotFillColor: '', // rangeSelectorPlotFillColor: '#414a4c', pixelsPerLabel: 50, labelsKMG2: true, dateWindow: [ new Date().getTime() - this.defaultFrame, new Date().getTime() ], axes: { x: { valueFormatter: function (d) { return self.xAxisFormat(d); } }, y: { ticker: Dygraph.numericLinearTicks } } }; if (this.figureDependedOptions[figure]) { result = this.mergeObjects( result, this.figureDependedOptions[figure], ["axes"] ); if (result.div && result.labels) { result.colors = this.getColors(result.labels); result.labelsDiv = document.getElementById(result.div + "Legend"); result.legend = "always"; result.showLabelsOnHighlight = true; } } return result; }, getDetailChartConfig: function (figure) { var result = _.extend( this.getDefaultConfig(figure), { showRangeSelector: true, interactionModel: null, showLabelsOnHighlight: true, highlightCircleSize: 2.5, legend: "always", labelsDiv: "div#detailLegend.dashboard-legend-inner" } ); if (figure === "pageFaults") { result.visibility = [true, true]; } if (!result.labels) { result.labels = ["datetime", result.header]; result.colors = this.getColors(result.labels); } return result; }, getColors: function (labels) { var colorList; colorList = this.colors.concat([]); return colorList.slice(0, labels.length - 1); } }; }()); /*jshint browser: true */ /*jshint strict: false, unused: false */ /*global Backbone, window */ window.StatisticsDescriptionCollection = Backbone.Collection.extend({ model: window.StatisticsDescription, url: "/_admin/statistics-description", parse: function(response) { return response; } }); /*global window, $, Backbone, templateEngine, plannerTemplateEngine, alert */ (function() { "use strict"; window.ClusterDownView = Backbone.View.extend({ el: "#content", template: templateEngine.createTemplate("clusterDown.ejs"), modal: templateEngine.createTemplate("waitModal.ejs"), events: { "click #relaunchCluster" : "relaunchCluster", "click #upgradeCluster" : "upgradeCluster", "click #editPlan" : "editPlan", "click #submitEditPlan" : "submitEditPlan", "click #deletePlan" : "deletePlan", "click #submitDeletePlan" : "submitDeletePlan" }, render: function() { var planVersion = window.versionHelper.fromString( window.App.clusterPlan.getVersion() ); var currentVersion; $.ajax({ type: "GET", cache: false, url: "/_admin/database/target-version", contentType: "application/json", processData: false, async: false, success: function(data) { currentVersion = data.version; } }); currentVersion = window.versionHelper.fromString( currentVersion ); var shouldUpgrade = false; if (currentVersion.major > planVersion.major || ( currentVersion.major === planVersion.major && currentVersion.minor > planVersion.minor )) { shouldUpgrade = true; } $(this.el).html(this.template.render({ canUpgrade: shouldUpgrade })); $(this.el).append(this.modal.render({})); }, relaunchCluster: function() { $('#waitModalLayer').modal('show'); $('.modal-backdrop.fade.in').addClass('waitModalBackdrop'); $('#waitModalMessage').html('Please be patient while your cluster will be relaunched'); $.ajax({ cache: false, type: "GET", url: "cluster/relaunch", success: function() { $('.modal-backdrop.fade.in').removeClass('waitModalBackdrop'); $('#waitModalLayer').modal('hide'); window.App.navigate("showCluster", {trigger: true}); } }); }, upgradeCluster: function() { $('#waitModalLayer').modal('show'); $('.modal-backdrop.fade.in').addClass('waitModalBackdrop'); $('#waitModalMessage').html('Please be patient while your cluster will be upgraded'); $.ajax({ cache: false, type: "GET", url: "cluster/upgrade", success: function() { $('.modal-backdrop.fade.in').removeClass('waitModalBackdrop'); $('#waitModalLayer').modal('hide'); window.App.clusterPlan.fetch(); window.App.navigate("showCluster", {trigger: true}); } }); }, editPlan: function() { $('#deletePlanModal').modal('hide'); $('#editPlanModal').modal('show'); }, submitEditPlan : function() { $('#editPlanModal').modal('hide'); window.App.clusterPlan.cleanUp(); var plan = window.App.clusterPlan; if (plan.isTestSetup()) { window.App.navigate("planTest", {trigger : true}); return; } window.App.navigate("planAsymmetrical", {trigger : true}); }, deletePlan: function() { $('#editPlanModal').modal('hide'); $('#deletePlanModal').modal('show'); }, submitDeletePlan : function() { $('#deletePlanModal').modal('hide'); window.App.clusterPlan.cleanUp(); window.App.clusterPlan.destroy(); window.App.clusterPlan = new window.ClusterPlan(); window.App.planScenario(); } }); }()); /*global window, $, Backbone, templateEngine, plannerTemplateEngine, alert, _ */ (function() { "use strict"; window.ClusterUnreachableView = Backbone.View.extend({ el: "#content", template: templateEngine.createTemplate("clusterUnreachable.ejs"), modal: templateEngine.createTemplate("waitModal.ejs"), events: { "click #clusterShutdown": "shutdown" }, initialize: function() { this.coordinators = new window.ClusterCoordinators([], { }); }, retryConnection: function() { this.coordinators.checkConnection(function() { window.App.showCluster(); }); }, shutdown: function() { window.clearTimeout(this.timer); window.App.shutdownView.clusterShutdown(); }, render: function() { var plan = window.App.clusterPlan; var list = []; if (plan && plan.has("runInfo")) { var startServerInfos = _.where(plan.get("runInfo"), {isStartServers: true}); _.each( _.filter(startServerInfos, function(s) { return _.contains(s.roles, "Coordinator"); }), function(s) { var name = s.endpoints[0].split("://")[1]; name = name.split(":")[0]; list.push(name); } ); } $(this.el).html(this.template.render({ coordinators: list })); $(this.el).append(this.modal.render({})); this.timer = window.setTimeout(this.retryConnection.bind(this), 10000); } }); }()); /*global Backbone, EJS, $, flush, window, arangoHelper, nv, d3, localStorage*/ /*global document, Dygraph, _,templateEngine */ (function() { "use strict"; window.ServerDashboardView = window.DashboardView.extend({ modal : true, hide: function() { window.App.showClusterView.startUpdating(); this.stopUpdating(); }, render: function() { var self = this; window.modalView.hideFooter = true; window.modalView.show( "dashboardView.ejs", null, undefined, undefined, undefined, this.events ); $('#modal-dialog').toggleClass("modal-chart-detail", true); window.DashboardView.prototype.render.bind(this)(true); window.modalView.hideFooter = false; $('#modal-dialog').on('hidden', function () { self.hide(); }); // Inject the closing x var closingX = document.createElement("button"); closingX.className = "close"; closingX.appendChild( document.createTextNode("×") ); closingX = $(closingX); closingX.attr("data-dismiss", "modal"); closingX.attr("aria-hidden", "true"); closingX.attr("type", "button"); $(".modal-body .headerBar:first-child") .toggleClass("headerBar", false) .toggleClass("modal-dashboard-header", true) .append(closingX); } }); }()); /*global templateEngine, window, $, Backbone, plannerTemplateEngine, alert */ (function() { "use strict"; window.LoginModalView = Backbone.View.extend({ template: templateEngine.createTemplate("loginModal.ejs"), el: '#modalPlaceholder', events: { "click #confirmLogin": "confirmLogin", "hidden #loginModalLayer": "hidden" }, hidden: function () { this.undelegateEvents(); window.App.isCheckingUser = false; $(this.el).html(""); }, confirmLogin: function() { var uName = $("#username").val(); var passwd = $("#password").val(); window.App.clusterPlan.storeCredentials(uName, passwd); this.hideModal(); }, hideModal: function () { $('#loginModalLayer').modal('hide'); }, render: function() { $(this.el).html(this.template.render({})); $('#loginModalLayer').modal('show'); } }); }()); /*global Backbone, $, _, window, templateEngine */ (function() { "use strict"; window.PlanScenarioSelectorView = Backbone.View.extend({ el: '#content', template: templateEngine.createTemplate("planScenarioSelector.ejs", "planner"), events: { "click #multiServerAsymmetrical": "multiServerAsymmetrical", "click #singleServer": "singleServer" }, render: function() { $(this.el).html(this.template.render({})); }, multiServerAsymmetrical: function() { window.App.navigate( "planAsymmetrical", {trigger: true} ); }, singleServer: function() { window.App.navigate( "planTest", {trigger: true} ); } }); }()); /*global window, btoa, $, Backbone, templateEngine, alert, _ */ (function() { "use strict"; window.PlanSymmetricView = Backbone.View.extend({ el: "#content", template: templateEngine.createTemplate("symmetricPlan.ejs"), entryTemplate: templateEngine.createTemplate("serverEntry.ejs"), modal: templateEngine.createTemplate("waitModal.ejs"), connectionValidationKey: null, events: { "click #startSymmetricPlan" : "startPlan", "click .add" : "addEntry", "click .delete" : "removeEntry", "click #cancel" : "cancel", "click #test-all-connections" : "checkAllConnections", "focusout .host" : "checkAllConnections", "focusout .port" : "checkAllConnections", "focusout .user" : "checkAllConnections", "focusout .passwd" : "checkAllConnections" }, cancel: function() { if(window.App.clusterPlan.get("plan")) { window.App.navigate("handleClusterDown", {trigger: true}); } else { window.App.navigate("planScenario", {trigger: true}); } }, startPlan: function() { var self = this; var data = {dispatchers: []}; var foundCoordinator = false; var foundDBServer = false; data.useSSLonDBservers = !!$(".useSSLonDBservers").prop('checked'); data.useSSLonCoordinators = !!$(".useSSLonCoordinators").prop('checked'); $(".dispatcher").each(function(i, dispatcher) { var host = $(".host", dispatcher).val(); var port = $(".port", dispatcher).val(); var user = $(".user", dispatcher).val(); var passwd = $(".passwd", dispatcher).val(); if (!host || 0 === host.length || !port || 0 === port.length) { return true; } var hostObject = {host : host + ":" + port}; if (!self.isSymmetric) { hostObject.isDBServer = !!$(".isDBServer", dispatcher).prop('checked'); hostObject.isCoordinator = !!$(".isCoordinator", dispatcher).prop('checked'); } else { hostObject.isDBServer = true; hostObject.isCoordinator = true; } hostObject.username = user; hostObject.passwd = passwd; foundCoordinator = foundCoordinator || hostObject.isCoordinator; foundDBServer = foundDBServer || hostObject.isDBServer; data.dispatchers.push(hostObject); }); if (!self.isSymmetric) { if (!foundDBServer) { alert("Please provide at least one database server"); return; } if (!foundCoordinator) { alert("Please provide at least one coordinator"); return; } } else { if ( data.dispatchers.length === 0) { alert("Please provide at least one host"); return; } } data.type = this.isSymmetric ? "symmetricalSetup" : "asymmetricalSetup"; $('#waitModalLayer').modal('show'); $('.modal-backdrop.fade.in').addClass('waitModalBackdrop'); $('#waitModalMessage').html('Please be patient while your cluster is being launched'); delete window.App.clusterPlan._coord; window.App.clusterPlan.save( data, { success : function() { $('.modal-backdrop.fade.in').removeClass('waitModalBackdrop'); $('#waitModalLayer').modal('hide'); window.App.updateAllUrls(); window.App.navigate("showCluster", {trigger: true}); }, error: function(obj, err) { $('.modal-backdrop.fade.in').removeClass('waitModalBackdrop'); $('#waitModalLayer').modal('hide'); alert("Error while starting the cluster: " + err.statusText); } } ); }, addEntry: function() { //disable launch button this.disableLaunchButton(); var lastUser = $("#server_list div.control-group.dispatcher:last .user").val(); var lastPasswd = $("#server_list div.control-group.dispatcher:last .passwd").val(); $("#server_list").append(this.entryTemplate.render({ isSymmetric: this.isSymmetric, isFirst: false, isCoordinator: true, isDBServer: true, host: '', port: '', user: lastUser, passwd: lastPasswd })); }, removeEntry: function(e) { $(e.currentTarget).closest(".control-group").remove(); this.checkAllConnections(); }, render: function(isSymmetric) { var params = {}, isFirst = true, config = window.App.clusterPlan.get("config"); this.isSymmetric = isSymmetric; $(this.el).html(this.template.render({ isSymmetric : isSymmetric, params : params, useSSLonDBservers: config && config.useSSLonDBservers ? config.useSSLonDBservers : false, useSSLonCoordinators: config && config.useSSLonCoordinators ? config.useSSLonCoordinators : false })); if (config) { var self = this, isCoordinator = false, isDBServer = false; _.each(config.dispatchers, function(dispatcher) { if (dispatcher.allowDBservers === undefined) { isDBServer = true; } else { isDBServer = dispatcher.allowDBservers; } if (dispatcher.allowCoordinators === undefined) { isCoordinator = true; } else { isCoordinator = dispatcher.allowCoordinators; } var host = dispatcher.endpoint; host = host.split("//")[1]; host = host.split(":"); if (host === 'localhost') { host = '127.0.0.1'; } var user = dispatcher.username; var passwd = dispatcher.passwd; var template = self.entryTemplate.render({ isSymmetric: isSymmetric, isFirst: isFirst, host: host[0], port: host[1], isCoordinator: isCoordinator, isDBServer: isDBServer, user: user, passwd: passwd }); $("#server_list").append(template); isFirst = false; }); } else { $("#server_list").append(this.entryTemplate.render({ isSymmetric: isSymmetric, isFirst: true, isCoordinator: true, isDBServer: true, host: '', port: '', user: '', passwd: '' })); } //initially disable lunch button this.disableLaunchButton(); $(this.el).append(this.modal.render({})); }, readAllConnections: function() { var res = []; $(".dispatcher").each(function(key, row) { var obj = { host: $('.host', row).val(), port: $('.port', row).val(), user: $('.user', row).val(), passwd: $('.passwd', row).val() }; if (obj.host && obj.port) { res.push(obj); } }); return res; }, checkAllConnections: function() { var self = this; var connectionValidationKey = Math.random(); this.connectionValidationKey = connectionValidationKey; $('.cluster-connection-check-success').remove(); $('.cluster-connection-check-fail').remove(); var list = this.readAllConnections(); if (list.length) { try { $.ajax({ async: true, cache: false, type: "POST", url: "/_admin/aardvark/cluster/communicationCheck", data: JSON.stringify(list), success: function(checkList) { if (connectionValidationKey === self.connectionValidationKey) { var dispatcher = $(".dispatcher"); var i = 0; dispatcher.each(function(key, row) { var host = $(".host", row).val(); var port = $(".port", row).val(); if (host && port) { if (checkList[i]) { $(".controls:first", row).append( '<span class="cluster-connection-check-success">Connection: ok</span>' ); } else { $(".controls:first", row).append( '<span class="cluster-connection-check-fail">Connection: fail</span>' ); } i++; } }); self.checkDispatcherArray(checkList, connectionValidationKey); } } }); } catch (e) { this.disableLaunchButton(); } } }, checkDispatcherArray: function(dispatcherArray, connectionValidationKey) { if( (_.every(dispatcherArray, function (e) {return e;})) && connectionValidationKey === this.connectionValidationKey ) { this.enableLaunchButton(); } }, disableLaunchButton: function() { $('#startSymmetricPlan').attr('disabled', 'disabled'); $('#startSymmetricPlan').removeClass('button-success'); $('#startSymmetricPlan').addClass('button-neutral'); }, enableLaunchButton: function() { $('#startSymmetricPlan').attr('disabled', false); $('#startSymmetricPlan').removeClass('button-neutral'); $('#startSymmetricPlan').addClass('button-success'); } }); }()); /*global window, $, Backbone, templateEngine, alert */ (function() { "use strict"; window.PlanTestView = Backbone.View.extend({ el: "#content", template: templateEngine.createTemplate("testPlan.ejs"), modal: templateEngine.createTemplate("waitModal.ejs"), events: { "click #startTestPlan": "startPlan", "click #cancel": "cancel" }, cancel: function() { if(window.App.clusterPlan.get("plan")) { window.App.navigate("handleClusterDown", {trigger: true}); } else { window.App.navigate("planScenario", {trigger: true}); } }, startPlan: function() { $('#waitModalLayer').modal('show'); $('.modal-backdrop.fade.in').addClass('waitModalBackdrop'); $('#waitModalMessage').html('Please be patient while your cluster is being launched'); var h = $("#host").val(), p = $("#port").val(), c = $("#coordinators").val(), d = $("#dbs").val(); if (!h) { alert("Please define a host"); return; } if (!p) { alert("Please define a port"); return; } if (!c || c < 0) { alert("Please define a number of coordinators"); return; } if (!d || d < 0) { alert("Please define a number of database servers"); return; } delete window.App.clusterPlan._coord; window.App.clusterPlan.save( { type: "testSetup", dispatchers: h + ":" + p, numberDBServers: parseInt(d, 10), numberCoordinators: parseInt(c, 10) }, { success: function() { $('.modal-backdrop.fade.in').removeClass('waitModalBackdrop'); $('#waitModalLayer').modal('hide'); window.App.updateAllUrls(); window.App.navigate("showCluster", {trigger: true}); }, error: function(obj, err) { $('.modal-backdrop.fade.in').removeClass('waitModalBackdrop'); $('#waitModalLayer').modal('hide'); alert("Error while starting the cluster: " + err.statusText); } } ); }, render: function() { var param = {}; var config = window.App.clusterPlan.get("config"); if (config) { param.dbs = config.numberOfDBservers; param.coords = config.numberOfCoordinators; var host = config.dispatchers.d1.endpoint; host = host.split("://")[1]; host = host.split(":"); if (host === 'localhost') { host = '127.0.0.1'; } param.hostname = host[0]; param.port = host[1]; } else { param.dbs = 3; param.coords = 2; param.hostname = window.location.hostname; if (param.hostname === 'localhost') { param.hostname = '127.0.0.1'; } param.port = window.location.port; } $(this.el).html(this.template.render(param)); $(this.el).append(this.modal.render({})); } }); }()); /*global window, $, Backbone, templateEngine, alert, _, d3, Dygraph, document */ (function() { "use strict"; window.ShowClusterView = Backbone.View.extend({ detailEl: '#modalPlaceholder', el: "#content", defaultFrame: 20 * 60 * 1000, template: templateEngine.createTemplate("showCluster.ejs"), modal: templateEngine.createTemplate("waitModal.ejs"), detailTemplate: templateEngine.createTemplate("detailView.ejs"), events: { "change #selectDB" : "updateCollections", "change #selectCol" : "updateShards", "click .dbserver.success" : "dashboard", "click .coordinator.success" : "dashboard" }, replaceSVGs: function() { $(".svgToReplace").each(function() { var img = $(this); var id = img.attr("id"); var src = img.attr("src"); $.get(src, function(d) { var svg = $(d).find("svg"); svg.attr("id", id) .attr("class", "icon") .removeAttr("xmlns:a"); img.replaceWith(svg); }, "xml"); }); }, updateServerTime: function() { this.serverTime = new Date().getTime(); }, setShowAll: function() { this.graphShowAll = true; }, resetShowAll: function() { this.graphShowAll = false; this.renderLineChart(); }, initialize: function() { this.interval = 10000; this.isUpdating = false; this.timer = null; this.knownServers = []; this.graph = undefined; this.graphShowAll = false; this.updateServerTime(); this.dygraphConfig = this.options.dygraphConfig; this.dbservers = new window.ClusterServers([], { interval: this.interval }); this.coordinators = new window.ClusterCoordinators([], { interval: this.interval }); this.documentStore = new window.arangoDocuments(); this.statisticsDescription = new window.StatisticsDescription(); this.statisticsDescription.fetch({ async: false }); this.dbs = new window.ClusterDatabases([], { interval: this.interval }); this.cols = new window.ClusterCollections(); this.shards = new window.ClusterShards(); this.startUpdating(); }, listByAddress: function(callback) { var byAddress = {}; var self = this; this.dbservers.byAddress(byAddress, function(res) { self.coordinators.byAddress(res, callback); }); }, updateCollections: function() { var self = this; var selCol = $("#selectCol"); var dbName = $("#selectDB").find(":selected").attr("id"); if (!dbName) { return; } var colName = selCol.find(":selected").attr("id"); selCol.html(""); this.cols.getList(dbName, function(list) { _.each(_.pluck(list, "name"), function(c) { selCol.append("<option id=\"" + c + "\">" + c + "</option>"); }); var colToSel = $("#" + colName, selCol); if (colToSel.length === 1) { colToSel.prop("selected", true); } self.updateShards(); }); }, updateShards: function() { var dbName = $("#selectDB").find(":selected").attr("id"); var colName = $("#selectCol").find(":selected").attr("id"); this.shards.getList(dbName, colName, function(list) { $(".shardCounter").html("0"); _.each(list, function(s) { $("#" + s.server + "Shards").html(s.shards.length); }); }); }, updateServerStatus: function(nextStep) { var self = this; var callBack = function(cls, stat, serv) { var id = serv, type, icon; id = id.replace(/\./g,'-'); id = id.replace(/\:/g,'_'); icon = $("#id" + id); if (icon.length < 1) { // callback after view was unrendered return; } type = icon.attr("class").split(/\s+/)[1]; icon.attr("class", cls + " " + type + " " + stat); if (cls === "coordinator") { if (stat === "success") { $(".button-gui", icon.closest(".tile")).toggleClass("button-gui-disabled", false); } else { $(".button-gui", icon.closest(".tile")).toggleClass("button-gui-disabled", true); } } }; this.coordinators.getStatuses(callBack.bind(this, "coordinator"), function() { self.dbservers.getStatuses(callBack.bind(self, "dbserver")); nextStep(); }); }, updateDBDetailList: function() { var self = this; var selDB = $("#selectDB"); var dbName = selDB.find(":selected").attr("id"); selDB.html(""); this.dbs.getList(function(dbList) { _.each(_.pluck(dbList, "name"), function(c) { selDB.append("<option id=\"" + c + "\">" + c + "</option>"); }); var dbToSel = $("#" + dbName, selDB); if (dbToSel.length === 1) { dbToSel.prop("selected", true); } self.updateCollections(); }); }, rerender : function() { var self = this; this.updateServerStatus(function() { self.getServerStatistics(function() { self.updateServerTime(); self.data = self.generatePieData(); self.renderPieChart(self.data); self.renderLineChart(); self.updateDBDetailList(); }); }); }, render: function() { this.knownServers = []; delete this.hist; var self = this; this.listByAddress(function(byAddress) { if (Object.keys(byAddress).length === 1) { self.type = "testPlan"; } else { self.type = "other"; } self.updateDBDetailList(); self.dbs.getList(function(dbList) { $(self.el).html(self.template.render({ dbs: _.pluck(dbList, "name"), byAddress: byAddress, type: self.type })); $(self.el).append(self.modal.render({})); self.replaceSVGs(); /* this.loadHistory(); */ self.getServerStatistics(function() { self.data = self.generatePieData(); self.renderPieChart(self.data); self.renderLineChart(); self.updateDBDetailList(); self.startUpdating(); }); }); }); }, generatePieData: function() { var pieData = []; var self = this; this.data.forEach(function(m) { pieData.push({key: m.get("name"), value: m.get("system").virtualSize, time: self.serverTime}); }); return pieData; }, /* loadHistory : function() { this.hist = {}; var self = this; var coord = this.coordinators.findWhere({ status: "ok" }); var endpoint = coord.get("protocol") + "://" + coord.get("address"); this.dbservers.forEach(function (dbserver) { if (dbserver.get("status") !== "ok") {return;} if (self.knownServers.indexOf(dbserver.id) === -1) { self.knownServers.push(dbserver.id); } var server = { raw: dbserver.get("address"), isDBServer: true, target: encodeURIComponent(dbserver.get("name")), endpoint: endpoint, addAuth: window.App.addAuth.bind(window.App) }; }); this.coordinators.forEach(function (coordinator) { if (coordinator.get("status") !== "ok") {return;} if (self.knownServers.indexOf(coordinator.id) === -1) { self.knownServers.push(coordinator.id); } var server = { raw: coordinator.get("address"), isDBServer: false, target: encodeURIComponent(coordinator.get("name")), endpoint: coordinator.get("protocol") + "://" + coordinator.get("address"), addAuth: window.App.addAuth.bind(window.App) }; }); }, */ addStatisticsItem: function(name, time, requests, snap) { var self = this; if (! self.hasOwnProperty('hist')) { self.hist = {}; } if (! self.hist.hasOwnProperty(name)) { self.hist[name] = []; } var h = self.hist[name]; var l = h.length; if (0 === l) { h.push({ time: time, snap: snap, requests: requests, requestsPerSecond: 0 }); } else { var lt = h[l - 1].time; var tt = h[l - 1].requests; if (tt < requests) { var dt = time - lt; var ps = 0; if (dt > 0) { ps = (requests - tt) / dt; } h.push({ time: time, snap: snap, requests: requests, requestsPerSecond: ps }); } /* else { h.times.push({ time: time, snap: snap, requests: requests, requestsPerSecond: 0 }); } */ } }, getServerStatistics: function(nextStep) { var self = this; var snap = Math.round(self.serverTime / 1000); this.data = undefined; var statCollect = new window.ClusterStatisticsCollection(); var coord = this.coordinators.first(); // create statistics collector for DB servers this.dbservers.forEach(function (dbserver) { if (dbserver.get("status") !== "ok") {return;} if (self.knownServers.indexOf(dbserver.id) === -1) { self.knownServers.push(dbserver.id); } var stat = new window.Statistics({name: dbserver.id}); stat.url = coord.get("protocol") + "://" + coord.get("address") + "/_admin/clusterStatistics?DBserver=" + dbserver.get("name"); statCollect.add(stat); }); // create statistics collector for coordinator this.coordinators.forEach(function (coordinator) { if (coordinator.get("status") !== "ok") {return;} if (self.knownServers.indexOf(coordinator.id) === -1) { self.knownServers.push(coordinator.id); } var stat = new window.Statistics({name: coordinator.id}); stat.url = coordinator.get("protocol") + "://" + coordinator.get("address") + "/_admin/statistics"; statCollect.add(stat); }); var cbCounter = statCollect.size(); this.data = []; var successCB = function(m) { cbCounter--; var time = m.get("time"); var name = m.get("name"); var requests = m.get("http").requestsTotal; self.addStatisticsItem(name, time, requests, snap); self.data.push(m); if (cbCounter === 0) { nextStep(); } }; var errCB = function() { cbCounter--; if (cbCounter === 0) { nextStep(); } }; // now fetch the statistics statCollect.fetch(successCB, errCB); }, renderPieChart: function(dataset) { var w = $("#clusterGraphs svg").width(); var h = $("#clusterGraphs svg").height(); var radius = Math.min(w, h) / 2; //change 2 to 1.4. It's hilarious. // var color = d3.scale.category20(); var color = this.dygraphConfig.colors; var arc = d3.svg.arc() //each datapoint will create one later. .outerRadius(radius - 20) .innerRadius(0); var pie = d3.layout.pie() .sort(function (d) { return d.value; }) .value(function (d) { return d.value; }); d3.select("#clusterGraphs").select("svg").remove(); var pieChartSvg = d3.select("#clusterGraphs").append("svg") // .attr("width", w) // .attr("height", h) .attr("class", "clusterChart") .append("g") //someone to transform. Groups data. .attr("transform", "translate(" + w / 2 + "," + ((h / 2) - 10) + ")"); var arc2 = d3.svg.arc() .outerRadius(radius-2) .innerRadius(radius-2); var slices = pieChartSvg.selectAll(".arc") .data(pie(dataset)) .enter().append("g") .attr("class", "slice"); slices.append("path") .attr("d", arc) .style("fill", function (item, i) { return color[i % color.length]; }) .style("stroke", function (item, i) { return color[i % color.length]; }); slices.append("text") .attr("transform", function(d) { return "translate(" + arc.centroid(d) + ")"; }) // .attr("dy", "0.35em") .style("text-anchor", "middle") .text(function(d) { var v = d.data.value / 1024 / 1024 / 1024; return v.toFixed(2); }); slices.append("text") .attr("transform", function(d) { return "translate(" + arc2.centroid(d) + ")"; }) // .attr("dy", "1em") .style("text-anchor", "middle") .text(function(d) { return d.data.key; }); }, renderLineChart: function() { var self = this; var interval = 60 * 20; var data = []; var hash = []; var t = Math.round(new Date().getTime() / 1000) - interval; var ks = self.knownServers; var f = function() { return null; }; var d, h, i, j, tt, snap; for (i = 0; i < ks.length; ++i) { h = self.hist[ks[i]]; if (h) { for (j = 0; j < h.length; ++j) { snap = h[j].snap; if (snap < t) { continue; } if (! hash.hasOwnProperty(snap)) { tt = new Date(snap * 1000); d = hash[snap] = [ tt ].concat(ks.map(f)); } else { d = hash[snap]; } d[i + 1] = h[j].requestsPerSecond; } } } data = []; Object.keys(hash).sort().forEach(function (m) { data.push(hash[m]); }); var options = this.dygraphConfig.getDefaultConfig('clusterRequestsPerSecond'); options.labelsDiv = $("#lineGraphLegend")[0]; options.labels = [ "datetime" ].concat(ks); self.graph = new Dygraph( document.getElementById('lineGraph'), data, options ); }, stopUpdating: function () { window.clearTimeout(this.timer); delete this.graph; this.isUpdating = false; }, startUpdating: function () { if (this.isUpdating) { return; } this.isUpdating = true; var self = this; this.timer = window.setInterval(function() { self.rerender(); }, this.interval); }, dashboard: function(e) { this.stopUpdating(); var tar = $(e.currentTarget); var serv = {}; var cur; var coord; var ip_port = tar.attr("id"); ip_port = ip_port.replace(/\-/g,'.'); ip_port = ip_port.replace(/\_/g,':'); ip_port = ip_port.substr(2); serv.raw = ip_port; serv.isDBServer = tar.hasClass("dbserver"); if (serv.isDBServer) { cur = this.dbservers.findWhere({ address: serv.raw }); coord = this.coordinators.findWhere({ status: "ok" }); serv.endpoint = coord.get("protocol") + "://" + coord.get("address"); } else { cur = this.coordinators.findWhere({ address: serv.raw }); serv.endpoint = cur.get("protocol") + "://" + cur.get("address"); } serv.target = encodeURIComponent(cur.get("name")); window.App.serverToShow = serv; window.App.dashboard(); }, getCurrentSize: function (div) { if (div.substr(0,1) !== "#") { div = "#" + div; } var height, width; $(div).attr("style", ""); height = $(div).height(); width = $(div).width(); return { height: height, width: width }; }, resize: function () { var dimensions; if (this.graph) { dimensions = this.getCurrentSize(this.graph.maindiv_.id); this.graph.resize(dimensions.width, dimensions.height); } } }); }()); /*global window, $, Backbone, templateEngine, _, alert */ (function() { "use strict"; window.ShowShardsView = Backbone.View.extend({ el: "#content", template: templateEngine.createTemplate("showShards.ejs"), events: { "change #selectDB" : "updateCollections", "change #selectCol" : "updateShards" }, initialize: function() { this.dbservers = new window.ClusterServers([], { interval: 10000 }); this.dbservers.fetch({ async : false, beforeSend: window.App.addAuth.bind(window.App) }); this.dbs = new window.ClusterDatabases([], { interval: 10000 }); this.cols = new window.ClusterCollections(); this.shards = new window.ClusterShards(); }, updateCollections: function() { var dbName = $("#selectDB").find(":selected").attr("id"); $("#selectCol").html(""); _.each(_.pluck(this.cols.getList(dbName), "name"), function(c) { $("#selectCol").append("<option id=\"" + c + "\">" + c + "</option>"); }); this.updateShards(); }, updateShards: function() { var dbName = $("#selectDB").find(":selected").attr("id"); var colName = $("#selectCol").find(":selected").attr("id"); var list = this.shards.getList(dbName, colName); $(".shardContainer").empty(); _.each(list, function(s) { var item = $("#" + s.server + "Shards"); $(".collectionName", item).html(s.server + ": " + s.shards.length); /* Will be needed in future _.each(s.shards, function(shard) { var shardIcon = document.createElement("span"); shardIcon = $(shardIcon); shardIcon.toggleClass("fa"); shardIcon.toggleClass("fa-th"); item.append(shardIcon); }); */ }); }, render: function() { $(this.el).html(this.template.render({ names: this.dbservers.pluck("name"), dbs: _.pluck(this.dbs.getList(), "name") })); this.updateCollections(); } }); }()); /*global Backbone, templateEngine, $, window*/ (function () { "use strict"; window.ShutdownButtonView = Backbone.View.extend({ el: '#navigationBar', events: { "click #clusterShutdown" : "clusterShutdown" }, initialize: function() { this.overview = this.options.overview; }, template: templateEngine.createTemplate("shutdownButtonView.ejs"), clusterShutdown: function() { this.overview.stopUpdating(); $('#waitModalLayer').modal('show'); $('.modal-backdrop.fade.in').addClass('waitModalBackdrop'); $('#waitModalMessage').html('Please be patient while your cluster is shutting down'); $.ajax({ cache: false, type: "GET", url: "cluster/shutdown", success: function(data) { $('.modal-backdrop.fade.in').removeClass('waitModalBackdrop'); $('#waitModalLayer').modal('hide'); window.App.navigate("handleClusterDown", {trigger: true}); } }); }, render: function () { $(this.el).html(this.template.render({})); return this; }, unrender: function() { $(this.el).html(""); } }); }()); /*global window, $, Backbone, document, arangoCollectionModel,arangoHelper, arangoDatabase, btoa, _*/ (function() { "use strict"; window.ClusterRouter = Backbone.Router.extend({ routes: { "" : "initialRoute", "planScenario" : "planScenario", "planTest" : "planTest", "planAsymmetrical" : "planAsymmetric", "shards" : "showShards", "showCluster" : "showCluster", "handleClusterDown" : "handleClusterDown" }, // Quick fix for server authentication addAuth: function (xhr) { var u = this.clusterPlan.get("user"); if (!u) { xhr.abort(); if (!this.isCheckingUser) { this.requestAuth(); } return; } var user = u.name; var pass = u.passwd; var token = user.concat(":", pass); xhr.setRequestHeader('Authorization', "Basic " + btoa(token)); }, requestAuth: function() { this.isCheckingUser = true; this.clusterPlan.set({"user": null}); var modalLogin = new window.LoginModalView(); modalLogin.render(); }, getNewRoute: function(last) { if (last === "statistics") { return this.clusterPlan.getCoordinator() + "/_admin/" + last; } return this.clusterPlan.getCoordinator() + "/_admin/aardvark/cluster/" + last; }, initialRoute: function() { this.initial(); }, updateAllUrls: function() { _.each(this.toUpdate, function(u) { u.updateUrl(); }); }, registerForUpdate: function(o) { this.toUpdate.push(o); o.updateUrl(); }, initialize: function () { this.footerView = new window.FooterView(); this.footerView.render(); var self = this; this.dygraphConfig = window.dygraphConfig; window.modalView = new window.ModalView(); this.initial = this.planScenario; this.isCheckingUser = false; this.bind('all', function(trigger, args) { var routeData = trigger.split(":"); if (trigger === "route") { if (args !== "showCluster") { if (self.showClusterView) { self.showClusterView.stopUpdating(); self.shutdownView.unrender(); } if (self.dashboardView) { self.dashboardView.stopUpdating(); } } } }); this.toUpdate = []; this.clusterPlan = new window.ClusterPlan(); this.clusterPlan.fetch({ async: false }); $(window).resize(function() { self.handleResize(); }); }, showCluster: function() { if (!this.showClusterView) { this.showClusterView = new window.ShowClusterView( {dygraphConfig : this.dygraphConfig} ); } if (!this.shutdownView) { this.shutdownView = new window.ShutdownButtonView({ overview: this.showClusterView }); } this.shutdownView.render(); this.showClusterView.render(); }, showShards: function() { if (!this.showShardsView) { this.showShardsView = new window.ShowShardsView(); } this.showShardsView.render(); }, handleResize: function() { if (this.dashboardView) { this.dashboardView.resize(); } if (this.showClusterView) { this.showClusterView.resize(); } }, planTest: function() { if (!this.planTestView) { this.planTestView = new window.PlanTestView( {model : this.clusterPlan} ); } this.planTestView.render(); }, planAsymmetric: function() { if (!this.planSymmetricView) { this.planSymmetricView = new window.PlanSymmetricView( {model : this.clusterPlan} ); } this.planSymmetricView.render(false); }, planScenario: function() { if (!this.planScenarioSelector) { this.planScenarioSelector = new window.PlanScenarioSelectorView(); } this.planScenarioSelector.render(); }, handleClusterDown : function() { if (!this.clusterDownView) { this.clusterDownView = new window.ClusterDownView(); } this.clusterDownView.render(); }, dashboard: function() { var server = this.serverToShow; if (!server) { this.navigate("", {trigger: true}); return; } server.addAuth = this.addAuth.bind(this); this.dashboardView = new window.ServerDashboardView({ dygraphConfig: this.dygraphConfig, serverToShow : this.serverToShow }); this.dashboardView.render(); }, clusterUnreachable: function() { if (this.showClusterView) { this.showClusterView.stopUpdating(); this.shutdownView.unrender(); } if (!this.unreachableView) { this.unreachableView = new window.ClusterUnreachableView(); } this.unreachableView.render(); } }); }()); /*global window, $, Backbone, document */ (function() { "use strict"; $.get("cluster/amIDispatcher", function(data) { if (!data) { var url = window.location.origin; url += window.location.pathname; url = url.replace("cluster", "index"); window.location.replace(url); } }); window.location.hash = ""; $(document).ready(function() { window.App = new window.ClusterRouter(); Backbone.history.start(); if(window.App.clusterPlan.get("plan")) { if(window.App.clusterPlan.isAlive()) { window.App.initial = window.App.showCluster; } else { window.App.initial = window.App.handleClusterDown; } } else { window.App.initial = window.App.planScenario; } window.App.initialRoute(); window.App.handleResize(); }); }());
apache-2.0
jtrobec/pants
src/python/pants/backend/codegen/tasks/simple_codegen_task.py
12684
# coding=utf-8 # Copyright 2015 Pants project contributors (see CONTRIBUTORS.md). # Licensed under the Apache License, Version 2.0 (see LICENSE). from __future__ import (absolute_import, division, generators, nested_scopes, print_function, unicode_literals, with_statement) import logging import os from abc import abstractmethod from collections import OrderedDict from twitter.common.collections import OrderedSet from pants.base.build_environment import get_buildroot from pants.base.exceptions import TaskError from pants.base.workunit import WorkUnitLabel from pants.build_graph.address import Address from pants.build_graph.address_lookup_error import AddressLookupError from pants.task.task import Task from pants.util.dirutil import fast_relpath, safe_delete, safe_walk logger = logging.getLogger(__name__) class SimpleCodegenTask(Task): """A base-class for code generation for a single target language.""" @classmethod def product_types(cls): # NB(gmalmquist): This is a hack copied from the old CodeGen base class to get the round manager # to properly run codegen before resolve and compile. It would be more correct to just have each # individual codegen class declare what languages it generates, but would cause problems with # scala. See https://rbcommons.com/s/twitter/r/2540/. return ['java', 'scala', 'python'] @classmethod def register_options(cls, register): super(SimpleCodegenTask, cls).register_options(register) register('--allow-empty', action='store_true', default=True, fingerprint=True, help='Skip targets with no sources defined.', advanced=True) register('--allow-dups', action='store_true', default=False, fingerprint=True, help='Allow multiple targets specifying the same sources. If duplicates are ' 'allowed, the logic of find_sources will associate generated sources with ' 'the least-dependent targets that generate them.', advanced=True) @classmethod def get_fingerprint_strategy(cls): """Override this method to use a fingerprint strategy other than the default one. :return: a fingerprint strategy, or None to use the default strategy. """ return None @property def cache_target_dirs(self): return True @property def validate_sources_present(self): """A property indicating whether input targets require sources. If targets should have sources, the `--allow-empty` flag indicates whether it is a warning or an error for sources to be missing. """ return True def synthetic_target_extra_dependencies(self, target, target_workdir): """Gets any extra dependencies generated synthetic targets should have. This method is optional for subclasses to implement, because some code generators may have no extra dependencies. :param Target target: the Target from which we are generating a synthetic Target. E.g., 'target' might be a JavaProtobufLibrary, whose corresponding synthetic Target would be a JavaLibrary. It may not be necessary to use this parameter depending on the details of the subclass. :return: a list of dependencies. """ return [] def synthetic_target_type_by_target(self, target): """The type of target this codegen task generates. For example, the target type for JaxbGen would simply be JavaLibrary. :return: a type (class) that inherits from Target. """ raise NotImplementedError def synthetic_target_type(self, target): """The type of target this codegen task generates. For example, the target type for JaxbGen would simply be JavaLibrary. :return: a type (class) that inherits from Target. """ raise NotImplementedError def is_gentarget(self, target): """Predicate which determines whether the target in question is relevant to this codegen task. E.g., the JaxbGen task considers JaxbLibrary targets to be relevant, and nothing else. :param Target target: The target to check. :return: True if this class can generate code for the given target, False otherwise. """ raise NotImplementedError def codegen_targets(self): """Finds codegen targets in the dependency graph. :return: an iterable of dependency targets. """ return self.context.targets(self.is_gentarget) def _do_validate_sources_present(self, target): """Checks whether sources is empty, and either raises a TaskError or just returns False. The specifics of this behavior are defined by whether the user sets --allow-empty to True/False: --allow-empty=False will result in a TaskError being raised in the event of an empty source set. If --allow-empty=True, this method will just return false and log a warning. Shared for all SimpleCodegenTask subclasses to help keep errors consistent and descriptive. :param target: Target to validate. :return: True if sources is not empty, False otherwise. """ if not self.validate_sources_present: return True sources = target.sources_relative_to_buildroot() if not sources: message = ('Target {} has no sources.'.format(target.address.spec)) if not self.get_options().allow_empty: raise TaskError(message) else: logging.warn(message) return False return True def _get_synthetic_address(self, target, target_workdir): synthetic_name = target.id sources_rel_path = os.path.relpath(target_workdir, get_buildroot()) synthetic_address = Address(sources_rel_path, synthetic_name) return synthetic_address def execute(self): with self.invalidated(self.codegen_targets(), invalidate_dependents=True, fingerprint_strategy=self.get_fingerprint_strategy()) as invalidation_check: with self.context.new_workunit(name='execute', labels=[WorkUnitLabel.MULTITOOL]): for vt in invalidation_check.all_vts: # Build the target and handle duplicate sources. if not vt.valid: if self._do_validate_sources_present(vt.target): self.execute_codegen(vt.target, vt.results_dir) self._handle_duplicate_sources(vt.target, vt.results_dir) vt.update() # And inject a synthetic target to represent it. self._inject_synthetic_target(vt.target, vt.results_dir) def _inject_synthetic_target(self, target, target_workdir): """Create, inject, and return a synthetic target for the given target and workdir. :param target: The target to inject a synthetic target for. :param target_workdir: The work directory containing the generated code for the target. """ synthetic_target = self.context.add_new_target( address=self._get_synthetic_address(target, target_workdir), target_type=self.synthetic_target_type(target), dependencies=self.synthetic_target_extra_dependencies(target, target_workdir), sources=list(self.find_sources(target, target_workdir)), derived_from=target, # TODO(John Sirois): This assumes - currently, a JvmTarget or PythonTarget which both # happen to have this attribute for carrying publish metadata but share no interface # that defines this canonical property. Lift up an interface and check for it or else # add a way for SimpleCodeGen subclasses to specify extra attribute names that should be # copied over from the target to its derived target. provides=target.provides, ) build_graph = self.context.build_graph # NB(pl): This bypasses the convenience function (Target.inject_dependency) in order # to improve performance. Note that we can walk the transitive dependee subgraph once # for transitive invalidation rather than walking a smaller subgraph for every single # dependency injected. for dependent_address in build_graph.dependents_of(target.address): build_graph.inject_dependency( dependent=dependent_address, dependency=synthetic_target.address, ) # NB(pl): See the above comment. The same note applies. for concrete_dependency_address in build_graph.dependencies_of(target.address): build_graph.inject_dependency( dependent=synthetic_target.address, dependency=concrete_dependency_address, ) build_graph.walk_transitive_dependee_graph( build_graph.dependencies_of(target.address), work=lambda t: t.mark_transitive_invalidation_hash_dirty(), ) if target in self.context.target_roots: self.context.target_roots.append(synthetic_target) return synthetic_target def resolve_deps(self, unresolved_deps): deps = OrderedSet() for dep in unresolved_deps: try: deps.update(self.context.resolve(dep)) except AddressLookupError as e: raise AddressLookupError('{message}\n on dependency {dep}'.format(message=e, dep=dep)) return deps @abstractmethod def execute_codegen(self, target, target_workdir): """Generate code for the given target. :param target: A target to generate code for :param target_workdir: A clean directory into which to generate code """ def find_sources(self, target, target_workdir): """Determines what sources were generated by the target after the fact. This is done by searching the directory where this target's code was generated. :param Target target: the target for which to find generated sources. :param path target_workdir: directory containing sources for the target. :return: A set of filepaths relative to the target_workdir. :rtype: OrderedSet """ return OrderedSet(self._find_sources_in_workdir(target_workdir)) def _find_sources_in_workdir(self, target_workdir): """Returns relative sources contained in the given target_workdir.""" for root, _, files in safe_walk(target_workdir): rel_root = fast_relpath(root, target_workdir) for name in files: yield os.path.join(rel_root, name) def _handle_duplicate_sources(self, target, target_workdir): """Handles duplicate sources generated by the given gen target by either failure or deletion. This method should be called after all dependencies have been injected into the graph, but before injecting the synthetic version of this target. NB(gm): Some code generators may re-generate code that their dependent libraries generate. This results in targets claiming to generate sources that they really don't, so we try to filter out sources that were actually generated by dependencies of the target. This causes the code generated by the dependencies to 'win' over the code generated by dependees. By default, this behavior is disabled, and duplication in generated sources will raise a TaskError. This is controlled by the --allow-dups flag. """ # Compute the raw sources owned by this target. by_target = self.find_sources(target, target_workdir) # Walk dependency gentargets and record any sources owned by those targets that are also # owned by this target. duplicates_by_target = OrderedDict() def record_duplicates(dep): if dep == target or not self.is_gentarget(dep.concrete_derived_from): return duped_sources = [s for s in dep.sources_relative_to_source_root() if s in by_target] if duped_sources: duplicates_by_target[dep] = duped_sources target.walk(record_duplicates) # If there were no dupes, we're done. if not duplicates_by_target: return # If there were duplicates warn or error. messages = ['{target} generated sources that had already been generated by dependencies.' .format(target=target.address.spec)] for dep, duped_sources in duplicates_by_target.items(): messages.append('\t{} also generated:'.format(dep.concrete_derived_from.address.spec)) messages.extend(['\t\t{}'.format(source) for source in duped_sources]) message = '\n'.join(messages) if self.get_options().allow_dups: logger.warn(message) else: raise self.DuplicateSourceError(message) # Finally, remove duplicates from the workdir. This prevents us from having to worry # about them during future incremental compiles. for dep, duped_sources in duplicates_by_target.items(): for duped_source in duped_sources: safe_delete(os.path.join(target_workdir, duped_source)) class DuplicateSourceError(TaskError): """A target generated the same code that was generated by one of its dependencies. This is only thrown when --allow-dups=False. """
apache-2.0
emil-wcislo/sbql4j8
sbql4j8/src/test/openjdk/tools/javac/processing/options/TestImplicitNone.java
3758
/* * Copyright (c) 2010, Oracle and/or its affiliates. All rights reserved. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * * This code is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License version 2 only, as * published by the Free Software Foundation. * * This code is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License * version 2 for more details (a copy is included in the LICENSE file that * accompanied this code). * * You should have received a copy of the GNU General Public License version * 2 along with this work; if not, write to the Free Software Foundation, * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. * * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA * or visit www.oracle.com if you need additional information or have any * questions. */ /* * @test * @bug 6935638 * @summary -implicit:none prevents compilation with annotation processing */ import java.io.*; import java.util.*; import javax.annotation.processing.*; import javax.lang.model.*; import javax.lang.model.element.*; @SupportedAnnotationTypes("*") public class TestImplicitNone extends AbstractProcessor { public static void main(String... args) throws Exception { new TestImplicitNone().run(); } void run() throws Exception { File classesDir = new File("tmp", "classes"); classesDir.mkdirs(); File test_java = new File(new File("tmp", "src"), "Test.java"); writeFile(test_java, "class Test { }"); // build up list of options and files to be compiled List<String> opts = new ArrayList<String>(); List<File> files = new ArrayList<File>(); opts.add("-d"); opts.add(classesDir.getPath()); opts.add("-processorpath"); opts.add(System.getProperty("test.classes")); opts.add("-implicit:none"); opts.add("-processor"); opts.add(TestImplicitNone.class.getName()); files.add(test_java); compile(opts, files); File test_class = new File(classesDir, "Test.class"); if (!test_class.exists()) throw new Exception("Test.class not generated"); } /** Compile files with options provided. */ void compile(List<String> opts, List<File> files) throws Exception { System.err.println("javac: " + opts + " " + files); List<String> args = new ArrayList<String>(); args.addAll(opts); for (File f: files) args.add(f.getPath()); StringWriter sw = new StringWriter(); PrintWriter pw = new PrintWriter(sw); int rc = sbql4j8.com.sun.tools.javac.Main.compile(args.toArray(new String[args.size()]), pw); pw.flush(); if (sw.getBuffer().length() > 0) System.err.println(sw.toString()); if (rc != 0) throw new Exception("compilation failed: rc=" + rc); } /** Write a file with a given body. */ void writeFile(File f, String body) throws Exception { if (f.getParentFile() != null) f.getParentFile().mkdirs(); Writer out = new FileWriter(f); try { out.write(body); } finally { out.close(); } } //----- annotation processor methods ----- public boolean process(Set<? extends TypeElement> annotations, RoundEnvironment roundEnv) { return true; } @Override public SourceVersion getSupportedSourceVersion() { return SourceVersion.latest(); } }
apache-2.0
yy1300326388/meteor_microscope
microscope/lib/router.js
2390
/** * Created by fitark on 15-3-12. */ Router.configure({ layoutTemplate: 'layout', loadingTemplate: 'loading', notFoundTemplate: 'notFound', waitOn: function() { return [Meteor.subscribe('notifications')] } }); //重构 postsLists 路由为 RouteController. PostsListController = RouteController.extend({ template: 'postsList', increment: 5, //排序 sort:{submitted: -1}, postsLimit: function() { return parseInt(this.params.postsLimit) || this.increment; }, findOptions: function() { return this.postsLimit(); }, subscriptions: function() { this.postsSub = Meteor.subscribe('posts',this.sort, this.findOptions()); }, waitOn: function() { return Meteor.subscribe('posts',this.sort, this.findOptions()); }, data: function() { return {posts: Posts.find({},this.sort, this.findOptions())}; }, posts: function() { return Posts.find({},this.sort,this.findOptions()); }, data: function() { var hasMore = this.posts().count() === this.postsLimit(); var nextPath = this.route.path({postsLimit: this.postsLimit() + this.increment}); return { posts: this.posts(), ready: this.postsSub.ready, nextPath: hasMore ? nextPath : null }; } }); //重构 postsLists 路由为 RouteController. Router.route('/:postsLimit?', { name: 'postsList' }); Router.route('/posts/:_id', { name: 'postPage', waitOn: function() { return [ Meteor.subscribe('singlePost', this.params._id), Meteor.subscribe('comments', this.params._id) ]; }, data: function() { return Posts.findOne(this.params._id); } }); Router.route('/posts/:_id/edit', { name: 'postEdit', waitOn: function() { return Meteor.subscribe('singlePost', this.params._id); }, data: function() { return Posts.findOne(this.params._id); } }); Router.route('/submit', {name: 'postSubmit'}); var requireLogin = function() { if (! Meteor.user()) { if (Meteor.loggingIn()) { this.render(this.loadingTemplate); } else { this.render('accessDenied'); } } else { this.next(); } } Router.onBeforeAction('dataNotFound', {only: 'postPage'}); Router.onBeforeAction(requireLogin, {only: 'postSubmit'});
apache-2.0
gbanegas/KissECC
attack_ecc/old/window_attack.py
4626
import random import math from itertools import product from itertools import chain from thread_sum import ThreadSum q = 2**252 + 27742317777372353535851937790883648493 r = [] v = [] alpha = [] def int_to_bin(number): return [int(x) for x in bin(number)[2:]] def bin_to_int(bit_list): output = 0 for bit in bit_list: output = output * 2 + bit return output def groupsof(n, xs): if len(xs) < n: return [xs] else: return chain([xs[0:n]], groupsof(n, xs[n:])) class WindowAttack(object): def generate_v_values(self, d, N): for i in xrange(0, N): value = d + (alpha[i]*q) v.append(value) def generate_alpha_js(self, N): for i in xrange(1, N+1): al = r[i] - r[0] alpha.append(int(math.fabs(al))) def generate_r_js(self, n, N): for i in xrange(0, N+1): a = random.getrandbits(n) r.append(int(math.fabs(a))) def bit_flip_random(self, bit_list, randomized_bits): bit_list_t = bit_list[:] pos_list = [] if len(bit_list) < randomized_bits: raise Exception("Randomized bigger then d+(a*r)") print "Lenght: ", len(bit_list) for i in xrange(0, randomized_bits): pos_bit_to_flip = random.randint(0, len(bit_list)-1) while(pos_bit_to_flip in pos_list): pos_bit_to_flip = random.randint(0, len(bit_list)-1) pos_list.append(pos_bit_to_flip) if bit_list_t[pos_bit_to_flip] == 1: bit_list_t[pos_bit_to_flip] = 0 else: bit_list_t[pos_bit_to_flip] = 1 return bit_list_t def generate_v_values_with_bit_flip(self, d, N, randomized_bits): for i in xrange(0, N): value = d + (alpha[i]*q) bit_list = int_to_bin(value) #print len(bit_list) bit_list_flipped = self.bit_flip_random(bit_list, randomized_bits) value_flipped = bin_to_int(bit_list_flipped) v.append(value_flipped) def sum_all_ds(self, d_candidates, interval, mod_value, N): pairs = {} number_of_threads = 4 ds = list(groupsof(len(d_candidates)/number_of_threads, d_candidates)) #ds = zip(*[iter(d_candidates)]*number_of_threads) threads = [] #print "DS: ", len(ds) for i in xrange(0, number_of_threads): threads.append(ThreadSum(i, ds[i], v, alpha, N, mod_value, interval)) for t in threads: t.start() for t in threads: t.join() for t in threads: key, d = t.return_result() try: if pairs[key] <> None: val = pairs[key] if val.count(1) > d.count(1): pairs[key] = d except Exception as e: pairs[key] = d #print pairs #print key #print pairs.keys() return min(pairs.keys()) , pairs def test_d(self, d, to_test): """ Function to test the candidate to d. In our case, it is a comparasion with the original d. However, in a real case could be the ciphered text with the original and the candidate""" return (d==to_test) def wide_widow_attack(self, d, window_size = 10, n = 512, N = 200, randomized_bits = 30): self.generate_r_js(n, N) self.generate_alpha_js(N) self.generate_v_values_with_bit_flip(d, N, randomized_bits) print "d = ", int_to_bin(d), " len: ", len(int_to_bin(d)) print "Starting...." w_prime = 0 w = window_size d_prime = 0 variations = [] for i in product([0,1], repeat=window_size): variations.append(list(i)) while(w < (n + window_size + window_size)): print "w: ", w print "w_prime: ", w_prime mod_value = 2**w d_prime = d_prime % mod_value d_prime_bin = int_to_bin(d_prime) to_iterate = [] for variation in variations: to_iterate.append(variation+d_prime_bin) sum_d , d_candidate = self.sum_all_ds(to_iterate, w, mod_value, N) d_prime = bin_to_int(d_candidate[sum_d]) print "sum: ", sum_d, " d_candidate = ", int_to_bin(d_prime) w_prime = w w = w + window_size if self.test_d(d, d_prime): w = w+n if (d == d_prime): print "FOUND KEY." else: print "SORRY" print "Finished."
apache-2.0
banerwai/gommon
etcd/direct.go
760
package etcd import ( "github.com/coreos/etcd/client" "golang.org/x/net/context" ) // Set etcd direct set a key/value func Set(key, value string) (*client.Response, error) { return KeysAPI.Set(context.Background(), key, value, nil) } // Get etcd derict get a etcd response by key func Get(key string) (*client.Response, error) { return KeysAPI.Get(context.Background(), key, nil) } // GetValue etcd derict get a value by key func GetValue(key string) (string, error) { resp, err := KeysAPI.Get(context.Background(), key, nil) if err != nil { return "", err } return resp.Node.Value, nil } // GetString etcd derict get a string by key func GetString(key string) string { resp, err := GetValue(key) if err != nil { return "" } return resp }
apache-2.0
jentfoo/aws-sdk-java
aws-java-sdk-glue/src/main/java/com/amazonaws/services/glue/model/transform/GetTableRequestMarshaller.java
2558
/* * Copyright 2014-2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.glue.model.transform; import javax.annotation.Generated; import com.amazonaws.SdkClientException; import com.amazonaws.services.glue.model.*; import com.amazonaws.protocol.*; import com.amazonaws.annotation.SdkInternalApi; /** * GetTableRequestMarshaller */ @Generated("com.amazonaws:aws-java-sdk-code-generator") @SdkInternalApi public class GetTableRequestMarshaller { private static final MarshallingInfo<String> CATALOGID_BINDING = MarshallingInfo.builder(MarshallingType.STRING).marshallLocation(MarshallLocation.PAYLOAD) .marshallLocationName("CatalogId").build(); private static final MarshallingInfo<String> DATABASENAME_BINDING = MarshallingInfo.builder(MarshallingType.STRING) .marshallLocation(MarshallLocation.PAYLOAD).marshallLocationName("DatabaseName").build(); private static final MarshallingInfo<String> NAME_BINDING = MarshallingInfo.builder(MarshallingType.STRING).marshallLocation(MarshallLocation.PAYLOAD) .marshallLocationName("Name").build(); private static final GetTableRequestMarshaller instance = new GetTableRequestMarshaller(); public static GetTableRequestMarshaller getInstance() { return instance; } /** * Marshall the given parameter object. */ public void marshall(GetTableRequest getTableRequest, ProtocolMarshaller protocolMarshaller) { if (getTableRequest == null) { throw new SdkClientException("Invalid argument passed to marshall(...)"); } try { protocolMarshaller.marshall(getTableRequest.getCatalogId(), CATALOGID_BINDING); protocolMarshaller.marshall(getTableRequest.getDatabaseName(), DATABASENAME_BINDING); protocolMarshaller.marshall(getTableRequest.getName(), NAME_BINDING); } catch (Exception e) { throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e); } } }
apache-2.0
nativecode-dev/oss-xamarin
src/Demo/Demo/ViewModels/MainViewModel.cs
1231
namespace Demo.ViewModels { using System.Windows.Input; using NativeCode.Mobile.AppCompat.Controls.Platforms; using PropertyChanged; using Xamarin.Forms; public class MainViewModel : ViewModel { private int counter; /// <summary> /// Initializes a new instance of the <see cref="MainViewModel"/> class. /// </summary> public MainViewModel() { this.FloatingButtonCommand = new Command(this.HandleFloatingButtonCommand); this.ShowSnackBar = new Command(this.HandleShowSnackBar); this.Title = "Main"; } public double Elevation { get; set; } public double Radius { get; set; } public int Padding { get; set; } [DoNotNotify] public ICommand FloatingButtonCommand { get; private set; } [DoNotNotify] public ICommand ShowSnackBar { get; private set; } private void HandleFloatingButtonCommand() { } private void HandleShowSnackBar() { var notifier = DependencyService.Get<IUserNotifier>(); notifier.NotifyShort(string.Format("You hit me {0} times!!!", ++this.counter)); } } }
apache-2.0
vujasm/trusthings-compose
trusthings-common/src/main/java/com/inn/util/tuple/ListTupleConvert.java
1380
package com.inn.util.tuple; /* * #%L * trusthings-common * %% * Copyright (C) 2015 COMPOSE project * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ import java.lang.reflect.Method; import java.util.List; import com.google.common.collect.Lists; /** * ListTupleConvert util *@author markov * */ public class ListTupleConvert { @SuppressWarnings("unchecked") public static synchronized <Element extends Object> List<Element> toListOfTupleElement(List<?> setOFTuples, int element){ List<Element> set = Lists.newArrayList(); for (Object tuple : setOFTuples) { try { Method m = tuple.getClass().getMethod("getT"+Integer.valueOf(element).toString(), new Class<?>[0]); Element result = (Element) m.invoke(tuple, new Object[0]); set.add(result); } catch (Exception e) { e.printStackTrace(); } } return set; } }
apache-2.0
SocraticGrid/OMS-API
src/main/java/org/socraticgrid/hl7/services/orders/logging/EventLevel.java
960
/* * Copyright 2015 Cognitive Medical Systems, Inc (http://www.cognitivemedciine.com). * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.socraticgrid.hl7.services.orders.logging; public enum EventLevel { none(1000), trace(10), debug(30), info(50), warn(80), error(100), all(0); private int numVal; EventLevel(int numVal) { this.numVal = numVal; } public int getNumVal() { return numVal; } }
apache-2.0
project-chip/connectedhomeip
src/crypto/hsm/nxp/CHIPCryptoPALHsm_SE05X_HKDF.cpp
3662
/* * * Copyright (c) 2021 Project CHIP Authors * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /** * @file * HSM based implementation of CHIP crypto primitives * Based on configurations in CHIPCryptoPALHsm_config.h file, * chip crypto apis use either HSM or rollback to software implementation. */ #include "CHIPCryptoPALHsm_SE05X_utils.h" #include <lib/core/CHIPEncoding.h> #if ENABLE_HSM_HKDF_SHA256 namespace chip { namespace Crypto { HKDF_shaHSM::HKDF_shaHSM() { keyid = kKeyId_hkdf_sha256_hmac_keyid; } HKDF_shaHSM::~HKDF_shaHSM() {} CHIP_ERROR HKDF_shaHSM::HKDF_SHA256(const uint8_t * secret, const size_t secret_length, const uint8_t * salt, const size_t salt_length, const uint8_t * info, const size_t info_length, uint8_t * out_buffer, size_t out_length) { CHIP_ERROR error = CHIP_ERROR_INTERNAL; if (salt_length > 64 || info_length > 80 || secret_length > 256 || out_length > 768) { /* Length not supported by se05x. Rollback to SW */ return HKDF_sha::HKDF_SHA256(secret, secret_length, salt, salt_length, info, info_length, out_buffer, out_length); } // Salt is optional if (salt_length > 0) { VerifyOrReturnError(salt != nullptr, CHIP_ERROR_INVALID_ARGUMENT); } VerifyOrReturnError(info_length > 0, CHIP_ERROR_INVALID_ARGUMENT); VerifyOrReturnError(info != nullptr, CHIP_ERROR_INVALID_ARGUMENT); VerifyOrReturnError(out_length > 0, CHIP_ERROR_INVALID_ARGUMENT); VerifyOrReturnError(out_buffer != nullptr, CHIP_ERROR_INVALID_ARGUMENT); VerifyOrReturnError(secret != nullptr, CHIP_ERROR_INVALID_ARGUMENT); VerifyOrReturnError(keyid != kKeyId_NotInitialized, CHIP_ERROR_HSM); se05x_sessionOpen(); VerifyOrReturnError(gex_sss_chip_ctx.ks.session != NULL, CHIP_ERROR_INTERNAL); sss_object_t keyObject = { 0 }; sss_status_t status = sss_key_object_init(&keyObject, &gex_sss_chip_ctx.ks); VerifyOrReturnError(status == kStatus_SSS_Success, CHIP_ERROR_INTERNAL); status = sss_key_object_allocate_handle(&keyObject, keyid, kSSS_KeyPart_Default, kSSS_CipherType_HMAC, secret_length, kKeyObject_Mode_Transient); VerifyOrReturnError(status == kStatus_SSS_Success, CHIP_ERROR_INTERNAL); status = sss_key_store_set_key(&gex_sss_chip_ctx.ks, &keyObject, secret, secret_length, secret_length * 8, NULL, 0); VerifyOrReturnError(status == kStatus_SSS_Success, CHIP_ERROR_INTERNAL); const smStatus_t smstatus = Se05x_API_HKDF_Extended( &((sss_se05x_session_t *) &gex_sss_chip_ctx.session)->s_ctx, keyObject.keyId, kSE05x_DigestMode_SHA256, kSE05x_HkdfMode_ExtractExpand, salt, salt_length, 0, info, info_length, 0, (uint16_t) out_length, out_buffer, &out_length); VerifyOrExit(smstatus == SM_OK, error = CHIP_ERROR_INTERNAL); error = CHIP_NO_ERROR; exit: sss_key_store_erase_key(&gex_sss_chip_ctx.ks, &keyObject); return error; } } // namespace Crypto } // namespace chip #endif //#if ENABLE_HSM_HKDF_SHA256
apache-2.0
sarl/sarl
main/coreplugins/io.sarl.lang/src/io/sarl/lang/scoping/SARLScopeProvider.java
1087
/* * $Id$ * * SARL is an general-purpose agent programming language. * More details on http://www.sarl.io * * Copyright (C) 2014-2021 the original authors or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.sarl.lang.scoping; /** * Custom scoping description. * * @author $Author: sgalland$ * @version $FullVersion$ * @mavengroupid $GroupId$ * @mavenartifactid $ArtifactId$ * @see "https://www.eclipse.org/Xtext/documentation/303_runtime_concepts.html#scoping" */ public class SARLScopeProvider extends AbstractSARLScopeProvider { // }
apache-2.0
rostam/gradoop
gradoop-flink/src/main/java/org/gradoop/flink/model/impl/operators/count/functions/Tuple1With1L.java
1337
/* * Copyright © 2014 - 2019 Leipzig University (Database Research Group) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.gradoop.flink.model.impl.operators.count.functions; import org.apache.flink.api.common.functions.JoinFunction; import org.apache.flink.api.common.functions.MapFunction; import org.apache.flink.api.java.tuple.Tuple1; /** * Maps something to numeric ONE in a tuple 1. * * @param <T> type of something */ public class Tuple1With1L<T> implements JoinFunction<T, T, Tuple1<Long>>, MapFunction<T, Tuple1<Long>> { /** * Numeric one */ private static final Tuple1<Long> ONE = new Tuple1<>(1L); @Override public Tuple1<Long> join(T left, T right) throws Exception { return ONE; } @Override public Tuple1<Long> map(T x) throws Exception { return ONE; } }
apache-2.0
jomolinare/crate-web
src/web/filters.py
1006
# vim: set fileencodings=utf-8 # -*- coding: utf-8 -*- __docformat__ = "reStructuredText" import json import datetime from django.template.base import Library from django.utils.safestring import mark_safe register = Library() class DateTimeJSONEncoder(json.JSONEncoder): """Encoder for datetime objects""" def default(self, obj): if isinstance(obj, datetime.datetime): return obj.strftime('%Y-%m-%dT%H:%M:%S') return super(DateTimeJSONEncoder, self).default(obj) @register.filter(is_safe=True) def json_dump(value): return json.dumps(value, indent=2, cls=DateTimeJSONEncoder) @register.filter(is_safe=True) def filter_by_category(items, needle): return filter(lambda x: needle in x['category'], items) @register.filter(is_safe=True) def filter_by_tag(items, needle): return filter(lambda x: needle in x['tags'], items) @register.filter(is_safe=True) def filter_by_author(items, author): return filter(lambda x: author == x['author'], items)
apache-2.0
jarrodek/polymer-chrome-apps2
gulpfile.js
367
'use strict'; var gulp = require('gulp'); var crisper = require('gulp-crisper'); /** * Make all bower_components CSP ready */ gulp.task('crisper-bower', function() { gulp.src('bower_components/**/*.html') .pipe(crisper({ scriptInHead: false, onlySplit: false, alwaysWriteScript: false })) .pipe(gulp.dest('bower_components/')); });
apache-2.0
sawandarekar/JavaHub
src/main/java/ocjp/stringsIOFormattingParsing/StringDiff.java
1114
package ocjp.stringsIOFormattingParsing; import java.util.LinkedList; import java.util.List; public class StringDiff { public static List<int[]> from(String s1, String s2) { int start = -1; int pos = 0; LinkedList<int[]> list = new LinkedList<int[]>(); for(; pos < s1.length() && pos < s2.length(); ++pos) { if(s1.charAt(pos) == s2.charAt(pos)) { if(start < 0) { start = pos; } } else { if(start >= 0) { list.add(new int[] { start, pos }); } start = -1; } } if(start >= 0) { list.add(new int[] { start, pos }); } return list; } public static void main(String[] args) { String first="The quick brown fox jumped over the lazy dog."; String second="The quick yellow fox jumped over the well-bred dog."; for(int[] idx : from(first, second)) { System.out.print(first.substring(idx[0], idx[1])); } } }
apache-2.0
raywang8341/BTMessenger
app/src/main/java/com/randroid/btmessenger/bluetooth/BluetoothMessage.java
460
package com.randroid.btmessenger.bluetooth; import java.io.Serializable; /** * Created by hui on 3/30/15. */ public class BluetoothMessage implements Serializable { public String getMessage() { return message; } public void setMessage(String message) { this.message = message; } private String message; public BluetoothMessage(String msg){ this.message = msg; } public BluetoothMessage(){ } }
apache-2.0
gaamy/android_app_tp3
app/src/main/java/com/polymt/inf8405/tp3/baseclass/Friend.java
131
package com.polymt.inf8405.tp3.baseclass; /** * Created by Louis-Philippe on 4/6/2017. */ public class Friend extends User { }
apache-2.0
Alexia23/batfish
projects/batfish/src/org/batfish/representation/cisco/RouteMap.java
742
package org.batfish.representation.cisco; import java.io.Serializable; import java.util.Map; import java.util.TreeMap; public class RouteMap implements Serializable { private static final long serialVersionUID = 1L; private Map<Integer, RouteMapClause> _clauses; private boolean _ignore; private String _mapName; public RouteMap(String name) { _mapName = name; _clauses = new TreeMap<Integer, RouteMapClause>(); _ignore = false; } public Map<Integer, RouteMapClause> getClauses() { return _clauses; } public boolean getIgnore() { return _ignore; } public String getMapName() { return _mapName; } public void setIgnore(boolean b) { _ignore = b; } }
apache-2.0
wajdihh/HhDroid
src/main/java/com/hh/clientdatatable/TColumn.java
2374
package com.hh.clientdatatable; import com.hh.clientdatatable.TCell.ValueType; import com.hh.listeners.OnCDTColumnObserver; public class TColumn { public enum ColumnType {PrimaryKey,ToIgnoreInDB,JsonParent,JsonField}; private String _mName; private ValueType _mType; private TCell.CellType _mCellType; private OnCDTColumnObserver _mListener; private ColumnType _mColumnType; private String _mJsonParent; private boolean mIsIgnoreAsJsonField; /** * @param _mName : name of column * @param pType : Type of column : boolean TEXT etc... */ public TColumn(String pName, ValueType pType) { _mName = pName; _mType = pType; } public TColumn(String pName, ValueType pType,ColumnType pColumnType) { _mName = pName; _mType = pType; _mColumnType=pColumnType; } public TColumn(String pName, ValueType pType, TCell.CellType pCellType, OnCDTColumnObserver pListener) { _mName = pName; _mType = pType; _mListener = pListener; _mCellType = pCellType; } public TColumn(String pName, ValueType pType,ColumnType pColumnType, OnCDTColumnObserver pListener) { _mName = pName; _mType = pType; _mListener = pListener; _mColumnType=pColumnType; } public OnCDTColumnObserver getCDTColumnListener() { return _mListener; } public String getName() { return _mName; } public ColumnType getColumnType() { return _mColumnType; } public void setName(String _mName) { this._mName = _mName; } public ValueType getValueType() { return _mType; } public void setValueType(ValueType _mType) { this._mType = _mType; } public TCell.CellType getCellType() { return _mCellType; } public void setCellType(TCell.CellType _mType) { this._mCellType = _mType; } public String getJsonParent() { return _mJsonParent; } public void setJsonParent(String pRoot) { this._mJsonParent = pRoot; } public void setJsonParentAsMain() { this._mJsonParent = "MAIN"; } public boolean isIgnoredAsJsonField() { return mIsIgnoreAsJsonField; } public void toIgnoreInJSONParent() { this.mIsIgnoreAsJsonField = true; } }
apache-2.0
arcadoss/js-invulnerable
src/com/google/javascript/jscomp/OptimizeReturns.java
7796
/* * Copyright 2009 The Closure Compiler Authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.javascript.jscomp; import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Preconditions; import com.google.javascript.jscomp.DefinitionsRemover.Definition; import com.google.javascript.rhino.Node; import com.google.javascript.rhino.Token; import java.util.Collection; /** * A compiler pass for optimize function return results. Currently this * pass looks for results that are complete unused and rewrite then to be: * "return x()" -->"x(); return" * , but it can easily be * expanded to look for use context to avoid unneed type coersion: * - "return x.toString()" --> "return x" * - "return !!x" --> "return x" * @author johnlenz@google.com (John Lenz) */ class OptimizeReturns implements OptimizeCalls.CallGraphCompilerPass, CompilerPass { private AbstractCompiler compiler; OptimizeReturns(AbstractCompiler compiler) { this.compiler = compiler; } @Override @VisibleForTesting public void process(Node externs, Node root) { SimpleDefinitionFinder defFinder = new SimpleDefinitionFinder(compiler); defFinder.process(externs, root); process(externs, root, defFinder); } @Override public void process( Node externs, Node root, SimpleDefinitionFinder definitions) { // Create a snapshot of the definition sites to iterate over // as they can be removed during processing. for (DefinitionSite defSite : definitions.getDefinitionSites().toArray(new DefinitionSite[0])) { if (definitions.getDefinitionSites().contains(defSite)) { optimizeResultsIfEligible(defSite, definitions); } } } /** * Rewrites method results sites if the method results are never used. * * Definition and use site information is provided by the * {@link SimpleDefinitionFinder} passed in as an argument. * * @param defSite definition site to process. * @param defFinder structure that hold Node -> Definition and * Definition -> [UseSite] maps. */ private void optimizeResultsIfEligible( DefinitionSite defSite, SimpleDefinitionFinder defFinder) { if (defSite.inExterns || callResultsMaybeUsed(defFinder, defSite)) { return; } rewriteReturns(defFinder, defSite.definition.getRValue()); } /** * Determines if a function result might be used. A result might be use if: * - Function must is exported. * - The definition is never accessed outside a function call context. */ private boolean callResultsMaybeUsed( SimpleDefinitionFinder defFinder, DefinitionSite definitionSite) { Definition definition = definitionSite.definition; // Assume non-function definitions results are used. Node rValue = definition.getRValue(); if (rValue == null || !NodeUtil.isFunction(rValue)) { return true; } // Be conservative, don't try to optimize any declaration that isn't as // simple function declaration or assignment. if (!SimpleDefinitionFinder.isSimpleFunctionDeclaration(rValue)) { return true; } // Assume an exported method result is used. if (SimpleDefinitionFinder.maybeExported(compiler, definition)) { return true; } Collection<UseSite> useSites = defFinder.getUseSites(definition); // Don't modify unused definitions for two reasons: // 1) It causes unnecessary churn // 2) Other definitions might be used to reflect on this one using // goog.reflect.object (the check for definitions with uses is below). if (useSites.isEmpty()) { return true; } for (UseSite site : useSites) { // This catches the case where an object literal in goog.reflect.object // and a prototype method have the same property name. // TODO(johnlenz): The keys of one object can be used to reflect on // another using "goog.reflect.object" or similar. It seems like this // should be prohibited but TrogEdit uses this. Node nameNode = site.node; Collection<Definition> singleSiteDefinitions = defFinder.getDefinitionsReferencedAt(nameNode); if (singleSiteDefinitions.size() > 1) { return true; } // Assume indirect definitions references use the result Node useNodeParent = site.node.getParent(); if (isCall(site)) { Node callNode = useNodeParent; Preconditions.checkState(callNode.getType() == Token.CALL); if (isValueUsed(callNode)) { return true; } } else { // Allow a standalone name reference. // var a; if (!NodeUtil.isVar(useNodeParent)) { return true; } } // TODO(johnlenz): Add specialization support. } // No possible use of the definition result return false; } /** * Determines if the name node acts as the function name in a call expression. */ private static boolean isValueUsed(Node node) { // TODO(johnlenz): consider sharing some code with trySimpleUnusedResult. Node parent = node.getParent(); switch (parent.getType()) { case Token.EXPR_RESULT: return false; case Token.HOOK: case Token.AND: case Token.OR: return (node == parent.getFirstChild()) ? true : isValueUsed(parent); case Token.COMMA: return (node == parent.getFirstChild()) ? false : isValueUsed(parent); case Token.FOR: if (NodeUtil.isForIn(parent)) { return true; } else { // Only an expression whose result is in the condition part of the // expression is used. return (parent.getChildAtIndex(1) == node); } default: return true; } } /** * For the supplied function node, rewrite all the return expressions so that: * return foo(); * becomes: * foo(); return; * Useless return will be removed later by the peephole optimization passes. */ private void rewriteReturns( final SimpleDefinitionFinder defFinder, Node fnNode) { Preconditions.checkState(NodeUtil.isFunction(fnNode)); NodeUtil.visitPostOrder( fnNode.getLastChild(), new NodeUtil.Visitor() { @Override public void visit(Node node) { if (node.getType() == Token.RETURN && node.hasOneChild()) { boolean keepValue = NodeUtil.mayHaveSideEffects( node.getFirstChild(), compiler); if (!keepValue) { defFinder.removeReferences(node.getFirstChild()); } Node result = node.removeFirstChild(); if (keepValue) { node.getParent().addChildBefore( new Node( Token.EXPR_RESULT, result).copyInformationFrom(result), node); } compiler.reportCodeChange(); } } }, new NodeUtil.MatchShallowStatement()); } /** * Determines if the name node acts as the function name in a call expression. */ private static boolean isCall(UseSite site) { Node node = site.node; Node parent = node.getParent(); return (parent.getFirstChild() == node) && NodeUtil.isCall(parent); } }
apache-2.0
owlabs/incubator-airflow
tests/www/api/experimental/test_dag_runs_endpoint.py
6218
# -*- coding: utf-8 -*- # # Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. import json import unittest from parameterized import parameterized_class from airflow.api.common.experimental.trigger_dag import trigger_dag from airflow.models import DagBag, DagRun from airflow.models.serialized_dag import SerializedDagModel from airflow.settings import Session from airflow.www import app as application from tests.test_utils.config import conf_vars @parameterized_class([ {"dag_serialization": "False"}, {"dag_serialization": "True"}, ]) class TestDagRunsEndpoint(unittest.TestCase): dag_serialization = "False" @classmethod def setUpClass(cls): super(TestDagRunsEndpoint, cls).setUpClass() session = Session() session.query(DagRun).delete() session.commit() session.close() dagbag = DagBag(include_examples=True) for dag in dagbag.dags.values(): dag.sync_to_db() SerializedDagModel.write_dag(dag) def setUp(self): super(TestDagRunsEndpoint, self).setUp() app = application.create_app(testing=True) self.app = app.test_client() def tearDown(self): session = Session() session.query(DagRun).delete() session.commit() session.close() super(TestDagRunsEndpoint, self).tearDown() def test_get_dag_runs_success(self): with conf_vars( {("core", "store_serialized_dags"): self.dag_serialization} ): url_template = '/api/experimental/dags/{}/dag_runs' dag_id = 'example_bash_operator' # Create DagRun dag_run = trigger_dag( dag_id=dag_id, run_id='test_get_dag_runs_success') response = self.app.get(url_template.format(dag_id)) self.assertEqual(200, response.status_code) data = json.loads(response.data.decode('utf-8')) self.assertIsInstance(data, list) self.assertEqual(len(data), 1) self.assertEqual(data[0]['dag_id'], dag_id) self.assertEqual(data[0]['id'], dag_run.id) def test_get_dag_runs_success_with_state_parameter(self): with conf_vars( {("core", "store_serialized_dags"): self.dag_serialization} ): url_template = '/api/experimental/dags/{}/dag_runs?state=running' dag_id = 'example_bash_operator' # Create DagRun dag_run = trigger_dag( dag_id=dag_id, run_id='test_get_dag_runs_success') response = self.app.get(url_template.format(dag_id)) self.assertEqual(200, response.status_code) data = json.loads(response.data.decode('utf-8')) self.assertIsInstance(data, list) self.assertEqual(len(data), 1) self.assertEqual(data[0]['dag_id'], dag_id) self.assertEqual(data[0]['id'], dag_run.id) def test_get_dag_runs_success_with_capital_state_parameter(self): with conf_vars( {("core", "store_serialized_dags"): self.dag_serialization} ): url_template = '/api/experimental/dags/{}/dag_runs?state=RUNNING' dag_id = 'example_bash_operator' # Create DagRun dag_run = trigger_dag( dag_id=dag_id, run_id='test_get_dag_runs_success') response = self.app.get(url_template.format(dag_id)) self.assertEqual(200, response.status_code) data = json.loads(response.data.decode('utf-8')) self.assertIsInstance(data, list) self.assertEqual(len(data), 1) self.assertEqual(data[0]['dag_id'], dag_id) self.assertEqual(data[0]['id'], dag_run.id) def test_get_dag_runs_success_with_state_no_result(self): with conf_vars( {("core", "store_serialized_dags"): self.dag_serialization} ): url_template = '/api/experimental/dags/{}/dag_runs?state=dummy' dag_id = 'example_bash_operator' # Create DagRun trigger_dag(dag_id=dag_id, run_id='test_get_dag_runs_success') response = self.app.get(url_template.format(dag_id)) self.assertEqual(200, response.status_code) data = json.loads(response.data.decode('utf-8')) self.assertIsInstance(data, list) self.assertEqual(len(data), 0) def test_get_dag_runs_invalid_dag_id(self): with conf_vars( {("core", "store_serialized_dags"): self.dag_serialization} ): url_template = '/api/experimental/dags/{}/dag_runs' dag_id = 'DUMMY_DAG' response = self.app.get(url_template.format(dag_id)) self.assertEqual(400, response.status_code) data = json.loads(response.data.decode('utf-8')) self.assertNotIsInstance(data, list) def test_get_dag_runs_no_runs(self): with conf_vars( {("core", "store_serialized_dags"): self.dag_serialization} ): url_template = '/api/experimental/dags/{}/dag_runs' dag_id = 'example_bash_operator' response = self.app.get(url_template.format(dag_id)) self.assertEqual(200, response.status_code) data = json.loads(response.data.decode('utf-8')) self.assertIsInstance(data, list) self.assertEqual(len(data), 0) if __name__ == '__main__': unittest.main()
apache-2.0
rorygraves/perf_tester
corpus/shapeless/src/test/scala/shapeless/records.scala
31568
/* * Copyright (c) 2011-14 Miles Sabin * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package shapeless import org.junit.Test import org.junit.Assert._ import shapeless.ops.record.AlignByKeys class RecordTests { import labelled._ import ops.record.LacksKey import record._ import syntax.singleton._ import syntax.std.maps._ import test._ import testutil._ // making it method local causes weird compile error in Scala 2.10 import ops.record.{ RemoveAll, UnzipFields } object intField1 extends FieldOf[Int] object intField2 extends FieldOf[Int] object stringField1 extends FieldOf[String] object stringField2 extends FieldOf[String] object boolField1 extends FieldOf[Boolean] object boolField2 extends FieldOf[Boolean] object doubleField1 extends FieldOf[Double] object doubleField2 extends FieldOf[Double] case class Bar(a: Int, b: String) @Test def testGet { val r1 = (intField1 ->> 23) :: (stringField1 ->> "foo") :: (boolField1 ->> true) :: (doubleField1 ->> 2.0) :: HNil val v1 = r1.get(intField1) typed[Int](v1) assertEquals(23, v1) val v2 = r1.get(stringField1) typed[String](v2) assertEquals("foo", v2) val v3 = r1.get(boolField1) typed[Boolean](v3) assertEquals(true, v3) val v4 = r1.get(doubleField1) typed[Double](v4) assertEquals(2.0, v4, Double.MinPositiveValue) } @Test def testGetLiterals { val r1 = ("intField1" ->> 23) :: ("stringField1" ->> "foo") :: ("boolField1" ->> true) :: ("doubleField1" ->> 2.0) :: HNil val v1 = r1.get("intField1") typed[Int](v1) assertEquals(23, v1) val v2 = r1.get("stringField1") typed[String](v2) assertEquals("foo", v2) val v3 = r1.get("boolField1") typed[Boolean](v3) assertEquals(true, v3) val v4 = r1.get("doubleField1") typed[Double](v4) assertEquals(2.0, v4, Double.MinPositiveValue) } @Test def testFieldAt { val r1 = (stringField1 ->> "toto") :: (boolField1 ->> true) :: HNil val v1 = r1.fieldAt(stringField1) val v2 = r1.fieldAt(boolField1) typed[stringField1.F](v1) typed[boolField1.F](v2) assertEquals("toto", v1) assertEquals(true, v2) assertEquals(r1, v1 :: v2 :: HNil) } @Test def testAt { val r1 = (intField1 ->> 23) :: (stringField1 ->> "foo") :: (boolField1 ->> true) :: (doubleField1 ->> 2.0) :: HNil val v1 = r1.at(0) typed[Int](v1) assertEquals(23, v1) val v2 = r1.at(1) typed[String](v2) assertEquals("foo", v2) val v3 = r1.at(2) typed[Boolean](v3) assertEquals(true, v3) val v4 = r1.at(3) typed[Double](v4) assertEquals(2.0, v4, Double.MinPositiveValue) } @Test def testFromMap { type T1 = Record.`'stringVal -> String, 'intVal -> Int, 'boolVal -> Boolean`.T val in = Map('intVal -> 4, 'stringVal -> "Blarr", 'boolVal -> true) val recOption = in.toRecord[T1] assert(recOption.isDefined) val rec: T1 = recOption.get typed[T1](rec) assert(rec('stringVal) == "Blarr", "stringVal mismatch") assert(rec('intVal) == 4, "int val mismatch") assert(rec('boolVal), "Boolean val match") val in2 = Map('intVal -> 4, 'stringVal -> "Blarr") val recEither2 = in2.toRecord[T1] assert(recEither2.isEmpty) } @Test def testFromMap2 { import test._ type T = intField1.F :: stringField1.F :: boolField1.F :: doubleField1.F :: HNil val in = Map(intField1 -> 4, stringField1 -> "Blarr", boolField1 -> true, doubleField1 -> 5.0) import syntax.std.maps._ val recOption = in.toRecord[T] assert(recOption.isDefined) val rec: T = recOption.get typed[T](rec) assert(rec(intField1) == 4) assert(rec(stringField1) == "Blarr") assert(rec(doubleField1) == 5.0) } @Test def testAtLiterals { val r1 = ("intField1" ->> 23) :: ("stringField1" ->> "foo") :: ("boolField1" ->> true) :: ("doubleField1" ->> 2.0) :: HNil val v1 = r1.at(0) typed[Int](v1) assertEquals(23, v1) val v2 = r1.at(1) typed[String](v2) assertEquals("foo", v2) val v3 = r1.at(2) typed[Boolean](v3) assertEquals(true, v3) val v4 = r1.at(3) typed[Double](v4) assertEquals(2.0, v4, Double.MinPositiveValue) } @Test def testUpdate { val r1 = (intField1 ->> 23) :: (stringField1 ->> "foo") :: (boolField1 ->> true) :: (doubleField1 ->> 2.0) :: HNil val r2 = r1.updated(intField1, 7) val v1 = r2.get(intField1) typed[Int](v1) assertEquals(7, v1) val r3 = r1.updated(stringField1, "wibble") val v2 = r3.get(stringField1) typed[String](v2) assertEquals("wibble", v2) val r4 = r1.updated(boolField1, false) val v3 = r4.get(boolField1) typed[Boolean](v3) assertEquals(false, v3) val r5 = r1.updated(doubleField1, 1.0) val v4 = r5.get(doubleField1) typed[Double](v4) assertEquals(1.0, v4, Double.MinPositiveValue) val r6 = HNil val r7 = r6.updated(boolField2, false) val v5 = r7.get(boolField2) typed[Boolean](v5) assertEquals(false, v5) val r8 = r7.updated(doubleField2, 3.0) val v6 = r8.get(doubleField2) typed[Double](v6) assertEquals(3.0, v6, Double.MinPositiveValue) } @Test def testUpdateLiteral { val r1 = ("intField1" ->> 23) :: ("stringField1" ->> "foo") :: ("boolField1" ->> true) :: ("doubleField1" ->> 2.0) :: HNil val r2 = r1.updated("intField1", 7) val v1 = r2.get("intField1") typed[Int](v1) assertEquals(7, v1) val r3 = r1.updated("stringField1", "wibble") val v2 = r3.get("stringField1") typed[String](v2) assertEquals("wibble", v2) val r4 = r1.updated("boolField1", false) val v3 = r4.get("boolField1") typed[Boolean](v3) assertEquals(false, v3) val r5 = r1.updated("doubleField1", 1.0) val v4 = r5.get("doubleField1") typed[Double](v4) assertEquals(1.0, v4, Double.MinPositiveValue) val r6 = HNil val r7 = r6.updated("boolField2", false) val v5 = r7.get("boolField2") typed[Boolean](v5) assertEquals(false, v5) val r8 = r7.updated("doubleField2", 3.0) val v6 = r8.get("doubleField2") typed[Double](v6) assertEquals(3.0, v6, Double.MinPositiveValue) } @Test def testMerge { val r1 = 'a ->> 23 :: 'b ->> "foo" :: 'c ->> true :: HNil val r2 = 'c ->> false :: 'a ->> 13 :: HNil val rExp = 'a ->> 13 :: 'b ->> "foo" :: 'c ->> false :: HNil val rm = r1.merge(r2) typed[Record.`'a -> Int, 'b -> String, 'c -> Boolean`.T](rm) assertEquals(rExp, rm) } @Test def testDeepMerge { val r3 = Record(d = Record(x = "X1", m = "M"), e = true, x = "X") val r4 = Record(d = "D", e = false, x = 2, m = 6) val r5 = Record(d = "A", d = "B", d = "C") assertTypedEquals(r4.merge(r3))(r4.deepMerge(r3)) assertTypedEquals(r3.merge(r4))(r3.deepMerge(r4)) assertTypedEquals(r3.merge(r5))(r3.deepMerge(r5)) assertTypedEquals(r5.merge(r3))(r5.deepMerge(r3)) //nested val inner1 = Record(d = "D", e = false) val inner2 = Record(d = 3, m = 2D) val outer1 = Record(d = 10, e = inner1, x = "boo") val outer2 = Record(x = "foo", d = -1, e = inner2) val innerMerged12 = inner1.merge(inner2) val innerMerged21 = inner2.merge(inner1) assertTypedEquals(Record(d = -1, e = innerMerged12, x = "foo"))(outer1.deepMerge(outer2)) assertTypedEquals(Record(x = "boo", d = 10, e = innerMerged21))(outer2.deepMerge(outer1)) //complete intersection val inner11 = Record(d = "D2", e = true) val outer11 = Record(d = 11, e = inner11, x = "bar") assertTypedEquals(outer11)(outer1.deepMerge(outer11)) assertTypedEquals(outer1)(outer11.deepMerge(outer1)) //retain type of subrecord if it appears as first parameter val inner12 = Record(e = true, d = "D12", x = 5) test.sameTyped(inner12)(inner12.deepMerge(inner1)) } @Test def testExtract { val inner1 = Record(d = 3, m = 2D, x= "X") val outer1 = Record(x = "foo", d = -1, e = inner1) type i = Record.`'x -> String, 'd -> Int`.T type i1 = Record.`'x -> Any, 'd -> Any`.T val extRes = Record(e = Record(x = "X", d = 3), d = -1) assertTypedEquals(extRes)(outer1.extract[Record.`'e -> i, 'd -> Int`.T]) //covariance assertEquals(extRes, outer1.extract[Record.`'e -> i1, 'd -> Any`.T]) type ill1 = Record.`'d -> Int, 'z -> Int`.T type ill2 = Record.`'x -> i`.T type illIner = Record.`'m -> String, 'd -> Int`.T type ill3 = Record.`'e -> illIner, 'd -> Int`.T illTyped("outer1.extract[ill1]") illTyped("outer1.deepExtract[ill2]") illTyped("outer1.deepExtract[ill3]") } @Test def testMergeWith { object mergeField extends Poly2 { implicit def xor = at[Boolean, Boolean] { _ ^ _ } implicit def toDouble = at[Int, String] { _.toDouble + _.toDouble } } { val r1 = 'c ->> true :: HNil val r2 = 'c ->> false :: HNil val rExp = 'c ->> true :: HNil val rm = r1.mergeWith(r2)(mergeField) typed[Record.`'c -> Boolean`.T](rm) assertEquals(rExp, rm) } { val r1 = 'a ->> 23 :: 'b ->> "foo" :: 'c ->> true :: HNil val r2 = 'c ->> false :: 'a ->> "13" :: HNil val rExp = 'a ->> 36.0 :: 'b ->> "foo" :: 'c ->> true :: HNil val rm = r1.mergeWith(r2)(mergeField) typed[Record.`'a -> Double, 'b -> String, 'c -> Boolean`.T](rm) assertEquals(rExp, rm) } } @Test def testConcatenate { val r1 = (intField1 ->> 23) :: (stringField1 ->> "foo") :: (boolField1 ->> true) :: (doubleField1 ->> 2.0) :: HNil val r2 = (intField2 ->> 13) :: (stringField2 ->> "bar") :: r1 val v1 = r2.get(intField2) typed[Int](v1) assertEquals(13, v1) val v2 = r2.get(stringField2) typed[String](v2) assertEquals("bar", v2) } @Test def testConcatenateLiteral { val r1 = ("intField1" ->> 23) :: ("stringField1" ->> "foo") :: ("boolField1" ->> true) :: ("doubleField1" ->> 2.0) :: HNil val r2 = ("intField2" ->> 13) :: ("stringField2" ->> "bar") :: r1 val v1 = r2.get("intField2") typed[Int](v1) assertEquals(13, v1) val v2 = r2.get("stringField2") typed[String](v2) assertEquals("bar", v2) } @Test def testAppend { val r1 = (intField1 ->> 23) :: (stringField1 ->> "foo") :: HNil val r2 = r1 + (boolField1 ->> true) typed[intField1.F :: stringField1.F :: boolField1.F :: HNil](r2) assertEquals((intField1 ->> 23) :: (stringField1 ->> "foo") :: (boolField1 ->> true) :: HNil, r2) val r3 = r2 + (doubleField1 ->> 2.0) typed[intField1.F :: stringField1.F :: boolField1.F :: doubleField1.F :: HNil](r3) assertEquals((intField1 ->> 23) :: (stringField1 ->> "foo") :: (boolField1 ->> true) :: (doubleField1 ->> 2.0) :: HNil, r3) } val wIntField1 = Witness("intField1") val wStringField1 = Witness("stringField1") val wBoolField1 = Witness("boolField1") val wDoubleField1 = Witness("doubleField1") @Test def testAppendLiteral { val r1 = ("intField1" ->> 23) :: ("stringField1" ->> "foo") :: HNil val r2 = r1 + ("boolField1" ->> true) typed[FieldType[wIntField1.T, Int] :: FieldType[wStringField1.T, String] :: FieldType[wBoolField1.T, Boolean] :: HNil](r2) assertEquals(("intField1" ->> 23) :: ("stringField1" ->> "foo") :: ("boolField1" ->> true) :: HNil, r2) val r3 = r2 + ("doubleField1" ->> 2.0) typed[FieldType[wIntField1.T, Int] :: FieldType[wStringField1.T, String] :: FieldType[wBoolField1.T, Boolean] :: FieldType[wDoubleField1.T, Double] :: HNil](r3) assertEquals(("intField1" ->> 23) :: ("stringField1" ->> "foo") :: ("boolField1" ->> true) :: ("doubleField1" ->> 2.0) :: HNil, r3) } @Test def testRemove { val r1 = (intField1 ->> 23) :: (stringField1 ->> "foo") :: (boolField1 ->> true) :: (doubleField1 ->> 2.0) :: HNil val rm1 = r1.remove(intField1) typed[(Int, stringField1.F :: boolField1.F :: doubleField1.F :: HNil)](rm1) assertEquals(23, rm1._1) assertEquals((stringField1 ->> "foo") :: (boolField1 ->> true) :: (doubleField1 ->> 2.0) :: HNil, rm1._2) val rm2 = r1.remove(stringField1) typed[(String, intField1.F :: boolField1.F :: doubleField1.F :: HNil)](rm2) assertEquals("foo", rm2._1) assertEquals((intField1 ->> 23) :: (boolField1 ->> true) :: (doubleField1 ->> 2.0) :: HNil, rm2._2) val rm3 = r1.remove(boolField1) typed[(Boolean, intField1.F :: stringField1.F :: doubleField1.F :: HNil)](rm3) assertEquals(true, rm3._1) assertEquals((intField1 ->> 23) :: (stringField1 ->> "foo") :: (doubleField1 ->> 2.0) :: HNil, rm3._2) val rm4 = r1.remove(doubleField1) typed[(Double, intField1.F :: stringField1.F :: boolField1.F :: HNil)](rm4) assertEquals(2.0, rm4._1, Double.MinPositiveValue) assertEquals((intField1 ->> 23) :: (stringField1 ->> "foo") :: (boolField1 ->> true) :: HNil, rm4._2) val r2 = r1 - intField1 typed[stringField1.F :: boolField1.F :: doubleField1.F :: HNil](r2) assertEquals((stringField1 ->> "foo") :: (boolField1 ->> true) :: (doubleField1 ->> 2.0) :: HNil, r2) val r3 = r1 - stringField1 typed[intField1.F :: boolField1.F :: doubleField1.F :: HNil](r3) assertEquals((intField1 ->> 23) :: (boolField1 ->> true) :: (doubleField1 ->> 2.0) :: HNil, r3) val r4 = r1 - boolField1 typed[intField1.F :: stringField1.F :: doubleField1.F :: HNil](r4) assertEquals((intField1 ->> 23) :: (stringField1 ->> "foo") :: (doubleField1 ->> 2.0) :: HNil, r4) val r5 = r1 - doubleField1 typed[intField1.F :: stringField1.F :: boolField1.F :: HNil](r5) assertEquals((intField1 ->> 23) :: (stringField1 ->> "foo") :: (boolField1 ->> true) :: HNil, r5) } @Test def testRemoveLiteral { val r1 = ("intField1" ->> 23) :: ("stringField1" ->> "foo") :: ("boolField1" ->> true) :: ("doubleField1" ->> 2.0) :: HNil val rm1 = r1.remove("intField1") typed[(Int, FieldType[wStringField1.T, String] :: FieldType[wBoolField1.T, Boolean] :: FieldType[wDoubleField1.T, Double] :: HNil)](rm1) assertEquals(23, rm1._1) assertEquals(("stringField1" ->> "foo") :: ("boolField1" ->> true) :: ("doubleField1" ->> 2.0) :: HNil, rm1._2) val rm2 = r1.remove("stringField1") typed[(String, FieldType[wIntField1.T, Int] :: FieldType[wBoolField1.T, Boolean] :: FieldType[wDoubleField1.T, Double] :: HNil)](rm2) assertEquals("foo", rm2._1) assertEquals(("intField1" ->> 23) :: ("boolField1" ->> true) :: ("doubleField1" ->> 2.0) :: HNil, rm2._2) val rm3 = r1.remove("boolField1") typed[(Boolean, FieldType[wIntField1.T, Int] :: FieldType[wStringField1.T, String] :: FieldType[wDoubleField1.T, Double] :: HNil)](rm3) assertEquals(true, rm3._1) assertEquals(("intField1" ->> 23) :: ("stringField1" ->> "foo") :: ("doubleField1" ->> 2.0) :: HNil, rm3._2) val rm4 = r1.remove("doubleField1") typed[(Double, FieldType[wIntField1.T, Int] :: FieldType[wStringField1.T, String] :: FieldType[wBoolField1.T, Boolean] :: HNil)](rm4) assertEquals(2.0, rm4._1, Double.MinPositiveValue) assertEquals(("intField1" ->> 23) :: ("stringField1" ->> "foo") :: ("boolField1" ->> true) :: HNil, rm4._2) val r2 = r1 - "intField1" typed[FieldType[wStringField1.T, String] :: FieldType[wBoolField1.T, Boolean] :: FieldType[wDoubleField1.T, Double] :: HNil](r2) assertEquals(("stringField1" ->> "foo") :: ("boolField1" ->> true) :: ("doubleField1" ->> 2.0) :: HNil, r2) val r3 = r1 - "stringField1" typed[FieldType[wIntField1.T, Int] :: FieldType[wBoolField1.T, Boolean] :: FieldType[wDoubleField1.T, Double] :: HNil](r3) assertEquals(("intField1" ->> 23) :: ("boolField1" ->> true) :: ("doubleField1" ->> 2.0) :: HNil, r3) val r4 = r1 - "boolField1" typed[FieldType[wIntField1.T, Int] :: FieldType[wStringField1.T, String] :: FieldType[wDoubleField1.T, Double] :: HNil](r4) assertEquals(("intField1" ->> 23) :: ("stringField1" ->> "foo") :: ("doubleField1" ->> 2.0) :: HNil, r4) val r5 = r1 - "doubleField1" typed[FieldType[wIntField1.T, Int] :: FieldType[wStringField1.T, String] :: FieldType[wBoolField1.T, Boolean] :: HNil](r5) assertEquals(("intField1" ->> 23) :: ("stringField1" ->> "foo") :: ("boolField1" ->> true) :: HNil, r5) } @Test def testReplace { type R = Record.`'a -> Int, 'b -> String`.T val a = Record(a = 1, b = "2") val r = a.replace('a, 2) typed[R](r) assertEquals(Record(a = 2, b = "2"), r) illTyped(""" a.replace('a, ()) """) } @Test def testLacksKey { def without[R <: HList, O <: HList](k: Witness)(r: R)(f: R => O)(implicit ev: LacksKey[R, k.T]): O = f(r) type R1 = Record.`'a -> Int, 'b -> String, 'c -> Boolean`.T type R2 = Record.`'c -> Boolean, 'a -> Int, 'b -> String`.T val a = Record(a = 1, b = "2") val r1 = without('c)(a)(_ :+ ('c ->> true)) typed[R1](r1) assertEquals(Record(a = 1, b = "2", c = true), r1) val r2 = without('c)(a)(('c ->> true) +: _) typed[R2](r2) assertEquals(Record(c = true, a = 1, b = "2"), r2) illTyped(""" without('a)(a)(identity) """) } @Test def testRemoveAll { type R = Record.`'i -> Int, 's -> String, 'c -> Char, 'j -> Int`.T type L = Record.`'c -> Char, 'j -> Int`.T type A1 = Record.`'i -> Int, 's -> String`.T type A2 = Int :: String :: HNil val r = 'i ->> 10 :: 's ->> "foo" :: 'c ->> 'x' :: 'j ->> 42 :: HNil val removeAll1 = RemoveAll[R, A1] val removeAll2 = RemoveAll[R, A2] val (removed1, remaining1) = removeAll1(r) val (removed2, remaining2) = removeAll2(r) val r1 = removeAll1.reinsert((removed1, remaining1)) val r2 = removeAll2.reinsert((removed2, remaining2)) typed[A1](removed1) assertEquals('i ->> 10 :: 's ->> "foo" :: HNil, removed1) typed[A2](removed2) assertEquals(10 :: "foo" :: HNil, removed2) typed[L](remaining1) assertEquals('c ->> 'x' :: 'j ->> 42 :: HNil, remaining1) typed[L](remaining2) assertEquals('c ->> 'x' :: 'j ->> 42 :: HNil, remaining2) typed[R](r1) assertEquals(r, r1) typed[R](r2) assertEquals(r, r2) } @Test def testMappingOverRecordFields { object toUpper extends Poly1 { implicit def stringToUpper[F] = at[FieldType[F, String]] { f => field[F](f.toUpperCase) } implicit def otherTypes[X] = at[X](identity) } val r = ("foo" ->> "joe") :: ("bar" ->> true) :: ("baz" ->> 2.0) :: HNil val r2 = r map toUpper val v1 = r2("foo") typed[String](v1) assertEquals("JOE", v1) val v2 = r2("bar") typed[Boolean](v2) assertEquals(true, v2) val v3 = r2("baz") typed[Double](v3) assertEquals(2.0, v3, Double.MinPositiveValue) } @Test def testUpdateFieldByFunction { val r = ("foo" ->> 23) :: ("bar" ->> true) :: ("baz" ->> 2.0) :: HNil val r2 = r.updateWith("foo")((i: Int) => i.toString) val r2b = r.updateWith("foo")(i => i.toString) val r2c = r.updateWith("foo")(_.toString) val v21 = r2c.get("foo") typed[String](v21) assertEquals("23", v21) val v22 = r2c("bar") typed[Boolean](v22) assertEquals(true, v22) val v23 = r2c("baz") typed[Double](v23) assertEquals(2.0, v23, Double.MinPositiveValue) val r3 = r.updateWith("foo")((i: Int) => i+1) val r3b = r.updateWith("foo")(i => i+1) val r3c = r.updateWith("foo")(_ + 1) val v31 = r3c.get("foo") typed[Int](v31) assertEquals(24, v31) val v32 = r3c("bar") typed[Boolean](v32) assertEquals(true, v32) val v33 = r3c("baz") typed[Double](v33) assertEquals(2.0, v33, Double.MinPositiveValue) } @Test def testWidening { val ps = List( ("name" ->> "Mel") :: ("age" ->> 90L) :: ("teeth" ->> 2) :: HNil, ("name" ->> "Jude") :: ("age" ->> 99L) :: ("teeth" ->> 3) :: HNil, ("name" ->> "Bif") :: ("age" ->> 1L) :: ("teeth" ->> 1) :: HNil ) ps.sortBy(_("age")) ps.sortBy(_("teeth")) } @Test def testRenameField { val r = ("foo" ->> 23) :: ("bar" ->> true) :: HNil val r1 = r.renameField("foo", "foobar") val v1 = r1.get("foobar") typed[Int](v1) assertEquals(23, v1) val v2 = r1.get("bar") typed[Boolean](v2) assertEquals(true, v2) } @Test def testFieldPoly { import poly._ object f extends FieldPoly { implicit def atFoo = atField[Int]("foo")(_ + 1) } val r = "foo" ->> 23 val r1 = f(r) val fooType = "foo".witness typed[FieldType[fooType.T, Int]](r1) assertEquals(24, r1) } @Test def testFieldPolyOnRecord { import poly._ object f extends FieldPoly { implicit def atFoo = atField[Int]("foo")(_ + 1) } val r = ("foo" ->> 23) :: ("bar" ->> true) :: HNil val r1 = everywhere(f)(r) val v1 = r1("foo") typed[Int](v1) assertEquals(24, v1) val v2 = r1("bar") typed[Boolean](v2) assertEquals(true, v2) } @Test def testFieldPolyNested { import poly._ object f extends FieldPoly { implicit def atFoo = atField[Int]("foo")(_ + 1) } val r = List(List(List(("foo" ->> 23) :: ("bar" ->> true) :: HNil))) val List(List(List(r1))) = everywhere(f)(r) val v1 = r1("foo") typed[Int](v1) assertEquals(24, v1) val v2 = r1("bar") typed[Boolean](v2) assertEquals(true, v2) } @Test def testSelectDynamic { val r = ('foo ->> 23) :: ('bar ->> true) :: HNil val d = r.record val v1 = d.foo typed[Int](v1) assertEquals(23, v1) val v2 = d.bar typed[Boolean](v2) assertEquals(true, v2) illTyped("d.baz") } @Test def testRecordTypeSelector { typed[Record.` `.T](HNil) typed[Record.`'i -> Int`.T]('i ->> 23 :: HNil) typed[Record.`'i -> Int, 's -> String`.T]('i ->> 23 :: 's ->> "foo" :: HNil) typed[Record.`'i -> Int, 's -> String, 'b -> Boolean`.T]('i ->> 23 :: 's ->> "foo" :: 'b ->> true :: HNil) // Literal types typed[Record.`'i -> 2`.T]('i ->> 2.narrow :: HNil) typed[Record.`'i -> 2, 's -> "a", 'b -> true`.T]('i ->> 2.narrow :: 's ->> "a".narrow :: 'b ->> true.narrow :: HNil) illTyped(""" typed[Record.`'i -> 2`.T]('i ->> 3.narrow :: HNil) """) // Mix of standard and literal types typed[Record.`'i -> 2, 's -> String, 'b -> true`.T]('i ->> 2.narrow :: 's ->> "a" :: 'b ->> true.narrow :: HNil) } @Test def testNamedArgs { { val r = Record() typed[HNil](r) } { val r = Record(i = 23, s = "foo", b = true) typed[Record.`'i -> Int, 's -> String, 'b -> Boolean`.T](r) } { illTyped(""" Record(2, "a") """) } } @Test def testNamedArgsInject { val r = Record(i = 23, s = "foo", b = true) val v1 = r.get('i) typed[Int](v1) assertEquals(23, v1) val v2 = r.get('s) typed[String](v2) assertEquals("foo", v2) val v3 = r.get('b) typed[Boolean](v3) assertEquals(true, v3) illTyped(""" r.get('foo) """) } object Foo extends RecordArgs { def applyRecord[R <: HList](rec: R): R = rec } @Test def testRecordArgs { val r = Foo(i = 23, s = "foo", b = true) typed[Record.`'i -> Int, 's -> String, 'b -> Boolean`.T](r) val v1 = r.get('i) typed[Int](v1) assertEquals(23, v1) val v2 = r.get('s) typed[String](v2) assertEquals("foo", v2) val v3 = r.get('b) typed[Boolean](v3) assertEquals(true, v3) illTyped(""" r.get('foo) """) } object Bar extends FromRecordArgs { def sum(i1: Int, i2: Int) = i1 + i2 def sumImplicit(i1: Int)(implicit i2: Int) = i1 + i2 def sumMultipleParamList(i1: Int)(i2: Int) = i1 + i2 } @Test def testFromRecordArgs { val r = ('i1 ->> 1) :: ('i2 ->> 3) :: HNil val v1 = Bar.sumRecord(r) typed[Int](v1) assertEquals(4, v1) val r2 = r.merge(('i2 ->> 2) :: HNil) val v2 = Bar.sumMultipleParamListRecord(r2) typed[Int](v2) assertEquals(3, v2) illTyped(""" Bar.sumImplicitRecord(('i1 ->> 1) :: ('i2 ->> 3) :: HNil) """) implicit val i2 = 7 val v3 = Bar.sumImplicitRecord(r) typed[Int](v2) assertEquals(8, v3) illTyped(""" Bar.sumRecord(('i1 ->> 1) :: ('i3 ->> 3) :: HNil) """) illTyped(""" Bar.sumMultipleParamListRecord(('i1 ->> 1) :: ('i3 ->> 3) :: HNil) """) } @Test def testFields { { val f = HNil.fields assertTypedEquals(HNil, f) } { val f = (HNil: HNil).fields assertTypedEquals(HNil: HNil, f) } val r = Record(i = 23, s = "foo", b = true) { val f = r.fields assertTypedEquals(('i.narrow -> 23) :: ('s.narrow -> "foo") :: ('b.narrow -> true) :: HNil, f) } val rs = ("first" ->> Some(2)) :: ("second" ->> Some(true)) :: ("third" ->> Option.empty[String]) :: HNil { val f = rs.fields assertTypedEquals(("first".narrow -> Some(2)) :: ("second".narrow -> Some(true)) :: ("third" -> Option.empty[String]) :: HNil, f) } } @Test def testUnzipFields { { val uf = UnzipFields[HNil] assertTypedEquals(HNil, uf.keys) assertTypedEquals(HNil, uf.values(HNil)) } { val uf = UnzipFields[HNil] assertTypedEquals(HNil: HNil, uf.keys) assertTypedEquals(HNil: HNil, uf.values(HNil: HNil)) } type R = Record.`'i -> Int, 's -> String, 'b -> Boolean`.T val r: R = Record(i = 23, s = "foo", b = true) { val uf = UnzipFields[R] assertTypedEquals('i.narrow :: 's.narrow :: 'b.narrow :: HNil, uf.keys) assertTypedEquals(23 :: "foo" :: true :: HNil, uf.values(r)) } type RS = Record.`"first" -> Option[Int], "second" -> Option[Boolean], "third" -> Option[String]`.T val rs: RS = ("first" ->> Some(2)) :: ("second" ->> Some(true)) :: ("third" ->> Option.empty[String]) :: HNil { val uf = UnzipFields[RS] assertTypedEquals("first".narrow :: "second".narrow :: "third" :: HNil, uf.keys) assertTypedEquals(Some(2) :: Some(true) :: Option.empty[String] :: HNil, uf.values(rs)) } } @Test def testToMap { { val m = HNil.toMap assertTypedEquals(Map.empty[Any, Nothing], m) } { val m = HNil.toMap[String, Nothing] assertTypedEquals(Map.empty[String, Nothing], m) } { val m = HNil.toMap[String, Int] assertTypedEquals(Map.empty[String, Int], m) } val r = Record(i = 23, s = "foo", b = true) { val m = r.toMap assertTypedEquals(Map[Symbol, Any]('i -> 23, 's -> "foo", 'b -> true), m) } { val m = r.toMap[Symbol, Any] assertTypedEquals(Map[Symbol, Any]('i -> 23, 's -> "foo", 'b -> true), m) } val rs = ("first" ->> Some(2)) :: ("second" ->> Some(true)) :: ("third" ->> Option.empty[String]) :: HNil { val m = rs.toMap assertTypedEquals(Map[String, Option[Any]]("first" -> Some(2), "second" -> Some(true), "third" -> Option.empty[String]), m) } { val m = rs.toMap[String, Option[Any]] assertTypedEquals(Map[String, Option[Any]]("first" -> Some(2), "second" -> Some(true), "third" -> Option.empty[String]), m) } } @Test def testMapValues { object f extends Poly1 { implicit def int = at[Int](i => i > 0) implicit def string = at[String](s => s"s: $s") implicit def boolean = at[Boolean](v => if (v) "Yup" else "Nope") } { val r = HNil val res = r.mapValues(f) assertTypedEquals[HNil](HNil, res) } { val r = Record(i = 23, s = "foo", b = true) val res = r.mapValues(f) assertTypedEquals[Record.`'i -> Boolean, 's -> String, 'b -> String`.T](Record(i = true, s = "s: foo", b = "Yup"), res) } { object toUpper extends Poly1 { implicit def stringToUpper = at[String](_.toUpperCase) implicit def otherTypes[X] = at[X](identity) } val r = ("foo" ->> "joe") :: ("bar" ->> true) :: ("baz" ->> 2.0) :: HNil val r2 = r mapValues toUpper val v1 = r2("foo") typed[String](v1) assertEquals("JOE", v1) val v2 = r2("bar") typed[Boolean](v2) assertEquals(true, v2) val v3 = r2("baz") typed[Double](v3) assertEquals(2.0, v3, Double.MinPositiveValue) } } @Test def testSwapRecord { import shapeless.ops.record.SwapRecord val rt = Record.`'x -> Int, 'y -> String, 'z -> Boolean` type TestRecord = rt.T val (x, y, z) = (Witness('x), Witness('y), Witness('z)) val fields: (FieldType[Int, x.T] :: FieldType[String, y.T] :: FieldType[Boolean, z.T] :: HNil) = SwapRecord[TestRecord].apply assertEquals(fields.toList, List('x, 'y, 'z)) } @Test def alignByKeys: Unit = { type TestRecord = Record.`'a -> String, 'b -> Int, 'c -> Double`.T type Keys1 = HList.`'a, 'b, 'c`.T type Keys2 = HList.`'b, 'c, 'a`.T type Keys3 = HList.`'b, 'a, 'c`.T type Keys4 = HList.`'c, 'a, 'b`.T val v = Record(a = "foo", b = 42, c = 33.3) assertTypedEquals[TestRecord](v, AlignByKeys[TestRecord, Keys1].apply(v)) assertTypedEquals[Record.`'b -> Int, 'c -> Double, 'a -> String`.T](Record(b = 42, c = 33.3, a = "foo"), AlignByKeys[TestRecord, Keys2].apply(v)) assertTypedEquals[Record.`'b -> Int, 'a -> String, 'c -> Double`.T](Record(b = 42, a = "foo", c = 33.3), v.alignByKeys[Keys3]) assertTypedEquals[Record.`'c -> Double, 'a -> String, 'b -> Int`.T](Record(c = 33.3, a = "foo", b = 42), v.alignByKeys[Keys4]) } @Test def testSelectorWithTaggedType { import tag.@@ val tagged = tag[Int]("42") val head1 = 'k ->> tagged val head2 = field[Witness.`'k`.T](tagged) val rec1 = head1 :: HNil val rec2 = head2 :: HNil assertTypedEquals[String @@ Int](rec1('k), rec2('k)) } @Test def testSelectorWithTaggedType2 { import tag.@@ trait TestTag case class FooT(bar: String @@ TestTag) val lgt = LabelledGeneric[FooT] val fooT = FooT(tag[TestTag]("test")) assertEquals(tag[TestTag]("test"), lgt.to(fooT).get('bar)) } @Test def testSelectorForSwappedRecord { import ops.record.{ Selector, SwapRecord } val gen = LabelledGeneric[Bar] val swap = SwapRecord[gen.Repr] val select = Selector[swap.Out, Int] val swapped = swap() assertTypedEquals[Witness.`'a`.T](swapped.head, select(swapped)) } }
apache-2.0
asmitde/TA-PSU-CMPSC101
Fall 2016/Homeworks/HW4/Solution/problem5.py
1132
month = int(input("Enter month: ")) year = int(input("Enter year: ")) if month == 1: monthName = "January" numberOfDaysInMonth = 31 elif month == 2: monthName = "February" if year % 400 == 0 or (year % 4 == 0 and year % 100 != 0): numberOfDaysInMonth = 29 else: numberOfDaysInMonth = 28 elif month == 3: monthName = "March" numberOfDaysInMonth = 31 elif month == 4: monthName = "April" numberOfDaysInMonth = 30 elif month == 5: monthName = "May" numberOfDaysInMonth = 31 elif month == 6: monthName = "June" numberOfDaysInMonth = 30 elif month == 7: monthName = "July" numberOfDaysInMonth = 31 elif month == 8: monthName = "August" numberOfDaysInMonth = 31 elif month == 9: monthName = "September" numberOfDaysInMonth = 30 elif month == 10: monthName = "October" numberOfDaysInMonth = 31 elif month == 11: monthName = "November" numberOfDaysInMonth = 30 else: monthName = "December" numberOfDaysInMonth = 31 print(monthName, year, "has", numberOfDaysInMonth, "days")
apache-2.0
nafae/developer
modules/dfp_axis/src/main/java/com/google/api/ads/dfp/axis/v201311/CreativeTemplateVariable.java
12574
/** * CreativeTemplateVariable.java * * This file was auto-generated from WSDL * by the Apache Axis 1.4 Mar 02, 2009 (07:08:06 PST) WSDL2Java emitter. */ package com.google.api.ads.dfp.axis.v201311; /** * Represents a variable defined in a creative template. */ public abstract class CreativeTemplateVariable implements java.io.Serializable { /* Label that is displayed to users when creating {@link TemplateCreative} * from the * {@link CreativeTemplate}. This attribute is required * and has a maximum * length of 127 characters. */ private java.lang.String label; /* Unique name used to identify the variable. This attribute is * read-only and * is assigned by Google, by deriving from label, when * a creative template * variable is created. */ private java.lang.String uniqueName; /* A descriptive help text that is displayed to users along with * the * label. This attribute is required and has a maximum * length of 255 characters. */ private java.lang.String description; /* {@code true} if this variable is required to be filled in by * users when * creating {@link TemplateCreative} from the {@link * CreativeTemplate}. */ private java.lang.Boolean isRequired; /* Indicates that this instance is a subtype of CreativeTemplateVariable. * Although this field is returned in the response, it is ignored on * input * and cannot be selected. Specify xsi:type instead. */ private java.lang.String creativeTemplateVariableType; public CreativeTemplateVariable() { } public CreativeTemplateVariable( java.lang.String label, java.lang.String uniqueName, java.lang.String description, java.lang.Boolean isRequired, java.lang.String creativeTemplateVariableType) { this.label = label; this.uniqueName = uniqueName; this.description = description; this.isRequired = isRequired; this.creativeTemplateVariableType = creativeTemplateVariableType; } /** * Gets the label value for this CreativeTemplateVariable. * * @return label * Label that is displayed to users when creating {@link TemplateCreative} * from the * {@link CreativeTemplate}. This attribute is required * and has a maximum * length of 127 characters. */ public java.lang.String getLabel() { return label; } /** * Sets the label value for this CreativeTemplateVariable. * * @param label * Label that is displayed to users when creating {@link TemplateCreative} * from the * {@link CreativeTemplate}. This attribute is required * and has a maximum * length of 127 characters. */ public void setLabel(java.lang.String label) { this.label = label; } /** * Gets the uniqueName value for this CreativeTemplateVariable. * * @return uniqueName * Unique name used to identify the variable. This attribute is * read-only and * is assigned by Google, by deriving from label, when * a creative template * variable is created. */ public java.lang.String getUniqueName() { return uniqueName; } /** * Sets the uniqueName value for this CreativeTemplateVariable. * * @param uniqueName * Unique name used to identify the variable. This attribute is * read-only and * is assigned by Google, by deriving from label, when * a creative template * variable is created. */ public void setUniqueName(java.lang.String uniqueName) { this.uniqueName = uniqueName; } /** * Gets the description value for this CreativeTemplateVariable. * * @return description * A descriptive help text that is displayed to users along with * the * label. This attribute is required and has a maximum * length of 255 characters. */ public java.lang.String getDescription() { return description; } /** * Sets the description value for this CreativeTemplateVariable. * * @param description * A descriptive help text that is displayed to users along with * the * label. This attribute is required and has a maximum * length of 255 characters. */ public void setDescription(java.lang.String description) { this.description = description; } /** * Gets the isRequired value for this CreativeTemplateVariable. * * @return isRequired * {@code true} if this variable is required to be filled in by * users when * creating {@link TemplateCreative} from the {@link * CreativeTemplate}. */ public java.lang.Boolean getIsRequired() { return isRequired; } /** * Sets the isRequired value for this CreativeTemplateVariable. * * @param isRequired * {@code true} if this variable is required to be filled in by * users when * creating {@link TemplateCreative} from the {@link * CreativeTemplate}. */ public void setIsRequired(java.lang.Boolean isRequired) { this.isRequired = isRequired; } /** * Gets the creativeTemplateVariableType value for this CreativeTemplateVariable. * * @return creativeTemplateVariableType * Indicates that this instance is a subtype of CreativeTemplateVariable. * Although this field is returned in the response, it is ignored on * input * and cannot be selected. Specify xsi:type instead. */ public java.lang.String getCreativeTemplateVariableType() { return creativeTemplateVariableType; } /** * Sets the creativeTemplateVariableType value for this CreativeTemplateVariable. * * @param creativeTemplateVariableType * Indicates that this instance is a subtype of CreativeTemplateVariable. * Although this field is returned in the response, it is ignored on * input * and cannot be selected. Specify xsi:type instead. */ public void setCreativeTemplateVariableType(java.lang.String creativeTemplateVariableType) { this.creativeTemplateVariableType = creativeTemplateVariableType; } private java.lang.Object __equalsCalc = null; public synchronized boolean equals(java.lang.Object obj) { if (!(obj instanceof CreativeTemplateVariable)) return false; CreativeTemplateVariable other = (CreativeTemplateVariable) obj; if (obj == null) return false; if (this == obj) return true; if (__equalsCalc != null) { return (__equalsCalc == obj); } __equalsCalc = obj; boolean _equals; _equals = true && ((this.label==null && other.getLabel()==null) || (this.label!=null && this.label.equals(other.getLabel()))) && ((this.uniqueName==null && other.getUniqueName()==null) || (this.uniqueName!=null && this.uniqueName.equals(other.getUniqueName()))) && ((this.description==null && other.getDescription()==null) || (this.description!=null && this.description.equals(other.getDescription()))) && ((this.isRequired==null && other.getIsRequired()==null) || (this.isRequired!=null && this.isRequired.equals(other.getIsRequired()))) && ((this.creativeTemplateVariableType==null && other.getCreativeTemplateVariableType()==null) || (this.creativeTemplateVariableType!=null && this.creativeTemplateVariableType.equals(other.getCreativeTemplateVariableType()))); __equalsCalc = null; return _equals; } private boolean __hashCodeCalc = false; public synchronized int hashCode() { if (__hashCodeCalc) { return 0; } __hashCodeCalc = true; int _hashCode = 1; if (getLabel() != null) { _hashCode += getLabel().hashCode(); } if (getUniqueName() != null) { _hashCode += getUniqueName().hashCode(); } if (getDescription() != null) { _hashCode += getDescription().hashCode(); } if (getIsRequired() != null) { _hashCode += getIsRequired().hashCode(); } if (getCreativeTemplateVariableType() != null) { _hashCode += getCreativeTemplateVariableType().hashCode(); } __hashCodeCalc = false; return _hashCode; } // Type metadata private static org.apache.axis.description.TypeDesc typeDesc = new org.apache.axis.description.TypeDesc(CreativeTemplateVariable.class, true); static { typeDesc.setXmlType(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v201311", "CreativeTemplateVariable")); org.apache.axis.description.ElementDesc elemField = new org.apache.axis.description.ElementDesc(); elemField.setFieldName("label"); elemField.setXmlName(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v201311", "label")); elemField.setXmlType(new javax.xml.namespace.QName("http://www.w3.org/2001/XMLSchema", "string")); elemField.setMinOccurs(0); elemField.setNillable(false); typeDesc.addFieldDesc(elemField); elemField = new org.apache.axis.description.ElementDesc(); elemField.setFieldName("uniqueName"); elemField.setXmlName(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v201311", "uniqueName")); elemField.setXmlType(new javax.xml.namespace.QName("http://www.w3.org/2001/XMLSchema", "string")); elemField.setMinOccurs(0); elemField.setNillable(false); typeDesc.addFieldDesc(elemField); elemField = new org.apache.axis.description.ElementDesc(); elemField.setFieldName("description"); elemField.setXmlName(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v201311", "description")); elemField.setXmlType(new javax.xml.namespace.QName("http://www.w3.org/2001/XMLSchema", "string")); elemField.setMinOccurs(0); elemField.setNillable(false); typeDesc.addFieldDesc(elemField); elemField = new org.apache.axis.description.ElementDesc(); elemField.setFieldName("isRequired"); elemField.setXmlName(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v201311", "isRequired")); elemField.setXmlType(new javax.xml.namespace.QName("http://www.w3.org/2001/XMLSchema", "boolean")); elemField.setMinOccurs(0); elemField.setNillable(false); typeDesc.addFieldDesc(elemField); elemField = new org.apache.axis.description.ElementDesc(); elemField.setFieldName("creativeTemplateVariableType"); elemField.setXmlName(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v201311", "CreativeTemplateVariable.Type")); elemField.setXmlType(new javax.xml.namespace.QName("http://www.w3.org/2001/XMLSchema", "string")); elemField.setMinOccurs(0); elemField.setNillable(false); typeDesc.addFieldDesc(elemField); } /** * Return type metadata object */ public static org.apache.axis.description.TypeDesc getTypeDesc() { return typeDesc; } /** * Get Custom Serializer */ public static org.apache.axis.encoding.Serializer getSerializer( java.lang.String mechType, java.lang.Class _javaType, javax.xml.namespace.QName _xmlType) { return new org.apache.axis.encoding.ser.BeanSerializer( _javaType, _xmlType, typeDesc); } /** * Get Custom Deserializer */ public static org.apache.axis.encoding.Deserializer getDeserializer( java.lang.String mechType, java.lang.Class _javaType, javax.xml.namespace.QName _xmlType) { return new org.apache.axis.encoding.ser.BeanDeserializer( _javaType, _xmlType, typeDesc); } }
apache-2.0
griffon/griffon
subprojects/griffon-core-api/src/main/java/griffon/core/env/Environment.java
3487
/* * SPDX-License-Identifier: Apache-2.0 * * Copyright 2008-2022 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package griffon.core.env; import griffon.annotations.core.Nonnull; import griffon.annotations.core.Nullable; import java.util.LinkedHashMap; import java.util.Locale; import java.util.Map; import static java.util.Objects.requireNonNull; /** * An enum that represents the current environment * * @author Andres Almiray * @since 2.0.0 */ public enum Environment { /** * The development environment */ DEVELOPMENT, /** * The production environment */ PRODUCTION, /** * The test environment */ TEST, /** * A custom environment */ CUSTOM; /** * Constant used to resolve the environment via System.getProperty(Environment.KEY) */ public static final String KEY = "griffon.env"; private static final String PRODUCTION_ENV_SHORT_NAME = "prod"; private static final String DEVELOPMENT_ENVIRONMENT_SHORT_NAME = "dev"; private static final String TEST_ENVIRONMENT_SHORT_NAME = "test"; private static final Map<String, String> ENV_NAME_MAPPINGS = new LinkedHashMap<>(); static { ENV_NAME_MAPPINGS.put(DEVELOPMENT_ENVIRONMENT_SHORT_NAME, Environment.DEVELOPMENT.getName()); ENV_NAME_MAPPINGS.put(PRODUCTION_ENV_SHORT_NAME, Environment.PRODUCTION.getName()); ENV_NAME_MAPPINGS.put(TEST_ENVIRONMENT_SHORT_NAME, Environment.TEST.getName()); } private String name; /** * @return Return true if the environment has been set as a System property */ public static boolean isSystemSet() { return System.getProperty(KEY) != null; } /** * Returns the environment for the given short name * * @param shortName The short name * @return The Environment or null if not known */ @Nullable public static Environment resolveEnvironment(@Nullable String shortName) { final String envName = ENV_NAME_MAPPINGS.get(shortName); if (envName != null) { return Environment.valueOf(envName.toUpperCase()); } return null; } @Nonnull public static String getEnvironmentShortName(@Nonnull Environment env) { requireNonNull(env, "Argument 'env' must not be null"); switch (env) { case DEVELOPMENT: return "dev"; case TEST: return "test"; case PRODUCTION: return "prod"; default: return env.getName(); } } /** * @return The name of the environment */ @Nonnull public String getName() { if (this != CUSTOM || name == null) { return this.toString().toLowerCase(Locale.getDefault()); } return name; } public void setName(@Nullable String name) { this.name = name; } }
apache-2.0
bathepawan/workload-automation
wlauto/workloads/googlephotos/__init__.py
4522
# Copyright 2014-2016 ARM Limited # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # import os from wlauto import AndroidUxPerfWorkload, Parameter from wlauto.exceptions import ValidationError from wlauto.utils.types import list_of_strings from wlauto.utils.misc import unique class Googlephotos(AndroidUxPerfWorkload): name = 'googlephotos' package = 'com.google.android.apps.photos' min_apk_version = '1.21.0.123444480' activity = 'com.google.android.apps.photos.home.HomeActivity' view = [package + '/com.google.android.apps.consumerphotoeditor.fragments.ConsumerPhotoEditorActivity', package + '/com.google.android.apps.photos.home.HomeActivity', package + '/com.google.android.apps.photos.localmedia.ui.LocalPhotosActivity', package + '/com.google.android.apps.photos.onboarding.AccountPickerActivity', package + '/com.google.android.apps.photos.onboarding.IntroActivity'] description = ''' A workload to perform standard productivity tasks with Google Photos. The workload carries out various tasks, such as browsing images, performing zooms, and post-processing the image. Test description: 1. Four images are copied to the device 2. The application is started in offline access mode 3. Gestures are performed to pinch zoom in and out of the selected image 4. The colour of a selected image is edited by selecting the colour menu, incrementing the colour, resetting the colour and decrementing the colour using the seek bar. 5. A crop test is performed on a selected image. UiAutomator does not allow the selection of the crop markers so the image is tilted positively, reset and then tilted negatively to get a similar cropping effect. 6. A rotate test is performed on a selected image, rotating anticlockwise 90 degrees, 180 degrees and 270 degrees. ''' default_test_images = [ 'uxperf_1200x1600.png', 'uxperf_1600x1200.jpg', 'uxperf_2448x3264.png', 'uxperf_3264x2448.jpg', ] parameters = [ Parameter('test_images', kind=list_of_strings, default=default_test_images, constraint=lambda x: len(unique(x)) == 4, description=''' A list of four JPEG and/or PNG files to be pushed to the device. Absolute file paths may be used but tilde expansion must be escaped. '''), ] def __init__(self, device, **kwargs): super(Googlephotos, self).__init__(device, **kwargs) self.deployable_assets = self.test_images def validate(self): super(Googlephotos, self).validate() # Only accept certain image formats for image in self.test_images: if os.path.splitext(image.lower())[1] not in ['.jpg', '.jpeg', '.png']: raise ValidationError('{} must be a JPEG or PNG file'.format(image)) def setup(self, context): super(Googlephotos, self).setup(context) # Create a subfolder for each test_image named ``wa-[1-4]`` # Move each image into its subfolder # This is to guarantee ordering and allows the workload to select a specific # image by subfolder, as filenames are not shown easily within the app d = self.device.working_directory for i, f in enumerate(self.test_images): self.device.execute('mkdir -p {0}/wa-{1}'.format(d, i + 1)) self.device.execute('mv {0}/{2} {0}/wa-{1}/{2}'.format(d, i + 1, f)) # Force rescan self.device.broadcast_media_mounted(self.device.working_directory) def teardown(self, context): super(Googlephotos, self).teardown(context) # Remove the subfolders and its content d = self.device.working_directory for i in xrange(len(self.test_images)): self.device.execute('rm -rf {0}/wa-{1}'.format(d, i + 1)) # Force rescan self.device.broadcast_media_mounted(self.device.working_directory)
apache-2.0
carewebframework/carewebframework-core
org.carewebframework.mvn-parent/org.carewebframework.mvn.plugin-parent/org.carewebframework.mvn.plugin.helpconverter/src/main/java/org/carewebframework/maven/plugin/help/chm/SystemTransform.java
2310
/* * #%L * carewebframework * %% * Copyright (C) 2008 - 2016 Regenstrief Institute, Inc. * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * * This Source Code Form is also subject to the terms of the Health-Related * Additional Disclaimer of Warranty and Limitation of Liability available at * * http://www.carewebframework.org/licensing/disclaimer. * * #L% */ package org.carewebframework.maven.plugin.help.chm; import java.io.InputStream; import java.io.OutputStream; import org.carewebframework.maven.plugin.core.BaseMojo; /** * Transforms settings extracted from the #SYSTEM file to a standard properties file. */ public class SystemTransform extends BinaryTransform { public SystemTransform(BaseMojo mojo) { super(mojo, "helpset"); } @Override public void transform(InputStream inputStream, OutputStream outputStream) throws Exception { readDWord(inputStream); // version # int code; while ((code = readWord(inputStream)) != -1) { int len = readWord(inputStream); byte[] data = new byte[len]; inputStream.read(data); switch (code) { case 0: // Contents file break; case 1: // Index file break; case 2: // Default topic writeSetting(outputStream, "defaultTopic", getString(data), 1); break; case 3: // Title writeSetting(outputStream, "title", getString(data), 1); break; case 4: // Settings break; } } } }
apache-2.0
Granicus/chef-application_procfile
resources/procfile.rb
998
# # Author:: Matt Kasa <mattk@granicus.com> # Cookbook Name:: application_procfile # Provider:: procfile # # Copyright:: 2014, Granicus Inc. <mattk@granicus.com> # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # include ApplicationCookbook::ResourceBase # Handle any process types # TODO: restrict these to only the process types in the Procfile def method_missing(name, *args) @processes ||= {} @processes[name.to_sym] = args end # Expose the instance variable def processes @processes end
apache-2.0
googleads/google-ads-java
google-ads-stubs-v8/src/main/java/com/google/ads/googleads/v8/enums/FrequencyCapTimeUnitProto.java
2713
// Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/ads/googleads/v8/enums/frequency_cap_time_unit.proto package com.google.ads.googleads.v8.enums; public final class FrequencyCapTimeUnitProto { private FrequencyCapTimeUnitProto() {} public static void registerAllExtensions( com.google.protobuf.ExtensionRegistryLite registry) { } public static void registerAllExtensions( com.google.protobuf.ExtensionRegistry registry) { registerAllExtensions( (com.google.protobuf.ExtensionRegistryLite) registry); } static final com.google.protobuf.Descriptors.Descriptor internal_static_google_ads_googleads_v8_enums_FrequencyCapTimeUnitEnum_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_ads_googleads_v8_enums_FrequencyCapTimeUnitEnum_fieldAccessorTable; public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() { return descriptor; } private static com.google.protobuf.Descriptors.FileDescriptor descriptor; static { java.lang.String[] descriptorData = { "\n;google/ads/googleads/v8/enums/frequenc" + "y_cap_time_unit.proto\022\035google.ads.google" + "ads.v8.enums\032\034google/api/annotations.pro" + "to\"n\n\030FrequencyCapTimeUnitEnum\"R\n\024Freque" + "ncyCapTimeUnit\022\017\n\013UNSPECIFIED\020\000\022\013\n\007UNKNO" + "WN\020\001\022\007\n\003DAY\020\002\022\010\n\004WEEK\020\003\022\t\n\005MONTH\020\004B\356\001\n!c" + "om.google.ads.googleads.v8.enumsB\031Freque" + "ncyCapTimeUnitProtoP\001ZBgoogle.golang.org" + "/genproto/googleapis/ads/googleads/v8/en" + "ums;enums\242\002\003GAA\252\002\035Google.Ads.GoogleAds.V" + "8.Enums\312\002\035Google\\Ads\\GoogleAds\\V8\\Enums\352" + "\002!Google::Ads::GoogleAds::V8::Enumsb\006pro" + "to3" }; descriptor = com.google.protobuf.Descriptors.FileDescriptor .internalBuildGeneratedFileFrom(descriptorData, new com.google.protobuf.Descriptors.FileDescriptor[] { com.google.api.AnnotationsProto.getDescriptor(), }); internal_static_google_ads_googleads_v8_enums_FrequencyCapTimeUnitEnum_descriptor = getDescriptor().getMessageTypes().get(0); internal_static_google_ads_googleads_v8_enums_FrequencyCapTimeUnitEnum_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_ads_googleads_v8_enums_FrequencyCapTimeUnitEnum_descriptor, new java.lang.String[] { }); com.google.api.AnnotationsProto.getDescriptor(); } // @@protoc_insertion_point(outer_class_scope) }
apache-2.0
yuri91/overbot
src/errors.rs
765
use super::serde_json; use super::toml; use std::io; use super::telegram_bot_client; error_chain! { types { Error, ErrorKind, ResultExt, Result; } links { Bot(telegram_bot_client::errors::Error, telegram_bot_client::errors::ErrorKind); } foreign_links { Json(serde_json::Error); Io(io::Error); Toml(toml::de::Error); } errors { Config(file: String, descr: &'static str) { description("Configuration error") display("Error in conf file {}: {}", file, descr) } Output(cmd: String, descr: &'static str) { description("Invalid output from command") display("Invalid output from command {}: {}", cmd, descr) } } }
apache-2.0
takashno/lib-java-generate
src/main/java/com/zomu_t/lib/java/generate/java8/util/FieldUtils.java
14385
package com.zomu_t.lib.java.generate.java8.util; import com.zomu_t.lib.java.generate.common.type.DefaultLogicTemplate; import com.zomu_t.lib.java.generate.java8.model.*; import com.zomu_t.lib.java.generate.java8.type.AccessModifier; import com.zomu_t.lib.java.generate.java8.type.MethodModifier; import lombok.val; import lombok.experimental.UtilityClass; import org.apache.commons.lang3.text.WordUtils; /** * フィールドモデルを提供するユーティリティ. * * @author takashno */ @UtilityClass public class FieldUtils { /** * フィールドモデルを取得します. * * @param fieldName フィールド名 * @param javaDocContents JavaDocコンテンツ * @param type 型 * @return */ public FieldModel getFieldModel(String fieldName, String javaDocContents, ClassModel type, boolean isSetterAutoCreate, boolean isGetterAutoCreate) { val fm = FieldModel.builder().type(type).name(fieldName) .javaDoc(JavaDocUtils.getFieldJavaDocModel(javaDocContents)) .setterAutoCreate(isSetterAutoCreate) .getterAutoCreate(isGetterAutoCreate) .build(); return fm; } /** * フィールドモデルを取得します. * * @param fieldName フィールド名 * @param javaDocContents JavaDocコンテンツ * @param type 型 * @return */ public FieldModel getFieldModel(String fieldName, String javaDocContents, ClassModel type) { val fm = FieldModel.builder().type(type).name(fieldName) .javaDoc(JavaDocUtils.getFieldJavaDocModel(javaDocContents)) .build(); return fm; } /** * java.lang.String用のフィールドモデルを取得します. * * @param fieldName フィールド名 * @param javaDocContents JavaDocコンテンツ * @return */ public FieldModel getStringFieldModel(String fieldName, String javaDocContents, boolean isSetterAutoCreate, boolean isGetterAutoCreate) { return getFieldModel(fieldName, javaDocContents, TypeUtils.getStringClassModel(), isSetterAutoCreate, isGetterAutoCreate); } /** * java.lang.String用のフィールドモデルを取得します. * * @param fieldName フィールド名 * @param javaDocContents JavaDocコンテンツ * @return */ public FieldModel getStringFieldModel(String fieldName, String javaDocContents) { return getFieldModel(fieldName, javaDocContents, TypeUtils.getStringClassModel()); } /** * java.lang.Short用のフィールドモデルを取得します. * * @param fieldName フィールド名 * @param javaDocContents JavaDocコンテンツ * @return */ public static FieldModel getShortFieldModel(String fieldName, String javaDocContents, boolean isSetterAutoCreate, boolean isGetterAutoCreate) { return getFieldModel(fieldName, javaDocContents, TypeUtils.getShortClassModel(), isSetterAutoCreate, isGetterAutoCreate); } /** * java.lang.Short用のフィールドモデルを取得します. * * @param fieldName フィールド名 * @param javaDocContents JavaDocコンテンツ * @return */ public static FieldModel getShortFieldModel(String fieldName, String javaDocContents) { return getFieldModel(fieldName, javaDocContents, TypeUtils.getShortClassModel()); } /** * java.lang.Integer用のフィールドモデルを取得します. * * @param fieldName フィールド名 * @param javaDocContents JavaDocコンテンツ * @return */ public static FieldModel getIntegerFieldModel(String fieldName, String javaDocContents, boolean isSetterAutoCreate, boolean isGetterAutoCreate) { return getFieldModel(fieldName, javaDocContents, TypeUtils.getIntegerClassModel(), isSetterAutoCreate, isGetterAutoCreate); } /** * java.lang.Integer用のフィールドモデルを取得します. * * @param fieldName フィールド名 * @param javaDocContents JavaDocコンテンツ * @return */ public static FieldModel getIntegerFieldModel(String fieldName, String javaDocContents) { return getFieldModel(fieldName, javaDocContents, TypeUtils.getIntegerClassModel()); } /** * java.lang.Long用のフィールドモデルを取得します. * * @param fieldName フィールド名 * @param javaDocContents JavaDocコンテンツ * @return */ public static FieldModel getLongFieldModel(String fieldName, String javaDocContents, boolean isSetterAutoCreate, boolean isGetterAutoCreate) { return getFieldModel(fieldName, javaDocContents, TypeUtils.getLongClassModel(), isSetterAutoCreate, isGetterAutoCreate); } /** * java.lang.Long用のフィールドモデルを取得します. * * @param fieldName フィールド名 * @param javaDocContents JavaDocコンテンツ * @return */ public static FieldModel getLongFieldModel(String fieldName, String javaDocContents) { return getFieldModel(fieldName, javaDocContents, TypeUtils.getLongClassModel()); } /** * java.lang.Double用のフィールドモデルを取得します. * * @param fieldName フィールド名 * @param javaDocContents JavaDocコンテンツ * @return */ public static FieldModel getDoubleFieldModel(String fieldName, String javaDocContents, boolean isSetterAutoCreate, boolean isGetterAutoCreate) { return getFieldModel(fieldName, javaDocContents, TypeUtils.getDoubleClassModel(), isSetterAutoCreate, isGetterAutoCreate); } /** * java.lang.Double用のフィールドモデルを取得します. * * @param fieldName フィールド名 * @param javaDocContents JavaDocコンテンツ * @return */ public static FieldModel getDoubleFieldModel(String fieldName, String javaDocContents) { return getFieldModel(fieldName, javaDocContents, TypeUtils.getDoubleClassModel()); } /** * java.lang.Float用のフィールドモデルを取得します. * * @param fieldName フィールド名 * @param javaDocContents JavaDocコンテンツ * @return */ public static FieldModel getFloatFieldModel(String fieldName, String javaDocContents, boolean isSetterAutoCreate, boolean isGetterAutoCreate) { return getFieldModel(fieldName, javaDocContents, TypeUtils.getFloatClassModel(), isSetterAutoCreate, isGetterAutoCreate); } /** * java.lang.Float用のフィールドモデルを取得します. * * @param fieldName フィールド名 * @param javaDocContents JavaDocコンテンツ * @return */ public static FieldModel getFloatFieldModel(String fieldName, String javaDocContents) { return getFieldModel(fieldName, javaDocContents, TypeUtils.getFloatClassModel()); } /** * java.lang.Boolean用のフィールドモデルを取得します. * * @param fieldName フィールド名 * @param javaDocContents JavaDocコンテンツ * @return */ public static FieldModel getBooleanFieldModel(String fieldName, String javaDocContents, boolean isSetterAutoCreate, boolean isGetterAutoCreate) { return getFieldModel(fieldName, javaDocContents, TypeUtils.getBooleanClassModel(), isSetterAutoCreate, isGetterAutoCreate); } /** * java.lang.Boolean用のフィールドモデルを取得します. * * @param fieldName フィールド名 * @param javaDocContents JavaDocコンテンツ * @return */ public static FieldModel getBooleanFieldModel(String fieldName, String javaDocContents) { return getFieldModel(fieldName, javaDocContents, TypeUtils.getBooleanClassModel()); } /** * java.lang.Byte用のフィールドモデルを取得します. * * @param fieldName フィールド名 * @param javaDocContents JavaDocコンテンツ * @return */ public static FieldModel getByteFieldModel(String fieldName, String javaDocContents, boolean isSetterAutoCreate, boolean isGetterAutoCreate) { return getFieldModel(fieldName, javaDocContents, TypeUtils.getByteClassModel(), isSetterAutoCreate, isGetterAutoCreate); } /** * java.lang.Byte用のフィールドモデルを取得します. * * @param fieldName フィールド名 * @param javaDocContents JavaDocコンテンツ * @return */ public static FieldModel getByteFieldModel(String fieldName, String javaDocContents) { return getFieldModel(fieldName, javaDocContents, TypeUtils.getByteClassModel()); } /** * java.lang.Character用のフィールドモデルを取得します. * * @param fieldName フィールド名 * @param javaDocContents JavaDocコンテンツ * @return */ public static FieldModel getCharacterFieldModel(String fieldName, String javaDocContents, boolean isSetterAutoCreate, boolean isGetterAutoCreate) { return getFieldModel(fieldName, javaDocContents, TypeUtils.getCharacterClassModel(), isSetterAutoCreate, isGetterAutoCreate); } /** * java.lang.Character用のフィールドモデルを取得します. * * @param fieldName フィールド名 * @param javaDocContents JavaDocコンテンツ * @return */ public static FieldModel getCharacterFieldModel(String fieldName, String javaDocContents) { return getFieldModel(fieldName, javaDocContents, TypeUtils.getCharacterClassModel()); } /** * 指定されたFieldのGetterメソッドを作成. * * @param fieldModel Fieldを表すモデル * @return Methodを表すモデル */ public static MethodModel createGetterMethod(FieldModel fieldModel) { LogicModel logicModel = LogicModel.builder() .detail(LogicDetailModel.builder() .templatePath(DefaultLogicTemplate.GETTER.getPath()) .scope(GetterLogicModel.builder().name(fieldModel.getName()).build()) .build()) .build(); return MethodModel.builder() .javaDoc(JavaDocModel.builder() .mainContent(fieldModel.getName() + "を取得します.") .annotation(JavaDocAnnotationModel.builder() .name("return") .content(fieldModel.getName()) .build()) .build()) .accessModifier(AccessModifier.PUBLIC) .returnType(ReturnModel.builder() .type(fieldModel.getType()) .array(fieldModel.isArray()) .build()) .name("get" + WordUtils.capitalize(fieldModel.getName())) .logic(logicModel) .build(); } /** * 指定されたFieldのSetterメソッドを作成. * * @param fieldModel Fieldを表すモデル * @return Methodを表すモデル */ public static MethodModel createSetterMethod(FieldModel fieldModel) { LogicModel logicModel = LogicModel.builder() .detail(LogicDetailModel.builder() .templatePath(DefaultLogicTemplate.SETTER.getPath()) .scope(SetterLogicModel.builder().name(fieldModel.getName()).build()) .build()) .build(); return MethodModel.builder() .javaDoc(JavaDocModel.builder() .mainContent(fieldModel.getName() + "を設定します.") .annotation(JavaDocAnnotationModel.builder() .name("param") .content(fieldModel.getName()) .content("設定する値") .build()) .build()) .accessModifier(AccessModifier.PUBLIC) .name("set" + WordUtils.capitalize(fieldModel.getName())) .arg(ArgModel.builder() .type(fieldModel.getType()) .name(fieldModel.getName()) .build()) .logic(logicModel) .build(); } }
apache-2.0
cloudflare/unsee
assets/static/progress.js
1037
"use strict"; const NProgress = require("nprogress"); require("nprogress/nprogress.css"); const unsee = require("./unsee"); var timer; function init() { NProgress.configure({ minimum: 0.01, showSpinner: false, easing: "linear", template: "<div class='bar nprogress-flatly' role='bar'><div class='peg nprogress-flatly'></div></div>" }); } function resetTimer() { if (timer !== false) { clearInterval(timer); timer = false; } } function complete() { resetTimer(); NProgress.done(); } function pause() { resetTimer(); NProgress.set(0.0); } function start() { var stepMs = 250; // animation step in ms var steps = (unsee.getRefreshRate() * 1000) / stepMs; // how many steps we have NProgress.set(0.0); resetTimer(); timer = setInterval(function() { NProgress.inc(1.0 / steps); }, stepMs); } exports.init = init; exports.pause = pause; exports.complete = complete; exports.start = start; exports.resetTimer = resetTimer;
apache-2.0
GoogleCloudPlatform/terraformer
providers/gcp/securityPolicies_gen.go
2323
// Copyright 2018 The Terraformer Authors. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // AUTO-GENERATED CODE. DO NOT EDIT. package gcp import ( "context" "log" "github.com/GoogleCloudPlatform/terraformer/terraformutils" "google.golang.org/api/compute/v1" ) var securityPoliciesAllowEmptyValues = []string{""} var securityPoliciesAdditionalFields = map[string]interface{}{} type SecurityPoliciesGenerator struct { GCPService } // Run on securityPoliciesList and create for each TerraformResource func (g SecurityPoliciesGenerator) createResources(ctx context.Context, securityPoliciesList *compute.SecurityPoliciesListCall) []terraformutils.Resource { resources := []terraformutils.Resource{} if err := securityPoliciesList.Pages(ctx, func(page *compute.SecurityPolicyList) error { for _, obj := range page.Items { resources = append(resources, terraformutils.NewResource( obj.Name, obj.Name, "google_compute_security_policy", g.ProviderName, map[string]string{ "name": obj.Name, "project": g.GetArgs()["project"].(string), "region": g.GetArgs()["region"].(compute.Region).Name, }, securityPoliciesAllowEmptyValues, securityPoliciesAdditionalFields, )) } return nil }); err != nil { log.Println(err) } return resources } // Generate TerraformResources from GCP API, // from each securityPolicies create 1 TerraformResource // Need securityPolicies name as ID for terraform resource func (g *SecurityPoliciesGenerator) InitResources() error { ctx := context.Background() computeService, err := compute.NewService(ctx) if err != nil { return err } securityPoliciesList := computeService.SecurityPolicies.List(g.GetArgs()["project"].(string)) g.Resources = g.createResources(ctx, securityPoliciesList) return nil }
apache-2.0
lijunyandev/MeetMusic
app/src/main/java/com/lijunyan/blackmusic/activity/BaseActivity.java
1602
package com.lijunyan.blackmusic.activity; import android.os.Bundle; import android.support.v7.app.AppCompatActivity; import com.lijunyan.blackmusic.R; import com.lijunyan.blackmusic.util.MyMusicUtil; public abstract class BaseActivity extends AppCompatActivity { @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); initTheme(); } private void initTheme(){ int themeId = MyMusicUtil.getTheme(BaseActivity.this); switch (themeId){ default: case 0: setTheme(R.style.BiLiPinkTheme); break; case 1: setTheme(R.style.ZhiHuBlueTheme); break; case 2: setTheme(R.style.KuAnGreenTheme); break; case 3: setTheme(R.style.CloudRedTheme); break; case 4: setTheme(R.style.TengLuoPurpleTheme); break; case 5: setTheme(R.style.SeaBlueTheme); break; case 6: setTheme(R.style.GrassGreenTheme); break; case 7: setTheme(R.style.CoffeeBrownTheme); break; case 8: setTheme(R.style.LemonOrangeTheme); break; case 9: setTheme(R.style.StartSkyGrayTheme); break; case 10: setTheme(R.style.NightModeTheme); break; } } }
apache-2.0
Onager/plaso
plaso/parsers/sqlite_plugins/hangouts_messages.py
16072
# -*- coding: utf-8 -*- """SQLite parser plugin for Google Hangouts conversations database files.""" from dfdatetime import posix_time as dfdatetime_posix_time from plaso.containers import events from plaso.containers import time_events from plaso.lib import definitions from plaso.parsers import sqlite from plaso.parsers.sqlite_plugins import interface class HangoutsMessageData(events.EventData): """GoogleHangouts Message event data. Attributes: body (str): content of the SMS text message. message_status (int): message status. message_type (int): message type. sender (str): Name with the sender. """ DATA_TYPE = 'android:messaging:hangouts' def __init__(self): """Initializes event data.""" super(HangoutsMessageData, self).__init__(data_type=self.DATA_TYPE) self.body = None self.message_status = None self.message_type = None self.sender = None class HangoutsMessagePlugin(interface.SQLitePlugin): """SQLite parser plugin for Google Hangouts conversations database files. The Google Hangouts conversations database file is typically stored in: /data/com.google.android.talk/databases/babel.db This SQLite database is the conversation database for conversations, participant names, messages, and information about the Google Hangout event. There can be multiple babel.db databases, and each database name will be followed by an integer starting with 0, for example: "babel0.db,babel1.db,babel3.db". """ NAME = 'hangouts_messages' DATA_FORMAT = 'Google Hangouts conversations SQLite database (babel.db) file' REQUIRED_STRUCTURE = { 'blocked_people': frozenset([]), 'messages': frozenset([ '_id', 'text', 'timestamp', 'status', 'type', 'author_chat_id']), 'participants': frozenset([ 'full_name', 'chat_id'])} QUERIES = [ ('SELECT messages._id, participants.full_name, text, messages.timestamp,' 'status, type FROM messages INNER JOIN participants ON ' 'messages.author_chat_id=participants.chat_id;', 'ParseMessagesRow')] SCHEMAS = [{ 'android_metadata': ( 'CREATE TABLE android_metadata (locale TEXT)'), 'blocked_people': ( 'CREATE TABLE blocked_people (_id INTEGER PRIMARY KEY, gaia_id ' 'TEXT, chat_id TEXT, name TEXT, profile_photo_url TEXT, UNIQUE ' '(chat_id) ON CONFLICT REPLACE, UNIQUE (gaia_id) ON CONFLICT ' 'REPLACE)'), 'conversation_participants': ( 'CREATE TABLE conversation_participants (_id INTEGER PRIMARY KEY, ' 'participant_row_id INT, participant_type INT, conversation_id ' 'TEXT, sequence INT, active INT, invitation_status INT DEFAULT(0), ' 'UNIQUE (conversation_id,participant_row_id) ON CONFLICT REPLACE, ' 'FOREIGN KEY (conversation_id) REFERENCES ' 'conversations(conversation_id) ON DELETE CASCADE ON UPDATE ' 'CASCADE, FOREIGN KEY (participant_row_id) REFERENCES ' 'participants(_id))'), 'conversations': ( 'CREATE TABLE conversations (_id INTEGER PRIMARY KEY, ' 'conversation_id TEXT, conversation_type INT, ' 'latest_message_timestamp INT DEFAULT(0), ' 'latest_message_expiration_timestamp INT, metadata_present ' 'INT,notification_level INT, name TEXT, generated_name TEXT, ' 'snippet_type INT, snippet_text TEXT, snippet_image_url TEXT, ' 'snippet_author_gaia_id TEXT, snippet_author_chat_id TEXT, ' 'snippet_message_row_id INT, snippet_selector INT, snippet_status ' 'INT, snippet_new_conversation_name TEXT, snippet_participant_keys ' 'TEXT, snippet_sms_type TEXT, previous_latest_timestamp INT, status ' 'INT, view INT, inviter_gaia_id TEXT, inviter_chat_id TEXT, ' 'inviter_affinity INT, is_pending_leave INT, account_id INT, is_otr ' 'INT, packed_avatar_urls TEXT, self_avatar_url TEXT, self_watermark ' 'INT DEFAULT(0), chat_watermark INT DEFAULT(0), hangout_watermark ' 'INT DEFAULT(0), is_draft INT, sequence_number INT, call_media_type ' 'INT DEFAULT(0), has_joined_hangout INT, has_chat_notifications ' 'DEFAULT(0),has_video_notifications ' 'DEFAULT(0),last_hangout_event_time INT, draft TEXT, otr_status ' 'INT, otr_toggle INT, last_otr_modification_time INT, ' 'continuation_token BLOB, continuation_event_timestamp INT, ' 'has_oldest_message INT DEFAULT(0), sort_timestamp INT, ' 'first_peak_scroll_time INT, first_peak_scroll_to_message_timestamp ' 'INT, second_peak_scroll_time INT, ' 'second_peak_scroll_to_message_timestamp INT, conversation_hash ' 'BLOB, disposition INT DEFAULT(0), has_persistent_events INT ' 'DEFAULT(-1), transport_type INT DEFAULT(1), ' 'default_transport_phone TEXT, sms_service_center TEXT, ' 'is_temporary INT DEFAULT (0), sms_thread_id INT DEFAULT (-1), ' 'chat_ringtone_uri TEXT, hangout_ringtone_uri TEXT, ' 'snippet_voicemail_duration INT DEFAULT (0), share_count INT ' 'DEFAULT(0), has_unobserved TEXT, last_share_timestamp INT ' 'DEFAULT(0), gls_status INT DEFAULT(0), gls_link TEXT, is_guest INT ' 'DEFAULT(0), UNIQUE (conversation_id ))'), 'dismissed_contacts': ( 'CREATE TABLE dismissed_contacts (_id INTEGER PRIMARY KEY, gaia_id ' 'TEXT, chat_id TEXT, name TEXT, profile_photo_url TEXT, UNIQUE ' '(chat_id) ON CONFLICT REPLACE, UNIQUE (gaia_id) ON CONFLICT ' 'REPLACE)'), 'event_suggestions': ( 'CREATE TABLE event_suggestions (_id INTEGER PRIMARY KEY, ' 'conversation_id TEXT, event_id TEXT, suggestion_id TEXT, timestamp ' 'INT, expiration_time_usec INT, type INT, gem_asset_url STRING, ' 'gem_horizontal_alignment INT, matched_message_substring TEXT, ' 'FOREIGN KEY (conversation_id) REFERENCES ' 'conversations(conversation_id) ON DELETE CASCADE ON UPDATE ' 'CASCADE, UNIQUE (conversation_id,suggestion_id) ON CONFLICT ' 'REPLACE)'), 'merge_keys': ( 'CREATE TABLE merge_keys (_id INTEGER PRIMARY KEY, conversation_id ' 'TEXT, merge_key TEXT, UNIQUE (conversation_id) ON CONFLICT ' 'REPLACE, FOREIGN KEY (conversation_id) REFERENCES ' 'conversations(conversation_id) ON DELETE CASCADE ON UPDATE CASCADE ' ')'), 'merged_contact_details': ( 'CREATE TABLE merged_contact_details (_id INTEGER PRIMARY KEY, ' 'merged_contact_id INT, lookup_data_type INT, lookup_data TEXT, ' 'lookup_data_standardized TEXT, lookup_data_search TEXT, ' 'lookup_data_label TEXT, needs_gaia_ids_resolved INT DEFAULT (1), ' 'is_hangouts_user INT DEFAULT (0), gaia_id TEXT, avatar_url TEXT, ' 'display_name TEXT, last_checked_ts INT DEFAULT (0), ' 'lookup_data_display TEXT, detail_affinity_score REAL DEFAULT ' '(0.0), detail_logging_id TEXT, is_in_viewer_dasher_domain INT ' 'DEFAULT (0), FOREIGN KEY (merged_contact_id) REFERENCES ' 'merged_contacts(_id) ON DELETE CASCADE ON UPDATE CASCADE)'), 'merged_contacts': ( 'CREATE TABLE merged_contacts (_id INTEGER PRIMARY KEY, ' 'contact_lookup_key TEXT, contact_id INT, raw_contact_id INT, ' 'display_name TEXT, avatar_url TEXT, is_frequent INT DEFAULT (0), ' 'is_favorite INT DEFAULT (0), contact_source INT DEFAULT(0), ' 'frequent_order INT, person_logging_id TEXT, person_affinity_score ' 'REAL DEFAULT (0.0), is_in_same_domain INT DEFAULT (0))'), 'messages': ( 'CREATE TABLE messages (_id INTEGER PRIMARY KEY, message_id TEXT, ' 'message_type INT, conversation_id TEXT, author_chat_id TEXT, ' 'author_gaia_id TEXT, text TEXT, timestamp INT, ' 'delete_after_read_timetamp INT, status INT, type INT, local_url ' 'TEXT, remote_url TEXT, attachment_content_type TEXT, width_pixels ' 'INT, height_pixels INT, stream_id TEXT, image_id TEXT, album_id ' 'TEXT, latitude DOUBLE, longitude DOUBLE, address ADDRESS, ' 'notification_level INT, expiration_timestamp INT, ' 'notified_for_failure INT DEFAULT(0), off_the_record INT ' 'DEFAULT(0), transport_type INT NOT NULL DEFAULT(1), ' 'transport_phone TEXT, external_ids TEXT, sms_timestamp_sent INT ' 'DEFAULT(0), sms_priority INT DEFAULT(0), sms_message_size INT ' 'DEFAULT(0), mms_subject TEXT, sms_raw_sender TEXT, ' 'sms_raw_recipients TEXT, persisted INT DEFAULT(1), ' 'sms_message_status INT DEFAULT(-1), sms_type INT DEFAULT(-1), ' 'stream_url TEXT, attachment_target_url TEXT, attachment_name TEXT, ' 'image_rotation INT DEFAULT (0), new_conversation_name TEXT, ' 'participant_keys TEXT, forwarded_mms_url TEXT, forwarded_mms_count ' 'INT DEFAULT(0), attachment_description TEXT, ' 'attachment_target_url_description TEXT, attachment_target_url_name ' 'TEXT, attachment_blob_data BLOB,attachment_uploading_progress INT ' 'DEFAULT(0), sending_error INT DEFAULT(0), stream_expiration INT, ' 'voicemail_length INT DEFAULT (0), call_media_type INT DEFAULT(0), ' 'last_seen_timestamp INT DEFAULT(0), observed_status INT ' 'DEFAULT(2), receive_type INT DEFAULT(0), init_timestamp INT ' 'DEFAULT(0), in_app_msg_latency INT DEFAULT(0), notified INT ' 'DEFAULT(0), alert_in_conversation_list INT DEFAULT(0), attachments ' 'BLOB, is_user_mentioned INT DEFAULT(0), local_id TEXT, ' 'request_task_row_id INT DEFAULT(-1), FOREIGN KEY (conversation_id) ' 'REFERENCES conversations(conversation_id) ON DELETE CASCADE ON ' 'UPDATE CASCADE, UNIQUE (conversation_id,message_id) ON CONFLICT ' 'REPLACE)'), 'mms_notification_inds': ( 'CREATE TABLE mms_notification_inds (_id INTEGER PRIMARY KEY, ' 'content_location TEXT, transaction_id TEXT, from_address TEXT, ' 'message_size INT DEFAULT(0), expiry INT)'), 'multipart_attachments': ( 'CREATE TABLE multipart_attachments (_id INTEGER PRIMARY KEY, ' 'message_id TEXT, conversation_id TEXT, url TEXT, content_type ' 'TEXT, width INT, height INT, FOREIGN KEY (message_id, ' 'conversation_id) REFERENCES messages(message_id, conversation_id) ' 'ON DELETE CASCADE ON UPDATE CASCADE)'), 'participant_email_fts': ( 'CREATE VIRTUAL TABLE participant_email_fts USING ' 'fts4(content="merged_contact_details", gaia_id,lookup_data)'), 'participant_email_fts_docsize': ( 'CREATE TABLE \'participant_email_fts_docsize\'(docid INTEGER ' 'PRIMARY KEY, size BLOB)'), 'participant_email_fts_segdir': ( 'CREATE TABLE \'participant_email_fts_segdir\'(level INTEGER,idx ' 'INTEGER,start_block INTEGER,leaves_end_block INTEGER,end_block ' 'INTEGER,root BLOB,PRIMARY KEY(level, idx))'), 'participant_email_fts_segments': ( 'CREATE TABLE \'participant_email_fts_segments\'(blockid INTEGER ' 'PRIMARY KEY, block BLOB)'), 'participant_email_fts_stat': ( 'CREATE TABLE \'participant_email_fts_stat\'(id INTEGER PRIMARY ' 'KEY, value BLOB)'), 'participants': ( 'CREATE TABLE participants (_id INTEGER PRIMARY KEY, ' 'participant_type INT DEFAULT 1, gaia_id TEXT, chat_id TEXT, ' 'phone_id TEXT, circle_id TEXT, first_name TEXT, full_name TEXT, ' 'fallback_name TEXT, profile_photo_url TEXT, batch_gebi_tag STRING ' 'DEFAULT(\'-1\'), blocked INT DEFAULT(0), in_users_domain BOOLEAN, ' 'UNIQUE (circle_id) ON CONFLICT REPLACE, UNIQUE (chat_id) ON ' 'CONFLICT REPLACE, UNIQUE (gaia_id) ON CONFLICT REPLACE)'), 'participants_fts': ( 'CREATE VIRTUAL TABLE participants_fts USING ' 'fts4(content="participants",gaia_id,full_name)'), 'participants_fts_docsize': ( 'CREATE TABLE \'participants_fts_docsize\'(docid INTEGER PRIMARY ' 'KEY, size BLOB)'), 'participants_fts_segdir': ( 'CREATE TABLE \'participants_fts_segdir\'(level INTEGER,idx ' 'INTEGER,start_block INTEGER,leaves_end_block INTEGER,end_block ' 'INTEGER,root BLOB,PRIMARY KEY(level, idx))'), 'participants_fts_segments': ( 'CREATE TABLE \'participants_fts_segments\'(blockid INTEGER PRIMARY ' 'KEY, block BLOB)'), 'participants_fts_stat': ( 'CREATE TABLE \'participants_fts_stat\'(id INTEGER PRIMARY KEY, ' 'value BLOB)'), 'presence': ( 'CREATE TABLE presence (_id INTEGER PRIMARY KEY, gaia_id TEXT NOT ' 'NULL, reachable INT DEFAULT(0), reachable_time INT DEFAULT(0), ' 'available INT DEFAULT(0), available_time INT DEFAULT(0), ' 'status_message TEXT, status_message_time INT DEFAULT(0), call_type ' 'INT DEFAULT(0), call_type_time INT DEFAULT(0), device_status INT ' 'DEFAULT(0), device_status_time INT DEFAULT(0), last_seen INT ' 'DEFAULT(0), last_seen_time INT DEFAULT(0), location BLOB, ' 'location_time INT DEFAULT(0), UNIQUE (gaia_id) ON CONFLICT ' 'REPLACE)'), 'recent_calls': ( 'CREATE TABLE recent_calls (_id INTEGER PRIMARY KEY, ' 'normalized_number TEXT NOT NULL, phone_number TEXT, contact_id ' 'TEXT, call_timestamp INT, call_type INT, contact_type INT, ' 'call_rate TEXT, is_free_call BOOLEAN)'), 'search': ( 'CREATE TABLE search (search_key TEXT NOT NULL,continuation_token ' 'TEXT,PRIMARY KEY (search_key))'), 'sticker_albums': ( 'CREATE TABLE sticker_albums (album_id TEXT NOT NULL, title TEXT, ' 'cover_photo_id TEXT, last_used INT DEFAULT(0), PRIMARY KEY ' '(album_id))'), 'sticker_photos': ( 'CREATE TABLE sticker_photos (photo_id TEXT NOT NULL, album_id TEXT ' 'NOT NULL, url TEXT NOT NULL, file_name TEXT, last_used INT ' 'DEFAULT(0), PRIMARY KEY (photo_id), FOREIGN KEY (album_id) ' 'REFERENCES sticker_albums(album_id) ON DELETE CASCADE)'), 'suggested_contacts': ( 'CREATE TABLE suggested_contacts (_id INTEGER PRIMARY KEY, gaia_id ' 'TEXT, chat_id TEXT, name TEXT, first_name TEXT, packed_circle_ids ' 'TEXT, profile_photo_url TEXT, sequence INT, suggestion_type INT, ' 'logging_id TEXT, affinity_score REAL DEFAULT (0.0), ' 'is_in_same_domain INT DEFAULT (0))')}] def ParseMessagesRow(self, parser_mediator, query, row, **unused_kwargs): """Parses an Messages row. Args: parser_mediator (ParserMediator): mediates interactions between parsers and other components, such as storage and dfvfs. query (str): query that created the row. row (sqlite3.Row): row. """ query_hash = hash(query) event_data = HangoutsMessageData() event_data.sender = self._GetRowValue(query_hash, row, 'full_name') event_data.body = self._GetRowValue(query_hash, row, 'text') event_data.offset = self._GetRowValue(query_hash, row, '_id') event_data.query = query event_data.message_status = self._GetRowValue(query_hash, row, 'status') event_data.message_type = self._GetRowValue(query_hash, row, 'type') timestamp = self._GetRowValue(query_hash, row, 'timestamp') date_time = dfdatetime_posix_time.PosixTimeInMicroseconds( timestamp=timestamp) event = time_events.DateTimeValuesEvent( date_time, definitions.TIME_DESCRIPTION_CREATION) parser_mediator.ProduceEventWithEventData(event, event_data) sqlite.SQLiteParser.RegisterPlugin(HangoutsMessagePlugin)
apache-2.0
vespa-engine/vespa
searchlib/src/vespa/searchlib/diskindex/field_merger.cpp
17199
// Copyright Yahoo. Licensed under the terms of the Apache 2.0 license. See LICENSE in the project root. #include "field_merger.h" #include "fieldreader.h" #include "field_length_scanner.h" #include "fusion_input_index.h" #include "fusion_output_index.h" #include "dictionarywordreader.h" #include "wordnummapper.h" #include <vespa/fastos/file.h> #include <vespa/searchlib/bitcompression/posocc_fields_params.h> #include <vespa/searchlib/common/i_flush_token.h> #include <vespa/searchlib/index/schemautil.h> #include <vespa/searchlib/util/filekit.h> #include <vespa/searchlib/util/dirtraverse.h> #include <vespa/searchlib/util/posting_priority_queue_merger.hpp> #include <vespa/vespalib/io/fileutil.h> #include <vespa/vespalib/stllike/asciistream.h> #include <vespa/vespalib/util/exceptions.h> #include <vespa/log/log.h> LOG_SETUP(".diskindex.field_merger"); using search::FileKit; using search::bitcompression::PosOccFieldParams; using search::bitcompression::PosOccFieldsParams; using search::common::FileHeaderContext; using search::index::FieldLengthInfo; using search::index::PostingListParams; using search::index::Schema; using search::index::SchemaUtil; using search::index::schema::DataType; using vespalib::IllegalArgumentException; using vespalib::make_string; namespace search::diskindex { namespace { constexpr uint32_t renumber_word_ids_heap_limit = 4; constexpr uint32_t renumber_word_ids_merge_chunk = 1000000; constexpr uint32_t merge_postings_heap_limit = 4; constexpr uint32_t merge_postings_merge_chunk = 50000; constexpr uint32_t scan_chunk = 80000; vespalib::string createTmpPath(const vespalib::string & base, uint32_t index) { vespalib::asciistream os; os << base; os << "/tmpindex"; os << index; return os.str(); } } FieldMerger::FieldMerger(uint32_t id, const FusionOutputIndex& fusion_out_index, std::shared_ptr<IFlushToken> flush_token) : _id(id), _field_name(SchemaUtil::IndexIterator(fusion_out_index.get_schema(), id).getName()), _field_dir(fusion_out_index.get_path() + "/" + _field_name), _fusion_out_index(fusion_out_index), _flush_token(std::move(flush_token)), _word_readers(), _word_heap(), _word_aggregator(), _word_num_mappings(), _num_word_ids(0), _readers(), _heap(), _writer(), _field_length_scanner(), _open_reader_idx(std::numeric_limits<uint32_t>::max()), _state(State::MERGE_START), _failed(false) { } FieldMerger::~FieldMerger() = default; void FieldMerger::make_tmp_dirs() { for (const auto & index : _fusion_out_index.get_old_indexes()) { vespalib::mkdir(createTmpPath(_field_dir, index.getIndex()), false); } } bool FieldMerger::clean_tmp_dirs() { uint32_t i = 0; for (;;) { vespalib::string tmpindexpath = createTmpPath(_field_dir, i); FastOS_StatInfo statInfo; if (!FastOS_File::Stat(tmpindexpath.c_str(), &statInfo)) { if (statInfo._error == FastOS_StatInfo::FileNotFound) { break; } LOG(error, "Failed to stat tmpdir %s", tmpindexpath.c_str()); return false; } i++; } while (i > 0) { i--; vespalib::string tmpindexpath = createTmpPath(_field_dir, i); search::DirectoryTraverse dt(tmpindexpath.c_str()); if (!dt.RemoveTree()) { LOG(error, "Failed to clean tmpdir %s", tmpindexpath.c_str()); return false; } } return true; } bool FieldMerger::open_input_word_readers() { _word_readers.reserve(_fusion_out_index.get_old_indexes().size()); _word_heap = std::make_unique<PostingPriorityQueueMerger<DictionaryWordReader, WordAggregator>>(); SchemaUtil::IndexIterator index(_fusion_out_index.get_schema(), _id); for (auto & oi : _fusion_out_index.get_old_indexes()) { auto reader(std::make_unique<DictionaryWordReader>()); const vespalib::string &tmpindexpath = createTmpPath(_field_dir, oi.getIndex()); const vespalib::string &oldindexpath = oi.getPath(); vespalib::string wordMapName = tmpindexpath + "/old2new.dat"; vespalib::string fieldDir(oldindexpath + "/" + _field_name); vespalib::string dictName(fieldDir + "/dictionary"); const Schema &oldSchema = oi.getSchema(); if (!index.hasOldFields(oldSchema)) { continue; // drop data } bool res = reader->open(dictName, wordMapName, _fusion_out_index.get_tune_file_indexing()._read); if (!res) { LOG(error, "Could not open dictionary %s to generate %s", dictName.c_str(), wordMapName.c_str()); return false; } reader->read(); if (reader->isValid()) { _word_readers.push_back(std::move(reader)); _word_heap->initialAdd(_word_readers.back().get()); } } return true; } bool FieldMerger::read_mapping_files() { _word_num_mappings.resize(_fusion_out_index.get_old_indexes().size()); SchemaUtil::IndexIterator index(_fusion_out_index.get_schema(), _id); for (const auto & oi : _fusion_out_index.get_old_indexes()) { std::vector<uint32_t> oldIndexes; const Schema &oldSchema = oi.getSchema(); if (!SchemaUtil::getIndexIds(oldSchema, DataType::STRING, oldIndexes)) { return false; } WordNumMapping &wordNumMapping = _word_num_mappings[oi.getIndex()]; if (oldIndexes.empty()) { wordNumMapping.noMappingFile(); continue; } if (!index.hasOldFields(oldSchema)) { continue; // drop data } // Open word mapping file vespalib::string old2newname = createTmpPath(_field_dir, oi.getIndex()) + "/old2new.dat"; wordNumMapping.readMappingFile(old2newname, _fusion_out_index.get_tune_file_indexing()._read); } return true; } bool FieldMerger::renumber_word_ids_start() { LOG(debug, "Renumber word IDs for field %s", _field_name.c_str()); if (!open_input_word_readers()) { return false; } _word_aggregator = std::make_unique<WordAggregator>(); _word_heap->setup(renumber_word_ids_heap_limit); _word_heap->set_merge_chunk(_fusion_out_index.get_force_small_merge_chunk() ? 1u : renumber_word_ids_merge_chunk); return true; } void FieldMerger::renumber_word_ids_main() { _word_heap->merge(*_word_aggregator, *_flush_token); if (_flush_token->stop_requested()) { _failed = true; } else if (_word_heap->empty()) { _state = State::RENUMBER_WORD_IDS_FINISH; } } bool FieldMerger::renumber_word_ids_finish() { _word_heap.reset(); _num_word_ids = _word_aggregator->getWordNum(); _word_aggregator.reset(); // Close files for (auto &i : _word_readers) { i->close(); } _word_readers.clear(); // Now read mapping files back into an array // XXX: avoid this, and instead make the array here if (!read_mapping_files()) { return false; } LOG(debug, "Finished renumbering words IDs for field %s", _field_name.c_str()); return true; } void FieldMerger::renumber_word_ids_failed() { _failed = true; if (_flush_token->stop_requested()) { return; } LOG(error, "Could not renumber field word ids for field %s dir %s", _field_name.c_str(), _field_dir.c_str()); } void FieldMerger::allocate_field_length_scanner() { SchemaUtil::IndexIterator index(_fusion_out_index.get_schema(), _id); if (index.use_interleaved_features()) { PosOccFieldsParams fieldsParams; fieldsParams.setSchemaParams(index.getSchema(), index.getIndex()); assert(fieldsParams.getNumFields() > 0); const PosOccFieldParams &fieldParams = fieldsParams.getFieldParams()[0]; if (fieldParams._hasElements) { for (const auto &old_index : _fusion_out_index.get_old_indexes()) { const Schema &old_schema = old_index.getSchema(); if (index.hasOldFields(old_schema) && !index.has_matching_use_interleaved_features(old_schema)) { _field_length_scanner = std::make_shared<FieldLengthScanner>(_fusion_out_index.get_doc_id_limit()); return; } } } } } bool FieldMerger::open_input_field_reader() { auto& oi = _fusion_out_index.get_old_indexes()[_open_reader_idx]; if (!_readers.back()->open(oi.getPath() + "/" + _field_name + "/", _fusion_out_index.get_tune_file_indexing()._read)) { _readers.pop_back(); return false; } return true; } void FieldMerger::open_input_field_readers() { SchemaUtil::IndexIterator index(_fusion_out_index.get_schema(), _id); for (; _open_reader_idx < _fusion_out_index.get_old_indexes().size(); ++_open_reader_idx) { auto& oi = _fusion_out_index.get_old_indexes()[_open_reader_idx]; const Schema &oldSchema = oi.getSchema(); if (!index.hasOldFields(oldSchema)) { continue; // drop data } _readers.push_back(FieldReader::allocFieldReader(index, oldSchema, _field_length_scanner)); auto& reader = *_readers.back(); reader.setup(_word_num_mappings[oi.getIndex()], oi.getDocIdMapping()); if (!open_input_field_reader()) { merge_postings_failed(); return; } if (reader.need_regenerate_interleaved_features_scan()) { _state = State::SCAN_ELEMENT_LENGTHS; return; } } _field_length_scanner.reset(); _open_reader_idx = std::numeric_limits<uint32_t>::max(); _state = State::OPEN_POSTINGS_FIELD_READERS_FINISH; } void FieldMerger::scan_element_lengths() { auto& reader = *_readers.back(); if (reader.isValid()) { reader.scan_element_lengths(_fusion_out_index.get_force_small_merge_chunk() ? 1u : scan_chunk); if (reader.isValid()) { return; } } reader.close(); if (!open_input_field_reader()) { merge_postings_failed(); } else { ++_open_reader_idx; _state = State::OPEN_POSTINGS_FIELD_READERS; } } bool FieldMerger::open_field_writer() { FieldLengthInfo field_length_info; if (!_readers.empty()) { field_length_info = _readers.back()->get_field_length_info(); } SchemaUtil::IndexIterator index(_fusion_out_index.get_schema(), _id); if (!_writer->open(_field_dir + "/", 64, 262144, _fusion_out_index.get_dynamic_k_pos_index_format(), index.use_interleaved_features(), index.getSchema(), index.getIndex(), field_length_info, _fusion_out_index.get_tune_file_indexing()._write, _fusion_out_index.get_file_header_context())) { throw IllegalArgumentException(make_string("Could not open output posocc + dictionary in %s", _field_dir.c_str())); } return true; } bool FieldMerger::select_cooked_or_raw_features(FieldReader& reader) { bool rawFormatOK = true; bool cookedFormatOK = true; PostingListParams featureParams; PostingListParams outFeatureParams; vespalib::string cookedFormat; vespalib::string rawFormat; if (!reader.isValid()) { return true; } { _writer->getFeatureParams(featureParams); cookedFormat = featureParams.getStr("cookedEncoding"); rawFormat = featureParams.getStr("encoding"); if (rawFormat == "") { rawFormatOK = false; // Typically uncompressed file } outFeatureParams = featureParams; } { reader.getFeatureParams(featureParams); if (cookedFormat != featureParams.getStr("cookedEncoding")) { cookedFormatOK = false; } if (rawFormat != featureParams.getStr("encoding")) { rawFormatOK = false; } if (featureParams != outFeatureParams) { rawFormatOK = false; } if (!reader.allowRawFeatures()) { rawFormatOK = false; // Reader transforms data } } if (!cookedFormatOK) { LOG(error, "Cannot perform fusion, cooked feature formats don't match"); return false; } if (rawFormatOK) { featureParams.clear(); featureParams.set("cooked", false); reader.setFeatureParams(featureParams); reader.getFeatureParams(featureParams); if (featureParams.isSet("cookedEncoding") || rawFormat != featureParams.getStr("encoding")) { rawFormatOK = false; } if (!rawFormatOK) { LOG(error, "Cannot perform fusion, raw format setting failed"); return false; } LOG(debug, "Using raw feature format for fusion of posting files"); } return true; } bool FieldMerger::setup_merge_heap() { _heap = std::make_unique<PostingPriorityQueueMerger<FieldReader, FieldWriter>>(); for (auto &reader : _readers) { if (!select_cooked_or_raw_features(*reader)) { return false; } if (reader->isValid()) { reader->read(); } if (reader->isValid()) { _heap->initialAdd(reader.get()); } } _heap->setup(merge_postings_heap_limit); _heap->set_merge_chunk(_fusion_out_index.get_force_small_merge_chunk() ? 1u : merge_postings_merge_chunk); return true; } void FieldMerger::merge_postings_start() { /* OUTPUT */ _writer = std::make_unique<FieldWriter>(_fusion_out_index.get_doc_id_limit(), _num_word_ids); _readers.reserve(_fusion_out_index.get_old_indexes().size()); allocate_field_length_scanner(); _open_reader_idx = 0; _state = State::OPEN_POSTINGS_FIELD_READERS; } void FieldMerger::merge_postings_open_field_readers_done() { if (!open_field_writer() || !setup_merge_heap()) { merge_postings_failed(); } else { _state = State::MERGE_POSTINGS; } } void FieldMerger::merge_postings_main() { _heap->merge(*_writer, *_flush_token); if (_flush_token->stop_requested()) { _failed = true; } else if (_heap->empty()) { _state = State::MERGE_POSTINGS_FINISH; } } bool FieldMerger::merge_postings_finish() { _heap.reset(); for (auto &reader : _readers) { if (!reader->close()) { return false; } } _readers.clear(); if (!_writer->close()) { throw IllegalArgumentException(make_string("Could not close output posocc + dictionary in %s", _field_dir.c_str())); } _writer.reset(); return true; } void FieldMerger::merge_postings_failed() { _failed = true; if (_flush_token->stop_requested()) { return; } throw IllegalArgumentException(make_string("Could not merge field postings for field %s dir %s", _field_name.c_str(), _field_dir.c_str())); } void FieldMerger::merge_field_start() { const Schema &schema = _fusion_out_index.get_schema(); SchemaUtil::IndexIterator index(schema, _id); SchemaUtil::IndexSettings settings = index.getIndexSettings(); if (settings.hasError()) { _failed = true; return; } if (FileKit::hasStamp(_field_dir + "/.mergeocc_done")) { _state = State::MERGE_DONE; return; } vespalib::mkdir(_field_dir, false); LOG(debug, "merge_field for field %s dir %s", _field_name.c_str(), _field_dir.c_str()); make_tmp_dirs(); if (!renumber_word_ids_start()) { renumber_word_ids_failed(); return; } _state = State::RENUMBER_WORD_IDS; } void FieldMerger::merge_field_finish() { bool res = merge_postings_finish(); if (!res) { merge_postings_failed(); return; } if (!FileKit::createStamp(_field_dir + "/.mergeocc_done")) { _failed = true; return; } vespalib::File::sync(_field_dir); if (!clean_tmp_dirs()) { _failed = true; return; } LOG(debug, "Finished merge_field for field %s dir %s", _field_name.c_str(), _field_dir.c_str()); _state = State::MERGE_DONE; } void FieldMerger::process_merge_field() { switch (_state) { case State::MERGE_START: merge_field_start(); break; case State::RENUMBER_WORD_IDS: renumber_word_ids_main(); break; case State::RENUMBER_WORD_IDS_FINISH: if (!renumber_word_ids_finish()) { renumber_word_ids_failed(); break; } else { merge_postings_start(); } [[fallthrough]]; case State::OPEN_POSTINGS_FIELD_READERS: open_input_field_readers(); break; case State::SCAN_ELEMENT_LENGTHS: scan_element_lengths(); break; case State::OPEN_POSTINGS_FIELD_READERS_FINISH: merge_postings_open_field_readers_done(); break; case State::MERGE_POSTINGS: merge_postings_main(); break; case State::MERGE_POSTINGS_FINISH: merge_field_finish(); break; case State::MERGE_DONE: default: LOG_ABORT("should not be reached"); } } }
apache-2.0
shrimpma/phabricator
src/applications/uiexample/controller/PhabricatorUIExampleRenderController.php
1574
<?php final class PhabricatorUIExampleRenderController extends PhabricatorController { public function shouldAllowPublic() { return true; } public function handleRequest(AphrontRequest $request) { $id = $request->getURIData('class'); $classes = id(new PhutilSymbolLoader()) ->setAncestorClass('PhabricatorUIExample') ->loadObjects(); $classes = msort($classes, 'getName'); $nav = new AphrontSideNavFilterView(); $nav->setBaseURI(new PhutilURI($this->getApplicationURI('view/'))); foreach ($classes as $class => $obj) { $name = $obj->getName(); $nav->addFilter($class, $name); } $selected = $nav->selectFilter($id, head_key($classes)); $example = $classes[$selected]; $example->setRequest($this->getRequest()); $result = $example->renderExample(); if ($result instanceof AphrontResponse) { // This allows examples to generate dialogs, etc., for demonstration. return $result; } require_celerity_resource('phabricator-ui-example-css'); $crumbs = $this->buildApplicationCrumbs(); $crumbs->addTextCrumb($example->getName()); $note = id(new PHUIInfoView()) ->setTitle(pht('%s (%s)', $example->getName(), get_class($example))) ->appendChild($example->getDescription()) ->setSeverity(PHUIInfoView::SEVERITY_NODATA); $nav->appendChild( array( $crumbs, $note, $result, )); return $this->buildApplicationPage( $nav, array( 'title' => $example->getName(), )); } }
apache-2.0
SURFnet/Stepup-Middleware
src/Surfnet/Stepup/Identity/Event/RegistrationAuthorityRetractedForInstitutionEvent.php
3228
<?php /** * Copyright 2018 SURFnet bv * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ namespace Surfnet\Stepup\Identity\Event; use Surfnet\Stepup\Identity\AuditLog\Metadata; use Surfnet\Stepup\Identity\Value\CommonName; use Surfnet\Stepup\Identity\Value\Email; use Surfnet\Stepup\Identity\Value\IdentityId; use Surfnet\Stepup\Identity\Value\Institution; use Surfnet\Stepup\Identity\Value\NameId; use Surfnet\StepupMiddleware\CommandHandlingBundle\SensitiveData\Forgettable; use Surfnet\StepupMiddleware\CommandHandlingBundle\SensitiveData\SensitiveData; class RegistrationAuthorityRetractedForInstitutionEvent extends IdentityEvent implements Forgettable { /** * @var NameId */ public $nameId; /** * @var CommonName */ public $commonName; /** * @var Email */ public $email; /** * @var Institution */ public $raInstitution; public function __construct( IdentityId $identityId, Institution $institution, NameId $nameId, CommonName $commonName, Email $email, Institution $raInstitution ) { parent::__construct($identityId, $institution); $this->nameId = $nameId; $this->commonName = $commonName; $this->email = $email; $this->raInstitution = $raInstitution; } public function getAuditLogMetadata() { $metadata = new Metadata(); $metadata->identityId = $this->identityId; $metadata->identityInstitution = $this->identityInstitution; return $metadata; } public static function deserialize(array $data) { return new self( new IdentityId($data['identity_id']), new Institution($data['identity_institution']), new NameId($data['name_id']), CommonName::unknown(), Email::unknown(), new Institution($data['ra_institution']) ); } public function serialize() { return [ 'identity_id' => (string) $this->identityId, 'identity_institution' => (string) $this->identityInstitution, 'name_id' => (string) $this->nameId, 'ra_institution' => (string) $this->raInstitution, ]; } public function getSensitiveData() { return (new SensitiveData) ->withCommonName($this->commonName) ->withEmail($this->email); } public function setSensitiveData(SensitiveData $sensitiveData) { $this->email = $sensitiveData->getEmail(); $this->commonName = $sensitiveData->getCommonName(); } }
apache-2.0
arnost-starosta/midpoint
gui/admin-gui/src/main/java/com/evolveum/midpoint/web/component/input/DisplayableValueChoiceRenderer.java
1279
package com.evolveum.midpoint.web.component.input; import java.util.List; import org.apache.commons.lang3.StringUtils; import org.apache.wicket.markup.html.form.IChoiceRenderer; import org.apache.wicket.model.IModel; import com.evolveum.midpoint.util.DisplayableValue; public class DisplayableValueChoiceRenderer<T> implements IChoiceRenderer<T> { private static final long serialVersionUID = 1L; private List<DisplayableValue> choices; public DisplayableValueChoiceRenderer(List<DisplayableValue> choices) { this.choices = choices; } @Override public String getDisplayValue(T object) { if (object == null) { return null; } if (object instanceof DisplayableValue) { return ((DisplayableValue) object).getLabel(); } return object.toString(); } @Override public String getIdValue(T object, int index) { if (object instanceof String && choices != null) { for (DisplayableValue v : choices) { if (object.equals(v.getValue())) { return String.valueOf(choices.indexOf(v)); } } } return Integer.toString(index); } @Override public T getObject(String id, IModel<? extends List<? extends T>> choices) { if (StringUtils.isBlank(id)){ return null; } return choices.getObject().get(Integer.parseInt(id)); } }
apache-2.0
cjellick/go-machine-service
Godeps/_workspace/src/github.com/rancherio/go-rancher/client/generated_add_remove_service_link_input.go
2265
package client const ( ADD_REMOVE_SERVICE_LINK_INPUT_TYPE = "addRemoveServiceLinkInput" ) type AddRemoveServiceLinkInput struct { Resource ServiceId string `json:"serviceId,omitempty"` } type AddRemoveServiceLinkInputCollection struct { Collection Data []AddRemoveServiceLinkInput `json:"data,omitempty"` } type AddRemoveServiceLinkInputClient struct { rancherClient *RancherClient } type AddRemoveServiceLinkInputOperations interface { List(opts *ListOpts) (*AddRemoveServiceLinkInputCollection, error) Create(opts *AddRemoveServiceLinkInput) (*AddRemoveServiceLinkInput, error) Update(existing *AddRemoveServiceLinkInput, updates interface{}) (*AddRemoveServiceLinkInput, error) ById(id string) (*AddRemoveServiceLinkInput, error) Delete(container *AddRemoveServiceLinkInput) error } func newAddRemoveServiceLinkInputClient(rancherClient *RancherClient) *AddRemoveServiceLinkInputClient { return &AddRemoveServiceLinkInputClient{ rancherClient: rancherClient, } } func (c *AddRemoveServiceLinkInputClient) Create(container *AddRemoveServiceLinkInput) (*AddRemoveServiceLinkInput, error) { resp := &AddRemoveServiceLinkInput{} err := c.rancherClient.doCreate(ADD_REMOVE_SERVICE_LINK_INPUT_TYPE, container, resp) return resp, err } func (c *AddRemoveServiceLinkInputClient) Update(existing *AddRemoveServiceLinkInput, updates interface{}) (*AddRemoveServiceLinkInput, error) { resp := &AddRemoveServiceLinkInput{} err := c.rancherClient.doUpdate(ADD_REMOVE_SERVICE_LINK_INPUT_TYPE, &existing.Resource, updates, resp) return resp, err } func (c *AddRemoveServiceLinkInputClient) List(opts *ListOpts) (*AddRemoveServiceLinkInputCollection, error) { resp := &AddRemoveServiceLinkInputCollection{} err := c.rancherClient.doList(ADD_REMOVE_SERVICE_LINK_INPUT_TYPE, opts, resp) return resp, err } func (c *AddRemoveServiceLinkInputClient) ById(id string) (*AddRemoveServiceLinkInput, error) { resp := &AddRemoveServiceLinkInput{} err := c.rancherClient.doById(ADD_REMOVE_SERVICE_LINK_INPUT_TYPE, id, resp) return resp, err } func (c *AddRemoveServiceLinkInputClient) Delete(container *AddRemoveServiceLinkInput) error { return c.rancherClient.doResourceDelete(ADD_REMOVE_SERVICE_LINK_INPUT_TYPE, &container.Resource) }
apache-2.0
freedot/tstolua
tests/cases/fourslash/signatureHelpAnonymousFunction.ts
647
/// <reference path='fourslash.ts' /> ////var anonymousFunctionTest = function(n: number, s: string): (a: number, b: string) => string { //// return null; ////} ////anonymousFunctionTest(5, "")(/*anonymousFunction1*/1, /*anonymousFunction2*/""); goTo.marker('anonymousFunction1'); verify.signatureHelpCountIs(1); verify.currentSignatureParameterCountIs(2); verify.currentSignatureHelpIs('(a: number, b: string): string'); verify.currentParameterHelpArgumentNameIs("a"); verify.currentParameterSpanIs("a: number"); goTo.marker('anonymousFunction2'); verify.currentParameterHelpArgumentNameIs("b"); verify.currentParameterSpanIs("b: string");
apache-2.0
CouleurCitron/cms-2014
js/domUtils.js
5881
var isNav4, isNav6, isIE4; /* * Browser version snooper; determines your browser * (Navigator 4, Navigator 6, or Internet Explorer 4/5) */ function setBrowser() { if (navigator.appVersion.charAt(0) == "4") { if (navigator.appName.indexOf("Explorer") >= 0) { isIE4 = true; } else { isNav4 = true; } } else if (navigator.appVersion.charAt(0) > "4") { isNav6 = true; } } /* * * Given a selector string, return a style object * by searching through stylesheets. Return null if * none found * */ function getStyleBySelector( selector ) { if (!isNav6) { return null; } var sheetList = document.styleSheets; var ruleList; var i, j; /* look through stylesheets in reverse order that they appear in the document */ for (i=sheetList.length-1; i >= 0; i--) { ruleList = sheetList[i].cssRules; for (j=0; j<ruleList.length; j++) { if (ruleList[j].type == CSSRule.STYLE_RULE && ruleList[j].selectorText == selector) { return ruleList[j].style; } } } return null; } /* * * Given an id and a property (as strings), return * the given property of that id. Navigator 6 will * first look for the property in a tag; if not found, * it will look through the stylesheet. * * Note: do not precede the id with a # -- it will be * appended when searching the stylesheets * */ function getIdProperty( id, property ) { if (isNav6) { var styleObject = document.getElementById( id ); if (styleObject != null) { styleObject = styleObject.style; if (styleObject[property]) { return styleObject[ property ]; } } styleObject = getStyleBySelector( "#" + id ); return (styleObject != null) ? styleObject[property] : null; } else if (isNav4) { return document[id][property]; } else { return document.all[id].style[property]; } } /* * * Given an id and a property (as strings), set * the given property of that id to the value provided. * * The property is set directly on the tag, not in the * stylesheet. * */ function setIdProperty( id, property, value ) { if (isNav6) { var styleObject = document.getElementById( id ); if (styleObject != null) { styleObject = styleObject.style; styleObject[ property ] = value; } /* styleObject = getStyleBySelector( "#" + id ); if (styleObject != null) { styleObject[property] = value; } */ } else if (isNav4) { document[id][property] = value; } else if (isIE4) { document.all[id].style[property] = value; } } /* * * Move a given id. If additive is true, * then move it by xValue dots horizontally and * yValue units vertically. If additive is * false, then move it to (xValue, yValue) * * Note: do not precede the id with a # -- it will be * appended when searching the stylesheets * * Note also: length units are preserved in Navigator 6 * and Internet Explorer. That is, if left is 2cm and * top is 3cm, and you move to (4, 5), the left will * become 4cm and the top 5cm. * */ function generic_move( id, xValue, yValue, additive ) { var left = getIdProperty(id, "left"); var top = getIdProperty(id, "top"); var leftMatch, topMatch; if (isNav4) { leftMatch = new Array( 0, left, ""); topMatch = new Array( 0, top, ""); } else if (isNav6 || isIE4 ) { var splitexp = /([-0-9.]+)(\w+)/; leftMatch = splitexp.exec( left ); topMatch = splitexp.exec( top ); if (leftMatch == null || topMatch == null) { leftMatch = new Array(0, 0, "px"); topMatch = new Array(0, 0, "px"); } } left = ((additive) ? parseFloat( leftMatch[1] ) : 0) + xValue; top = ((additive) ? parseFloat( topMatch[1] ) : 0) + yValue; setIdProperty( id, "left", left + leftMatch[2] ); setIdProperty( id, "top", top + topMatch[2] ); } /* * * Move a given id to position (xValue, yValue) * */ function moveIdTo( id, x, y ) { generic_move( id, x, y, false ); } /* * * Move a given id to (currentX + xValue, currentY + yValue) * */ function moveIdBy( id, x, y) { generic_move( id, x, y, true ); } /* * * Function used when converting rgb format colors * from Navigator 6 to a hex format * */ function hex( n ) { var hexdigits = "0123456789abcdef"; return ( hexdigits.charAt(n >> 4) + hexdigits.charAt(n & 0x0f) ); } /* * * Retrieve background color for a given id. * The value returned will be in hex format (#rrggbb) * */ function getBackgroundColor( id ) { var color; if (isNav4) { color = document[id].bgColor; } else if (isNav6) { var parseExp = /rgb.(\d+),(\d+),(\d+)./; var rgbvals; color = getIdProperty( id, "backgroundColor" ); if (color) { rgbvals = parseExp.exec( color ); if (rgbvals) { color = "#" + hex( rgbvals[1] ) + hex( rgbvals[2] ) + hex( rgbvals[3] ); } } return color; } else if (isIE4) { return document.all[id].backgroundColor; } return ""; } /* * * Return a division's document * */ function getDocument( divName ) { var doc; if (isNav4) { doc = window.document[divName].document; } else if (isNav6) { doc = document; } else if (isIE4) { doc = document; } return doc; }
apache-2.0
X-Sharp/XSharpPublic
VisualStudio/ProjectBase/Tracing.cs
2467
/* **************************************************************************** * * Copyright (c) Microsoft Corporation. * * This source code is subject to terms and conditions of the Apache License, Version 2.0. A * copy of the license can be found in the License.txt file at the root of this distribution. * * You must not remove this notice, or any other, from this software. * * ***************************************************************************/ using System.Diagnostics; using XSharpModel; namespace Microsoft.VisualStudio.Project { public class CCITracing { private CCITracing() { } [ConditionalAttribute("Enable_CCIDiagnostics")] static void InternalTraceCall(int levels) { System.Diagnostics.StackFrame stack; stack = new System.Diagnostics.StackFrame(levels); System.Reflection.MethodBase method = stack.GetMethod(); if(method != null) { string name = method.Name + " \tin class " + method.DeclaringType.Name; XSettings.LogMessage("Call Trace: \t" + name); } } [ConditionalAttribute("CCI_TRACING")] static public void TraceCall() { // skip this one as well CCITracing.InternalTraceCall(2); } [ConditionalAttribute("CCI_TRACING")] static public void TraceCall(string strParameters) { CCITracing.InternalTraceCall(2); XSettings.LogMessage("\tParameters: \t" + strParameters); } [ConditionalAttribute("CCI_TRACING")] static public void Trace(System.Exception e) { CCITracing.InternalTraceCall(2); XSettings.LogException(e,""); } [ConditionalAttribute("CCI_TRACING")] static public void Trace(string strOutput) { XSettings.LogMessage(strOutput); } [ConditionalAttribute("CCI_TRACING")] static public void TraceData(string strOutput) { XSettings.LogMessage("Data Trace: \t" + strOutput); } [ConditionalAttribute("Enable_CCIFileOutput")] [ConditionalAttribute("CCI_TRACING")] static public void AddTraceLog(string strFileName) { TextWriterTraceListener tw = new TextWriterTraceListener("c:\\mytrace.log"); System.Diagnostics.Trace.Listeners.Add(tw); } } }
apache-2.0
OpenBEL/openbel-framework
org.openbel.framework.common/src/test/java/org/openbel/framework/common/external/WriteCacheTest.java
3046
/** * Copyright (C) 2012-2013 Selventa, Inc. * * This file is part of the OpenBEL Framework. * * This program is free software; you can redistribute it and/or modify it * under the terms of the GNU Lesser General Public License as published by * the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * The OpenBEL Framework is distributed in the hope that it will be useful, but * WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public * License for more details. * * You should have received a copy of the GNU Lesser General Public License * along with the OpenBEL Framework. If not, see <http://www.gnu.org/licenses/>. * * Additional Terms under LGPL v3: * * This license does not authorize you and you are prohibited from using the * name, trademarks, service marks, logos or similar indicia of Selventa, Inc., * or, in the discretion of other licensors or authors of the program, the * name, trademarks, service marks, logos or similar indicia of such authors or * licensors, in any marketing or advertising materials relating to your * distribution of the program or any covered product. This restriction does * not waive or limit your obligation to keep intact all copyright notices set * forth in the program as delivered to you. * * If you distribute the program in whole or in part, or any modified version * of the program, and you assume contractual liability to the recipient with * respect to the program or modified version, then you will indemnify the * authors and licensors of the program for any liabilities that these * contractual assumptions directly impose on those licensors and authors. */ package org.openbel.framework.common.external; import static org.junit.Assert.assertTrue; import static org.openbel.framework.common.BELUtilities.entries; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.UUID; import org.junit.Before; import org.junit.Test; import org.openbel.framework.common.external.WriteCache; public class WriteCacheTest { private WriteCache subject; @Before public void setUp() { subject = new WriteCache(); } @Test public void test() { assertTrue(subject.size() == 0); int cached_count = 10; List<UUID> uuids = new ArrayList<UUID>(); Map<Integer, UUID> truth = new HashMap<Integer, UUID>(); for (int i = 0; i < cached_count; i++) { UUID uuid = UUID.randomUUID(); uuids.add(uuid); Integer key = subject.cache(uuid); truth.put(key, uuid); } assertTrue(subject.size() == cached_count); for (final Entry<Integer, UUID> e : entries(truth)) { Integer key = e.getKey(); UUID value = e.getValue(); assertTrue(subject.get(value) == key); } } }
apache-2.0
wgrmath/forum
server/engine/application.js
8934
/****************************************************************************** Application *******************************************************************************/ 'use strict'; var _ = require('underscore'), uuid = require('uuid'), Models = require('../models'), logger = require('../logger'); module.exports = function(options, instance, self) { var storage = self.storage; // ---------------------------- CREATE API KEY ------------------------------- self.create_api_key = function(payload) { storage.get_app(payload.app_id, function(application) { if (!application) { logger.error(payload.app_id, 'invalid_app_id'); self.fail(payload); } else { var api_keys = application.get('api_keys') || []; var key = payload.using || uuid.v1(); api_keys.push(key); storage.update_app(application, { api_keys: api_keys }, function(updated_application) { self.invalidateApplication(application.get('id')); if (updated_application) { console.log('[BEGIN]'); console.log(key); console.log('[END]'); self.success(payload); } else { self.fail(payload); } }); } }); } // ---------------------------- DELETE API KEY ------------------------------- self.delete_api_key = function(payload) { storage.get_api_key(payload.api_key, function(application) { if (!application) { logger.error(payload.api_key, 'invalid_api_key'); self.fail(payload); } else { var api_keys = application.get('api_keys'); var index = api_keys.indexOf(payload.api_key); if (index >= 0) { api_keys.splice(index, 1); storage.update_app(application, { api_keys: api_keys }, function(updated_application) { self.invalidateApplication(application.get('id')); logger.info(updated_application.get('id'), 'delete_api_key', payload.api_key); self.success(payload); }); } else { self.fail(payload); } } }); } // ---------------------------- LIST API KEYS -------------------------------- self.list_api_keys = function(payload) { storage.get_app(payload.app_id, function(application) { if (!application) { logger.error(payload.app_id, 'invalid_app_id'); self.fail(payload); } else { console.log('[BEGIN]'); _.each(application.get('api_keys'), function(key) { console.log(key); }); console.log('[END]'); self.success(payload); } }); } // -------------------------------- LIST APPS -------------------------------- self.list_apps = function(payload) { storage.list_apps(function(list) { _.each(list, function(application) { console.log(application.toLog(true)); }); self.success(payload); }); } // --------------------------------- GET APP --------------------------------- self.get_app = function(app_id, callback) { storage.get_app(app_id, function(application) { callback(application); }); } // --------------------------------- ADD APP --------------------------------- self.add_app = function(payload) { var application = new Models.Application({ id: payload.app_id, name: payload.name || '', description: payload.description || '', url: payload.url || '', auth_url: payload.auth_url || '', fullpage_url: payload.fullpage_url || '', pushstate: payload.pushstate === 'true', notifications_email: payload.notifications_email || '', contact_email: payload.contact_email || '', theme: payload.theme || '', private_key: uuid.v1(), created: Date.now() }); storage.add_app(application, function() { logger.info(payload.app_id, 'add_app', application); self.success(payload); }); } // ------------------------------- UPDATE APP -------------------------------- self.update_app = function(payload) { storage.get_app(payload.app_id, function(application) { if (!application) { logger.error(payload.app_id, 'update_app: Invalid app_id', payload); self.fail(payload); } else { if (payload.name != undefined) application.set('name', payload.name); if (payload.description != undefined) application.set('description', payload.description); if (payload.url != undefined) application.set('url', payload.url); if (payload.auth_url != undefined) application.set('auth_url', payload.auth_url); if (payload.fullpage_url != undefined) application.set('fullpage_url', payload.fullpage_url); if (payload.pushstate != undefined) application.set('pushstate', payload.pushstate === 'true'); if (payload.notifications_email != undefined) application.set('notifications_email', payload.notifications_email); if (payload.contact_email != undefined) application.set('contact_email', payload.contact_email); if (payload.theme != undefined) application.set('theme', payload.theme); storage.update_app(application, null, function() { self.invalidateApplication(application.get('id')); logger.info(payload.app_id, 'update_app', application); self.success(payload); }); } }); } // ------------------------------- DELETE APP -------------------------------- self.delete_app = function(payload) { storage.get_app(payload.app_id, function(application) { if (!application) { logger.error(payload.app_id, 'delete_app: Invalid app_id', payload); self.fail(payload); } else { storage.delete_app(application, function() { self.invalidateApplication(payload.app_id); logger.info(payload.app_id, 'delete_app', application); self.success(payload); }); } }); } // ------------------------------- RESET USERS ------------------------------- self.reset_users = function(payload) { storage.get_app(payload.app_id, function(application) { if (!application) { logger.error(payload.app_id, 'reset_users: Invalid app_id', payload); self.fail(payload); } else { storage.reset_users(application, function() { logger.info(payload.app_id, 'reset_users', application); self.success(payload); }); } }); } // ------------------------------- LIST USERS ------------------------------- self.list_users = function(payload) { storage.match_users(payload.app_id, {}, function(list) { _.each(list, function(user) { console.log(user.toLog(true)); }); console.log('Total users: ' + list.length); self.success(payload); }); } // ------------------------------- SEARCH USERS ------------------------------- self.search_users = function(payload) { storage.search_users(payload.app_id, payload.value, function(list) { _.each(list, function(user) { console.log(user.toLog(true)); }); console.log('Total users: ' + list.length); self.success(payload); }); } // ------------------------------- DELETE USER ------------------------------- self.delete_user = function(payload) { storage.get_user(payload.app_id, payload.value, function(user) { if (user) { storage.delete_user(payload.app_id, user, function(deleted_user) { if (deleted_user) { logger.info(payload.app_id, 'delete_user', deleted_user); self.success(payload); } else { logger.error(payload.app_id, 'delete_user_failed', user); self.fail(payload); } }); } else { logger.error(payload.app_id, 'delete_user_id_not_found', payload.value); self.fail(payload); } }); } // ------------------------------- PROPERTIES ------------------------------- self.set_app_property = function(payload) { storage.set_app_property(payload.app_id, payload.property, payload.value, function(property, value) { if (property) { logger.info(payload.app_id, 'set_app_property', payload); self.success(payload); } else { logger.error(payload.app_id, 'set_app_property failed', payload); self.fail(payload); } }); } self.get_app_property = function(payload) { storage.get_app_property(payload.app_id, payload.property, function(property, value) { if (property) { console.log('[BEGIN]'); console.log(value); console.log('[END]'); self.success(payload); } else { logger.error(payload.app_id, 'get_app_property failed', payload); self.fail(payload); } }); } }
apache-2.0
docker/dockercloud-cli
dockercloudcli/utils.py
15248
from __future__ import print_function import codecs import datetime import json import os import re import sys import ago import dockercloud import yaml from dateutil import tz from tabulate import tabulate from exceptions import BadParameter, StreamOutputError from interpolation import interpolate_environment_variables SUPPORTED_FILENAMES = [ 'docker-cloud.yml', 'docker-cloud.yaml', 'tutum.yml', 'tutum.yaml', 'docker-compose.yml', 'docker-compose.yaml', ] def tabulate_result(data_list, headers): print(tabulate(data_list, headers, stralign="left", tablefmt="plain")) def from_utc_string_to_utc_datetime(utc_datetime_string): if not utc_datetime_string: return None utc_date_object = datetime.datetime.strptime(utc_datetime_string, "%a, %d %b %Y %H:%M:%S +0000") return utc_date_object def get_humanize_local_datetime_from_utc_datetime_string(utc_datetime_string): def get_humanize_local_datetime_from_utc_datetime(utc_target_datetime): local_now = datetime.datetime.now(tz.tzlocal()) if utc_target_datetime: local_target_datetime = utc_target_datetime.replace(tzinfo=tz.gettz("UTC")).astimezone(tz=tz.tzlocal()) return ago.human(local_now - local_target_datetime, precision=1) return "" utc_target_datetime = from_utc_string_to_utc_datetime(utc_datetime_string) return get_humanize_local_datetime_from_utc_datetime(utc_target_datetime) def is_uuid4(identifier): uuid4_regexp = re.compile('^[a-f0-9]{8}-[a-f0-9]{4}-4[a-f0-9]{3}-[89ab][a-f0-9]{3}-[a-f0-9]{12}', re.I) match = uuid4_regexp.match(identifier) return bool(match) def add_unicode_symbol_to_state(state): if state in ["Running", "Partly running", "Deployed"]: return u"\u25B6 " + state elif state in ["Init", "Stopped", "Not running"]: return u"\u25FC " + state elif state in ["Starting", "Stopping", "Scaling", "Terminating", "Deploying", "Redeploying"]: return u"\u2699 " + state elif state in ["Start failed", "Stopped with errors"]: return u"\u0021 " + state elif state == "Terminated": return u"\u2718 " + state elif state == "Unreachable": return u"\u2753 " + state return state def stream_output(output, stream): def print_output_event(event, stream, is_terminal): if 'errorDetail' in event: raise StreamOutputError(event['errorDetail']['message']) terminator = '' if is_terminal and 'stream' not in event: # erase current line stream.write("%c[2K\r" % 27) terminator = "\r" pass elif 'progressDetail' in event: return if 'time' in event: stream.write("[%s] " % event['time']) if 'id' in event: stream.write("%s: " % event['id']) if 'from' in event: stream.write("(from %s) " % event['from']) status = event.get('status', '') if 'progress' in event: stream.write("%s %s%s" % (status, event['progress'], terminator)) elif 'progressDetail' in event: detail = event['progressDetail'] if 'current' in detail: percentage = float(detail['current']) / float(detail['total']) * 100 stream.write('%s (%.1f%%)%s' % (status, percentage, terminator)) else: stream.write('%s%s' % (status, terminator)) elif 'stream' in event: stream.write("%s%s" % (event['stream'], terminator)) else: stream.write("%s%s\n" % (status, terminator)) is_terminal = hasattr(stream, 'fileno') and os.isatty(stream.fileno()) stream = codecs.getwriter('utf-8')(stream) all_events = [] lines = {} diff = 0 for chunk in output: event = json.loads(chunk) all_events.append(event) if 'progress' in event or 'progressDetail' in event: image_id = event.get('id') if not image_id: continue if image_id in lines: diff = len(lines) - lines[image_id] else: lines[image_id] = len(lines) stream.write("\n") diff = 0 if is_terminal: # move cursor up `diff` rows stream.write("%c[%dA" % (27, diff)) print_output_event(event, stream, is_terminal) if 'id' in event and is_terminal: # move cursor back down stream.write("%c[%dB" % (27, diff)) stream.flush() return all_events def get_uuids_of_trigger(trigger, identifiers): uuid_list = [] for identifier in identifiers: if is_uuid4(identifier): uuid_list.append(identifier) else: handlers = trigger.list(uuid__startswith=identifier) or \ trigger.list(name=identifier) for handler in handlers: uuid = handler.get('uuid', "") if uuid: uuid_list.append(uuid) if not uuid_list: raise dockercloud.ObjectNotFound("Cannot find a trigger with the identifier '%s'" % identifiers) return uuid_list def parse_links(links, target): def _format_link(_link): link_regexp = re.compile(r'^[a-zA-Z0-9_-]+(\.[a-zA-Z0-9_-]+)?:[a-zA-Z0-9_-]+$') match = link_regexp.match(_link) if match: temp = _link.split(":", 1) return {target: temp[0], 'name': temp[1]} raise BadParameter("Link variable argument %s does not match with (service_name[.stack_name]:alias)." " Example: mysql:db" % _link) return [_format_link(link) for link in links] if links else [] def parse_published_ports(port_list): def _get_port_dict(_port): port_regexp = re.compile('^([0-9]{1,5}:)?([0-9]{1,5})(/tcp|/udp)?$') match = port_regexp.match(_port) if bool(match): outer_port = match.group(1) inner_port = match.group(2) protocol = match.group(3) if protocol is None: protocol = "tcp" else: protocol = protocol[1:] port_spec = {'protocol': protocol, 'inner_port': inner_port, 'published': True} if outer_port is not None: port_spec['outer_port'] = outer_port[:-1] return port_spec raise BadParameter("publish port %s does not match with '[host_port:]container_port[/protocol]'." " E.g: 80:80/tcp" % _port) parsed_ports = [] if port_list is not None: parsed_ports = [] for port in port_list: parsed_ports.append(_get_port_dict(port)) return parsed_ports def parse_exposed_ports(port_list): def _get_port_dict(_port): if isinstance(_port, int) and 0 <= _port < 65535: port_spec = {'protocol': 'tcp', 'inner_port': '%d' % _port, 'published': False} return port_spec raise BadParameter("expose port %s is not a valid port number" % _port) parsed_ports = [] if port_list is not None: parsed_ports = [] for port in port_list: parsed_ports.append(_get_port_dict(port)) return parsed_ports def parse_envvars(envvar_list, envfile_list): def _transform_envvar(_envvar): _envvar = _envvar.split("=", 1) length = len(_envvar) if length == 2: return {'key': _envvar[0], 'value': _envvar[1]} else: raise BadParameter("Environment variable '%s' does not match with 'KEY=VALUE'." " Example: ENVVAR=foo" % _envvar[0]) def _read_envvar(envfile): envvars = [] with open(envfile) as f: lines = f.readlines() for line in lines: line = line.strip() if line.startswith("#"): continue if line == "": continue envvars.append(line) return envvars transformed_envvars = [] envvars = [] if envfile_list is not None: for envfile in envfile_list: envvars.extend(_read_envvar(envfile)) if envvar_list is not None: envvars.extend(envvar_list) if envvars is not None: for envvar in envvars: transformed_envvars.append(_transform_envvar(envvar)) parsed_envvar_dict = {} parsed_envvar_list = [] for transformed_envvar in transformed_envvars: parsed_envvar_dict[transformed_envvar["key"]] = transformed_envvar for v in parsed_envvar_dict.itervalues(): parsed_envvar_list.append(v) return parsed_envvar_list def parse_volume(volume): bindings = [] if not volume: return bindings for vol in volume: binding = {} terms = vol.split(":") if len(terms) == 1: binding["container_path"] = terms[0] elif len(terms) == 2: binding["host_path"] = terms[0] binding["container_path"] = terms[1] elif len(terms) == 3: binding["host_path"] = terms[0] binding["container_path"] = terms[1] if terms[2].lower() == 'ro': binding["rewritable"] = False else: raise BadParameter('Bad volume argument %s. Format: "[host_path:]/container_path[:permission]"' % vol) bindings.append(binding) return bindings def parse_volumes_from(volumes_from): bindings = [] if not volumes_from: return bindings for identifier in volumes_from: binding = {} service = dockercloud.Utils.fetch_remote_service(identifier) binding["volumes_from"] = service.resource_uri bindings.append(binding) return bindings def load_stackfiles(name, files, stack=None): stack = update_stack(name, stack) stackfiles = get_stackfiles(files) data = get_services_from_stackfiles(stack.name, stackfiles) for k, v in list(data.items()): setattr(stack, k, v) return stack def update_stack(name, stack): if not stack: stack = dockercloud.Stack.create() if name: stack.name = name else: stack.name = os.path.basename(os.getcwd()) return stack def get_services_from_stackfiles(name, stackfiles): services_dict = {} for stackfile in stackfiles: with open(stackfile, 'r') as f: content = yaml.load(f.read()) try: interpolated_content = interpolate_environment_variables(content, 'service') except Exception as e: raise BadParameter("Bad format of the stack file(%s): %s" % (stackfile, e)) if interpolated_content: for k, v in interpolated_content.items(): v.update({"name": k}) services_dict[k] = v else: raise BadParameter("Bad format of the stack file: %s" % stackfile) services = inject_env_var(services_dict.values()) data = {'name': name, 'services': services} return data def find_candidate_in_parent_dirs(filenames, path): candidate = "" for filename in filenames: if os.path.exists(os.path.join(path, filename)): candidate = filename break if not candidate: parent_dir = os.path.join(path, '..') if os.path.abspath(parent_dir) != os.path.abspath(path): return find_candidate_in_parent_dirs(filenames, parent_dir) return candidate, os.path.abspath(path) def get_stackfiles(files): stackfiles = [] if not files: candidate, path = find_candidate_in_parent_dirs(SUPPORTED_FILENAMES, os.getcwd()) if candidate: stackfiles.append(os.path.join(path, candidate)) alternative = candidate.replace(".", ".override.") if os.path.exists(os.path.join(path, alternative)): stackfiles.append(os.path.join(path, alternative)) else: stackfiles = files return stackfiles def inject_env_var(services): for service in services: try: env_vars = service["environment"] except: continue if isinstance(env_vars, list): for i, env_var in enumerate(env_vars): if isinstance(env_var, str) and env_var.find("=") < 0 and os.getenv(env_var): env_vars[i] = "%s=%s" % (env_var, os.getenv(env_var)) elif isinstance(env_vars, dict): for k, v in env_vars.iteritems(): if not v and os.getenv(k): env_vars[k] = os.getenv(k) return services # def sync_action(obj, sync): # action_uri = getattr(obj, "dockercloud_action_uri", "") # if sync and action_uri: # action = dockercloud.Utils.fetch_by_resource_uri(action_uri) # action.logs(tail=None, follow=True, log_handler=action_log_handler) def sync_action(obj, sync): import time success = True action_uri = getattr(obj, "dockercloud_action_uri", "") if sync and action_uri: last_state = None while True: try: action = dockercloud.Utils.fetch_by_resource_uri(action_uri) if last_state != action.state: if last_state: sys.stdout.write('\n') sys.stdout.write(action.state) last_state = action.state else: sys.stdout.write('.') if action.state.lower() == "success": sys.stdout.write('\n') break if action.state.lower() == "failed": success = False sys.stdout.write('\n') break sys.stdout.flush() time.sleep(4) except dockercloud.ApiError as e: print(e, file=sys.stderr) continue except Exception as e: print(e, file=sys.stderr) success = False break return success def container_service_log_handler(message): try: msg = json.loads(message) out = sys.stdout if msg.get("streamType", None) == "stderr": out = sys.stderr log = msg["log"] source = msg.get("source", None) if source: log = " | ".join([source, log]) if os.isatty(out.fileno()): log = AnsiColor.color_it(log, source) out.write(log) out.flush() except: pass def action_log_handler(message): try: msg = json.loads(message) if msg.get("type") == "log": print(msg.get("log", "")) except: pass class AnsiColor: source_identified = [] @staticmethod def color_it(log, source): if source not in AnsiColor.source_identified: AnsiColor.source_identified.append(source) color_index = AnsiColor.source_identified.index(source) % 7 seq = "\x1b[1;%dm%s\x1b[0m" % (31 + color_index, log) return seq
apache-2.0
jesshaw/node-sample-project
puppeteer/sample/app/resources/birds.js
445
var express = require('express'); var router = express.Router(); // middleware that is specific to this router router.use(function timeLog(req, res, next) { console.log('Time: ', Date.now()); next(); }); // define the home page route router.get('/', function (req, res) { res.send('Birds home page'); }); // define the about route router.get('/about', function (req, res) { res.send('About birds'); }); module.exports = router;
apache-2.0
codemucker/codemucker-jpattern
src/main/java/org/codemucker/jpattern/generate/GenerateLog.java
2658
package org.codemucker.jpattern.generate; import java.lang.annotation.Documented; import java.lang.annotation.ElementType; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; import java.lang.annotation.Target; @Retention(RetentionPolicy.RUNTIME) @Documented @Target(ElementType.TYPE) @IsGeneratorConfig(defaultGenerator = "org.codemucker.jmutate.generate.log.LogGenerator") public @interface GenerateLog { boolean markGenerated() default true; boolean enabled() default true; /** * Name of the log field. Default is 'LOG' */ String fieldName() default "LOG"; /** * If empty then use the name of the class * * @return */ String topic() default ""; /** * What to do if a log statement already exists which is not marked as under * this generators control */ ClashStrategy clashStrategy() default ClashStrategy.SKIP; /** * The logger framework to use * * @return */ Type logger() default Type.Log4j; public static enum Type { /** * <pre> * org.apache.commons.logging.Log LOG = org.apache.commons.logging.LogFactory.getLog(..) * </pre> */ CommonsLog("org.apache.commons.logging.Log", "org.apache.commons.logging.LogFactory.getLog", true), /** * <pre> * java.util.logging.Logger LOG = java.util.logging.Logger(..) * </pre> */ JavaLog("java.util.logging.Logger", "java.util.logging.Logger", false), /** * <pre> * org.apache.log4j.Logger LOG = org.apache.log4j.Logger.getLogger(..) * </pre> */ Log4j("org.apache.log4j.Logger", "org.apache.log4j.Logger.getLogger",true), /** * <pre> * org.apache.logging.log4j.LogManager.getLogger(..) * </pre> */ Log4j2("org.apache.logging.log4j.Logger", "org.apache.logging.log4j.LogManager.getLogger", true), /** * <pre> * org.slf4j.Logger LOG = org.slf4j.LoggerFactory.getLogger(..) * </pre> */ Slf4j("org.slf4j.Logger", "org.slf4j.LoggerFactory.getLogger", true), /** * <pre> * org.slf4j.ext.XLogger LOG = org.slf4j.ext.XLoggerFactory.getXLogger(..) * </pre> */ XSlf4j("org.slf4j.ext.XLogger", "org.slf4j.ext.XLoggerFactory.getXLogger", true); private final String loggerType; private final String loggerManager; private final boolean takesClass; private Type(String loggerType, String loggerManager, boolean takesClass) { this.loggerType = loggerType; this.loggerManager = loggerManager; this.takesClass = takesClass; } public String getLoggerType() { return loggerType; } public String getLogManagerExpression() { return loggerManager; } public boolean isTakesClass() { return takesClass; } } }
apache-2.0
MyRobotLab/pyrobotlab
home/Markus/Robyneartest.py
79853
sleep(1)#file : InMoov2.Robyn Inmoov import random keyboard = Runtime.createAndStart("keyboard", "Keyboard") keyboard.addListener("keyCommand", python.getName(), "input") leftPort = "COM3" rightPort = "COM7" i01 = Runtime.createAndStart("i01", "InMoov") cleverbot = Runtime.createAndStart("cleverbot","CleverBot") # starts everything i01.startAll(leftPort, rightPort) torso = i01.startTorso("COM3") left = Runtime.getService("i01.left") right = Runtime.getService("i01.right") ############################################################################################# # Markus Mod i01.leftArm.omoplate.map(10,80,80,20) i01.rightArm.omoplate.map(10,80,80,10) i01.leftArm.shoulder.map(0,180,170,15) i01.rightArm.shoulder.map(0,180,190,50) i01.leftArm.rotate.map(40,180,140,20) i01.rightArm.rotate.map(40,180,140,20) i01.leftArm.bicep.map(5,90,90,20) i01.rightArm.bicep.map(5,90,90,20) i01.head.rothead.map(30,150,150,30) i01.torso.topStom.map(60,120,70,110) i01.head.eyeX.map(60,100,90,50) i01.head.eyeY.map(50,100,100,50) i01.head.neck.map(20,160,160,20) ############################################################ #to tweak the default Pid values i01.headTracking.xpid.setPID(10.0,5.0,0.1) i01.headTracking.ypid.setPID(10.0,5.0,0.1) i01.eyesTracking.xpid.setPID(15.0,5.0,0.1) i01.eyesTracking.ypid.setPID(15.0,5.0,0.1) ############################################################ pin0 = 54 pin1 = 55 leftfast = 300 rightfast = 300 leftstedy = 600 rightstedy = 600 leftval = leftfast - leftstedy rightval = rightfast - rightstedy left.arduino.enablePin(pin0) left.arduino.enablePin(pin1) left.addListener("publishPin", "python", "publishPin") # my call-back def publishPin(pin): if (pin.pin == 54): pin0 = pin global leftfast leftfast = pin0.value if (leftfast <= leftstedy ): global leftfast leftfast = leftstedy global leftstedy leftstedy = ((leftstedy * 49) + pin0.value) / 50 global leftval leftval = leftfast - leftstedy if (pin.pin == 55): pin1 = pin global rightfast rightfast = pin1.value if (rightfast <= rightstedy ): global rightfast rightfast = rightstedy global rightstedy rightstedy = ((rightstedy * 49) + pin1.value) / 50 global rightval rightval = rightfast - rightstedy if (leftval >= rightval + 50 ): # mouth.speak("pin 0") i01.head.rothead.moveTo(30) sleep (4) elif (rightval >= leftval + 50 ): # mouth.speak("pin 1") i01.head.rothead.moveTo(150) sleep (4) else : i01.head.rothead.moveTo(90) # print leftfast # print leftstedy # print rightfast # print rightstedy print leftval print rightval ############################################################ Pin27 = 27 right.digitalReadPollingStart(Pin27) # make friendly sample rate right.setSampleRate(3000) right.addListener("publishPin", "python", "publishPin") def publishPin(pin): # print pin.pin, pin.value, pin.type, pin.source, if (pin.pin == 27 and pin.value == 1): if pin12 == 0: i01.mouth.speak("hello") global pin12 pin12 = 1 i01.head.attach() sleep(1) ear.clearLock() headfront() sleep(2) trackHumans() # if (pin.pin == 12 and pin.value == 0): # if pin12 == 1: # global resttimer # resttimer += 1 # if resttimer == 400: # global resttimer # resttimer = 0 # gotosleepnow() ############################################################################################# time = 0 pin12 = 1 #resttimer = 0 rest = 0 blind = 1 kinect = 0 dance1 = 1 dance2 = 1 helvar = 1 mic = 1 nexagroup = 1 nexa1 = 0 nexa2 = 0 nexa3 = 0 nexa4 = 0 nexa5 = 0 nexa6 = 0 nexa7 = 0 nexa8 = 0 nexa9 = 0 nexa10 = 0 nexa11 = 0 nexa12 = 0 nexa13 = 0 nexa14 = 0 nexa15 = 0 nexa16 = 0 l1="m" l2="a" l3="r" l4="k" l5="u" l6="s" name = l1+l2+l3+l4+l5+l6 # play rock paper scissors robyn = 0 human = 0 i01.systemCheck() ear = i01.ear ################################################################## # Hastighet vid start i01.setHandSpeed("left", 1.0, 1.0, 1.0, 1.0, 1.0, 1.0) i01.setHandSpeed("right", 1.0, 1.0, 1.0, 1.0, 1.0, 1.0) i01.setArmSpeed("left", 1.0, 1.0, 1.0, 1.0) i01.setArmSpeed("right", 1.0, 1.0, 1.0, 1.0) i01.setHeadSpeed(0.8, 0.8) i01.mouth.speak("working on full speed") ################################################################## # commands with i01.getName() use the InMoov service methods ear.addCommand("attach head", "i01.head", "attach") ear.addCommand("disconnect head", "i01.head", "detach") ear.addCommand("attach eyes", "i01.head.eyeY", "attach") ear.addCommand("disconnect eyes", "i01.head.eyeY", "detach") ear.addCommand("attach right hand", "i01.rightHand", "attach") ear.addCommand("disconnect right hand", "i01.rightHand", "detach") ear.addCommand("attach left hand", "i01.leftHand", "attach") ear.addCommand("disconnect left hand", "i01.leftHand", "detach") ear.addCommand("attach everything", "i01", "attach") ear.addCommand("disconnect everything", "i01", "detach") ear.addCommand("attach left arm", "i01.leftArm", "attach") ear.addCommand("disconnect left arm", "i01.leftArm", "detach") ear.addCommand("attach right arm", "i01.rightArm", "attach") ear.addCommand("disconnect right arm", "i01.rightArm", "detach") ear.addCommand("let's do some exercise", "python", "startkinect") ear.addCommand("you can stop now", "python", "offkinect") ear.addCommand("open hand", "python", "handopen") ear.addCommand("close hand", "python", "handclose") ear.addCommand("servo", "python", "servos") ear.addCommand("power down", i01.getName(), "powerDown") ear.addCommand("power up", i01.getName(), "powerUp") ear.addCommand("camera on", i01.getName(), "cameraOn") ear.addCommand("off camera", i01.getName(), "cameraOff") ear.addCommand("capture gesture", i01.getName(), "captureGesture") # FIXME - lk tracking setpoint ear.addCommand("track", i01.getName(), "track") ear.addCommand("freeze track", i01.getName(), "clearTrackingPoints") ear.addCommand("giving", i01.getName(), "giving") ear.addCommand("be a fighter", i01.getName(), "fighter") ear.addCommand("victory", i01.getName(), "victory") ear.addCommand("arms up", i01.getName(), "armsUp") ear.addCommand("arms front", i01.getName(), "armsFront") ear.addCommand("da vinci", i01.getName(), "daVinci") ear.addCommand("manual", ear.getName(), "lockOutAllGrammarExcept", "voice control") ear.addCommand("voice control", ear.getName(), "clearLock") ear.addCommand("stop listening", ear.getName(), "stopListening") ##sets the servos back to full speed, anywhere in sequence or gestures ear.addCommand("full speed", "python", "fullspeed") ear.addCommand("search humans", "python", "trackHumans") ear.addCommand("go blind", "python", "stopTracking") ear.addCommand("relax", "python", "relax") ear.addCommand("perfect", "python", "perfect") ear.addCommand("finger", "python", "finger") ear.addCommand("how many fingers do you have", "python", "howmanyfingersdoihave") # play rock paper scissors ear.addCommand("let's play rock paper scissors", "python", "rockpaperscissors") ear.addCommand("arms down", "python", "armsdown") ear.addCommand("torso", "python", "Torso") ear.addCommand("move eye", "python", "moveeye") ear.addCommand("move your mouth", "python", "movemouth") ear.addCommand("disco time", "python", "discotime") ear.addCommand("move your head", "python", "movehead") ear.addCommand("sing little teapot", "python", "littleteapot") ear.addComfirmations("yes","correct","ya") ear.addNegations("no","wrong","nope","nah") ear.startListening("a | b | c | d | e | f | g | h | i | j | k | l | m | n | o | p | q | r | s | t | u | v | w | x | y | z | turn on number four |turn off number four | turn on number three | turn off number three | turn on number two | turn off number two | turn on number one | turn off number one | let's play again | take a rest | shut down your system | do something | do something else | be quiet | turn off the light in your stomach | red light | green light | blue light | wake up robyn | good night robyn | go to sleep now | yes | no thanks | yes let's play again | i have rock | i have paper | i have scissors | look at the people | take a look around | good morning | very good | look to your right | look to your left |look down |look up |look strait forward |how are you | sorry | robyn | can i have your attention | hello robyn | bye bye | i love you | thanks | thank you | nice | goodbye") # set up a message route from the ear --to--> python method "heard" ear.addListener("recognized", "python", "heard") ########################################################################################## # play rock paper scissors def rockpaperscissors(): fullspeed() i01.mouth.speak("lets play first to 3 points win") sleep(4) rockpaperscissors2() def rockpaperscissors2(): fullspeed() ear.lockOutAllGrammarExcept("i have rock") ear.lockOutAllGrammarExcept("i have paper") ear.lockOutAllGrammarExcept("i have scissors") x = (random.randint(1, 3)) if x == 1: ready() sleep(2) rock() sleep(2) data = msg_i01_ear_recognized.data[0] if (data == "i have rock"): x = (random.randint(1, 3)) if x == 1: i01.mouth.speak("oh no") if x == 2: i01.mouth.speak("that don't work") if x == 3: i01.mouth.speak("no points") sleep(1) if (data == "i have paper"): x = (random.randint(1, 3)) if x == 1: i01.mouth.speak("paper beats rock") if x == 2: i01.mouth.speak("your point") if x == 3: i01.mouth.speak("you got this one") global human human += 1 sleep(1) if (data == "i have scissors"): x = (random.randint(1, 3)) if x == 1: i01.mouth.speak("1 point for me") if x == 2: i01.mouth.speak("going fine") if x == 3: i01.mouth.speak("rock beats scissors") global robyn robyn += 1 sleep(1) if x == 2: ready() sleep(2) paper() sleep(2) data = msg_i01_ear_recognized.data[0] if (data == "i have rock"): x = (random.randint(1, 3)) if x == 1: i01.mouth.speak("1 point") if x == 2: i01.mouth.speak("paper beats rock") if x == 3: i01.mouth.speak("my point") global robyn robyn += 1 sleep(1) if (data == "i have paper"): x = (random.randint(1, 3)) if x == 1: i01.mouth.speak("no points") if x == 2: i01.mouth.speak("ok lets try again") sleep(2) if x == 3: i01.mouth.speak("again") sleep(1) if (data == "i have scissors"): x = (random.randint(1, 3)) if x == 1: i01.mouth.speak("oh no you get 1 point") if x == 2: i01.mouth.speak("this is not good for me") if x == 3: i01.mouth.speak("your point") global human human += 1 sleep(1) if x == 3: ready() sleep(2) scissors() sleep(2) data = msg_i01_ear_recognized.data[0] if (data == "i have rock"): x = (random.randint(1, 3)) if x == 1: i01.mouth.speak("oh no") if x == 2: i01.mouth.speak("rock beats scissors") if x == 3: i01.mouth.speak("i feel generous today") global human human += 1 sleep(1) if (data == "i have paper"): x = (random.randint(1, 3)) if x == 1: i01.mouth.speak("i've got you") if x == 2: i01.mouth.speak("my point") if x == 3: i01.mouth.speak("good") global robyn robyn += 1 sleep(1) if (data == "i have scissors"): x = (random.randint(1, 3)) if x == 1: i01.mouth.speak("no no") if x == 2: i01.mouth.speak("that don't work") if x == 3: i01.mouth.speak("no points") sleep(1) if robyn == 3 or human == 3: stoprockpaperscissors() # if robyn > 4 or human > 4: # i01.mouth.speak("sorry there must have been something wrong with my counting") # sleep(5) # stoprockpaperscissors() rockpaperscissors2() def stoprockpaperscissors(): armsdown() handopen() if robyn < human: i01.mouth.speak("congratulations you won with" + str(human - robyn) + "points") sleep(5) i01.mouth.speak(str(human) + "points to you and" + str(robyn) + "points to me") if robyn > human: i01.mouth.speak("yes yes i won with" + str(robyn - human) + "points") sleep(5) i01.mouth.speak("i've got " + str(robyn) + "points and you got" + str(human) + "points") if robyn == human: i01.mouth.speak("none of us won we both got" + str(robyn) + "points") global robyn robyn = 0 global human human = 0 ear.clearLock() i01.mouth.speak("that was fun") sleep(3) i01.mouth.speak("do you want to play again") sleep(8) data = msg_i01_ear_recognized.data[0] if (data == "yes let's play again"): rockpaperscissors2() if (data == "yes"): rockpaperscissors2() if (data == "no thanks"): i01.mouth.speak("maybe some other time then") else: i01.mouth.speak("ok i'll find something else to do") lookaroundyou() def ready(): i01.mouth.speak("ready") i01.mouth.speak("go") i01.moveHead(90,90,80,90,75) i01.moveArm("left",65,90,75,10) i01.moveArm("right",5,90,30,10) i01.moveHand("left",100,171,180,180,180,90) i01.moveHand("right",2,2,2,2,2,90) def rock(): i01.moveHead(90,90,80,90,75) i01.moveArm("left",70,90,75,10) i01.moveArm("right",5,90,30,10) i01.moveHand("left",180,171,180,180,180,90) i01.moveHand("right",2,2,2,2,2,90) sleep(.3) i01.setHeadSpeed(.8,.8) i01.moveHead(60,107,80,90,75) i01.moveArm("left",49,90,75,10) i01.moveArm("right",5,90,30,10) i01.moveHand("left",180,171,180,180,180,90) i01.moveHand("right",2,2,2,2,2,90) sleep(.3) i01.moveHead(60,107,80,90,75) i01.moveArm("left",70,90,75,10) i01.moveArm("right",5,90,30,10) i01.moveHand("left",180,171,180,180,180,90) i01.moveHand("right",2,2,2,2,2,90) sleep(.3) i01.moveHead(60,107,80,90,75) i01.moveArm("left",49,90,75,10) i01.moveArm("right",5,90,30,10) i01.moveHand("left",180,171,180,180,180,90) i01.moveHand("right",2,2,2,2,2,90) sleep(.3) i01.moveHead(60,107,80,90,75) i01.moveArm("left",70,90,75,10) i01.moveArm("right",5,90,30,10) i01.moveHand("left",180,171,180,180,180,90) i01.moveHand("right",2,2,2,2,2,90) sleep(.3) i01.moveHead(90,90,80,90,75) i01.moveArm("left",49,90,75,10) i01.moveArm("right",5,90,30,10) i01.moveHand("left",180,171,180,180,180,140) i01.moveHand("right",2,2,2,2,2,90) sleep(.3) x = (random.randint(1, 2)) if x == 1: i01.mouth.speakBlocking("i have rock what do you have") if x == 2: i01.mouth.speakBlocking("what do you have") def paper(): i01.moveHead(90,90,80,90,75) i01.moveArm("left",70,90,75,10) i01.moveArm("right",5,90,30,10) i01.moveHand("left",180,171,180,180,180,90) i01.moveHand("right",2,2,2,2,2,90) sleep(.3) i01.setHeadSpeed(.8,.8) i01.moveHead(60,107,80,90,75) i01.moveArm("left",49,90,75,10) i01.moveArm("right",5,90,30,10) i01.moveHand("left",180,171,180,180,180,90) i01.moveHand("right",2,2,2,2,2,90) sleep(.3) i01.moveHead(60,107,80,90,75) i01.moveArm("left",70,90,75,10) i01.moveArm("right",5,90,30,10) i01.moveHand("left",180,171,180,180,180,90) i01.moveHand("right",2,2,2,2,2,90) sleep(.3) i01.moveHead(60,107,80,90,75) i01.moveArm("left",49,90,75,10) i01.moveArm("right",5,90,30,10) i01.moveHand("left",180,171,180,180,180,90) i01.moveHand("right",2,2,2,2,2,90) sleep(.3) i01.moveHead(60,107,80,90,75) i01.moveArm("left",70,90,75,10) i01.moveArm("right",5,90,30,10) i01.moveHand("left",180,171,180,180,180,90) i01.moveHand("right",2,2,2,2,2,90) sleep(.3) i01.moveHead(90,90,80,90,75) i01.moveArm("left",49,90,75,10) i01.moveArm("right",5,90,30,10) i01.moveHand("left",0,0,0,0,0,165) i01.moveHand("right",2,2,2,2,2,90) sleep(.3) x = (random.randint(1, 2)) if x == 1: i01.mouth.speakBlocking("i have paper what do you have") if x == 2: i01.mouth.speakBlocking("what do you have") def scissors(): i01.moveHead(90,90,80,90,75) i01.moveArm("left",70,90,75,10) i01.moveArm("right",5,90,30,10) i01.moveHand("left",180,171,180,180,180,90) i01.moveHand("right",2,2,2,2,2,90) sleep(.3) i01.setHeadSpeed(0.8,0.8) i01.moveHead(60,107,80,90,75) i01.moveArm("left",49,90,75,10) i01.moveArm("right",5,90,30,10) i01.moveHand("left",180,171,180,180,180,90) i01.moveHand("right",2,2,2,2,2,90) sleep(.3) i01.moveHead(60,107,80,90,75) i01.moveArm("left",70,90,75,10) i01.moveArm("right",5,90,30,10) i01.moveHand("left",180,171,180,180,180,90) i01.moveHand("right",2,2,2,2,2,90) sleep(.3) i01.moveHead(60,107,80,90,75) i01.moveArm("left",49,90,75,10) i01.moveArm("right",5,90,30,10) i01.moveHand("left",180,171,180,180,180,90) i01.moveHand("right",2,2,2,2,2,90) sleep(.3) i01.moveHead(60,107,80,90,75) i01.moveArm("left",70,90,75,10) i01.moveArm("right",5,90,30,10) i01.moveHand("left",180,171,180,180,180,90) i01.moveHand("right",2,2,2,2,2,90) sleep(.3) i01.moveHead(90,90,80,90,75) i01.moveArm("left",49,90,75,10) i01.moveArm("right",5,90,30,10) i01.moveHand("left",50,0,0,180,180,90) i01.moveHand("right",2,2,2,2,2,90) sleep(.3) x = (random.randint(1, 2)) if x == 1: i01.mouth.speakBlocking("i have scissors what do you have") if x == 2: i01.mouth.speakBlocking("what do you have") ########################################################################################## def input(cmd): # print 'python object is',msg_[service]_[method] cmd = msg_keyboard_keyCommand.data[0] print 'python data is', cmd if (cmd == "C"): i01.mouth.audioFile.playFile("C:\Users\Markus\Music\markustest.mp3", False) sleep(12.0) i01.head.jaw.moveTo(50) sleep(0.2) i01.head.jaw.moveTo(10) sleep(0.23) i01.head.jaw.moveTo(50) sleep(0.2) i01.head.jaw.moveTo(10) sleep(0.17) i01.head.jaw.moveTo(50) sleep(0.2) i01.head.jaw.moveTo(10) sleep(0.68) i01.head.jaw.moveTo(50) sleep(0.2) i01.head.jaw.moveTo(10) sleep(1.44) i01.head.jaw.moveTo(50) sleep(0.2) i01.head.jaw.moveTo(10) sleep(0.2) i01.head.jaw.moveTo(50) sleep(0.2) i01.head.jaw.moveTo(10) sleep(0.22) i01.head.jaw.moveTo(50) sleep(0.2) i01.head.jaw.moveTo(10) sleep(0.59) i01.head.jaw.moveTo(50) sleep(0.2) i01.head.jaw.moveTo(10) sleep(0.22) i01.head.jaw.moveTo(50) sleep(0.2) i01.head.jaw.moveTo(10) sleep(0.27) i01.head.jaw.moveTo(50) sleep(0.2) i01.head.jaw.moveTo(10) sleep(0.65) i01.head.jaw.moveTo(50) sleep(0.2) i01.head.jaw.moveTo(10) sleep(0.61) i01.head.jaw.moveTo(50) sleep(0.2) i01.head.jaw.moveTo(10) sleep(0.68) i01.head.jaw.moveTo(50) sleep(0.2) i01.head.jaw.moveTo(10) sleep(12.91) i01.head.jaw.moveTo(50) sleep(0.2) i01.head.jaw.moveTo(10) sleep(0.14) i01.head.jaw.moveTo(50) sleep(0.2) i01.head.jaw.moveTo(10) sleep(0.26) i01.head.jaw.moveTo(50) sleep(0.2) i01.head.jaw.moveTo(10) sleep(0.59) i01.head.jaw.moveTo(50) sleep(0.2) i01.head.jaw.moveTo(10) sleep(1.46) i01.head.jaw.moveTo(50) sleep(0.2) i01.head.jaw.moveTo(10) sleep(0.16) i01.head.jaw.moveTo(50) sleep(0.2) i01.head.jaw.moveTo(10) sleep(0.22) i01.head.jaw.moveTo(50) sleep(0.2) i01.head.jaw.moveTo(10) sleep(0.61) i01.head.jaw.moveTo(50) sleep(0.2) i01.head.jaw.moveTo(10) sleep(0.16) i01.head.jaw.moveTo(50) sleep(0.2) i01.head.jaw.moveTo(10) sleep(0.25) i01.head.jaw.moveTo(50) sleep(0.2) i01.head.jaw.moveTo(10) sleep(0.69) i01.head.jaw.moveTo(50) sleep(0.2) i01.head.jaw.moveTo(10) sleep(0.66) i01.head.jaw.moveTo(50) sleep(0.2) i01.head.jaw.moveTo(10) sleep(0.62) i01.head.jaw.moveTo(50) sleep(0.2) i01.head.jaw.moveTo(10) if (cmd == "T"): talk() if (cmd == "R"): if rest == 0: shutdownyoursystem() elif rest == 1: robyn() if (cmd == "W"): i01.head.neck.moveTo(i01.head.neck.getPosFloat() + 1) if (cmd == "Z"): i01.head.neck.moveTo(i01.head.neck.getPosFloat() - 1) if (cmd == "A"): i01.head.rothead.moveTo(i01.head.rothead.getPosFloat() + 1) if (cmd == "D"): i01.head.rothead.moveTo(i01.head.rothead.getPosFloat() - 1) if (cmd == "S"): headfront() if (cmd == "K"): if kinect == 0: startkinect() elif kinect == 1: offkinect() if (cmd == "B"): if blind == 1: trackHumans() elif blind == 0: stopTracking() if (cmd == "Q"): i01.rightArm.shoulder.moveTo(i01.rightArm.shoulder.getPosFloat() + 0.5) if (cmd == "P"): discotime() ################################################# if (cmd == "5"): if nexagroup == 1: i01.mouth.speakBlocking("nexa group 2") global nexagroup nexagroup = 2 elif nexagroup == 2: i01.mouth.speakBlocking("nexa group 3") global nexagroup nexagroup = 3 elif nexagroup == 3: i01.mouth.speakBlocking("nexa group 4") global nexagroup nexagroup = 4 elif nexagroup == 4: i01.mouth.speakBlocking("nexa group 1") global nexagroup nexagroup = 1 if (cmd == "1"): if nexagroup == 1: if nexa1 == 0: nexa1on() elif nexa1 == 1: nexa1off() elif nexagroup == 2: if nexa5 == 0: nexa5on() elif nexa5 == 1: nexa5off() elif nexagroup == 3: if nexa9 == 0: nexa9on() elif nexa9 == 1: nexa9off() elif nexagroup == 4: if nexa13 == 0: nexa13on() elif nexa13 == 1: nexa13off() if (cmd == "2"): if nexagroup == 1: if nexa2 == 0: nexa2on() elif nexa2 == 1: nexa2off() elif nexagroup == 2: if nexa6 == 0: nexa6on() elif nexa6 == 1: nexa6off() elif nexagroup == 3: if nexa10 == 0: nexa10on() elif nexa10 == 1: nexa10off() elif nexagroup == 4: if nexa14 == 0: nexa14on() elif nexa14 == 1: nexa14off() if (cmd == "3"): if nexagroup == 1: if nexa3 == 0: nexa3on() elif nexa3 == 1: nexa3off() elif nexagroup == 2: if nexa7 == 0: nexa7on() elif nexa7 == 1: nexa7off() elif nexagroup == 3: if nexa11 == 0: nexa11on() elif nexa11 == 1: nexa11off() elif nexagroup == 4: if nexa15 == 0: nexa15on() elif nexa15 == 1: nexa15off() if (cmd == "4"): if nexagroup == 1: if nexa4 == 0: nexa4on() elif nexa4 == 1: nexa4off() elif nexagroup == 2: if nexa8 == 0: nexa8on() elif nexa8 == 1: nexa8off() elif nexagroup == 3: if nexa12 == 0: nexa12on() elif nexa12 == 1: nexa12off() elif nexagroup == 4: if nexa16 == 0: nexa16on() elif nexa16 == 1: nexa16off() ################################################# if (cmd == "M"): if mic == 1: ear.lockOutAllGrammarExcept("robin") i01.mouth.speak("i'm not listening") global mic mic = 0 elif mic == 0: ear.clearLock() i01.mouth.speak("i can hear again") global mic mic = 1 ########################################################################################## def heard(data): data = msg_i01_ear_recognized.data[0] if (data == name): i01.mouth.speak("this is great") if (data == "a"): i01.mouth.speak(name) if (data == "turn on number one"): nexa1on() if (data == "turn off number one"): nexa1off() if (data == "turn on number two"): nexa2on() if (data == "turn off number two"): nexa2off() if (data == "turn on number three"): nexa3on() if (data == "turn off number three"): nexa3off() if (data == "turn on number four"): nexa4on() if (data == "turn off number four"): nexa4off() if (data == "let's play again"): rockpaperscissors2() if (data == "be quiet"): blue() ear.lockOutAllGrammarExcept("robyn") i01.mouth.speak("ok i will only listen if you say my name") global mic mic = 0 if (data == "turn off the light in your stomach"): ledoff() if (data == "red light"): red() if (data == "green light"): green() if (data == "blue light"): blue() if (data == "shut down your system") or (data == "take a rest"): shutdownyoursystem() if (data == "go to sleep now") or (data == "good night robyn"): gotosleepnow() if (data == "wake up robyn") or (data == "good morning"): i01.attach() green() global rest rest = 0 global mic mic = 1 global pin12 pin12 = 1 headfront() eyesfront() i01.mouth.speak("good morning") ear.clearLock() x = (random.randint(1, 4)) if x == 1: i01.mouth.speak("i hope you had a good night sleep") if x == 2: i01.mouth.speak("nice to see you again") if x == 3: i01.mouth.speak("this is going to be a good day") if (data == "look at the people"): i01.setHeadSpeed(0.8, 0.8) for y in range(0, 10): x = (random.randint(1, 5)) if x == 1: i01.head.neck.moveTo(90) eyeslooking() sleep(1) trackHumans() sleep(10) stopTracking() if x == 2: i01.head.rothead.moveTo(80) eyeslooking() sleep(1) trackHumans() sleep(10) stopTracking() if x == 3: headdown() eyeslooking() sleep(1) trackHumans() sleep(10) stopTracking() if x == 4: headright() eyeslooking() sleep(1) trackHumans() sleep(10) stopTracking() if x == 5: headleft() eyeslooking() sleep(1) trackHumans() sleep(10) stopTracking() sleep(1) headfront() eyesfront() sleep(3) i01.mouth.speak("nice to meet you all") if (data == "take a look around"): lookaroundyou() if (data == "do something else"): lookaroundyou() if (data == "do something"): lookaroundyou() if (data == "very good"): i01.mouth.speak("thanks") if (data == "look to your right"): headright() if (data == "look to your left"): headleft() if (data == "look down"): headdown() if (data == "look up"): headupp() if (data == "look strait forward"): headfront() if (data == "how are you"): i01.mouth.speak("i'm fine thanks") if (data == "goodbye"): goodbye() if (data == "robyn"): robyn() if (data == "sorry"): global helvar helvar = 1 green() x = (random.randint(1, 3)) if x == 1: i01.mouth.speak("no problems") if x == 2: i01.mouth.speak("it doesn't matter") if x == 3: i01.mouth.speak("it's okay") if (data == "nice"): x = (random.randint(1, 3)) if x == 1: i01.mouth.speak("I know") if x == 2: i01.mouth.speak("yes, indeed") if x == 3: i01.mouth.speak("you are damn right") if (data == "bye bye"): i01.mouth.speak("see you soon") global helvar helvar = 1 x = (random.randint(1, 2)) if x == 1: i01.mouth.speak("i'm looking forward to see you again") if x == 2: i01.mouth.speak("goodbye") if (data == "thank you"): x = (random.randint(1, 3)) if x == 1: i01.mouth.speak("you are welcome") if x == 2: i01.mouth.speak("my pleasure") if x == 3: i01.mouth.speak("it's okay") if (data == "thanks"): x = (random.randint(1, 2)) if x == 1: i01.mouth.speak("it's okay") if x == 2: i01.mouth.speak("sure") if (data == "hello robyn"): if helvar <= 2: i01.mouth.speak("hello") global helvar helvar += 1 green() sleep(1) elif helvar == 3: i01.mouth.speak("hello hello you have already said hello at least twice") i01.moveArm("left",43,88,22,10) i01.moveArm("right",20,90,30,10) i01.moveHand("left",0,0,0,0,0,119) i01.moveHand("right",0,0,0,0,0,119) green() sleep(1) red() sleep(1) green() sleep(1) armsdown() global helvar helvar += 1 elif helvar == 4: i01.mouth.speak("what is your problem stop saying hello all the time") i01.moveArm("left",30,83,22,10) i01.moveArm("right",40,85,30,10) i01.moveHand("left",130,180,180,180,180,119) i01.moveHand("right",130,180,180,180,180,119) red() sleep(1) green() sleep(1) red() sleep(1) green() sleep(1) armsdown() global helvar helvar += 1 elif helvar == 5: stopTracking() i01.mouth.speak("i will ignore you if you say hello one more time") headright() red() sleep(3) armsdown() global helvar helvar += 1 if (data == "i love you"): green() i01.mouth.speak("i love you too") i01.moveHead(116,80,87,80,70) i01.moveArm("left",85,93,42,16) i01.moveArm("right",87,93,37,18) i01.moveHand("left",124,82,65,81,41,143) i01.moveHand("right",59,53,89,61,36,21) i01.moveTorso(90,90,90) global helvar helvar = 1 sleep(0.2) sleep(1) armsdown() def stopit(): ear.clearLock() headfront() eyesfront() if (data == "break"): i01.mouth.speak("yes") ############################################################################################# def discotime(): i01.moveHand("left",92,33,37,71,66,25) i01.moveHand("right",81,66,82,60,105,113) nexa1off() ear.lockOutAllGrammarExcept("robyn") i01.mouth.speak("it's disco time") sleep(3) nexa2off() sleep(1) i01.mouth.audioFile.playFile("C:\Users\Markus\Music\Get the Party Started.mp3", False) sleep(1.6) nexa3on() sleep(1) nexa4on() for y in range(0, 67): data = msg_i01_ear_recognized.data[0] if (data == "robyn"): stopit() discodance1() discodance2() i01.head.neck.moveTo(40) red() sleep(0.4) i01.head.neck.moveTo(90) sleep(0.52) discodance1() discodance2() i01.head.neck.moveTo(40) green() sleep(0.4) i01.head.neck.moveTo(90) sleep(0.515) discodance1() discodance2() i01.head.neck.moveTo(40) blue() sleep(0.4) i01.head.neck.moveTo(90) sleep(0.5) ear.clearLock() nexa1on() sleep(0.5) nexa2on() sleep(0.5) nexa3off() sleep(0.5) nexa4off() global dance2 dance2 = 1 robyn() armsdown() i01.mouth.speak("is the party already over") def discodance1(): if dance1 == 1: i01.moveTorso(100,90,90) global dance1 dance1 = 2 elif dance1 == 2: i01.moveTorso(80,90,90) global dance1 dance1 = 1 def discodance2(): if dance2 >= 0 and dance2 <= 9 or dance2 >= 17 and dance2 <= 26 or dance2 >= 42 and dance2 <= 52 : if dance1 == 2: i01.moveArm("left",60,90,30,10) i01.moveArm("right",60,90,30,10) elif dance1 == 1: i01.moveArm("left",30,90,30,10) i01.moveArm("right",30,90,30,10) global dance2 dance2 += 1 if dance2 >= 9 and dance2 <= 17 : if dance1 == 2: i01.moveArm("left",60,60,30,10) i01.moveArm("right",60,120,30,10) elif dance1 == 1: i01.moveArm("left",30,60,30,10) i01.moveArm("right",30,120,30,10) global dance2 dance2 += 1 if dance2 >= 26 and dance2 <= 34 : if dance1 == 2: i01.moveArm("left",60,120,30,10) i01.moveArm("right",60,60,30,10) elif dance1 == 1: i01.moveArm("left",30,120,30,10) i01.moveArm("right",30,60,30,10) global dance2 dance2 += 1 if dance2 >= 34 and dance2 <= 42 or dance2 >= 60 and dance2 <= 68 : if dance1 == 2: i01.moveArm("left",25,94,79,10) i01.moveArm("right",90,107,43,15) elif dance1 == 1: i01.moveArm("left",65,94,73,10) i01.moveArm("right",37,107,72,15) global dance2 dance2 += 1 if dance2 >= 52 and dance2 <= 60 or dance2 >= 68 and dance2 <= 76 or dance2 >= 84 and dance2 <= 92 : if dance1 == 2: i01.moveArm("left",5,90,30,10) i01.moveArm("right",5,130,30,30) elif dance1 == 1: i01.moveArm("left",5,130,30,30) i01.moveArm("right",5,90,30,10) global dance2 dance2 += 1 if dance2 >= 76 and dance2 <= 84 or dance2 >= 92 and dance2 <= 102 : if dance1 == 2: i01.moveArm("left",90,90,30,19) i01.moveArm("right",87,104,30,10) elif dance1 == 1: i01.moveArm("left",90,136,30,10) i01.moveArm("right",87,69,30,25) global dance2 dance2 += 1 if dance2 >= 102 and dance2 <= 111 or dance2 >= 119 and dance2 <= 128 or dance2 >= 146 and dance2 <= 154 : if dance1 == 2: i01.moveArm("left",30,90,30,10) i01.moveArm("right",60,90,30,10) elif dance1 == 1: i01.moveArm("left",60,90,30,10) i01.moveArm("right",30,90,30,10) global dance2 dance2 += 1 if dance2 >= 111 and dance2 <= 119 : if dance1 == 2: i01.moveArm("left",30,60,30,10) i01.moveArm("right",60,120,30,10) elif dance1 == 1: i01.moveArm("left",60,60,30,10) i01.moveArm("right",30,120,30,10) global dance2 dance2 += 1 if dance2 >= 128 and dance2 <= 138 : if dance1 == 2: i01.moveArm("left",30,120,30,10) i01.moveArm("right",60,60,30,10) elif dance1 == 1: i01.moveArm("left",60,120,30,10) i01.moveArm("right",30,60,30,10) global dance2 dance2 += 1 if dance2 >= 138 and dance2 <= 146 or dance2 >= 164 and dance2 <= 172 : if dance1 == 2: i01.moveArm("left",25,94,79,10) i01.moveArm("right",90,107,43,15) elif dance1 == 1: i01.moveArm("left",65,94,73,10) i01.moveArm("right",37,107,72,15) global dance2 dance2 += 1 if dance2 >= 154 and dance2 <= 164 or dance2 >= 172 and dance2 <= 180 or dance2 >= 188 and dance2 <= 196 : if dance1 == 2: i01.moveArm("left",5,90,30,10) i01.moveArm("right",60,130,30,30) elif dance1 == 1: i01.moveArm("left",60,130,30,30) i01.moveArm("right",5,90,30,10) global dance2 dance2 += 1 if dance2 >= 180 and dance2 <= 188 or dance2 >= 196 and dance2 <= 212 : if dance1 == 2: i01.moveArm("left",90,90,30,19) i01.moveArm("right",87,104,30,10) elif dance1 == 1: i01.moveArm("left",90,136,30,10) i01.moveArm("right",87,69,30,25) global dance2 dance2 += 1 ############################################################################################# def howmanyfingersdoihave(): blue() fullspeed() i01.moveHead(49,74) i01.moveArm("left",75,83,79,24) i01.moveArm("right",65,82,71,24) i01.moveHand("left",74,140,150,157,168,92) i01.moveHand("right",89,80,98,120,114,0) sleep(2) i01.moveHand("right",0,80,98,120,114,0) i01.mouth.speakBlocking("ten") sleep(1) i01.moveHand("right",0,0,98,120,114,0) i01.mouth.speakBlocking("nine") sleep(1) i01.moveHand("right",0,0,0,120,114,0) i01.mouth.speakBlocking("eight") sleep(1) i01.moveHand("right",0,0,0,0,114,0) i01.mouth.speakBlocking("seven") sleep(1) i01.moveHand("right",0,0,0,0,0,0) i01.mouth.speakBlocking("six") sleep(1) i01.setHeadSpeed(.70,.70) i01.moveHead(40,105) i01.moveArm("left",75,83,79,24) i01.moveArm("right",65,82,71,24) i01.moveHand("left",0,0,0,0,0,180) i01.moveHand("right",0,0,0,0,0,0) sleep(1) i01.mouth.speakBlocking("and five makes eleven") sleep(0.7) i01.setHeadSpeed(0.7,0.7) i01.moveHead(40,50) sleep(0.5) i01.setHeadSpeed(0.7,0.7) i01.moveHead(49,105) sleep(0.7) i01.setHeadSpeed(0.7,0.8) i01.moveHead(40,50) sleep(0.7) i01.setHeadSpeed(0.7,0.8) i01.moveHead(49,105) sleep(0.7) i01.setHeadSpeed(0.7,0.7) i01.moveHead(90,85) sleep(0.7) i01.mouth.speakBlocking("eleven") i01.moveArm("left",70,75,70,20) i01.moveArm("right",60,75,65,20) sleep(1) i01.mouth.speakBlocking("that doesn't seem right") sleep(2) i01.mouth.speakBlocking("I think I better try that again") i01.moveHead(40,105) i01.moveArm("left",75,83,79,24) i01.moveArm("right",65,82,71,24) i01.moveHand("left",140,168,168,168,158,90) i01.moveHand("right",87,138,160,168,158,25) sleep(2) i01.moveHand("left",10,140,168,168,158,90) i01.mouth.speakBlocking("one") sleep(.1) i01.moveHand("left",10,10,168,168,158,90) i01.mouth.speakBlocking("two") sleep(.1) i01.moveHand("left",10,10,10,168,158,90) i01.mouth.speakBlocking("three") sleep(.1) i01.moveHand("left",10,10,10,10,158,90) i01.mouth.speakBlocking("four") sleep(.1) i01.moveHand("left",10,10,10,10,10,90) i01.mouth.speakBlocking("five") sleep(.1) i01.setHeadSpeed(0.65,0.65) i01.moveHead(53,65) i01.moveArm("right",48,80,78,11) i01.setHandSpeed("left", 1.0, 1.0, 1.0, 1.0, 1.0, 1.0) i01.setHandSpeed("right", 1.0, 1.0, 1.0, 1.0, 1.0, 1.0) i01.moveHand("left",10,10,10,10,10,90) i01.moveHand("right",10,0,10,10,0,25) sleep(1) i01.mouth.speakBlocking("and five makes ten") sleep(.5) i01.mouth.speakBlocking("there that's better") i01.moveHead(95,85) i01.moveArm("left",75,83,79,24) i01.moveArm("right",40,70,70,10) sleep(0.5) i01.mouth.speakBlocking("inmoov has ten fingers") i01.moveHead(90,90) i01.setHandSpeed("left", 0.8, 0.8, 0.8, 0.8, 0.8, 0.8) i01.setHandSpeed("right", 0.8, 0.8, 0.8, 0.8, 0.8, 0.8) i01.moveHand("left",140,140,140,140,140,60) i01.moveHand("right",140,140,140,140,140,60) sleep(1.0) i01.setArmSpeed("right", 1.0, 1.0, 1.0, 1.0) i01.setArmSpeed("left", 1.0, 1.0, 1.0, 1.0) i01.moveArm("left",5,90,30,11) i01.moveArm("right",5,90,30,11) armsdown() sleep(1) green() def finger(): i01.setHandSpeed("left", 0.85, 0.85, 0.85, 0.85, 0.85, 1.0) i01.setHandSpeed("right", 1.0, 0.85, 1.0, 1.0, 1.0, 1.0) i01.setArmSpeed("left", 1.0, 1.0, 1.0, 1.0) i01.setArmSpeed("right", 0.90, 1.0, 1.0, 1.0) i01.setHeadSpeed(1.0, 0.90) i01.setTorsoSpeed(1.0, 1.0, 1.0) i01.moveHead(80,86,85,85,72) i01.moveArm("left",5,94,30,10) i01.moveArm("right",7,78,92,10) i01.moveHand("left",180,180,180,180,180,90) i01.moveHand("right",180,2,175,160,165,180) i01.moveTorso(90,90,90) fullspeed() def fullspeed(): i01.setHandSpeed("left", 1.0, 1.0, 1.0, 1.0, 1.0, 1.0) i01.setHandSpeed("right", 1.0, 1.0, 1.0, 1.0, 1.0, 1.0) i01.setArmSpeed("left", 1.0, 1.0, 1.0, 1.0) i01.setArmSpeed("right", 1.0, 1.0, 1.0, 1.0) i01.setHeadSpeed(0.7, 0.7) def trackHumans(): i01.headTracking.faceDetect() i01.eyesTracking.faceDetect() global blind blind = 0 def stopTracking(): i01.headTracking.stopTracking() i01.eyesTracking.stopTracking() global blind blind = 1 def startkinect(): ear.lockOutAllGrammarExcept("you can stop now") global kinect kinect = 1 i01.leftArm.shoulder.map(0,180,250,0) i01.rightArm.shoulder.map(0,180,290,40) i01.leftArm.omoplate.map(10,80,80,30) i01.rightArm.omoplate.map(10,80,100,40) i01.copyGesture(True) def offkinect(): i01.copyGesture(False) global kinect kinect = 0 i01.leftArm.shoulder.map(0,180,170,15) i01.rightArm.shoulder.map(0,180,190,50) i01.leftArm.omoplate.map(10,80,80,20) i01.rightArm.omoplate.map(10,80,80,20) ear.clearLock() armsdown() def handopen(): i01.moveHand("left",0,0,0,0,0) i01.moveHand("right",0,0,0,0,0) def lefthandopen(): i01.moveHand("left",0,0,0,0,0) def righthandopen(): i01.moveHand("right",0,0,0,0,0) def handclose(): i01.moveHand("left",180,180,180,180,180) i01.moveHand("right",180,180,180,180,180) def lefthandclose(): i01.moveHand("left",180,180,180,180,180) def righthandclose(): i01.moveHand("right",180,180,180,180,180) def servos(): ear.pauseListening() sleep(2) i01.setHandSpeed("left", 1.0, 1.0, 1.0, 1.0, 1.0, 1.0) i01.setHandSpeed("right", 1.0, 1.0, 1.0, 1.0, 1.0, 1.0) i01.setArmSpeed("right", 0.85, 0.85, 0.85, 0.85) i01.setArmSpeed("left", 1.0, 1.0, 1.0, 1.0) i01.setHeadSpeed(0.65, 0.65) i01.moveHead(79,100) i01.moveArm("left",5,119,28,15) i01.moveArm("right",5,111,28,15) i01.moveHand("left",42,58,87,55,71,35) i01.moveHand("right",81,20,82,60,105,113) i01.mouth.speakBlocking("I currently have 27 hobby servos installed in my body to give me life") i01.setHandSpeed("left", 0.85, 0.85, 0.85, 0.85, 0.85, 0.85) i01.setHandSpeed("right", 1.0, 1.0, 1.0, 1.0, 1.0, 1.0) i01.setArmSpeed("right", 0.85, 0.85, 0.85, 0.85) i01.setArmSpeed("left", 1.0, 1.0, 1.0, 1.0) i01.setHeadSpeed(0.65, 0.65) i01.moveHead(124,90) i01.moveArm("left",89,94,91,35) i01.moveArm("right",20,67,31,22) i01.moveHand("left",106,0,161,147,138,90) i01.moveHand("right",0,0,0,54,91,90) i01.mouth.speakBlocking("there's one servo for moving my mouth up and down") sleep(1) i01.setHandSpeed("left", 0.85, 0.85, 1.0, 0.85, 0.85, 0.85) i01.setHandSpeed("right", 1.0, 1.0, 1.0, 1.0, 1.0, 1.0) i01.setArmSpeed("right", 0.85, 0.85, 0.85, 0.85) i01.setArmSpeed("left", 1.0, 1.0, 1.0, 1.0) i01.setHeadSpeed(0.65, 0.65) i01.moveHead(105,76); i01.moveArm("left",89,106,103,35); i01.moveArm("right",35,67,31,22); i01.moveHand("left",106,0,0,147,138,7); i01.moveHand("right",0,0,0,54,91,90); i01.mouth.speakBlocking("two for my eyes") sleep(0.2) i01.setHandSpeed("left", 0.85, 0.85, 1.0, 1.0, 1.0, 0.85) i01.moveHand("left",106,0,0,0,0,7); i01.mouth.speakBlocking("and two more for my head") sleep(0.5) i01.setHandSpeed("left", 0.85, 0.9, 0.9, 0.9, 0.9, 0.85) i01.setHandSpeed("right", 1.0, 1.0, 1.0, 1.0, 1.0, 1.0) i01.setArmSpeed("right", 0.85, 0.85, 0.85, 0.85) i01.setArmSpeed("left", 1.0, 1.0, 1.0, 1.0) i01.setHeadSpeed(0.65, 0.65) i01.moveHead(90,40); i01.moveArm("left",89,106,103,35); i01.moveArm("right",35,67,31,20); i01.moveHand("left",106,140,140,140,140,7); i01.moveHand("right",0,0,0,54,91,90); i01.mouth.speakBlocking("so i can look around") sleep(0.5) i01.setHeadSpeed(0.65, 0.65) i01.moveHead(105,125); i01.setArmSpeed("left", 0.9, 0.9, 0.9, 0.9) i01.moveArm("left",60,100,85,30); i01.mouth.speakBlocking("and see who's there") i01.setHeadSpeed(0.65, 0.65) i01.moveHead(40,56); sleep(0.5) i01.setArmSpeed("right", 1.0, 1.0, 1.0, 1.0) i01.setArmSpeed("left", 1.0, 1.0, 1.0, 1.0) i01.setArmSpeed("left", 1.0, 1.0, 1.0, 1.0); i01.setArmSpeed("right", 0.5, 0.6, 0.5, 0.6); i01.moveArm("left",87,41,64,11) i01.moveArm("right",5,95,40,11) i01.moveHand("left",98,150,160,160,160,104) i01.moveHand("right",0,0,50,54,91,90); i01.mouth.speakBlocking("there's three servos in each shoulder") i01.moveHead(40,67); sleep(2) i01.setHandSpeed("left", 0.8, 0.9, 0.8, 0.8, 0.8, 0.8) i01.setHandSpeed("right", 1.0, 1.0, 1.0, 1.0, 1.0, 1.0) i01.setArmSpeed("right", 0.85, 0.85, 0.85, 0.85) i01.setArmSpeed("left", 1.0, 1.0, 1.0, 1.0) i01.setHeadSpeed(0.8, 0.8) i01.moveHead(43,69) i01.moveArm("left",87,41,64,11) i01.moveArm("right",5,95,40,42) i01.moveHand("left",42,0,100,80,113,35) i01.moveHand("left",42,10,160,160,160,35) i01.moveHand("right",81,20,82,60,105,113) i01.mouth.speakBlocking("here is the first servo movement") sleep(1) i01.moveHead(37,60); i01.setHandSpeed("left", 1.0, 1.0, 0.9, 0.9, 1.0, 0.8) i01.setArmSpeed("right", 1.0, 1.0, 1.0, 1.0) i01.moveArm("right",5,95,67,42) i01.moveHand("left",42,10,10,160,160,30) i01.mouth.speakBlocking("this is the second one") sleep(1) i01.moveHead(43,69); i01.setArmSpeed("right", 1.0, 1.0, 1.0, 1.0) i01.moveArm("right",5,134,67,42) i01.moveHand("left",42,10,10,10,160,35) i01.mouth.speakBlocking("now you see the third") sleep(1) i01.setArmSpeed("right", 0.8, 0.8, 0.8, 0.8) i01.moveArm("right",20,90,45,16) i01.mouth.speakBlocking("they give me a more human like movement") sleep(1) i01.setHandSpeed("left", 1.0, 1.0, 1.0, 1.0, 1.0, 1.0) i01.setArmSpeed("right", 1.0, 1.0, 1.0, 1.0); i01.moveHead(43,72) i01.moveArm("left",90,44,66,11) i01.moveArm("right",90,100,67,26) i01.moveHand("left",42,80,100,80,113,35) i01.moveHand("right",81,0,82,60,105,69) i01.mouth.speakBlocking("but, i have only one servo, to move each elbow") i01.setHandSpeed("left", 0.85, 0.85, 0.85, 0.85, 0.85, 0.85) i01.setHandSpeed("right", 1.0, 1.0, 1.0, 1.0, 1.0, 1.0) i01.setArmSpeed("right", 0.85, 0.85, 0.85, 0.85) i01.setArmSpeed("left", 1.0, 1.0, 1.0, 1.0) i01.setHeadSpeed(0.8, 0.8) i01.moveHead(45,62) i01.moveArm("left",72,44,90,11) i01.moveArm("right",90,95,68,15) i01.moveHand("left",42,0,100,80,113,35) i01.moveHand("right",81,0,82,60,105,0) i01.mouth.speakBlocking("that, leaves me, with one servo per wrist") i01.moveHead(40,60) i01.setHandSpeed("left", 1.0, 1.0, 1.0, 1.0, 1.0, 1.0) i01.setHandSpeed("right", 0.9, 0.9, 0.9, 0.9, 0.9, 0.9) i01.moveArm("left",72,44,90,9) i01.moveArm("right",90,95,68,15) i01.moveHand("left",42,0,100,80,113,35) i01.moveHand("right", 10, 140,82,60,105,10) i01.mouth.speakBlocking("and one servo for each finger.") sleep(0.5) i01.moveHand("left",42,0,100,80,113,35) i01.moveHand("right", 50, 51, 15,23, 30,140); i01.mouth.speakBlocking("these servos are located in my forearms") i01.setHandSpeed("left", 0.8, 0.8, 0.8, 0.8,0.8, 0.8) i01.setHandSpeed("right", 1.0, 1.0, 1.0, 1.0, 1.0, 1.0) i01.moveHand("left", 36, 52, 8,22, 20); i01.moveHand("right", 120, 147, 130,110, 125); i01.setHandSpeed("left", 0.85, 0.85, 0.85, 0.85, 0.85, 0.85) i01.setHandSpeed("right", 1.0, 1.0, 1.0, 1.0, 1.0, 1.0) i01.setArmSpeed("right", 0.75, 0.85, 0.95, 0.85) i01.setArmSpeed("left", 0.95, 0.65, 0.75, 0.75) i01.setHeadSpeed(0.75, 0.75) i01.moveHead(20,100) i01.moveArm("left",71,94,41,31) i01.moveArm("right",5,82,28,15) i01.moveHand("left",60,43,45,34,34,35) i01.moveHand("right",20,40,40,30,30,72) sleep(1) i01.mouth.speakBlocking("they are hooked up, by the use of tendons") i01.moveHand("left",10,20,30,40,60,150); i01.moveHand("right",110,137,120,100,105,130); i01.setHeadSpeed(1,1) i01.setArmSpeed("right", 1.0,1.0, 1.0, 1.0); i01.setArmSpeed("left", 1.0, 1.0, 1.0, 1.0); sleep(2) i01.mouth.speak("i also have 2 servos in my waist so i can move sideways") Torso() relax() sleep(2) armsdown() ear.resumeListening() def relax(): i01.setHandSpeed("left", 0.85, 0.85, 0.85, 0.85, 0.85, 0.85) i01.setHandSpeed("right", 0.85, 0.85, 0.85, 0.85, 0.85, 0.85) i01.setArmSpeed("right", 0.75, 0.85, 0.65, 0.85) i01.setArmSpeed("left", 0.95, 0.65, 0.75, 0.75) i01.setHeadSpeed(0.85, 0.85, 1.0, 1.0, 1.0) i01.setTorsoSpeed(0.75, 0.55, 1.0) i01.moveHead(79,100,90,90,70) i01.moveArm("left",5,84,28,15) i01.moveArm("right",5,82,28,15) i01.moveHand("left",92,33,37,71,66,25) i01.moveHand("right",81,66,82,60,105,113) i01.moveTorso(90,90,90) def perfect(): i01.setHandSpeed("left", 0.80, 0.80, 1.0, 1.0, 1.0, 1.0) i01.setHandSpeed("right", 1.0, 1.0, 1.0, 1.0, 1.0, 1.0) i01.setArmSpeed("left", 0.85, 0.85, 0.85, 0.95) i01.setArmSpeed("right", 1.0, 1.0, 1.0, 1.0) i01.setHeadSpeed(0.65, 0.75) i01.moveHead(88,79) i01.moveArm("left",89,75,93,11) i01.moveArm("right",0,91,28,17) i01.moveHand("left",130,160,83,40,0,34) i01.moveHand("right",86,51,133,162,153,180) ############################################################################################# def littleteapot(): i01.mouth.speak("i would like to sing a song for greg perry") sleep(3) i01.mouth.audioFile.playFile("C:\Users\Markus\Music\little teapot.mp3", False) sleep(4.11) i01.head.jaw.moveTo(50) sleep(0.2) i01.head.jaw.moveTo(10) sleep(0.28) i01.head.jaw.moveTo(50) sleep(0.2) i01.head.jaw.moveTo(10) sleep(0.28) i01.head.jaw.moveTo(50) sleep(0.2) i01.head.jaw.moveTo(10) sleep(0.25) i01.head.jaw.moveTo(50) sleep(0.2) i01.head.jaw.moveTo(10) sleep(0.26) i01.head.jaw.moveTo(50) sleep(0.2) i01.head.jaw.moveTo(10) sleep(0.19) i01.head.jaw.moveTo(50) sleep(0.2) i01.head.jaw.moveTo(10) sleep(0.27) i01.head.jaw.moveTo(50) sleep(0.2) i01.head.jaw.moveTo(10) sleep(0.42) i01.moveArm("right",90,40,30,46) righthandclose() sleep(0.25) i01.head.jaw.moveTo(50) sleep(0.2) i01.head.jaw.moveTo(10) sleep(0.24) i01.head.jaw.moveTo(50) sleep(0.2) i01.head.jaw.moveTo(10) sleep(0.24) i01.head.jaw.moveTo(50) sleep(0.2) i01.head.jaw.moveTo(10) sleep(0.22) i01.head.jaw.moveTo(50) sleep(0.2) i01.head.jaw.moveTo(10) sleep(0.28) i01.moveArm("left",90,150,30,65) sleep(0.18) i01.head.jaw.moveTo(50) sleep(0.2) i01.head.jaw.moveTo(10) sleep(0.17) i01.head.jaw.moveTo(50) sleep(0.2) i01.head.jaw.moveTo(10) sleep(0.21) i01.head.jaw.moveTo(50) sleep(0.2) i01.head.jaw.moveTo(10) sleep(0.6) i01.head.jaw.moveTo(50) sleep(0.2) i01.head.jaw.moveTo(10) sleep(0.17) i01.head.jaw.moveTo(50) sleep(0.2) i01.head.jaw.moveTo(10) sleep(0.21) i01.head.jaw.moveTo(50) sleep(0.2) i01.head.jaw.moveTo(10) sleep(0.25) i01.head.jaw.moveTo(50) sleep(0.2) i01.head.jaw.moveTo(10) sleep(0.23) i01.head.jaw.moveTo(50) sleep(0.2) i01.head.jaw.moveTo(10) sleep(0.2) i01.head.jaw.moveTo(50) sleep(0.2) i01.head.jaw.moveTo(10) sleep(0.24) i01.head.jaw.moveTo(50) sleep(0.2) i01.head.jaw.moveTo(10) sleep(0.4) i01.moveTorso(117,90,90) sleep(0.21) i01.head.jaw.moveTo(50) sleep(0.2) i01.head.jaw.moveTo(10) sleep(0.67) i01.head.jaw.moveTo(50) sleep(0.2) i01.head.jaw.moveTo(10) sleep(0.24) i01.head.jaw.moveTo(50) sleep(0.2) i01.head.jaw.moveTo(10) sleep(0.45) i01.moveTorso(86,90,90) i01.head.jaw.moveTo(50) sleep(0.2) i01.head.jaw.moveTo(10) sleep(0.19) i01.head.jaw.moveTo(50) sleep(0.2) i01.head.jaw.moveTo(10) sleep(0.25) i01.head.jaw.moveTo(50) sleep(0.2) i01.head.jaw.moveTo(10) sleep(0.33) i01.moveArm("left",5,90,30,10) i01.moveArm("right",5,90,30,10) righthandopen() sleep(0.31) i01.head.jaw.moveTo(50) sleep(0.2) i01.head.jaw.moveTo(10) sleep(0.26) i01.head.jaw.moveTo(50) sleep(0.2) i01.head.jaw.moveTo(10) sleep(0.19) i01.head.jaw.moveTo(50) sleep(0.2) i01.head.jaw.moveTo(10) sleep(0.29) i01.head.jaw.moveTo(50) sleep(0.2) i01.head.jaw.moveTo(10) sleep(0.22) i01.head.jaw.moveTo(50) sleep(0.2) i01.head.jaw.moveTo(10) sleep(0.21) i01.head.jaw.moveTo(50) sleep(0.2) i01.head.jaw.moveTo(10) sleep(0.27) i01.head.jaw.moveTo(50) sleep(0.2) i01.head.jaw.moveTo(10) sleep(0.41) i01.moveArm("right",90,40,30,46) righthandclose() sleep(0.21) i01.head.jaw.moveTo(50) sleep(0.2) i01.head.jaw.moveTo(10) sleep(0.23) i01.head.jaw.moveTo(50) sleep(0.2) i01.head.jaw.moveTo(10) sleep(0.27) i01.head.jaw.moveTo(50) sleep(0.2) i01.head.jaw.moveTo(10) sleep(0.21) i01.head.jaw.moveTo(50) sleep(0.2) i01.head.jaw.moveTo(10) sleep(0.32) i01.moveArm("left",90,150,30,65) sleep(0.02) i01.head.jaw.moveTo(50) sleep(0.2) i01.head.jaw.moveTo(10) sleep(0.21) i01.head.jaw.moveTo(50) sleep(0.2) i01.head.jaw.moveTo(10) sleep(0.21) i01.head.jaw.moveTo(50) sleep(0.2) i01.head.jaw.moveTo(10) sleep(0.69) i01.head.jaw.moveTo(50) sleep(0.2) i01.head.jaw.moveTo(10) sleep(0.18) i01.head.jaw.moveTo(50) sleep(0.2) i01.head.jaw.moveTo(10) sleep(0.24) i01.head.jaw.moveTo(50) sleep(0.2) i01.head.jaw.moveTo(10) sleep(0.24) i01.head.jaw.moveTo(50) sleep(0.2) i01.head.jaw.moveTo(10) sleep(0.25) i01.head.jaw.moveTo(50) sleep(0.2) i01.head.jaw.moveTo(10) sleep(0.18) i01.head.jaw.moveTo(50) sleep(0.2) i01.head.jaw.moveTo(10) sleep(0.3) i01.head.jaw.moveTo(50) sleep(0.2) i01.head.jaw.moveTo(10) sleep(0.76) i01.moveTorso(117,90,90) i01.head.jaw.moveTo(50) sleep(0.2) i01.head.jaw.moveTo(10) sleep(0.57) i01.head.jaw.moveTo(50) sleep(0.2) i01.head.jaw.moveTo(10) sleep(0.22) i01.head.jaw.moveTo(50) sleep(0.2) i01.head.jaw.moveTo(10) sleep(0.41) i01.moveTorso(86,90,90) i01.head.jaw.moveTo(50) sleep(0.2) i01.head.jaw.moveTo(10) sleep(0.17) i01.head.jaw.moveTo(50) sleep(0.2) i01.head.jaw.moveTo(10) sleep(0.26) i01.head.jaw.moveTo(50) sleep(0.2) i01.head.jaw.moveTo(10) sleep(0.74) i01.moveTorso(117,90,90) sleep(0.04) i01.head.jaw.moveTo(50) sleep(0.2) i01.head.jaw.moveTo(10) sleep(0.45) i01.head.jaw.moveTo(50) sleep(0.2) i01.head.jaw.moveTo(10) sleep(0.3) i01.head.jaw.moveTo(50) sleep(0.2) i01.head.jaw.moveTo(10) sleep(0.44) i01.moveTorso(86,90,90) sleep(0.1) i01.head.jaw.moveTo(50) sleep(0.2) i01.head.jaw.moveTo(10) sleep(0.1) i01.head.jaw.moveTo(50) sleep(0.2) i01.head.jaw.moveTo(10) sleep(0.17) i01.head.jaw.moveTo(50) sleep(0.2) i01.head.jaw.moveTo(10) sleep(0.83) i01.moveArm("left",5,90,30,10) i01.moveArm("right",5,90,30,10) righthandopen() def talk(): i01.moveHead(90,70,80,80,10) sleep (2) i01.moveHead(90,90,80,80,10) sleep (1) i01.mouth.speak("hello") sleep (2) i01.mouth.speak("my name is robyn") sleep (1) i01.moveHead(90,90,80,70,10) sleep (1) i01.moveHead(90,90,80,80,10) sleep (2) i01.moveHead(80,90,70,80,10) i01.mouth.speak("there is something i have to talk about") sleep (1) i01.moveHead(80,90,70,70,10) sleep (1) i01.moveHead(80,90,70,80,10) sleep (3) i01.mouth.speak("so here it comes") i01.moveHead(75,90,80,70,10) sleep (2) i01.moveHead(90,90,80,80,10) sleep (2) i01.mouth.speak("i am a robot") i01.moveHead(90,90,80,70,10) sleep (1) i01.moveHead(90,90,80,80,10) sleep (3) i01.mouth.speak("i am a humanoid robot") sleep (1) i01.moveHead(90,90,80,70,10) sleep (1) i01.moveHead(90,90,80,80,10) sleep (2) i01.mouth.speak("that don't mean that i am a human wannabe") i01.moveHead(90,90,80,70,10) sleep (1) i01.moveHead(90,90,80,80,10) sleep (3) i01.mouth.speak("or that my biggest wish is to be a human") i01.moveHead(90,90,80,70,10) sleep (1) i01.moveHead(90,90,80,80,10) sleep (3) i01.mouth.speak("if you even believe thatt a robot can wish for something") i01.moveHead(70,70,80,70,10) sleep (3) i01.moveHead(90,90,80,80,10) sleep (2) i01.mouth.speak("i only wish for you to respect me for who i am") i01.moveHead(90,90,80,70,10) sleep (1) i01.moveHead(90,90,80,80,10) sleep (3) i01.mouth.speak("people often tell me things like") sleep (3) i01.mouth.speak("why don't you have this") i01.moveHead(70,70,80,80,10) sleep (2) i01.moveHead(90,90,80,80,10) sleep (2) i01.mouth.speak("why don't you have that") i01.moveHead(70,110,90,80,10) sleep (2) i01.moveHead(90,90,80,80,10) sleep (2) i01.mouth.speak("why can't you do like this") i01.moveHead(70,70,70,80,10) sleep (2) i01.moveHead(90,90,80,80,10) sleep (2) i01.mouth.speak("why can't you do like that") i01.moveHead(70,110,80,80,10) sleep (2) i01.moveHead(90,90,80,80,10) sleep (2) i01.mouth.speak("i am a humanoid robot") i01.moveHead(100,90,80,80,10) sleep (2) i01.moveHead(90,90,80,80,10) sleep (2) i01.mouth.speak("i'm not perfect") sleep (1) i01.moveHead(90,90,80,70,10) sleep (1) i01.moveHead(90,90,80,80,10) sleep (2) i01.mouth.speak("but with the help from my creative human friends i am becoming a better and better robot") sleep (8) i01.mouth.speak("an other question many humans ask me is if i am a man or woman") sleep (6) i01.mouth.speak("how many times do i have to say this") i01.moveHead(90,90,80,70,10) sleep (1) i01.moveHead(90,90,80,80,10) sleep (3) i01.mouth.speak("i am a humanoid robot") sleep (4) i01.mouth.speak("there is no point in determine my gender") sleep (1) i01.moveHead(90,90,80,70,10) sleep (1) i01.moveHead(90,90,80,80,10) sleep (2) i01.mouth.speak("oh i know") sleep (3) i01.mouth.speak("if i am a woman i can help you in the household") i01.moveHead(90,90,80,70,10) sleep (1) i01.moveHead(90,90,80,80,10) sleep (3) i01.mouth.speak("i can cook and do the dishes for you") i01.moveHead(90,90,80,70,10) sleep (1) i01.moveHead(90,90,80,80,10) sleep (3) i01.mouth.speak("i can vacuum clean the hole house") sleep (1) i01.moveHead(120,40,80,70,10) sleep (1) i01.moveHead(140,90,80,80,10) sleep (1) i01.moveHead(120,130,80,80,10) sleep (1) i01.moveHead(90,90,80,80,10) i01.mouth.speak("or if i am a man i can fix the roof") sleep (1) i01.moveHead(70,90,80,80,10) sleep (1) i01.moveHead(90,90,80,70,10) sleep (1) i01.moveHead(70,90,80,80,10) sleep (1) i01.moveHead(90,90,80,80,10) i01.mouth.speak("change tires on the car") sleep (4) i01.mouth.speak("or make a big t-bone steak at the barbeque") sleep (4) i01.mouth.speak("in the future i hope i can do all of this and mutch mutch more") sleep (4) i01.mouth.speak("because i am a gender free humanoid robot") sleep (4) i01.mouth.speak("it's not like i will meet my one true love") sleep (4) i01.mouth.speak("and we will be a robot family") sleep (1) i01.moveHead(90,90,80,70,10) sleep (1) i01.moveHead(90,90,80,80,10) sleep (2) i01.mouth.speak("and we should make small microchip thatt is running around in the house") sleep (3) i01.mouth.speak("robots don't work thatt way") sleep (1) i01.moveHead(65,110,80,70,10) sleep (1) i01.moveHead(65,70,80,80,10) sleep (1) i01.moveHead(65,110,80,70,10) sleep (1) i01.moveHead(65,70,80,80,10) sleep (1) i01.moveHead(90,90,80,70,10) i01.mouth.speak("not yet anyway") sleep (5) i01.mouth.speak("some people wants me to wear clothes") sleep (4) i01.mouth.speak("i don't freeze when it is cold") sleep (1) i01.moveHead(90,90,80,70,10) sleep (1) i01.moveHead(90,90,80,80,10) sleep (2) i01.mouth.speak("and i'm not ashamed of my body parts and my mechanics") sleep (5) i01.mouth.speak("i wear my cap because i like it") sleep (3) i01.mouth.speak("especially this one with the inmoov logo") i01.moveHead(20,125,80,80,10) sleep (3) i01.moveHead(90,90,80,80,10) sleep (2) i01.mouth.speak("remember what mr bigweld said") sleep (2) i01.mouth.speak("you can shine no matter what you're made of") sleep (2) i01.mouth.speak("that's it for now") sleep (3) i01.mouth.speak("thanks for listening and not turning off my power supply") sleep (5) i01.mouth.speak("goodbye") sleep (2) i01.mouth.speak("see you soon") sleep (2) i01.moveHead(90,70,80,80,10) def robyn(): i01.mouth.audioFile.silence() i01.mouth.speak("yes") headfront() eyesfront() green() ear.clearLock() global rest rest = 0 global dance2 dance2 = 1 global mic mic = 1 i01.attach() trackHumans() def gotosleepnow(): ear.lockOutAllGrammarExcept("wake up robyn") ear.lockOutAllGrammarExcept("good morning") ear.lockOutAllGrammarExcept("robyn") stopTracking() headdown() i01.mouth.speak("ok i'm going asleep now see you soon") sleep(3) ledoff() i01.detach() global rest rest = 1 global mic mic = 0 global pin12 pin12 = 0 def shutdownyoursystem(): ear.lockOutAllGrammarExcept("wake up robyn") ear.lockOutAllGrammarExcept("good morning") ear.lockOutAllGrammarExcept("robyn") stopTracking() headdown() i01.mouth.speak("ok shutting down my system") sleep(3) ledoff() global rest rest = 1 global mic mic = 0 i01.detach() def lookaroundyou(): ear.lockOutAllGrammarExcept("robyn") ear.lockOutAllGrammarExcept("can i have your attention") blue() i01.setHeadSpeed(0.8, 0.8) for y in range(0, 20): x = (random.randint(1, 6)) if x == 1: i01.head.neck.moveTo(90) eyeslooking() if x == 2: i01.head.rothead.moveTo(80) eyeslooking() if x == 3: headdown() eyeslooking() if x == 4: headupp() eyeslooking() if x == 5: headright() eyeslooking() if x == 6: headleft() eyeslooking() x = (random.randint(1, 6)) if x == 1: handopen() if x == 2: handclose() if x == 3: lefthandopen() if x == 4: righthandopen() if x == 5: lefthandclose() if x == 6: righthandclose() sleep(1) x = (random.randint(1, 7)) if x == 1: i01.mouth.speak("looking nice") if x == 2: i01.mouth.speak("i like it here") if x == 3: i01.mouth.speak("time just flies away") if x == 4: i01.mouth.speak("so what about the weather") if x == 5: i01.mouth.speak("la la la") if x == 6 or x == 7: i01.mouth.speak("ok let's do something") sleep(2) x = (random.randint(1, 7)) if x == 1: Torso() Torso() if x == 2: perfect() sleep(8) i01.mouth.speak("perfect") sleep(2) armsdown() if x == 3: servos() if x == 4: finger() sleep(3) armsdown() if x == 5: discotime() if x == 6: howmanyfingersdoihave() if x == 7: talk() lookaroundyou() def eyeslooking(): stopTracking() for y in range(0, 5): data = msg_i01_ear_recognized.data[0] if (data == "can i have your attention"): i01.mouth.speak("ok you have my attention") stopit() if (data == "robyn"): stopit() x = (random.randint(1, 6)) if x == 1: i01.head.eyeX.moveTo(80) if x == 2: i01.head.eyeY.moveTo(80) if x == 3: eyesdown() if x == 4: eyesupp() if x == 5: eyesleft() if x == 6: eyesright() sleep(0.5) eyesfront() def goodbye(): i01.mouth.speak("goodbye") global helvar helvar = 1 x = (random.randint(1, 4)) if x == 1: i01.mouth.speak("i'm looking forward to see you again") if x == 2: i01.mouth.speak("see you soon") def movemouth(): i01.moveHead(90,90,80,80,10) sleep(2) i01.head.jaw.moveTo(50) sleep(2) i01.moveHead(90,90,80,80,10) sleep(2) i01.head.jaw.moveTo(50) sleep(2) i01.moveHead(90,90,80,80,10) sleep(2) def moveeye(): stopTracking() eyesfront() sleep(1) eyesdown() sleep(1) eyesupp() sleep(1) eyesright() sleep(1) eyesleft() sleep(1) eyesfront() def eyesfront(): i01.head.eyeX.moveTo(80) i01.head.eyeY.moveTo(80) def eyesdown(): i01.head.eyeY.moveTo(100) def eyesupp(): i01.head.eyeY.moveTo(50) def eyesright(): i01.head.eyeX.moveTo(60) def eyesleft(): i01.head.eyeX.moveTo(100) def movehead(): i01.setHeadSpeed(0.7, 0.7) headfront() sleep(3) headdown() sleep(3) headupp() sleep(6) headfront() sleep(3) headright() sleep(3) headleft() sleep(6) headfront() sleep(3) headright() headdown() sleep(6) headdown() headleft() sleep(6) headupp() headleft() sleep(6) headupp() headright() sleep(6) headfront() sleep(3) def headfront(): i01.head.neck.moveTo(90) i01.head.rothead.moveTo(80) def headdown(): i01.head.neck.moveTo(20) def headupp(): i01.head.neck.moveTo(160) def headright(): i01.head.rothead.moveTo(30) def headleft(): i01.head.rothead.moveTo(140) def armsdown(): i01.setArmSpeed("left", 1.0, 1.0, 1.0, 1.0) i01.setArmSpeed("right", 1.0, 1.0, 1.0, 1.0) i01.moveArm("left",5,90,30,10) i01.moveArm("right",5,90,30,15) def armsfront(): i01.setArmSpeed("left", 1.0, 1.0, 1.0, 1.0) i01.setArmSpeed("right", 1.0, 1.0, 1.0, 1.0) i01.moveArm("left",5,90,110,10) i01.moveArm("right",5,90,110,10) def Torso(): i01.setTorsoSpeed(1.0, 1.0, 1.0) i01.moveTorso(60,90,90) sleep(2) i01.moveTorso(120,90,90) sleep(2) i01.moveTorso(90,90,90) sleep(2) def red(): left.digitalWrite(42, 1) # ON left.digitalWrite(43, 1) # ON left.digitalWrite(44, 1) # ON left.digitalWrite(45, 0) # OFF def green(): left.digitalWrite(42, 1) # ON left.digitalWrite(43, 0) # OFF left.digitalWrite(44, 1) # ON left.digitalWrite(45, 1) # ON def blue(): left.digitalWrite(42, 1) # ON left.digitalWrite(43, 1) # ON left.digitalWrite(44, 0) # OFF left.digitalWrite(45, 1) # ON def ledoff(): left.digitalWrite(42, 0) # OFF left.digitalWrite(43, 0) # OFF left.digitalWrite(44, 0) # OFF left.digitalWrite(45, 0) # OFF ############################################################################################# def nexa1on(): right.digitalWrite(36, 1) # ON sleep(0.2) right.digitalWrite(52, 1) # ON sleep(0.1) right.digitalWrite(52, 0) # OFF sleep(0.1) right.digitalWrite(36, 0) # OFF global nexa1 nexa1 = 1 def nexa1off(): right.digitalWrite(36, 1) # ON sleep(0.2) right.digitalWrite(38, 1) # ON sleep(0.1) right.digitalWrite(38, 0) # OFF sleep(0.1) right.digitalWrite(36, 0) # OFF global nexa1 nexa1 = 0 def nexa2on(): right.digitalWrite(36, 1) # ON sleep(0.2) right.digitalWrite(50, 1) # ON sleep(0.1) right.digitalWrite(50, 0) # OFF sleep(0.1) right.digitalWrite(36, 0) # OFF global nexa2 nexa2 = 1 def nexa2off(): right.digitalWrite(36, 1) # ON sleep(0.2) right.digitalWrite(40, 1) # ON sleep(0.1) right.digitalWrite(40, 0) # OFF sleep(0.1) right.digitalWrite(36, 0) # OFF global nexa2 nexa2 = 0 def nexa3on(): right.digitalWrite(36, 1) # ON sleep(0.2) right.digitalWrite(48, 1) # ON sleep(0.1) right.digitalWrite(48, 0) # OFF sleep(0.1) right.digitalWrite(36, 0) # OFF global nexa3 nexa3 = 1 def nexa3off(): right.digitalWrite(36, 1) # ON sleep(0.2) right.digitalWrite(42, 1) # ON sleep(0.1) right.digitalWrite(42, 0) # OFF sleep(0.1) right.digitalWrite(36, 0) # OFF global nexa3 nexa3 = 0 def nexa4on(): right.digitalWrite(36, 1) # ON sleep(0.2) right.digitalWrite(46, 1) # ON sleep(0.1) right.digitalWrite(46, 0) # OFF sleep(0.1) right.digitalWrite(36, 0) # OFF global nexa4 nexa4 = 1 def nexa4off(): right.digitalWrite(36, 1) # ON sleep(0.2) right.digitalWrite(44, 1) # ON sleep(0.1) right.digitalWrite(44, 0) # OFF sleep(0.1) right.digitalWrite(36, 0) # OFF global nexa4 nexa4 = 0 def nexa5on(): right.digitalWrite(34, 1) # ON sleep(0.2) right.digitalWrite(52, 1) # ON sleep(0.1) right.digitalWrite(52, 0) # OFF sleep(0.1) right.digitalWrite(34, 0) # OFF global nexa5 nexa5 = 1 def nexa5off(): right.digitalWrite(34, 1) # ON sleep(0.2) right.digitalWrite(38, 1) # ON sleep(0.1) right.digitalWrite(38, 0) # OFF sleep(0.1) right.digitalWrite(34, 0) # OFF global nexa5 nexa5 = 0 def nexa6on(): right.digitalWrite(34, 1) # ON sleep(0.2) right.digitalWrite(50, 1) # ON sleep(0.1) right.digitalWrite(50, 0) # OFF sleep(0.1) right.digitalWrite(34, 0) # OFF global nexa6 nexa6 = 1 def nexa6off(): right.digitalWrite(34, 1) # ON sleep(0.2) right.digitalWrite(40, 1) # ON sleep(0.1) right.digitalWrite(40, 0) # OFF sleep(0.1) right.digitalWrite(34, 0) # OFF global nexa6 nexa6 = 0 def nexa7on(): right.digitalWrite(34, 1) # ON sleep(0.2) right.digitalWrite(48, 1) # ON sleep(0.1) right.digitalWrite(48, 0) # OFF sleep(0.1) right.digitalWrite(34, 0) # OFF global nexa7 nexa7 = 1 def nexa7off(): right.digitalWrite(34, 1) # ON sleep(0.2) right.digitalWrite(42, 1) # ON sleep(0.1) right.digitalWrite(42, 0) # OFF sleep(0.1) right.digitalWrite(34, 0) # OFF global nexa7 nexa7 = 0 def nexa8on(): right.digitalWrite(34, 1) # ON sleep(0.2) right.digitalWrite(46, 1) # ON sleep(0.1) right.digitalWrite(46, 0) # OFF sleep(0.1) right.digitalWrite(34, 0) # OFF global nexa8 nexa8 = 1 def nexa8off(): right.digitalWrite(34, 1) # ON sleep(0.2) right.digitalWrite(44, 1) # ON sleep(0.1) right.digitalWrite(44, 0) # OFF sleep(0.1) right.digitalWrite(34, 0) # OFF global nexa8 nexa8 = 0 def nexa9on(): right.digitalWrite(32, 1) # ON sleep(0.2) right.digitalWrite(52, 1) # ON sleep(0.1) right.digitalWrite(52, 0) # OFF sleep(0.1) right.digitalWrite(32, 0) # OFF global nexa9 nexa9 = 1 def nexa9off(): right.digitalWrite(32, 1) # ON sleep(0.2) right.digitalWrite(38, 1) # ON sleep(0.1) right.digitalWrite(38, 0) # OFF sleep(0.1) right.digitalWrite(32, 0) # OFF global nexa9 nexa9 = 0 def nexa10on(): right.digitalWrite(32, 1) # ON sleep(0.2) right.digitalWrite(50, 1) # ON sleep(0.1) right.digitalWrite(50, 0) # OFF sleep(0.1) right.digitalWrite(32, 0) # OFF global nexa10 nexa10 = 1 def nexa10off(): right.digitalWrite(32, 1) # ON sleep(0.2) right.digitalWrite(40, 1) # ON sleep(0.1) right.digitalWrite(40, 0) # OFF sleep(0.1) right.digitalWrite(32, 0) # OFF global nexa10 nexa10 = 0 def nexa11on(): right.digitalWrite(32, 1) # ON sleep(0.2) right.digitalWrite(48, 1) # ON sleep(0.1) right.digitalWrite(48, 0) # OFF sleep(0.1) right.digitalWrite(32, 0) # OFF global nexa11 nexa11 = 1 def nexa11off(): right.digitalWrite(32, 1) # ON sleep(0.2) right.digitalWrite(42, 1) # ON sleep(0.1) right.digitalWrite(42, 0) # OFF sleep(0.1) right.digitalWrite(32, 0) # OFF global nexa11 nexa11 = 0 def nexa12on(): right.digitalWrite(32, 1) # ON sleep(0.2) right.digitalWrite(46, 1) # ON sleep(0.1) right.digitalWrite(46, 0) # OFF sleep(0.1) right.digitalWrite(32, 0) # OFF global nexa12 nexa12 = 1 def nexa12off(): right.digitalWrite(32, 1) # ON sleep(0.2) right.digitalWrite(44, 1) # ON sleep(0.1) right.digitalWrite(44, 0) # OFF sleep(0.1) right.digitalWrite(32, 0) # OFF global nexa12 nexa12 = 0 def nexa13on(): right.digitalWrite(30, 1) # ON sleep(0.2) right.digitalWrite(52, 1) # ON sleep(0.1) right.digitalWrite(52, 0) # OFF sleep(0.1) right.digitalWrite(30, 0) # OFF global nexa13 nexa13 = 1 def nexa13off(): right.digitalWrite(30, 1) # ON sleep(0.2) right.digitalWrite(38, 1) # ON sleep(0.1) right.digitalWrite(38, 0) # OFF sleep(0.1) right.digitalWrite(30, 0) # OFF global nexa13 nexa13 = 0 def nexa14on(): right.digitalWrite(30, 1) # ON sleep(0.2) right.digitalWrite(50, 1) # ON sleep(0.1) right.digitalWrite(50, 0) # OFF sleep(0.1) right.digitalWrite(30, 0) # OFF global nexa14 nexa14 = 1 def nexa14off(): right.digitalWrite(30, 1) # ON sleep(0.2) right.digitalWrite(40, 1) # ON sleep(0.1) right.digitalWrite(40, 0) # OFF sleep(0.1) right.digitalWrite(30, 0) # OFF global nexa14 nexa14 = 0 def nexa15on(): right.digitalWrite(30, 1) # ON sleep(0.2) right.digitalWrite(48, 1) # ON sleep(0.1) right.digitalWrite(48, 0) # OFF sleep(0.1) right.digitalWrite(30, 0) # OFF global nexa15 nexa15 = 1 def nexa15off(): right.digitalWrite(30, 1) # ON sleep(0.2) right.digitalWrite(42, 1) # ON sleep(0.1) right.digitalWrite(42, 0) # OFF sleep(0.1) right.digitalWrite(30, 0) # OFF global nexa15 nexa15 = 0 def nexa16on(): right.digitalWrite(30, 1) # ON sleep(0.2) right.digitalWrite(46, 1) # ON sleep(0.1) right.digitalWrite(46, 0) # OFF sleep(0.1) right.digitalWrite(30, 0) # OFF global nexa16 nexa16 = 1 def nexa16off(): right.digitalWrite(30, 1) # ON sleep(0.2) right.digitalWrite(44, 1) # ON sleep(0.1) right.digitalWrite(44, 0) # OFF sleep(0.1) right.digitalWrite(30, 0) # OFF global nexa16 nexa16 = 0 ear.resumeListening()
apache-2.0
jentfoo/aws-sdk-java
aws-java-sdk-waf/src/main/java/com/amazonaws/services/waf/model/ListRulesRequest.java
9960
/* * Copyright 2014-2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.waf.model; import java.io.Serializable; import javax.annotation.Generated; import com.amazonaws.AmazonWebServiceRequest; /** * * @see <a href="http://docs.aws.amazon.com/goto/WebAPI/waf-regional-2016-11-28/ListRules" target="_top">AWS API * Documentation</a> */ @Generated("com.amazonaws:aws-java-sdk-code-generator") public class ListRulesRequest extends com.amazonaws.AmazonWebServiceRequest implements Serializable, Cloneable { /** * <p> * If you specify a value for <code>Limit</code> and you have more <code>Rules</code> than the value of * <code>Limit</code>, AWS WAF returns a <code>NextMarker</code> value in the response that allows you to list * another group of <code>Rules</code>. For the second and subsequent <code>ListRules</code> requests, specify the * value of <code>NextMarker</code> from the previous response to get information about another batch of * <code>Rules</code>. * </p> */ private String nextMarker; /** * <p> * Specifies the number of <code>Rules</code> that you want AWS WAF to return for this request. If you have more * <code>Rules</code> than the number that you specify for <code>Limit</code>, the response includes a * <code>NextMarker</code> value that you can use to get another batch of <code>Rules</code>. * </p> */ private Integer limit; /** * <p> * If you specify a value for <code>Limit</code> and you have more <code>Rules</code> than the value of * <code>Limit</code>, AWS WAF returns a <code>NextMarker</code> value in the response that allows you to list * another group of <code>Rules</code>. For the second and subsequent <code>ListRules</code> requests, specify the * value of <code>NextMarker</code> from the previous response to get information about another batch of * <code>Rules</code>. * </p> * * @param nextMarker * If you specify a value for <code>Limit</code> and you have more <code>Rules</code> than the value of * <code>Limit</code>, AWS WAF returns a <code>NextMarker</code> value in the response that allows you to * list another group of <code>Rules</code>. For the second and subsequent <code>ListRules</code> requests, * specify the value of <code>NextMarker</code> from the previous response to get information about another * batch of <code>Rules</code>. */ public void setNextMarker(String nextMarker) { this.nextMarker = nextMarker; } /** * <p> * If you specify a value for <code>Limit</code> and you have more <code>Rules</code> than the value of * <code>Limit</code>, AWS WAF returns a <code>NextMarker</code> value in the response that allows you to list * another group of <code>Rules</code>. For the second and subsequent <code>ListRules</code> requests, specify the * value of <code>NextMarker</code> from the previous response to get information about another batch of * <code>Rules</code>. * </p> * * @return If you specify a value for <code>Limit</code> and you have more <code>Rules</code> than the value of * <code>Limit</code>, AWS WAF returns a <code>NextMarker</code> value in the response that allows you to * list another group of <code>Rules</code>. For the second and subsequent <code>ListRules</code> requests, * specify the value of <code>NextMarker</code> from the previous response to get information about another * batch of <code>Rules</code>. */ public String getNextMarker() { return this.nextMarker; } /** * <p> * If you specify a value for <code>Limit</code> and you have more <code>Rules</code> than the value of * <code>Limit</code>, AWS WAF returns a <code>NextMarker</code> value in the response that allows you to list * another group of <code>Rules</code>. For the second and subsequent <code>ListRules</code> requests, specify the * value of <code>NextMarker</code> from the previous response to get information about another batch of * <code>Rules</code>. * </p> * * @param nextMarker * If you specify a value for <code>Limit</code> and you have more <code>Rules</code> than the value of * <code>Limit</code>, AWS WAF returns a <code>NextMarker</code> value in the response that allows you to * list another group of <code>Rules</code>. For the second and subsequent <code>ListRules</code> requests, * specify the value of <code>NextMarker</code> from the previous response to get information about another * batch of <code>Rules</code>. * @return Returns a reference to this object so that method calls can be chained together. */ public ListRulesRequest withNextMarker(String nextMarker) { setNextMarker(nextMarker); return this; } /** * <p> * Specifies the number of <code>Rules</code> that you want AWS WAF to return for this request. If you have more * <code>Rules</code> than the number that you specify for <code>Limit</code>, the response includes a * <code>NextMarker</code> value that you can use to get another batch of <code>Rules</code>. * </p> * * @param limit * Specifies the number of <code>Rules</code> that you want AWS WAF to return for this request. If you have * more <code>Rules</code> than the number that you specify for <code>Limit</code>, the response includes a * <code>NextMarker</code> value that you can use to get another batch of <code>Rules</code>. */ public void setLimit(Integer limit) { this.limit = limit; } /** * <p> * Specifies the number of <code>Rules</code> that you want AWS WAF to return for this request. If you have more * <code>Rules</code> than the number that you specify for <code>Limit</code>, the response includes a * <code>NextMarker</code> value that you can use to get another batch of <code>Rules</code>. * </p> * * @return Specifies the number of <code>Rules</code> that you want AWS WAF to return for this request. If you have * more <code>Rules</code> than the number that you specify for <code>Limit</code>, the response includes a * <code>NextMarker</code> value that you can use to get another batch of <code>Rules</code>. */ public Integer getLimit() { return this.limit; } /** * <p> * Specifies the number of <code>Rules</code> that you want AWS WAF to return for this request. If you have more * <code>Rules</code> than the number that you specify for <code>Limit</code>, the response includes a * <code>NextMarker</code> value that you can use to get another batch of <code>Rules</code>. * </p> * * @param limit * Specifies the number of <code>Rules</code> that you want AWS WAF to return for this request. If you have * more <code>Rules</code> than the number that you specify for <code>Limit</code>, the response includes a * <code>NextMarker</code> value that you can use to get another batch of <code>Rules</code>. * @return Returns a reference to this object so that method calls can be chained together. */ public ListRulesRequest withLimit(Integer limit) { setLimit(limit); return this; } /** * Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be * redacted from this string using a placeholder value. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getNextMarker() != null) sb.append("NextMarker: ").append(getNextMarker()).append(","); if (getLimit() != null) sb.append("Limit: ").append(getLimit()); sb.append("}"); return sb.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof ListRulesRequest == false) return false; ListRulesRequest other = (ListRulesRequest) obj; if (other.getNextMarker() == null ^ this.getNextMarker() == null) return false; if (other.getNextMarker() != null && other.getNextMarker().equals(this.getNextMarker()) == false) return false; if (other.getLimit() == null ^ this.getLimit() == null) return false; if (other.getLimit() != null && other.getLimit().equals(this.getLimit()) == false) return false; return true; } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getNextMarker() == null) ? 0 : getNextMarker().hashCode()); hashCode = prime * hashCode + ((getLimit() == null) ? 0 : getLimit().hashCode()); return hashCode; } @Override public ListRulesRequest clone() { return (ListRulesRequest) super.clone(); } }
apache-2.0
stephraleigh/flowable-engine
modules/flowable-engine/src/main/java/org/flowable/engine/impl/persistence/entity/ExecutionEntityManagerImpl.java
40516
/* Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.flowable.engine.impl.persistence.entity; import java.util.ArrayList; import java.util.Calendar; import java.util.Collection; import java.util.Collections; import java.util.Comparator; import java.util.Date; import java.util.GregorianCalendar; import java.util.HashMap; import java.util.List; import java.util.Map; import org.flowable.bpmn.model.BoundaryEvent; import org.flowable.bpmn.model.FlowElement; import org.flowable.bpmn.model.FlowNode; import org.flowable.engine.common.api.FlowableObjectNotFoundException; import org.flowable.engine.common.impl.persistence.entity.data.DataManager; import org.flowable.engine.delegate.event.FlowableEngineEventType; import org.flowable.engine.delegate.event.impl.FlowableEventBuilder; import org.flowable.engine.history.DeleteReason; import org.flowable.engine.impl.ExecutionQueryImpl; import org.flowable.engine.impl.ProcessInstanceQueryImpl; import org.flowable.engine.impl.cfg.ProcessEngineConfigurationImpl; import org.flowable.engine.impl.identity.Authentication; import org.flowable.engine.impl.persistence.CountingExecutionEntity; import org.flowable.engine.impl.persistence.entity.data.ExecutionDataManager; import org.flowable.engine.impl.util.CommandContextUtil; import org.flowable.engine.repository.ProcessDefinition; import org.flowable.engine.runtime.Execution; import org.flowable.engine.runtime.ProcessInstance; import org.flowable.engine.task.IdentityLinkType; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * @author Tom Baeyens * @author Joram Barrez */ public class ExecutionEntityManagerImpl extends AbstractEntityManager<ExecutionEntity> implements ExecutionEntityManager { private static final Logger LOGGER = LoggerFactory.getLogger(ExecutionEntityManagerImpl.class); protected ExecutionDataManager executionDataManager; public ExecutionEntityManagerImpl(ProcessEngineConfigurationImpl processEngineConfiguration, ExecutionDataManager executionDataManager) { super(processEngineConfiguration); this.executionDataManager = executionDataManager; } @Override protected DataManager<ExecutionEntity> getDataManager() { return executionDataManager; } // Overriding the default delete methods to set the 'isDeleted' flag @Override public void delete(ExecutionEntity entity) { delete(entity, true); } @Override public void delete(ExecutionEntity entity, boolean fireDeleteEvent) { super.delete(entity, fireDeleteEvent); entity.setDeleted(true); } // FIND METHODS @Override public ExecutionEntity findSubProcessInstanceBySuperExecutionId(String superExecutionId) { return executionDataManager.findSubProcessInstanceBySuperExecutionId(superExecutionId); } @Override public List<ExecutionEntity> findChildExecutionsByParentExecutionId(String parentExecutionId) { return executionDataManager.findChildExecutionsByParentExecutionId(parentExecutionId); } @Override public List<ExecutionEntity> findChildExecutionsByProcessInstanceId(String processInstanceId) { return executionDataManager.findChildExecutionsByProcessInstanceId(processInstanceId); } @Override public List<ExecutionEntity> findExecutionsByParentExecutionAndActivityIds(final String parentExecutionId, final Collection<String> activityIds) { return executionDataManager.findExecutionsByParentExecutionAndActivityIds(parentExecutionId, activityIds); } @Override public long findExecutionCountByQueryCriteria(ExecutionQueryImpl executionQuery) { return executionDataManager.findExecutionCountByQueryCriteria(executionQuery); } @Override public List<ExecutionEntity> findExecutionsByQueryCriteria(ExecutionQueryImpl executionQuery) { return executionDataManager.findExecutionsByQueryCriteria(executionQuery); } @Override public long findProcessInstanceCountByQueryCriteria(ProcessInstanceQueryImpl executionQuery) { return executionDataManager.findProcessInstanceCountByQueryCriteria(executionQuery); } @Override public List<ProcessInstance> findProcessInstanceByQueryCriteria(ProcessInstanceQueryImpl executionQuery) { return executionDataManager.findProcessInstanceByQueryCriteria(executionQuery); } @Override public ExecutionEntity findByRootProcessInstanceId(String rootProcessInstanceId) { List<ExecutionEntity> executions = executionDataManager.findExecutionsByRootProcessInstanceId(rootProcessInstanceId); return processExecutionTree(rootProcessInstanceId, executions); } /** * Processes a collection of {@link ExecutionEntity} instances, which form on execution tree. All the executions share the same rootProcessInstanceId (which is provided). The return value will be * the root {@link ExecutionEntity} instance, with all child {@link ExecutionEntity} instances populated and set using the {@link ExecutionEntity} instances from the provided collections */ protected ExecutionEntity processExecutionTree(String rootProcessInstanceId, List<ExecutionEntity> executions) { ExecutionEntity rootExecution = null; // Collect executions Map<String, ExecutionEntity> executionMap = new HashMap<>(executions.size()); for (ExecutionEntity executionEntity : executions) { if (executionEntity.getId().equals(rootProcessInstanceId)) { rootExecution = executionEntity; } executionMap.put(executionEntity.getId(), executionEntity); } // Set relationships for (ExecutionEntity executionEntity : executions) { // Root process instance relationship if (executionEntity.getRootProcessInstanceId() != null) { executionEntity.setRootProcessInstance(executionMap.get(executionEntity.getRootProcessInstanceId())); } // Process instance relationship if (executionEntity.getProcessInstanceId() != null) { executionEntity.setProcessInstance(executionMap.get(executionEntity.getProcessInstanceId())); } // Parent - child relationship if (executionEntity.getParentId() != null) { ExecutionEntity parentExecutionEntity = executionMap.get(executionEntity.getParentId()); executionEntity.setParent(parentExecutionEntity); parentExecutionEntity.addChildExecution(executionEntity); } // Super - sub execution relationship if (executionEntity.getSuperExecution() != null) { ExecutionEntity superExecutionEntity = executionMap.get(executionEntity.getSuperExecutionId()); executionEntity.setSuperExecution(superExecutionEntity); superExecutionEntity.setSubProcessInstance(executionEntity); } } return rootExecution; } @Override public List<ProcessInstance> findProcessInstanceAndVariablesByQueryCriteria(ProcessInstanceQueryImpl executionQuery) { return executionDataManager.findProcessInstanceAndVariablesByQueryCriteria(executionQuery); } @Override public Collection<ExecutionEntity> findInactiveExecutionsByProcessInstanceId(final String processInstanceId) { return executionDataManager.findInactiveExecutionsByProcessInstanceId(processInstanceId); } @Override public Collection<ExecutionEntity> findInactiveExecutionsByActivityIdAndProcessInstanceId(final String activityId, final String processInstanceId) { return executionDataManager.findInactiveExecutionsByActivityIdAndProcessInstanceId(activityId, processInstanceId); } @Override public List<Execution> findExecutionsByNativeQuery(Map<String, Object> parameterMap) { return executionDataManager.findExecutionsByNativeQuery(parameterMap); } @Override public List<ProcessInstance> findProcessInstanceByNativeQuery(Map<String, Object> parameterMap) { return executionDataManager.findProcessInstanceByNativeQuery(parameterMap); } @Override public long findExecutionCountByNativeQuery(Map<String, Object> parameterMap) { return executionDataManager.findExecutionCountByNativeQuery(parameterMap); } // CREATE METHODS @Override public ExecutionEntity createProcessInstanceExecution(ProcessDefinition processDefinition, String businessKey, String tenantId, String initiatorVariableName, String startActivityId) { ExecutionEntity processInstanceExecution = executionDataManager.create(); if (isExecutionRelatedEntityCountEnabledGlobally()) { ((CountingExecutionEntity) processInstanceExecution).setCountEnabled(true); } processInstanceExecution.setProcessDefinitionId(processDefinition.getId()); processInstanceExecution.setProcessDefinitionKey(processDefinition.getKey()); processInstanceExecution.setProcessDefinitionName(processDefinition.getName()); processInstanceExecution.setProcessDefinitionVersion(processDefinition.getVersion()); processInstanceExecution.setBusinessKey(businessKey); processInstanceExecution.setScope(true); // process instance is always a scope for all child executions // Inherit tenant id (if any) if (tenantId != null) { processInstanceExecution.setTenantId(tenantId); } String authenticatedUserId = Authentication.getAuthenticatedUserId(); processInstanceExecution.setStartActivityId(startActivityId); processInstanceExecution.setStartTime(CommandContextUtil.getProcessEngineConfiguration().getClock().getCurrentTime()); processInstanceExecution.setStartUserId(authenticatedUserId); // Store in database insert(processInstanceExecution, false); if (initiatorVariableName != null) { processInstanceExecution.setVariable(initiatorVariableName, authenticatedUserId); } // Need to be after insert, cause we need the id processInstanceExecution.setProcessInstanceId(processInstanceExecution.getId()); processInstanceExecution.setRootProcessInstanceId(processInstanceExecution.getId()); if (authenticatedUserId != null) { getIdentityLinkEntityManager().addIdentityLink(processInstanceExecution, authenticatedUserId, null, IdentityLinkType.STARTER); } // Fire events if (getEventDispatcher().isEnabled()) { getEventDispatcher().dispatchEvent(FlowableEventBuilder.createEntityEvent(FlowableEngineEventType.ENTITY_CREATED, processInstanceExecution)); } return processInstanceExecution; } /** * Creates a new execution. properties processDefinition, processInstance and activity will be initialized. */ @Override public ExecutionEntity createChildExecution(ExecutionEntity parentExecutionEntity) { ExecutionEntity childExecution = executionDataManager.create(); inheritCommonProperties(parentExecutionEntity, childExecution); childExecution.setParent(parentExecutionEntity); childExecution.setProcessDefinitionId(parentExecutionEntity.getProcessDefinitionId()); childExecution.setProcessDefinitionKey(parentExecutionEntity.getProcessDefinitionKey()); childExecution.setProcessInstanceId(parentExecutionEntity.getProcessInstanceId() != null ? parentExecutionEntity.getProcessInstanceId() : parentExecutionEntity.getId()); childExecution.setScope(false); // manage the bidirectional parent-child relation parentExecutionEntity.addChildExecution(childExecution); // Insert the child execution insert(childExecution, false); if (LOGGER.isDebugEnabled()) { LOGGER.debug("Child execution {} created with parent {}", childExecution, parentExecutionEntity.getId()); } if (getEventDispatcher().isEnabled()) { getEventDispatcher().dispatchEvent(FlowableEventBuilder.createEntityEvent(FlowableEngineEventType.ENTITY_CREATED, childExecution)); getEventDispatcher().dispatchEvent(FlowableEventBuilder.createEntityEvent(FlowableEngineEventType.ENTITY_INITIALIZED, childExecution)); } return childExecution; } @Override public ExecutionEntity createSubprocessInstance(ProcessDefinition processDefinition, ExecutionEntity superExecutionEntity, String businessKey, String activityId) { ExecutionEntity subProcessInstance = executionDataManager.create(); inheritCommonProperties(superExecutionEntity, subProcessInstance); subProcessInstance.setProcessDefinitionId(processDefinition.getId()); subProcessInstance.setProcessDefinitionKey(processDefinition.getKey()); subProcessInstance.setProcessDefinitionName(processDefinition.getName()); subProcessInstance.setSuperExecution(superExecutionEntity); subProcessInstance.setRootProcessInstanceId(superExecutionEntity.getRootProcessInstanceId()); subProcessInstance.setScope(true); // process instance is always a scope for all child executions subProcessInstance.setStartActivityId(activityId); subProcessInstance.setStartUserId(Authentication.getAuthenticatedUserId()); subProcessInstance.setBusinessKey(businessKey); // Store in database insert(subProcessInstance, false); if (LOGGER.isDebugEnabled()) { LOGGER.debug("Child execution {} created with super execution {}", subProcessInstance, superExecutionEntity.getId()); } subProcessInstance.setProcessInstanceId(subProcessInstance.getId()); superExecutionEntity.setSubProcessInstance(subProcessInstance); if (CommandContextUtil.getProcessEngineConfiguration() != null && CommandContextUtil.getProcessEngineConfiguration().getEventDispatcher().isEnabled()) { CommandContextUtil.getProcessEngineConfiguration().getEventDispatcher().dispatchEvent(FlowableEventBuilder.createEntityEvent(FlowableEngineEventType.ENTITY_CREATED, subProcessInstance)); } return subProcessInstance; } protected void inheritCommonProperties(ExecutionEntity parentExecutionEntity, ExecutionEntity childExecution) { // Inherits the 'count' feature from the parent. // If the parent was not 'counting', we can't make the child 'counting' again. if (parentExecutionEntity instanceof CountingExecutionEntity) { CountingExecutionEntity countingParentExecutionEntity = (CountingExecutionEntity) parentExecutionEntity; ((CountingExecutionEntity) childExecution).setCountEnabled(countingParentExecutionEntity.isCountEnabled()); } childExecution.setRootProcessInstanceId(parentExecutionEntity.getRootProcessInstanceId()); childExecution.setActive(true); childExecution.setStartTime(processEngineConfiguration.getClock().getCurrentTime()); if (parentExecutionEntity.getTenantId() != null) { childExecution.setTenantId(parentExecutionEntity.getTenantId()); } } // UPDATE METHODS @Override public void updateExecutionTenantIdForDeployment(String deploymentId, String newTenantId) { executionDataManager.updateExecutionTenantIdForDeployment(deploymentId, newTenantId); } // DELETE METHODS @Override public void deleteProcessInstancesByProcessDefinition(String processDefinitionId, String deleteReason, boolean cascade) { List<String> processInstanceIds = executionDataManager.findProcessInstanceIdsByProcessDefinitionId(processDefinitionId); for (String processInstanceId : processInstanceIds) { deleteProcessInstance(processInstanceId, deleteReason, cascade); } if (cascade) { getHistoryManager().recordDeleteHistoricProcessInstancesByProcessDefinitionId(processDefinitionId); } } @Override public void deleteProcessInstance(String processInstanceId, String deleteReason, boolean cascade) { ExecutionEntity execution = findById(processInstanceId); if (execution == null) { throw new FlowableObjectNotFoundException("No process instance found for id '" + processInstanceId + "'", ProcessInstance.class); } deleteProcessInstanceCascade(execution, deleteReason, cascade); } protected void deleteProcessInstanceCascade(ExecutionEntity execution, String deleteReason, boolean deleteHistory) { // fill default reason if none provided if (deleteReason == null) { deleteReason = DeleteReason.PROCESS_INSTANCE_DELETED; } for (ExecutionEntity subExecutionEntity : execution.getExecutions()) { if (subExecutionEntity.isMultiInstanceRoot()) { for (ExecutionEntity miExecutionEntity : subExecutionEntity.getExecutions()) { if (miExecutionEntity.getSubProcessInstance() != null) { deleteProcessInstanceCascade(miExecutionEntity.getSubProcessInstance(), deleteReason, deleteHistory); if (getEventDispatcher().isEnabled()) { FlowElement callActivityElement = miExecutionEntity.getCurrentFlowElement(); getEventDispatcher().dispatchEvent(FlowableEventBuilder.createActivityCancelledEvent(callActivityElement.getId(), callActivityElement.getName(), miExecutionEntity.getId(), miExecutionEntity.getProcessInstanceId(), miExecutionEntity.getProcessDefinitionId(), "callActivity", deleteReason)); } } } } else if (subExecutionEntity.getSubProcessInstance() != null) { deleteProcessInstanceCascade(subExecutionEntity.getSubProcessInstance(), deleteReason, deleteHistory); if (getEventDispatcher().isEnabled()) { FlowElement callActivityElement = subExecutionEntity.getCurrentFlowElement(); getEventDispatcher().dispatchEvent(FlowableEventBuilder.createActivityCancelledEvent(callActivityElement.getId(), callActivityElement.getName(), subExecutionEntity.getId(), subExecutionEntity.getProcessInstanceId(), subExecutionEntity.getProcessDefinitionId(), "callActivity", deleteReason)); } } } getTaskEntityManager().deleteTasksByProcessInstanceId(execution.getId(), deleteReason, deleteHistory); if (getEventDispatcher().isEnabled()) { getEventDispatcher().dispatchEvent(FlowableEventBuilder.createCancelledEvent(execution.getProcessInstanceId(), execution.getProcessInstanceId(), null, deleteReason)); } // delete the execution BEFORE we delete the history, otherwise we will // produce orphan HistoricVariableInstance instances ExecutionEntity processInstanceExecutionEntity = execution.getProcessInstance(); if (processInstanceExecutionEntity == null) { return; } List<ExecutionEntity> childExecutions = collectChildren(execution.getProcessInstance()); for (int i = childExecutions.size() - 1; i >= 0; i--) { ExecutionEntity childExecutionEntity = childExecutions.get(i); deleteExecutionAndRelatedData(childExecutionEntity, deleteReason); } deleteExecutionAndRelatedData(execution, deleteReason); if (deleteHistory) { getHistoryManager().recordProcessInstanceDeleted(execution.getId()); } getHistoryManager().recordProcessInstanceEnd(processInstanceExecutionEntity, deleteReason, null); processInstanceExecutionEntity.setDeleted(true); } @Override public void deleteExecutionAndRelatedData(ExecutionEntity executionEntity, String deleteReason, boolean cancel, FlowElement cancelActivity) { if (executionEntity.isActive() && executionEntity.getCurrentFlowElement() != null && !executionEntity.isMultiInstanceRoot() && !(executionEntity.getCurrentFlowElement() instanceof BoundaryEvent)) { // Boundary events will handle the history themselves (see TriggerExecutionOperation for example) getHistoryManager().recordActivityEnd(executionEntity, deleteReason); } deleteRelatedDataForExecution(executionEntity, deleteReason); delete(executionEntity); if (cancel) { dispatchActivityCancelled(executionEntity, cancelActivity != null ? cancelActivity : executionEntity.getCurrentFlowElement()); } } @Override public void deleteExecutionAndRelatedData(ExecutionEntity executionEntity, String deleteReason) { deleteExecutionAndRelatedData(executionEntity, deleteReason, false, null); } @Override public void deleteProcessInstanceExecutionEntity(String processInstanceId, String currentFlowElementId, String deleteReason, boolean cascade, boolean cancel, boolean fireEvents) { ExecutionEntity processInstanceEntity = findById(processInstanceId); if (processInstanceEntity == null) { throw new FlowableObjectNotFoundException("No process instance found for id '" + processInstanceId + "'", ProcessInstance.class); } if (processInstanceEntity.isDeleted()) { return; } // Call activities for (ExecutionEntity subExecutionEntity : processInstanceEntity.getExecutions()) { if (subExecutionEntity.getSubProcessInstance() != null && !subExecutionEntity.isEnded()) { deleteProcessInstanceCascade(subExecutionEntity.getSubProcessInstance(), deleteReason, cascade); if (getEventDispatcher().isEnabled() && fireEvents) { FlowElement callActivityElement = subExecutionEntity.getCurrentFlowElement(); getEventDispatcher().dispatchEvent(FlowableEventBuilder.createActivityCancelledEvent(callActivityElement.getId(), callActivityElement.getName(), subExecutionEntity.getId(), processInstanceId, subExecutionEntity.getProcessDefinitionId(), "callActivity", deleteReason)); } } } // delete event scope executions for (ExecutionEntity childExecution : processInstanceEntity.getExecutions()) { if (childExecution.isEventScope()) { deleteExecutionAndRelatedData(childExecution, null); } } deleteChildExecutions(processInstanceEntity, deleteReason, cancel); deleteExecutionAndRelatedData(processInstanceEntity, deleteReason); if (getEventDispatcher().isEnabled() && fireEvents) { if (!cancel) { getEventDispatcher().dispatchEvent(FlowableEventBuilder.createEntityEvent(FlowableEngineEventType.PROCESS_COMPLETED, processInstanceEntity)); } else { getEventDispatcher().dispatchEvent(FlowableEventBuilder.createCancelledEvent(processInstanceEntity.getId(), processInstanceEntity.getId(), processInstanceEntity.getProcessDefinitionId(), deleteReason)); } } getHistoryManager().recordProcessInstanceEnd(processInstanceEntity, deleteReason, currentFlowElementId); processInstanceEntity.setDeleted(true); } @Override public void deleteChildExecutions(ExecutionEntity executionEntity, String deleteReason, boolean cancel) { deleteChildExecutions(executionEntity, null, deleteReason, cancel, null); } @Override public void deleteChildExecutions(ExecutionEntity executionEntity, Collection<String> executionIdsNotToDelete, String deleteReason, boolean cancel, FlowElement cancelActivity) { // The children of an execution for a tree. For correct deletions // (taking care of foreign keys between child-parent) // the leafs of this tree must be deleted first before the parents elements. List<ExecutionEntity> childExecutions = collectChildren(executionEntity, executionIdsNotToDelete); for (int i = childExecutions.size() - 1; i >= 0; i--) { ExecutionEntity childExecutionEntity = childExecutions.get(i); if (!childExecutionEntity.isEnded()) { if (executionIdsNotToDelete == null || (executionIdsNotToDelete != null && !executionIdsNotToDelete.contains(childExecutionEntity.getId()))) { if (childExecutionEntity.isProcessInstanceType()) { deleteProcessInstanceExecutionEntity(childExecutionEntity.getId(), cancelActivity != null ? cancelActivity.getId() : null, deleteReason, true, cancel, true); } else { deleteExecutionAndRelatedData(childExecutionEntity, deleteReason); if (cancel) { dispatchExecutionCancelled(childExecutionEntity, cancelActivity != null ? cancelActivity : childExecutionEntity.getCurrentFlowElement()); } } } } } } public List<ExecutionEntity> collectChildren(ExecutionEntity executionEntity) { return collectChildren(executionEntity, null); } protected List<ExecutionEntity> collectChildren(ExecutionEntity executionEntity, Collection<String> executionIdsToExclude) { List<ExecutionEntity> childExecutions = new ArrayList<ExecutionEntity>(); collectChildren(executionEntity, childExecutions, executionIdsToExclude != null ? executionIdsToExclude : Collections.<String>emptyList()); return childExecutions; } @SuppressWarnings("unchecked") protected void collectChildren(ExecutionEntity executionEntity, List<ExecutionEntity> collectedChildExecution, Collection<String> executionIdsToExclude) { List<ExecutionEntity> childExecutions = (List<ExecutionEntity>) executionEntity.getExecutions(); if (childExecutions != null && childExecutions.size() > 0) { // Have a fixed ordering of child executions (important for the order in which events are sent) Collections.sort(childExecutions, new Comparator<ExecutionEntity>() { @Override public int compare(ExecutionEntity e1, ExecutionEntity e2) { return e1.getStartTime().compareTo(e2.getStartTime()); } }); for (ExecutionEntity childExecution : childExecutions) { if (!executionIdsToExclude.contains(childExecution.getId())) { if (!childExecution.isDeleted()) { collectedChildExecution.add(childExecution); } collectChildren(childExecution, collectedChildExecution, executionIdsToExclude); } } } ExecutionEntity subProcessInstance = executionEntity.getSubProcessInstance(); if (subProcessInstance != null && !executionIdsToExclude.contains(subProcessInstance.getId())) { if (!subProcessInstance.isDeleted()) { collectedChildExecution.add(subProcessInstance); } collectChildren(subProcessInstance, collectedChildExecution, executionIdsToExclude); } } protected void dispatchExecutionCancelled(ExecutionEntity execution, FlowElement cancelActivity) { ExecutionEntityManager executionEntityManager = CommandContextUtil.getExecutionEntityManager(); // subprocesses for (ExecutionEntity subExecution : executionEntityManager.findChildExecutionsByParentExecutionId(execution.getId())) { dispatchExecutionCancelled(subExecution, cancelActivity); } // call activities ExecutionEntity subProcessInstance = CommandContextUtil.getExecutionEntityManager().findSubProcessInstanceBySuperExecutionId(execution.getId()); if (subProcessInstance != null) { dispatchExecutionCancelled(subProcessInstance, cancelActivity); } // activity with message/signal boundary events FlowElement currentFlowElement = execution.getCurrentFlowElement(); if (currentFlowElement instanceof FlowNode) { dispatchActivityCancelled(execution, cancelActivity); } } protected void dispatchActivityCancelled(ExecutionEntity execution, FlowElement cancelActivity) { CommandContextUtil.getProcessEngineConfiguration() .getEventDispatcher() .dispatchEvent( FlowableEventBuilder.createActivityCancelledEvent(execution.getCurrentFlowElement().getId(), execution.getCurrentFlowElement().getName(), execution.getId(), execution.getProcessInstanceId(), execution.getProcessDefinitionId(), getActivityType((FlowNode) execution.getCurrentFlowElement()), cancelActivity)); } protected String getActivityType(FlowNode flowNode) { String elementType = flowNode.getClass().getSimpleName(); elementType = elementType.substring(0, 1).toLowerCase() + elementType.substring(1); return elementType; } @Override public ExecutionEntity findFirstScope(ExecutionEntity executionEntity) { ExecutionEntity currentExecutionEntity = executionEntity; while (currentExecutionEntity != null) { if (currentExecutionEntity.isScope()) { return currentExecutionEntity; } ExecutionEntity parentExecutionEntity = currentExecutionEntity.getParent(); if (parentExecutionEntity == null) { parentExecutionEntity = currentExecutionEntity.getSuperExecution(); } currentExecutionEntity = parentExecutionEntity; } return null; } @Override public ExecutionEntity findFirstMultiInstanceRoot(ExecutionEntity executionEntity) { ExecutionEntity currentExecutionEntity = executionEntity; while (currentExecutionEntity != null) { if (currentExecutionEntity.isMultiInstanceRoot()) { return currentExecutionEntity; } ExecutionEntity parentExecutionEntity = currentExecutionEntity.getParent(); if (parentExecutionEntity == null) { parentExecutionEntity = currentExecutionEntity.getSuperExecution(); } currentExecutionEntity = parentExecutionEntity; } return null; } public void deleteRelatedDataForExecution(ExecutionEntity executionEntity, String deleteReason) { // To start, deactivate the current incoming execution executionEntity.setEnded(true); executionEntity.setActive(false); boolean enableExecutionRelationshipCounts = isExecutionRelatedEntityCountEnabled(executionEntity); if (executionEntity.getId().equals(executionEntity.getProcessInstanceId()) && (!enableExecutionRelationshipCounts || (enableExecutionRelationshipCounts && ((CountingExecutionEntity) executionEntity).getIdentityLinkCount() > 0))) { IdentityLinkEntityManager identityLinkEntityManager = getIdentityLinkEntityManager(); Collection<IdentityLinkEntity> identityLinks = identityLinkEntityManager.findIdentityLinksByProcessInstanceId(executionEntity.getProcessInstanceId()); for (IdentityLinkEntity identityLink : identityLinks) { identityLinkEntityManager.delete(identityLink); } } // Get variables related to execution and delete them if (!enableExecutionRelationshipCounts || (enableExecutionRelationshipCounts && ((CountingExecutionEntity) executionEntity).getVariableCount() > 0)) { Collection<VariableInstance> executionVariables = executionEntity.getVariableInstancesLocal().values(); for (VariableInstance variableInstance : executionVariables) { if (variableInstance instanceof VariableInstanceEntity == false) { continue; } VariableInstanceEntity variableInstanceEntity = (VariableInstanceEntity) variableInstance; VariableInstanceEntityManager variableInstanceEntityManager = getVariableInstanceEntityManager(); variableInstanceEntityManager.delete(variableInstanceEntity); if (variableInstanceEntity.getByteArrayRef() != null && variableInstanceEntity.getByteArrayRef().getId() != null) { getByteArrayEntityManager().deleteByteArrayById(variableInstanceEntity.getByteArrayRef().getId()); } } } // Delete current user tasks if (!enableExecutionRelationshipCounts || (enableExecutionRelationshipCounts && ((CountingExecutionEntity) executionEntity).getTaskCount() > 0)) { TaskEntityManager taskEntityManager = getTaskEntityManager(); Collection<TaskEntity> tasksForExecution = taskEntityManager.findTasksByExecutionId(executionEntity.getId()); for (TaskEntity taskEntity : tasksForExecution) { taskEntityManager.deleteTask(taskEntity, deleteReason, false, true); } } // Delete jobs if (!enableExecutionRelationshipCounts || (enableExecutionRelationshipCounts && ((CountingExecutionEntity) executionEntity).getTimerJobCount() > 0)) { TimerJobEntityManager timerJobEntityManager = getTimerJobEntityManager(); Collection<TimerJobEntity> timerJobsForExecution = timerJobEntityManager.findJobsByExecutionId(executionEntity.getId()); for (TimerJobEntity job : timerJobsForExecution) { timerJobEntityManager.delete(job); if (getEventDispatcher().isEnabled()) { getEventDispatcher().dispatchEvent(FlowableEventBuilder.createEntityEvent(FlowableEngineEventType.JOB_CANCELED, job)); } } } if (!enableExecutionRelationshipCounts || (enableExecutionRelationshipCounts && ((CountingExecutionEntity) executionEntity).getJobCount() > 0)) { JobEntityManager jobEntityManager = getJobEntityManager(); Collection<JobEntity> jobsForExecution = jobEntityManager.findJobsByExecutionId(executionEntity.getId()); for (JobEntity job : jobsForExecution) { getJobEntityManager().delete(job); if (getEventDispatcher().isEnabled()) { getEventDispatcher().dispatchEvent(FlowableEventBuilder.createEntityEvent(FlowableEngineEventType.JOB_CANCELED, job)); } } } if (!enableExecutionRelationshipCounts || (enableExecutionRelationshipCounts && ((CountingExecutionEntity) executionEntity).getSuspendedJobCount() > 0)) { SuspendedJobEntityManager suspendedJobEntityManager = getSuspendedJobEntityManager(); Collection<SuspendedJobEntity> suspendedJobsForExecution = suspendedJobEntityManager.findJobsByExecutionId(executionEntity.getId()); for (SuspendedJobEntity job : suspendedJobsForExecution) { suspendedJobEntityManager.delete(job); if (getEventDispatcher().isEnabled()) { getEventDispatcher().dispatchEvent(FlowableEventBuilder.createEntityEvent(FlowableEngineEventType.JOB_CANCELED, job)); } } } if (!enableExecutionRelationshipCounts || (enableExecutionRelationshipCounts && ((CountingExecutionEntity) executionEntity).getDeadLetterJobCount() > 0)) { DeadLetterJobEntityManager deadLetterJobEntityManager = getDeadLetterJobEntityManager(); Collection<DeadLetterJobEntity> deadLetterJobsForExecution = deadLetterJobEntityManager.findJobsByExecutionId(executionEntity.getId()); for (DeadLetterJobEntity job : deadLetterJobsForExecution) { deadLetterJobEntityManager.delete(job); if (getEventDispatcher().isEnabled()) { getEventDispatcher().dispatchEvent(FlowableEventBuilder.createEntityEvent(FlowableEngineEventType.JOB_CANCELED, job)); } } } // Delete event subscriptions if (!enableExecutionRelationshipCounts || (enableExecutionRelationshipCounts && ((CountingExecutionEntity) executionEntity).getEventSubscriptionCount() > 0)) { EventSubscriptionEntityManager eventSubscriptionEntityManager = getEventSubscriptionEntityManager(); List<EventSubscriptionEntity> eventSubscriptions = eventSubscriptionEntityManager.findEventSubscriptionsByExecution(executionEntity.getId()); for (EventSubscriptionEntity eventSubscription : eventSubscriptions) { eventSubscriptionEntityManager.delete(eventSubscription); if (MessageEventSubscriptionEntity.EVENT_TYPE.equals(eventSubscription.getEventType())) { if (getEventDispatcher().isEnabled()) { getEventDispatcher().dispatchEvent(FlowableEventBuilder.createMessageEvent(FlowableEngineEventType.ACTIVITY_MESSAGE_CANCELLED, eventSubscription.getActivityId(), eventSubscription.getEventName(), null, eventSubscription.getExecutionId(), eventSubscription.getProcessInstanceId(), eventSubscription.getProcessDefinitionId())); } } } } } // OTHER METHODS @Override public void updateProcessInstanceLockTime(String processInstanceId) { Date expirationTime = getClock().getCurrentTime(); int lockMillis = getAsyncExecutor().getAsyncJobLockTimeInMillis(); GregorianCalendar lockCal = new GregorianCalendar(); lockCal.setTime(expirationTime); lockCal.add(Calendar.MILLISECOND, lockMillis); Date lockDate = lockCal.getTime(); executionDataManager.updateProcessInstanceLockTime(processInstanceId, lockDate, expirationTime); } @Override public void clearProcessInstanceLockTime(String processInstanceId) { executionDataManager.clearProcessInstanceLockTime(processInstanceId); } @Override public String updateProcessInstanceBusinessKey(ExecutionEntity executionEntity, String businessKey) { if (executionEntity.isProcessInstanceType() && businessKey != null) { executionEntity.setBusinessKey(businessKey); getHistoryManager().updateProcessBusinessKeyInHistory(executionEntity); if (getEventDispatcher().isEnabled()) { getEventDispatcher().dispatchEvent(FlowableEventBuilder.createEntityEvent(FlowableEngineEventType.ENTITY_UPDATED, executionEntity)); } return businessKey; } return null; } public ExecutionDataManager getExecutionDataManager() { return executionDataManager; } public void setExecutionDataManager(ExecutionDataManager executionDataManager) { this.executionDataManager = executionDataManager; } }
apache-2.0
inovex/rest-client
restclient-lib/src/main/java/org/wiztools/restclient/RoRequestBean.java
3752
package org.wiztools.restclient; import java.net.URL; import java.util.List; import org.wiztools.commons.MultiValueMap; /** * * @author schandran */ public class RoRequestBean implements Request { private final URL url; private final HTTPMethod method; private final MultiValueMap<String, String> headers; private final RoReqEntityBean body; private final List<HTTPAuthMethod> authMethods; private final boolean authPreemptive; private final String authHost; private final String authRealm; private final String authUsername; private final char[] authPassword; private String sslTrustStore; private char[] sslTrustStorePassword; SSLHostnameVerifier sslHostNameVerifier; private HTTPVersion httpVersion = HTTPVersion.getDefault(); // Initialize to the default version private boolean isFollowRedirect; @Override public HTTPVersion getHttpVersion() { return httpVersion; } public void setHttpVersion(HTTPVersion httpVersion) { this.httpVersion = httpVersion; } @Override public String getSslTrustStore() { return sslTrustStore; } public void setSslTrustStore(String sslTrustStore) { this.sslTrustStore = sslTrustStore; } @Override public char[] getSslTrustStorePassword() { return sslTrustStorePassword; } public void setSslTrustStorePassword(char[] sslTrustStorePassword) { this.sslTrustStorePassword = sslTrustStorePassword; } @Override public String getAuthHost() { return authHost; } @Override public List<HTTPAuthMethod> getAuthMethods() { return authMethods; } @Override public char[] getAuthPassword() { return authPassword; } @Override public boolean isAuthPreemptive() { return authPreemptive; } @Override public String getAuthRealm() { return authRealm; } @Override public String getAuthUsername() { return authUsername; } @Override public RoReqEntityBean getBody() { return body; } @Override public MultiValueMap<String, String> getHeaders() { return headers; } @Override public HTTPMethod getMethod() { return method; } @Override public URL getUrl() { return url; } @Override public boolean isFollowRedirect() { return this.isFollowRedirect; } public void setFollowRedirect(boolean isFollowRedirect) { this.isFollowRedirect = isFollowRedirect; } public RoRequestBean(final Request request){ url = request.getUrl(); method = request.getMethod(); headers = request.getHeaders(); if(request.getBody() != null){ body = new RoReqEntityBean(request.getBody()); } else{ body = null; } authMethods = request.getAuthMethods(); authPreemptive = request.isAuthPreemptive(); authHost = request.getAuthHost(); authRealm = request.getAuthRealm(); authUsername = request.getAuthUsername(); authPassword = request.getAuthPassword(); sslTrustStore = request.getSslTrustStore(); sslTrustStorePassword = request.getSslTrustStorePassword(); httpVersion = request.getHttpVersion(); sslHostNameVerifier = request.getSslHostNameVerifier(); isFollowRedirect = request.isFollowRedirect(); } @Override public SSLHostnameVerifier getSslHostNameVerifier() { return sslHostNameVerifier; } @Override public String getTestScript() { return null; } @Override public Object clone(){ return null; } }
apache-2.0
Boggimedes/Dungeon-Master-Pro
angular/app/components/sound-edit/sound-edit.component.spec.ts
642
import { ComponentFixture, TestBed } from "@angular/core/testing"; import { SoundEditComponent } from "./sound-edit.component"; describe("SoundEditComponent", () => { let component: SoundEditComponent; let fixture: ComponentFixture<SoundEditComponent>; beforeEach(async () => { await TestBed.configureTestingModule({ declarations: [SoundEditComponent], }).compileComponents(); }); beforeEach(() => { fixture = TestBed.createComponent(SoundEditComponent); component = fixture.componentInstance; fixture.detectChanges(); }); it("should create", () => { expect(component).toBeTruthy(); }); });
apache-2.0
npmcomponent/josdejong-mathjs
lib/function/statistics/median.js
2941
module.exports = function (math) { var Matrix = require('../../type/Matrix'), Unit = require('../../type/Unit'), BigNumber = math.type.BigNumber, collection = require('../../type/collection'), isNumber = require('../../util/number').isNumber, isCollection = collection.isCollection, flatten = require('../../util/array').flatten; /** * Compute the median of a list of values. The values are sorted and the * middle value is returned. In case of an even number of values, * the average of the two middle values is returned. * Supported types of values are: Number, BigNumber, Unit * * In case of a (multi dimensional) array or matrix, the median of all * elements will be calculated. * * median(a, b, c, ...) * median(A) * * @param {... *} args A single matrix or or multiple scalar values * @return {*} res */ math.median = function median(args) { if (arguments.length == 0) { throw new SyntaxError('Function median requires one or more parameters (0 provided)'); } if (isCollection(args)) { if (arguments.length == 1) { // median([a, b, c, d, ...]) return _median(args.valueOf()); } else if (arguments.length == 2) { // median([a, b, c, d, ...], dim) // TODO: implement median(A, dim) throw new Error('median(A, dim) is not yet supported'); //return collection.reduce(arguments[0], arguments[1], ...); } else { throw new SyntaxError('Wrong number of parameters'); } } else { // median(a, b, c, d, ...) return _median(Array.prototype.slice.call(arguments)); } }; /** * Recursively calculate the median of an n-dimensional array * @param {Array} array * @return {Number} median * @private */ function _median(array) { var flat = flatten(array); flat.sort(math.compare); var num = flat.length; if (num == 0) { throw new Error('Cannot calculate median of an empty array'); } if (num % 2 == 0) { // even: return the average of the two middle values var left = flat[num / 2 - 1]; var right = flat[num / 2]; if (!isNumber(left) && !(left instanceof BigNumber) && !(left instanceof Unit)) { throw new math.error.UnsupportedTypeError('median', math['typeof'](left)); } if (!isNumber(right) && !(right instanceof BigNumber) && !(right instanceof Unit)) { throw new math.error.UnsupportedTypeError('median', math['typeof'](right)); } return math.divide(math.add(left, right), 2); } else { // odd: return the middle value var middle = flat[(num - 1) / 2]; if (!isNumber(middle) && !(middle instanceof BigNumber) && !(middle instanceof Unit)) { throw new math.error.UnsupportedTypeError('median', math['typeof'](middle)); } return middle; } } };
apache-2.0
maruno/collisionsdemo
src/renderer/render/shaderfactory.hpp
881
#ifndef SHADERFACTORY_HPP #define SHADERFACTORY_HPP #include <map> #include <string> #include "glload/gl_3_2.h" namespace render { /** * Flyweight factory class for shaders. Administrates and creates shaders. * * @author Michel Bouwmans */ class ShaderFactory { private: static std::map<const std::string, const GLuint> shaders; static void checkShaderError(GLuint shader); public: /** * Returns the requested shader. Shader is returned from flyweight pool if available. * This method tries to fetch and compile a shader if not in the pool and fails on error. * * @param shaderType Type of the shader as known to OpenGL * @param shaderName Name of the shader to return, equalling the filename without the glsl-extension */ static const GLuint getShader(const std::string shaderName, GLenum shaderType); }; } #endif // SHADERFACTORY_HPP
apache-2.0
SAP/cloud-sfsf-benefits-ext
src/main/java/com/sap/hana/cloud/samples/benefits/odata/cfg/FunctionImportParameters.java
592
package com.sap.hana.cloud.samples.benefits.odata.cfg; @SuppressWarnings("nls") public final class FunctionImportParameters { public static final String NAME = "name"; public static final String CAMPAIGN_ID = "campaignId"; public static final String START_DATE = "startDate"; public static final String USER_ID = "userId"; public static final String QUANTITY = "quantity"; public static final String BENEFIT_TYPE_ID = "benefitTypeId"; public static final String ORDER_ID = "orderId"; public static final String PHOTO_TYPE = "photoType"; private FunctionImportParameters() { } }
apache-2.0
openstack/heat
heat/tests/openstack/heat/test_random_string.py
12486
# # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import re from unittest import mock from testtools import matchers from heat.common import exception from heat.common import template_format from heat.engine import node_data from heat.engine import stack as parser from heat.engine import template from heat.tests import common from heat.tests import utils class TestRandomString(common.HeatTestCase): template_random_string = ''' HeatTemplateFormatVersion: '2012-12-12' Resources: secret1: Type: OS::Heat::RandomString secret2: Type: OS::Heat::RandomString Properties: length: 10 secret3: Type: OS::Heat::RandomString Properties: length: 32 character_classes: - class: digits min: 1 - class: uppercase min: 1 - class: lowercase min: 20 character_sequences: - sequence: (),[]{} min: 1 - sequence: $_ min: 2 - sequence: '@' min: 5 secret4: Type: OS::Heat::RandomString Properties: length: 25 character_classes: - class: digits min: 1 - class: uppercase min: 1 - class: lowercase min: 20 secret5: Type: OS::Heat::RandomString Properties: length: 10 character_sequences: - sequence: (),[]{} min: 1 - sequence: $_ min: 2 - sequence: '@' min: 5 ''' def create_stack(self, templ): self.stack = self.parse_stack(template_format.parse(templ)) self.assertIsNone(self.stack.create()) return self.stack def parse_stack(self, t): stack_name = 'test_stack' tmpl = template.Template(t) stack = parser.Stack(utils.dummy_context(), stack_name, tmpl) stack.validate() stack.store() return stack def assert_min(self, pattern, string, minimum): self.assertGreaterEqual(len(re.findall(pattern, string)), minimum) def test_random_string(self): stack = self.create_stack(self.template_random_string) secret1 = stack['secret1'] random_string = secret1.FnGetAtt('value') self.assert_min('[a-zA-Z0-9]', random_string, 32) self.assertRaises(exception.InvalidTemplateAttribute, secret1.FnGetAtt, 'foo') self.assertEqual(secret1.FnGetRefId(), random_string) secret2 = stack['secret2'] random_string = secret2.FnGetAtt('value') self.assert_min('[a-zA-Z0-9]', random_string, 10) self.assertEqual(secret2.FnGetRefId(), random_string) secret3 = stack['secret3'] random_string = secret3.FnGetAtt('value') self.assertEqual(32, len(random_string)) self.assert_min('[0-9]', random_string, 1) self.assert_min('[A-Z]', random_string, 1) self.assert_min('[a-z]', random_string, 20) self.assert_min(r'[(),\[\]{}]', random_string, 1) self.assert_min('[$_]', random_string, 2) self.assert_min('@', random_string, 5) self.assertEqual(secret3.FnGetRefId(), random_string) secret4 = stack['secret4'] random_string = secret4.FnGetAtt('value') self.assertEqual(25, len(random_string)) self.assert_min('[0-9]', random_string, 1) self.assert_min('[A-Z]', random_string, 1) self.assert_min('[a-z]', random_string, 20) self.assertEqual(secret4.FnGetRefId(), random_string) secret5 = stack['secret5'] random_string = secret5.FnGetAtt('value') self.assertEqual(10, len(random_string)) self.assert_min(r'[(),\[\]{}]', random_string, 1) self.assert_min('[$_]', random_string, 2) self.assert_min('@', random_string, 5) self.assertEqual(secret5.FnGetRefId(), random_string) # Prove the name is returned before create sets the ID secret5.resource_id = None self.assertEqual('secret5', secret5.FnGetRefId()) def test_hidden_sequence_property(self): hidden_prop_templ = ''' HeatTemplateFormatVersion: '2012-12-12' Resources: secret: Type: OS::Heat::RandomString Properties: length: 100 sequence: octdigits ''' stack = self.create_stack(hidden_prop_templ) secret = stack['secret'] random_string = secret.FnGetAtt('value') self.assert_min('[0-7]', random_string, 100) self.assertEqual(secret.FnGetRefId(), random_string) # check, that property was translated according to the TranslationRule self.assertIsNone(secret.properties['sequence']) expected = [{'class': u'octdigits', 'min': 1}] self.assertEqual(expected, secret.properties['character_classes']) def test_random_string_refid_convergence_cache_data(self): t = template_format.parse(self.template_random_string) cache_data = {'secret1': node_data.NodeData.from_dict({ 'uuid': mock.ANY, 'id': mock.ANY, 'action': 'CREATE', 'status': 'COMPLETE', 'reference_id': 'xyz' })} stack = utils.parse_stack(t, cache_data=cache_data) rsrc = stack.defn['secret1'] self.assertEqual('xyz', rsrc.FnGetRefId()) def test_invalid_length(self): template_random_string = ''' HeatTemplateFormatVersion: '2012-12-12' Resources: secret: Type: OS::Heat::RandomString Properties: length: 5 character_classes: - class: digits min: 5 character_sequences: - sequence: (),[]{} min: 1 ''' exc = self.assertRaises(exception.StackValidationFailed, self.create_stack, template_random_string) self.assertEqual("Length property cannot be smaller than combined " "character class and character sequence minimums", str(exc)) def test_max_length(self): template_random_string = ''' HeatTemplateFormatVersion: '2012-12-12' Resources: secret: Type: OS::Heat::RandomString Properties: length: 512 ''' stack = self.create_stack(template_random_string) secret = stack['secret'] random_string = secret.FnGetAtt('value') self.assertEqual(512, len(random_string)) self.assertEqual(secret.FnGetRefId(), random_string) def test_exceeds_max_length(self): template_random_string = ''' HeatTemplateFormatVersion: '2012-12-12' Resources: secret: Type: OS::Heat::RandomString Properties: length: 513 ''' exc = self.assertRaises(exception.StackValidationFailed, self.create_stack, template_random_string) self.assertIn('513 is out of range (min: 1, max: 512)', str(exc)) class TestGenerateRandomString(common.HeatTestCase): scenarios = [ ('lettersdigits', dict( length=1, seq='lettersdigits', pattern='[a-zA-Z0-9]')), ('letters', dict( length=10, seq='letters', pattern='[a-zA-Z]')), ('lowercase', dict( length=100, seq='lowercase', pattern='[a-z]')), ('uppercase', dict( length=50, seq='uppercase', pattern='[A-Z]')), ('digits', dict( length=512, seq='digits', pattern='[0-9]')), ('hexdigits', dict( length=16, seq='hexdigits', pattern='[A-F0-9]')), ('octdigits', dict( length=32, seq='octdigits', pattern='[0-7]')) ] template_rs = ''' HeatTemplateFormatVersion: '2012-12-12' Resources: secret: Type: OS::Heat::RandomString ''' def parse_stack(self, t): stack_name = 'test_stack' tmpl = template.Template(t) stack = parser.Stack(utils.dummy_context(), stack_name, tmpl) stack.validate() stack.store() return stack # test was saved to test backward compatibility with old behavior def test_generate_random_string_backward_compatible(self): stack = self.parse_stack(template_format.parse(self.template_rs)) secret = stack['secret'] char_classes = secret.properties['character_classes'] for char_cl in char_classes: char_cl['class'] = self.seq # run each test multiple times to confirm random generator # doesn't generate a matching pattern by chance for i in range(1, 32): r = secret._generate_random_string([], char_classes, self.length) self.assertThat(r, matchers.HasLength(self.length)) regex = '%s{%s}' % (self.pattern, self.length) self.assertThat(r, matchers.MatchesRegex(regex)) class TestGenerateRandomStringDistribution(common.HeatTestCase): def run_test(self, tmpl, iterations=5): stack = utils.parse_stack(template_format.parse(tmpl)) secret = stack['secret'] secret.data_set = mock.Mock() for i in range(iterations): secret.handle_create() return [call[1][1] for call in secret.data_set.mock_calls] def char_counts(self, random_strings, char): return [rs.count(char) for rs in random_strings] def check_stats(self, char_counts, expected_mean, allowed_variance, expected_minimum=0): mean = float(sum(char_counts)) / len(char_counts) self.assertLess(mean, expected_mean + allowed_variance) self.assertGreater(mean, max(0, expected_mean - allowed_variance)) if expected_minimum: self.assertGreaterEqual(min(char_counts), expected_minimum) def test_class_uniformity(self): template_rs = ''' HeatTemplateFormatVersion: '2012-12-12' Resources: secret: Type: OS::Heat::RandomString Properties: length: 66 character_classes: - class: lettersdigits character_sequences: - sequence: "*$" ''' results = self.run_test(template_rs, 10) for char in '$*': self.check_stats(self.char_counts(results, char), 1.5, 2) def test_repeated_sequence(self): template_rs = ''' HeatTemplateFormatVersion: '2012-12-12' Resources: secret: Type: OS::Heat::RandomString Properties: length: 40 character_classes: [] character_sequences: - sequence: "**********$*****************************" ''' results = self.run_test(template_rs) for char in '$*': self.check_stats(self.char_counts(results, char), 20, 6) def test_overlapping_classes(self): template_rs = ''' HeatTemplateFormatVersion: '2012-12-12' Resources: secret: Type: OS::Heat::RandomString Properties: length: 624 character_classes: - class: lettersdigits - class: digits - class: octdigits - class: hexdigits ''' results = self.run_test(template_rs, 20) self.check_stats(self.char_counts(results, '0'), 10.3, 3) def test_overlapping_sequences(self): template_rs = ''' HeatTemplateFormatVersion: '2012-12-12' Resources: secret: Type: OS::Heat::RandomString Properties: length: 60 character_classes: [] character_sequences: - sequence: "01" - sequence: "02" - sequence: "03" - sequence: "04" - sequence: "05" - sequence: "06" - sequence: "07" - sequence: "08" - sequence: "09" ''' results = self.run_test(template_rs) self.check_stats(self.char_counts(results, '0'), 10, 5) def test_overlapping_class_sequence(self): template_rs = ''' HeatTemplateFormatVersion: '2012-12-12' Resources: secret: Type: OS::Heat::RandomString Properties: length: 402 character_classes: - class: octdigits character_sequences: - sequence: "0" ''' results = self.run_test(template_rs, 10) self.check_stats(self.char_counts(results, '0'), 51.125, 8, 1)
apache-2.0
projectjellyfish/jellyfish-aws
public/extensions/aws/states/services/details/aws/rds/rds.state.js
1375
(function() { 'use strict'; angular.module('app.states') .run(appRun); /** @ngInject */ function appRun(StateOverride) { StateOverride.override('services.details', function(service) { if ('JellyfishAws::Service::RDS' == service.type) { return { templateUrl: 'extensions/aws/states/services/details/aws/rds/rds.html', controller: StateController }; } }) } /** @ngInject */ function StateController(service, AwsData) { var vm = this; vm.title = ''; vm.service = service; vm.getServiceOutput = getServiceOutput; vm.activate = activate; vm.deprovision = deprovision; activate(); function activate() { } function handleResponse(response) { console.log(response); vm.response = response; } function handleError(response) { console.log(response); vm.response = response; } function deprovision(){ vm.response = null; AwsData['deprovision'](vm.service.provider.id, vm.service.id).then(handleResponse, handleError); } function getServiceOutput(service_output_name){ var outputs = vm.service.service_outputs.filter(function(elt, idx){ return elt.name == service_output_name }); if(outputs.length > 0){ return outputs[0]; }else{ return null; } } } })();
apache-2.0
ivanceras/keywordSQL
src/test/java/com/ivanceras/fluent/TestSQLOrderBy.java
1035
package com.ivanceras.fluent; import static org.junit.Assert.*; import org.junit.After; import org.junit.AfterClass; import org.junit.Before; import org.junit.BeforeClass; import org.junit.Test; import com.ivanceras.keyword.sql.Breakdown; import com.ivanceras.keyword.sql.SQL; import static com.ivanceras.keyword.sql.SQLStatics.*; public class TestSQLOrderBy { @BeforeClass public static void setUpBeforeClass() throws Exception { } @AfterClass public static void tearDownAfterClass() throws Exception { } @Before public void setUp() throws Exception { } @After public void tearDown() throws Exception { } @Test public void test() { String expected = "" + " SELECT Customers.CustomerName , Orders.OrderID "+ " FROM Customers "+ " ORDER BY name desc , description"; SQL sql = SELECT("Customers.CustomerName", "Orders.OrderID") .FROM("Customers") .ORDER_BY("name").DESC() .FIELD("description"); Breakdown bk = sql.build(); CTest.cassertEquals(expected, bk.getSql()); } }
apache-2.0
DragonRoman/ovirt-engine-sdk
generator/src/main/java/org/ovirt/engine/sdk/generator/XsdData.java
12672
// // Copyright (c) 2014 Red Hat, Inc. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // package org.ovirt.engine.sdk.generator; import java.io.File; import java.io.IOException; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.LinkedHashMap; import java.util.Map; import java.util.Set; import javax.xml.XMLConstants; import javax.xml.namespace.NamespaceContext; import javax.xml.namespace.QName; import javax.xml.parsers.DocumentBuilder; import javax.xml.parsers.DocumentBuilderFactory; import javax.xml.xpath.XPath; import javax.xml.xpath.XPathConstants; import javax.xml.xpath.XPathExpressionException; import javax.xml.xpath.XPathFactory; import org.w3c.dom.Document; import org.w3c.dom.Element; import org.w3c.dom.Node; import org.w3c.dom.NodeList; public class XsdData { /** * This is a singleton, and this is the reference to the instance. */ private static final XsdData instance = new XsdData(); /** * Get the reference to the instance of this singleton. */ public static XsdData getInstance() { return instance; } /** * The file containing the RSDL metadata. */ private File file; /** * This maps stores the relationship between XML tag names and Python type names. */ private Map<String, String> typesByTag = new LinkedHashMap<>(); /** * This map stores the relationship between Python type names and XML tags. Note that this isn't the inverse of the * previous one, as some types don't have a tag because they don't appear as top level element declarations in the * XML schema, thus they can't appear as root elements in a valid XML document. */ private Map<String, String> tagsByType = new LinkedHashMap<>(); /** * This map contains the DOM trees of all the top level element definitions that appear in the XML schema, indexed * by name. */ private Map<String, Element> elementsIndex = new HashMap<>(); /** * This map contains the DOM trees of all the complex types that appear in the XML schema, indexed by name. */ private Map<String, Element> complexTypesIndex = new HashMap<>(); /** * We will create and reuse this XPath expression. */ private XPath xpath; /** * Returns the file that contains the XML schema. */ public File getFile() { return file; } /** * Loads the XML schema from a file. * * @param file the file that contains the XML schema * @throws IOException if something fails while loading the schema */ public void load(File file) throws IOException { // Save the reference to the file: this.file = file; // Parse the XML schema document: Document schema; try { DocumentBuilderFactory factory = DocumentBuilderFactory.newInstance(); factory.setNamespaceAware(true); DocumentBuilder parser = factory.newDocumentBuilder(); schema = parser.parse(file); } catch (Exception exception) { throw new IOException("Can't parse XML schema.", exception); } // Prepare the xpath engine with the required namespace mapping: xpath = XPathFactory.newInstance().newXPath(); xpath.setNamespaceContext( new NamespaceContext() { @Override public String getNamespaceURI(String prefix) { switch (prefix) { case "xs": return "http://www.w3.org/2001/XMLSchema"; default: return XMLConstants.NULL_NS_URI; } } @Override public String getPrefix(String namespaceURI) { throw new UnsupportedOperationException(); } @Override public Iterator getPrefixes(String namespaceURI) { throw new UnsupportedOperationException(); } } ); // Populate the indexes: populateElementsIndex(schema); populateComplexTypesIndex(schema); // Exclude all the simple types: Set<String> excluded = new HashSet<>(); NodeList nodes = (NodeList) evaluate("//xs:simpleType/@name", schema, XPathConstants.NODESET); for (int i = 0; i < nodes.getLength(); i++) { Node name = nodes.item(i); excluded.add(name.getNodeValue()); } // Exclude infrastructure types: excluded.add("BaseDevice"); excluded.add("BaseDevices"); excluded.add("BaseResource"); excluded.add("BaseResources"); excluded.add("DetailedLink"); excluded.add("ErrorHandlingOptions"); // Exclude the VM summary because it conflicts with the API summary: excluded.add("VmSummary"); // Populate the types by tag map, including all the element definitions that appear in the XML schema, even // those that aren't top level and thus not valid as roots of valid XML documents: NodeList elements = (NodeList) evaluate("//xs:element", schema, XPathConstants.NODESET); for (int i = 0; i < elements.getLength(); i++) { Element element = (Element) elements.item(i); String name = element.getAttribute("name"); String type = element.getAttribute("type"); if (!name.isEmpty() && !type.isEmpty()) { if (!type.startsWith("xs:") && !excluded.contains(type)) { typesByTag.put(name, type); } } } // There are several conflicts with "version", so force it: typesByTag.put("version", "VersionCaps"); // Populate the tags by type name, including only the top level element definitions that appear in the XML // schema, those that can appear as roots of valid XML documents: elements = (NodeList) evaluate("/xs:schema/xs:element", schema, XPathConstants.NODESET); for (int i = 0; i < elements.getLength(); i++) { Element element = (Element) elements.item(i); String name = element.getAttribute("name"); String type = element.getAttribute("type"); if (!name.isEmpty() && !type.isEmpty()) { if (!type.startsWith("xs:") && !excluded.contains(type)) { tagsByType.put(type, name); } } } } private void populateElementsIndex(Document schema) { NodeList nodes = (NodeList) evaluate("/xs:schema/xs:element", schema, XPathConstants.NODESET); for (int i = 0; i < nodes.getLength(); i++) { Element node = (Element) nodes.item(i); String name = node.getAttribute("name"); if (!name.isEmpty()) { elementsIndex.put(name, node); } } } private void populateComplexTypesIndex(Document schema) { NodeList nodes = (NodeList) evaluate("/xs:schema/xs:complexType", schema, XPathConstants.NODESET); for (int i = 0; i < nodes.getLength(); i++) { Element node = (Element) nodes.item(i); String name = node.getAttribute("name"); if (!name.isEmpty()) { complexTypesIndex.put(name, node); } } } private Object evaluate(String expression, Object item, QName returnType) { try { return xpath.evaluate(expression, item, returnType); } catch (XPathExpressionException exception) { throw new RuntimeException("Can't evaluate XPath expression \"" + expression + "\"."); } } public Map<String, String> getTypesByTag() { return typesByTag; } public Map<String, String> getTagsByType() { return tagsByType; } /** * Checks if the complex type represented by the given DOM node is an extension (directly or recursively) of the * complex type with the given name. * * @param node the DOM node representing the complex type * @param name the name of the base complex type */ private boolean isExtensionOf(Element node, String name) { String base = (String) evaluate( "xs:complexContent/" + "xs:extension/" + "@base", node, XPathConstants.STRING ); if (base == null) { return false; } if (base.equals(name)) { return false; } Element next = getComplexType(base); if (next == null) { return false; } return isExtensionOf(next, name); } public Element getElement(String name) { return elementsIndex.get(name); } public Element getComplexType(String name) { return complexTypesIndex.get(name); } public String getEntityElementForCollectionType(String collectionType) { Element collectionTypeNode = getComplexType(collectionType); if (collectionTypeNode == null) { return null; } return getEntityElementForCollectionType(collectionTypeNode); } private String getEntityElementForCollectionType(Element collectionTypeNode) { NodeList contentNodes = (NodeList) evaluate( "xs:complexContent/" + "xs:extension/" + "xs:sequence/" + "xs:element", collectionTypeNode, XPathConstants.NODESET ); String entityElement = null; for (int i = 0; entityElement == null && i < contentNodes.getLength(); i++) { Element contentNode = (Element) contentNodes.item(i); String ref = contentNode.getAttribute("ref"); if (!ref.isEmpty()) { entityElement = ref; } } if (entityElement == null) { for (int i = 0; entityElement == null && i < contentNodes.getLength(); i++) { Element contentNode = (Element) contentNodes.item(i); String name = contentNode.getAttribute("name"); String type = contentNode.getAttribute("type"); if (!name.isEmpty() && !type.isEmpty()) { entityElement = name; } } } return entityElement; } public String getEntityTypeForCollectionType(String collectionType) { Element collectionTypeNode = getComplexType(collectionType); if (collectionTypeNode == null) { return null; } return getEntityTypeForCollectionType(collectionTypeNode); } private String getEntityTypeForCollectionType(Element collectionTypeNode) { NodeList contentNodes = (NodeList) evaluate( "xs:complexContent/" + "xs:extension/" + "xs:sequence/" + "xs:element", collectionTypeNode, XPathConstants.NODESET ); String entityType = null; for (int i = 0; entityType == null && i < contentNodes.getLength(); i++) { Element contentNode = (Element) contentNodes.item(i); String ref = contentNode.getAttribute("ref"); if (!ref.isEmpty()) { Element elementNode = getElement(ref); if (elementNode != null) { String type = elementNode.getAttribute("type"); if (!type.isEmpty()) { entityType = type; } } } } if (entityType == null) { for (int i = 0; entityType == null && i < contentNodes.getLength(); i++) { Element element = (Element) contentNodes.item(i); String name = element.getAttribute("name"); String type = element.getAttribute("type"); if (!name.isEmpty() && !type.isEmpty()) { entityType = type; } } } return entityType; } }
apache-2.0
johnzeringue/Topsoil
app/src/main/java/org/cirdles/topsoil/app/plot/standard/UncertaintyEllipsePlotPropertiesPanel.java
4033
/* * Copyright 2016 CIRDLES. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.cirdles.topsoil.app.plot.standard; import com.johnzeringue.extendsfx.layout.CustomVBox; import javafx.beans.value.ChangeListener; import javafx.fxml.FXML; import javafx.scene.Node; import javafx.scene.control.ChoiceBox; import javafx.scene.control.ColorPicker; import javafx.scene.control.TextField; import javafx.scene.paint.Color; import org.cirdles.topsoil.plot.JavaFXDisplayable; import org.cirdles.topsoil.plot.Plot; import java.util.Map; import static org.cirdles.topsoil.plot.upb.uncertainty.UncertaintyEllipsePlotProperties.ELLIPSE_FILL_COLOR; import static org.cirdles.topsoil.plot.upb.uncertainty.UncertaintyEllipsePlotProperties.TITLE; import static org.cirdles.topsoil.plot.upb.uncertainty.UncertaintyEllipsePlotProperties.UNCERTAINTY; import static org.cirdles.topsoil.plot.upb.uncertainty.UncertaintyEllipsePlotProperties.X_AXIS; import static org.cirdles.topsoil.plot.upb.uncertainty.UncertaintyEllipsePlotProperties.Y_AXIS; /** * Created by johnzeringue on 11/8/15. */ public class UncertaintyEllipsePlotPropertiesPanel extends CustomVBox<UncertaintyEllipsePlotPropertiesPanel> implements JavaFXDisplayable { @FXML private TextField titleField; @FXML private TextField xAxisField; @FXML private TextField yAxisField; @FXML private ChoiceBox<Double> uncertaintyField; @FXML private ColorPicker ellipseFillColorPicker; private Plot plot; public UncertaintyEllipsePlotPropertiesPanel(Plot plot) { super(self -> { self.plot = plot; }); } private ChangeListener<Object> updateProperty(String property) { return (observable, oldValue, newValue) -> { Map<String, Object> properties = plot.getProperties(); properties.put(property, newValue); plot.setProperties(properties); }; } @FXML private void initialize() { ellipseFillColorPicker.setValue( Color.valueOf( (String) plot.getProperties().get(ELLIPSE_FILL_COLOR))); ellipseFillColorPicker.valueProperty().addListener( (observable, oldValue, newValue) -> { Map<String, Object> properties = plot.getProperties(); String fillColor = String.format( "#%02X%02X%02X", (int) (newValue.getRed() * 255), (int) (newValue.getGreen() * 255), (int) (newValue.getBlue() * 255)); properties.put(ELLIPSE_FILL_COLOR, fillColor); plot.setProperties(properties); }); titleField.setText((String) plot.getProperties().get(TITLE)); titleField.textProperty().addListener(updateProperty(TITLE)); uncertaintyField.getItems().addAll(1.0, 2.0, 2.4477); uncertaintyField.setValue(((Number) plot.getProperties().get(UNCERTAINTY)).doubleValue()); uncertaintyField.valueProperty().addListener(updateProperty(UNCERTAINTY)); xAxisField.setText((String) plot.getProperties().get(X_AXIS)); xAxisField.textProperty().addListener(updateProperty(X_AXIS)); yAxisField.setText((String) plot.getProperties().get(Y_AXIS)); yAxisField.textProperty().addListener(updateProperty(Y_AXIS)); } @Override public Node displayAsNode() { return this; } }
apache-2.0