repo_name
stringlengths 4
116
| path
stringlengths 4
379
| size
stringlengths 1
7
| content
stringlengths 3
1.05M
| license
stringclasses 15
values |
---|---|---|---|---|
cmm863/HearthAttack
|
java/HearthSim/src/main/java/com/hearthsim/card/thegrandtournament/minion/rare/AlexstraszasChampion.java
|
1225
|
package com.hearthsim.card.thegrandtournament.minion.rare;
import com.hearthsim.card.CharacterIndex;
import com.hearthsim.card.minion.Minion;
import com.hearthsim.card.minion.MinionBattlecryInterface;
import com.hearthsim.event.effect.EffectCharacter;
import com.hearthsim.event.effect.conditional.Conditional;
import com.hearthsim.event.effect.conditional.EffectCharacterConditional;
import com.hearthsim.model.PlayerSide;
import com.hearthsim.util.tree.HearthTreeNode;
public class AlexstraszasChampion extends Minion implements MinionBattlecryInterface {
private EffectCharacter<Minion> effect;
public AlexstraszasChampion() {
super();
}
@Override
public EffectCharacter<Minion> getBattlecryEffect() {
if (effect == null) {
effect = new EffectCharacterConditional<Minion>(
(PlayerSide targetSide,CharacterIndex minionPlacementIndex, HearthTreeNode boardState) -> {
this.addAttack((byte) 1);
this.setCharge(true);
return boardState;
},
Conditional.HOLDING_DRAGON,
PlayerSide.CURRENT_PLAYER
);
}
return effect;
}
}
|
mit
|
motephyr/realtime_demo
|
app/assets/javascripts/3d.js
|
2218
|
var WIDTH = 800,
HEIGHT = 600;
// camera attributes
var VIEW_ANGLE = 45,
ASPECT = WIDTH / HEIGHT,
NEAR = 0.1,
FAR = 10000;
// get the DOM element to attach to
var $container = $('#canvas-wrap');
// create WebGL renderer, camera, and a scene
var renderer = new THREE.WebGLRenderer({alpha: true});
var camera =
new THREE.PerspectiveCamera(
VIEW_ANGLE,
ASPECT,
NEAR,
FAR );
var scene = new THREE.Scene();
// add the camera to the scene
scene.add(camera);
camera.position.z = 300;
renderer.setSize(WIDTH, HEIGHT);
$container.append(renderer.domElement);
// sphere
var radius = 50,
segments = 16,
rings = 16;
var sphereMaterial =
new THREE.MeshLambertMaterial(
{
color: 0xCC0000
});
var sphere = new THREE.Mesh(
new THREE.SphereGeometry(
radius,
segments,
rings),
sphereMaterial
);
scene.add(sphere);
var pointLight = new THREE.PointLight(0xFFFFFF);
pointLight.position.x = 10;
pointLight.position.y = 50;
pointLight.position.z = 130;
scene.add(pointLight);
renderer.render(scene, camera);
updateFcts = [];
var keyboard = new THREEx.KeyboardState(renderer.domElement);
renderer.domElement.setAttribute("tabIndex", "0");
renderer.domElement.focus();
updateFcts.push(function(delta, now){
if( keyboard.pressed('left') ){
sphere.position.x -= 10 * delta;
}else if( keyboard.pressed('right') ){
sphere.position.x += 10 * delta;
}
if( keyboard.pressed('down') ){
sphere.rotation.y += 10 * delta;
}else if( keyboard.pressed('up') ){
sphere.rotation.y -= 10 * delta;
}
})
updateFcts.push(function(){
renderer.render( scene, camera );
})
//////////////////////////////////////////////////////////////////////////////////
// loop runner //
//////////////////////////////////////////////////////////////////////////////////
var lastTimeMsec= null
requestAnimationFrame(function animate(nowMsec){
// keep looping
requestAnimationFrame( animate );
// measure time
lastTimeMsec = lastTimeMsec || nowMsec-1000/60
var deltaMsec = Math.min(200, nowMsec - lastTimeMsec)
lastTimeMsec = nowMsec
// call each update function
updateFcts.forEach(function(updateFn){
updateFn(deltaMsec/1000, nowMsec/1000)
})
})
|
mit
|
iiet/iiet-chat
|
app/models/server/models/LivechatDepartmentAgents.js
|
4409
|
import { Meteor } from 'meteor/meteor';
import _ from 'underscore';
import { Base } from './_Base';
import Users from './Users';
/**
* Livechat Department model
*/
export class LivechatDepartmentAgents extends Base {
constructor() {
super('livechat_department_agents');
this.tryEnsureIndex({ departmentId: 1 });
this.tryEnsureIndex({ departmentEnabled: 1 });
this.tryEnsureIndex({ agentId: 1 });
this.tryEnsureIndex({ username: 1 });
}
findByDepartmentId(departmentId) {
return this.find({ departmentId });
}
findByAgentId(agentId) {
return this.find({ agentId });
}
findOneByAgentIdAndDepartmentId(agentId, departmentId) {
return this.findOne({ agentId, departmentId });
}
saveAgent(agent) {
return this.upsert({
agentId: agent.agentId,
departmentId: agent.departmentId,
}, {
$set: {
username: agent.username,
departmentEnabled: agent.departmentEnabled,
count: parseInt(agent.count),
order: parseInt(agent.order),
},
});
}
removeByAgentId(agentId) {
this.remove({ agentId });
}
removeByDepartmentIdAndAgentId(departmentId, agentId) {
this.remove({ departmentId, agentId });
}
removeByDepartmentId(departmentId) {
this.remove({ departmentId });
}
getNextAgentForDepartment(departmentId) {
const agents = this.findByDepartmentId(departmentId).fetch();
if (agents.length === 0) {
return;
}
const onlineUsers = Users.findOnlineUserFromList(_.pluck(agents, 'username'));
const onlineUsernames = _.pluck(onlineUsers.fetch(), 'username');
const query = {
departmentId,
username: {
$in: onlineUsernames,
},
};
const sort = {
count: 1,
order: 1,
username: 1,
};
const update = {
$inc: {
count: 1,
},
};
const collectionObj = this.model.rawCollection();
const findAndModify = Meteor.wrapAsync(collectionObj.findAndModify, collectionObj);
const agent = findAndModify(query, sort, update);
if (agent && agent.value) {
return {
agentId: agent.value.agentId,
username: agent.value.username,
};
}
return null;
}
getOnlineForDepartment(departmentId) {
const agents = this.findByDepartmentId(departmentId).fetch();
if (agents.length === 0) {
return;
}
const onlineUsers = Users.findOnlineUserFromList(_.pluck(agents, 'username'));
const onlineUsernames = _.pluck(onlineUsers.fetch(), 'username');
const query = {
departmentId,
username: {
$in: onlineUsernames,
},
};
return this.find(query);
}
getBotsForDepartment(departmentId) {
const agents = this.findByDepartmentId(departmentId).fetch();
if (agents.length === 0) {
return;
}
const botUsers = Users.findBotAgents(_.pluck(agents, 'username'));
const botUsernames = _.pluck(botUsers.fetch(), 'username');
const query = {
departmentId,
username: {
$in: botUsernames,
},
};
return this.find(query);
}
getNextBotForDepartment(departmentId) {
const agents = this.findByDepartmentId(departmentId).fetch();
if (agents.length === 0) {
return;
}
const botUsers = Users.findBotAgents(_.pluck(agents, 'username'));
const botUsernames = _.pluck(botUsers.fetch(), 'username');
const query = {
departmentId,
username: {
$in: botUsernames,
},
};
const sort = {
count: 1,
order: 1,
username: 1,
};
const update = {
$inc: {
count: 1,
},
};
const collectionObj = this.model.rawCollection();
const findAndModify = Meteor.wrapAsync(collectionObj.findAndModify, collectionObj);
const bot = findAndModify(query, sort, update);
if (bot && bot.value) {
return {
agentId: bot.value.agentId,
username: bot.value.username,
};
}
return null;
}
findUsersInQueue(usersList) {
const query = {};
if (!_.isEmpty(usersList)) {
query.username = {
$in: usersList,
};
}
const options = {
sort: {
departmentId: 1,
count: 1,
order: 1,
username: 1,
},
};
return this.find(query, options);
}
replaceUsernameOfAgentByUserId(userId, username) {
const query = { agentId: userId };
const update = {
$set: {
username,
},
};
return this.update(query, update, { multi: true });
}
setDepartmentEnabledByDepartmentId(departmentId, departmentEnabled) {
return this.update({ departmentId },
{ $set: { departmentEnabled } },
{ multi: true });
}
}
export default new LivechatDepartmentAgents();
|
mit
|
luyucia/CatDoc
|
web/editor.md-master/src/editormd.js
|
153677
|
;(function(factory) {
"use strict";
// CommonJS/Node.js
if (typeof require === "function" && typeof exports === "object" && typeof module === "object")
{
module.exports = factory;
}
else if (typeof define === "function") // AMD/CMD/Sea.js
{
if (define.amd) // for Require.js
{
/* Require.js define replace */
}
else
{
define(["jquery"], factory); // for Sea.js
}
}
else
{
window.editormd = factory();
}
}(function() {
/* Require.js assignment replace */
"use strict";
var $ = (typeof (jQuery) !== "undefined") ? jQuery : Zepto;
if (typeof ($) === "undefined") {
return ;
}
/**
* editormd
*
* @param {String} id 编辑器的ID
* @param {Object} options 配置选项 Key/Value
* @returns {Object} editormd 返回editormd对象
*/
var editormd = function (id, options) {
return new editormd.fn.init(id, options);
};
editormd.title = editormd.$name = "Editor.md";
editormd.version = "1.4.4";
editormd.homePage = "https://pandao.github.io/editor.md/";
editormd.classPrefix = "editormd-";
editormd.toolbarModes = {
full : [
"undo", "redo", "|",
"bold", "del", "italic", "quote", "ucwords", "uppercase", "lowercase", "|",
"h1", "h2", "h3", "h4", "h5", "h6", "|",
"list-ul", "list-ol", "hr", "|",
"link", "reference-link", "image", "code", "preformatted-text", "code-block", "table", "datetime", "emoji", "html-entities", "pagebreak", "|",
"goto-line", "watch", "preview", "fullscreen", "clear", "search", "|",
"help", "info"
],
simple : [
"undo", "redo", "|",
"bold", "del", "italic", "quote", "uppercase", "lowercase", "|",
"h1", "h2", "h3", "h4", "h5", "h6", "|",
"list-ul", "list-ol", "hr", "|",
"watch", "preview", "fullscreen", "|",
"help", "info"
],
mini : [
"undo", "redo", "|",
"watch", "preview", "|",
"help", "info"
]
};
editormd.defaults = {
mode : "gfm", //gfm or markdown
theme : "default",
value : "", // value for CodeMirror, if mode not gfm/markdown
markdown : "",
appendMarkdown : "", // if in init textarea value not empty, append markdown to textarea
width : "100%",
height : "100%",
path : "./lib/", // Dependents module file directory
pluginPath : "", // If this empty, default use settings.path + "../plugins/"
delay : 300, // Delay parse markdown to html, Uint : ms
autoLoadModules : true, // Automatic load dependent module files
watch : true,
placeholder : "Enjoy Markdown! coding now...",
gotoLine : true,
codeFold : false,
autoHeight : false,
autoFocus : true,
autoCloseTags : true,
searchReplace : true,
syncScrolling : true,
readOnly : false,
tabSize : 4,
indentUnit : 4,
lineNumbers : true,
lineWrapping : true,
autoCloseBrackets : true,
showTrailingSpace : true,
matchBrackets : true,
indentWithTabs : true,
styleSelectedText : true,
matchWordHighlight : true, // options: true, false, "onselected"
styleActiveLine : true, // Highlight the current line
dialogLockScreen : true,
dialogShowMask : true,
dialogDraggable : true,
dialogMaskBgColor : "#fff",
dialogMaskOpacity : 0.1,
fontSize : "13px",
saveHTMLToTextarea : false,
disabledKeyMaps : [],
onload : function() {},
onresize : function() {},
onchange : function() {},
onwatch : null,
onunwatch : null,
onpreviewing : function() {},
onpreviewed : function() {},
onfullscreen : function() {},
onfullscreenExit : function() {},
onscroll : function() {},
onpreviewscroll : function() {},
imageUpload : false,
imageFormats : ["jpg", "jpeg", "gif", "png", "bmp", "webp"],
imageUploadURL : "",
crossDomainUpload : false,
uploadCallbackURL : "",
toc : true, // Table of contents
tocm : false, // Using [TOCM], auto create ToC dropdown menu
tocTitle : "", // for ToC dropdown menu btn
tocDropdown : false,
tocContainer : "",
tocStartLevel : 1, // Said from H1 to create ToC
htmlDecode : false, // Open the HTML tag identification
pageBreak : true, // Enable parse page break [========]
atLink : true, // for @link
emailLink : true, // for email address auto link
taskList : false, // Enable Github Flavored Markdown task lists
emoji : false, // :emoji: , Support Github emoji, Twitter Emoji (Twemoji);
// Support FontAwesome icon emoji :fa-xxx: > Using fontAwesome icon web fonts;
// Support Editor.md logo icon emoji :editormd-logo: :editormd-logo-1x: > 1~8x;
tex : false, // TeX(LaTeX), based on KaTeX
flowChart : false, // flowChart.js only support IE9+
sequenceDiagram : false, // sequenceDiagram.js only support IE9+
previewCodeHighlight : true,
toolbar : true, // show/hide toolbar
toolbarAutoFixed : true, // on window scroll auto fixed position
toolbarIcons : "full",
toolbarTitles : {},
toolbarHandlers : {
ucwords : function() {
return editormd.toolbarHandlers.ucwords;
},
lowercase : function() {
return editormd.toolbarHandlers.lowercase;
}
},
toolbarCustomIcons : { // using html tag create toolbar icon, unused default <a> tag.
lowercase : "<a href=\"javascript:;\" title=\"Lowercase\" unselectable=\"on\"><i class=\"fa\" name=\"lowercase\" style=\"font-size:24px;margin-top: -10px;\">a</i></a>",
"ucwords" : "<a href=\"javascript:;\" title=\"ucwords\" unselectable=\"on\"><i class=\"fa\" name=\"ucwords\" style=\"font-size:20px;margin-top: -3px;\">Aa</i></a>"
},
toolbarIconsClass : {
undo : "fa-undo",
redo : "fa-repeat",
bold : "fa-bold",
del : "fa-strikethrough",
italic : "fa-italic",
quote : "fa-quote-left",
uppercase : "fa-font",
h1 : editormd.classPrefix + "bold",
h2 : editormd.classPrefix + "bold",
h3 : editormd.classPrefix + "bold",
h4 : editormd.classPrefix + "bold",
h5 : editormd.classPrefix + "bold",
h6 : editormd.classPrefix + "bold",
"list-ul" : "fa-list-ul",
"list-ol" : "fa-list-ol",
hr : "fa-minus",
link : "fa-link",
"reference-link" : "fa-anchor",
image : "fa-picture-o",
code : "fa-code",
"preformatted-text" : "fa-file-code-o",
"code-block" : "fa-file-code-o",
table : "fa-table",
datetime : "fa-clock-o",
emoji : "fa-smile-o",
"html-entities" : "fa-copyright",
pagebreak : "fa-newspaper-o",
"goto-line" : "fa-terminal", // fa-crosshairs
watch : "fa-eye-slash",
unwatch : "fa-eye",
preview : "fa-desktop",
search : "fa-search",
fullscreen : "fa-arrows-alt",
clear : "fa-eraser",
help : "fa-question-circle",
info : "fa-info-circle"
},
toolbarIconTexts : {},
lang : {
name : "zh-cn",
description : "开源在线Markdown编辑器<br/>Open source online Markdown editor.",
tocTitle : "目录",
toolbar : {
undo : "撤销(Ctrl+Z)",
redo : "重做(Ctrl+Y)",
bold : "粗体",
del : "删除线",
italic : "斜体",
quote : "引用",
ucwords : "将每个单词首字母转成大写",
uppercase : "将所选转换成大写",
lowercase : "将所选转换成小写",
h1 : "标题1",
h2 : "标题2",
h3 : "标题3",
h4 : "标题4",
h5 : "标题5",
h6 : "标题6",
"list-ul" : "无序列表",
"list-ol" : "有序列表",
hr : "横线",
link : "链接",
"reference-link" : "引用链接",
image : "添加图片",
code : "行内代码",
"preformatted-text" : "预格式文本 / 代码块(缩进风格)",
"code-block" : "代码块(多语言风格)",
table : "添加表格",
datetime : "日期时间",
emoji : "Emoji表情",
"html-entities" : "HTML实体字符",
pagebreak : "插入分页符",
"goto-line" : "跳转到行",
watch : "关闭实时预览",
unwatch : "开启实时预览",
preview : "全窗口预览HTML(按 Shift + ESC还原)",
fullscreen : "全屏(按ESC还原)",
clear : "清空",
search : "搜索",
help : "使用帮助",
info : "关于" + editormd.title
},
buttons : {
enter : "确定",
cancel : "取消",
close : "关闭"
},
dialog : {
link : {
title : "添加链接",
url : "链接地址",
urlTitle : "链接标题",
urlEmpty : "错误:请填写链接地址。"
},
referenceLink : {
title : "添加引用链接",
name : "引用名称",
url : "链接地址",
urlId : "链接ID",
urlTitle : "链接标题",
nameEmpty: "错误:引用链接的名称不能为空。",
idEmpty : "错误:请填写引用链接的ID。",
urlEmpty : "错误:请填写引用链接的URL地址。"
},
image : {
title : "添加图片",
url : "图片地址",
link : "图片链接",
alt : "图片描述",
uploadButton : "本地上传",
imageURLEmpty : "错误:图片地址不能为空。",
uploadFileEmpty : "错误:上传的图片不能为空。",
formatNotAllowed : "错误:只允许上传图片文件,允许上传的图片文件格式有:"
},
preformattedText : {
title : "添加预格式文本或代码块",
emptyAlert : "错误:请填写预格式文本或代码的内容。"
},
codeBlock : {
title : "添加代码块",
selectLabel : "代码语言:",
selectDefaultText : "请选择代码语言",
otherLanguage : "其他语言",
unselectedLanguageAlert : "错误:请选择代码所属的语言类型。",
codeEmptyAlert : "错误:请填写代码内容。"
},
htmlEntities : {
title : "HTML 实体字符"
},
help : {
title : "使用帮助"
}
}
}
};
editormd.classNames = {
tex : editormd.classPrefix + "tex"
};
editormd.dialogZindex = 99999;
editormd.$katex = null;
editormd.$marked = null;
editormd.$CodeMirror = null;
editormd.$prettyPrint = null;
var timer, flowchartTimer;
editormd.prototype = editormd.fn = {
state : {
watching : false,
loaded : false,
preview : false,
fullscreen : false
},
/**
* 构造函数/实例初始化
* Constructor / instance initialization
*
* @param {String} id 编辑器的ID
* @param {Object} [options={}] 配置选项 Key/Value
* @returns {editormd} 返回editormd的实例对象
*/
init : function (id, options) {
options = options || {};
if (typeof id === "object")
{
options = id;
}
var _this = this;
var classPrefix = this.classPrefix = editormd.classPrefix;
var settings = this.settings = $.extend(true, editormd.defaults, options);
id = (typeof id === "object") ? settings.id : id;
var editor = this.editor = $("#" + id);
this.id = id;
this.lang = settings.lang;
var classNames = this.classNames = {
textarea : {
html : classPrefix + "html-textarea",
markdown : classPrefix + "markdown-textarea"
}
};
settings.pluginPath = (settings.pluginPath === "") ? settings.path + "../plugins/" : settings.pluginPath;
this.state.watching = (settings.watch) ? true : false;
if (!editor.hasClass("editormd")) {
editor.addClass("editormd");
}
editor.css({
width : (typeof settings.width === "number") ? settings.width + "px" : settings.width,
height : (typeof settings.height === "number") ? settings.height + "px" : settings.height
});
if (settings.autoHeight)
{
editor.css("height", "auto");
}
var markdownTextarea = this.markdownTextarea = editor.children("textarea");
if (markdownTextarea.length < 1)
{
editor.append("<textarea></textarea>");
markdownTextarea = this.markdownTextarea = editor.children("textarea");
}
markdownTextarea.addClass(classNames.textarea.markdown).attr("name", id + "-markdown-doc").attr("placeholder", settings.placeholder);
var appendElements = [
(!settings.readOnly) ? "<a href=\"javascript:;\" class=\"fa fa-close " + classPrefix + "preview-close-btn\"></a>" : "",
( (settings.saveHTMLToTextarea) ? "<textarea class=\"" + classNames.textarea.html + "\" name=\"" + id + "-html-code\"></textarea>" : "" ),
"<div class=\"" + classPrefix + "preview\"><div class=\"markdown-body " + classPrefix + "preview-container\"></div></div>",
"<div class=\"" + classPrefix + "container-mask\" style=\"display:block;\"></div>",
"<div class=\"" + classPrefix + "mask\"></div>"
].join("\n");
editor.append(appendElements).addClass(classPrefix + "vertical");
this.mask = editor.children("." + classPrefix + "mask");
this.containerMask = editor.children("." + classPrefix + "container-mask");
if (settings.markdown !== "")
{
markdownTextarea.val(settings.markdown);
}
if (settings.appendMarkdown !== "")
{
markdownTextarea.val(markdownTextarea.val() + settings.appendMarkdown);
}
this.htmlTextarea = editor.children("." + classNames.textarea.html);
this.preview = editor.children("." + classPrefix + "preview");
this.previewContainer = this.preview.children("." + classPrefix + "preview-container");
if (typeof define === "function" && define.amd)
{
if (typeof katex !== "undefined")
{
editormd.$katex = katex;
}
if (settings.searchReplace && !settings.readOnly)
{
editormd.loadCSS(settings.path + "codemirror/addon/dialog/dialog");
editormd.loadCSS(settings.path + "codemirror/addon/search/matchesonscrollbar");
}
}
if ((typeof define === "function" && define.amd) || !settings.autoLoadModules)
{
if (typeof CodeMirror !== "undefined") {
editormd.$CodeMirror = CodeMirror;
}
if (typeof marked !== "undefined") {
editormd.$marked = marked;
}
this.setCodeMirror().setToolbar().loadedDisplay();
}
else
{
this.loadQueues();
}
return this;
},
/**
* 所需组件加载队列
* Required components loading queue
*
* @returns {editormd} 返回editormd的实例对象
*/
loadQueues : function() {
var _this = this;
var settings = this.settings;
var loadPath = settings.path;
var loadFlowChartOrSequenceDiagram = function() {
if (editormd.isIE8)
{
_this.loadedDisplay();
return ;
}
if (settings.flowChart || settings.sequenceDiagram)
{
editormd.loadScript(loadPath + "raphael.min", function() {
editormd.loadScript(loadPath + "underscore.min", function() {
if (!settings.flowChart && settings.sequenceDiagram)
{
editormd.loadScript(loadPath + "sequence-diagram.min", function() {
_this.loadedDisplay();
});
}
else if (settings.flowChart && !settings.sequenceDiagram)
{
editormd.loadScript(loadPath + "flowchart.min", function() {
editormd.loadScript(loadPath + "jquery.flowchart.min", function() {
_this.loadedDisplay();
});
});
}
else if (settings.flowChart && settings.sequenceDiagram)
{
editormd.loadScript(loadPath + "flowchart.min", function() {
editormd.loadScript(loadPath + "jquery.flowchart.min", function() {
editormd.loadScript(loadPath + "sequence-diagram.min", function() {
_this.loadedDisplay();
});
});
});
}
});
});
}
else
{
_this.loadedDisplay();
}
};
editormd.loadCSS(loadPath + "codemirror/codemirror.min");
if (settings.searchReplace && !settings.readOnly)
{
editormd.loadCSS(loadPath + "codemirror/addon/dialog/dialog");
editormd.loadCSS(loadPath + "codemirror/addon/search/matchesonscrollbar");
}
if (settings.codeFold)
{
editormd.loadCSS(loadPath + "codemirror/addon/fold/foldgutter");
}
editormd.loadScript(loadPath + "codemirror/codemirror.min", function() {
editormd.$CodeMirror = CodeMirror;
editormd.loadScript(loadPath + "codemirror/modes.min", function() {
editormd.loadScript(loadPath + "codemirror/addons.min", function() {
_this.setCodeMirror();
if (settings.mode !== "gfm" && settings.mode !== "markdown")
{
_this.loadedDisplay();
return false;
}
_this.setToolbar();
editormd.loadScript(loadPath + "marked.min", function() {
editormd.$marked = marked;
if (settings.previewCodeHighlight)
{
editormd.loadScript(loadPath + "prettify.min", function() {
loadFlowChartOrSequenceDiagram();
});
}
else
{
loadFlowChartOrSequenceDiagram();
}
});
});
});
});
return this;
},
/**
* 设置CodeMirror的主题
* Setting CodeMirror theme
*
* @returns {editormd} 返回editormd的实例对象
*/
setTheme : function(theme) {
var settings = this.settings;
settings.theme = theme;
if (theme !== "default")
{
editormd.loadCSS(settings.path + "codemirror/theme/" + settings.theme);
}
this.cm.setOption("theme", theme);
return this;
},
/**
* 配置和初始化CodeMirror组件
* CodeMirror initialization
*
* @returns {editormd} 返回editormd的实例对象
*/
setCodeMirror : function() {
var settings = this.settings;
var editor = this.editor;
if (settings.theme !== "default")
{
editormd.loadCSS(settings.path + "codemirror/theme/" + settings.theme);
}
var codeMirrorConfig = {
mode : settings.mode,
theme : settings.theme,
tabSize : settings.tabSize,
dragDrop : false,
autofocus : settings.autoFocus,
autoCloseTags : settings.autoCloseTags,
readOnly : (settings.readOnly) ? "nocursor" : false,
indentUnit : settings.indentUnit,
lineNumbers : settings.lineNumbers,
lineWrapping : settings.lineWrapping,
extraKeys : {
"Ctrl-Q": function(cm) {
cm.foldCode(cm.getCursor());
}
},
foldGutter : settings.codeFold,
gutters : ["CodeMirror-linenumbers", "CodeMirror-foldgutter"],
matchBrackets : settings.matchBrackets,
indentWithTabs : settings.indentWithTabs,
styleActiveLine : settings.styleActiveLine,
styleSelectedText : settings.styleSelectedText,
autoCloseBrackets : settings.autoCloseBrackets,
showTrailingSpace : settings.showTrailingSpace,
highlightSelectionMatches : ( (!settings.matchWordHighlight) ? false : { showToken: (settings.matchWordHighlight === "onselected") ? false : /\w/ } )
};
this.codeEditor = this.cm = editormd.$CodeMirror.fromTextArea(this.markdownTextarea[0], codeMirrorConfig);
this.codeMirror = this.cmElement = editor.children(".CodeMirror");
if (settings.value !== "")
{
this.cm.setValue(settings.value);
}
this.codeMirror.css({
fontSize : settings.fontSize,
width : (!settings.watch) ? "100%" : "50%"
});
if (settings.autoHeight)
{
this.codeMirror.css("height", "auto");
this.cm.setOption("viewportMargin", Infinity);
}
return this;
},
/**
* 获取CodeMirror的配置选项
* Get CodeMirror setting options
*
* @returns {Mixed} return CodeMirror setting option value
*/
getCodeMirrorOption : function(key) {
return this.cm.getOption(key);
},
/**
* 配置和重配置CodeMirror的选项
* CodeMirror setting options / resettings
*
* @returns {editormd} 返回editormd的实例对象
*/
setCodeMirrorOption : function(key, value) {
this.cm.setOption(key, value);
return this;
},
/**
* 添加 CodeMirror 键盘快捷键
* Add CodeMirror keyboard shortcuts key map
*
* @returns {editormd} 返回editormd的实例对象
*/
addKeyMap : function(map, bottom) {
this.cm.addKeyMap(map, bottom);
return this;
},
/**
* 移除 CodeMirror 键盘快捷键
* Remove CodeMirror keyboard shortcuts key map
*
* @returns {editormd} 返回editormd的实例对象
*/
removeKeyMap : function(map) {
this.cm.removeKeyMap(map);
return this;
},
/**
* 跳转到指定的行
* Goto CodeMirror line
*
* @param {String|Intiger} line line number or "first"|"last"
* @returns {editormd} 返回editormd的实例对象
*/
gotoLine : function (line) {
var settings = this.settings;
if (!settings.gotoLine)
{
return this;
}
var cm = this.cm;
var editor = this.editor;
var count = cm.lineCount();
var preview = this.preview;
if (typeof line === "string")
{
if(line === "last")
{
line = count;
}
if (line === "first")
{
line = 1;
}
}
if (typeof line !== "number")
{
alert("Error: The line number must be an integer.");
return this;
}
line = parseInt(line) - 1;
if (line > count)
{
alert("Error: The line number range 1-" + count);
return this;
}
cm.setCursor( {line : line, ch : 0} );
var scrollInfo = cm.getScrollInfo();
var clientHeight = scrollInfo.clientHeight;
var coords = cm.charCoords({line : line, ch : 0}, "local");
cm.scrollTo(null, (coords.top + coords.bottom - clientHeight) / 2);
if (settings.watch)
{
var cmScroll = this.codeMirror.find(".CodeMirror-scroll")[0];
var height = $(cmScroll).height();
var scrollTop = cmScroll.scrollTop;
var percent = (scrollTop / cmScroll.scrollHeight);
if (scrollTop === 0)
{
preview.scrollTop(0);
}
else if (scrollTop + height >= cmScroll.scrollHeight - 16)
{
preview.scrollTop(preview[0].scrollHeight);
}
else
{
preview.scrollTop(preview[0].scrollHeight * percent);
}
}
cm.focus();
return this;
},
/**
* 扩展当前实例对象,可同时设置多个或者只设置一个
* Extend editormd instance object, can mutil setting.
*
* @returns {editormd} this(editormd instance object.)
*/
extends : function() {
if (typeof arguments[1] !== "undefined")
{
if (typeof arguments[1] === "function")
{
arguments[1] = $.proxy(arguments[1], this);
}
this[arguments[0]] = arguments[1];
}
if (typeof arguments[0] === "object" && typeof arguments[0].length === "undefined")
{
$.extend(true, this, arguments[0]);
}
return this;
},
/**
* 设置或扩展当前实例对象,单个设置
* Extend editormd instance object, one by one
*
* @param {String|Object} key option key
* @param {String|Object} value option value
* @returns {editormd} this(editormd instance object.)
*/
set : function (key, value) {
if (typeof value !== "undefined" && typeof value === "function")
{
value = $.proxy(value, this);
}
this[key] = value;
return this;
},
/**
* 重新配置
* Resetting editor options
*
* @param {String|Object} key option key
* @param {String|Object} value option value
* @returns {editormd} this(editormd instance object.)
*/
config : function(key, value) {
var settings = this.settings;
if (typeof key === "object")
{
settings = $.extend(true, settings, key);
}
if (typeof key === "string")
{
settings[key] = value;
}
this.settings = settings;
this.recreate();
return this;
},
/**
* 注册事件处理方法
* Bind editor event handle
*
* @param {String} eventType event type
* @param {Function} callback 回调函数
* @returns {editormd} this(editormd instance object.)
*/
on : function(eventType, callback) {
var settings = this.settings;
if (typeof settings["on" + eventType] !== "undefined")
{
settings["on" + eventType] = $.proxy(callback, this);
}
return this;
},
/**
* 解除事件处理方法
* Unbind editor event handle
*
* @param {String} eventType event type
* @returns {editormd} this(editormd instance object.)
*/
off : function(eventType) {
var settings = this.settings;
if (typeof settings["on" + eventType] !== "undefined")
{
settings["on" + eventType] = function(){};
}
return this;
},
/**
* 显示工具栏
* Display toolbar
*
* @param {Function} [callback=function(){}] 回调函数
* @returns {editormd} 返回editormd的实例对象
*/
showToolbar : function(callback) {
var settings = this.settings;
if(settings.readOnly) {
return this;
}
if (settings.toolbar && (this.toolbar.length < 1 || this.toolbar.find("." + this.classPrefix + "menu").html() === "") )
{
this.setToolbar();
}
settings.toolbar = true;
this.toolbar.show();
this.resize();
$.proxy(callback || function(){}, this)();
return this;
},
/**
* 隐藏工具栏
* Hide toolbar
*
* @param {Function} [callback=function(){}] 回调函数
* @returns {editormd} this(editormd instance object.)
*/
hideToolbar : function(callback) {
var settings = this.settings;
settings.toolbar = false;
this.toolbar.hide();
this.resize();
$.proxy(callback || function(){}, this)();
return this;
},
/**
* 页面滚动时工具栏的固定定位
* Set toolbar in window scroll auto fixed position
*
* @returns {editormd} 返回editormd的实例对象
*/
setToolbarAutoFixed : function(fixed) {
var state = this.state;
var editor = this.editor;
var toolbar = this.toolbar;
var settings = this.settings;
if (typeof fixed !== "undefined")
{
settings.toolbarAutoFixed = fixed;
}
var autoFixedHandle = function(){
var $window = $(window);
var top = $window.scrollTop();
if (!settings.toolbarAutoFixed)
{
return false;
}
if (top - editor.offset().top > 10 && top < editor.height())
{
toolbar.css({
position : "fixed",
width : editor.width() + "px",
left : ($window.width() - editor.width()) / 2 + "px"
});
}
else
{
toolbar.css({
position : "absolute",
width : "100%",
left : 0
});
}
};
if (!state.fullscreen && !state.preview && settings.toolbar && settings.toolbarAutoFixed)
{
$(window).bind("scroll", autoFixedHandle);
}
return this;
},
/**
* 配置和初始化工具栏
* Set toolbar and Initialization
*
* @returns {editormd} 返回editormd的实例对象
*/
setToolbar : function() {
var settings = this.settings;
if(settings.readOnly) {
return this;
}
var editor = this.editor;
var preview = this.preview;
var classPrefix = this.classPrefix;
var toolbar = this.toolbar = editor.children("." + classPrefix + "toolbar");
if (settings.toolbar && toolbar.length < 1)
{
var toolbarHTML = "<div class=\"" + classPrefix + "toolbar\"><div class=\"" + classPrefix + "toolbar-container\"><ul class=\"" + classPrefix + "menu\"></ul></div></div>";
editor.append(toolbarHTML);
toolbar = this.toolbar = editor.children("." + classPrefix + "toolbar");
}
if (!settings.toolbar)
{
toolbar.hide();
return this;
}
toolbar.show();
var icons = (typeof settings.toolbarIcons === "function") ? settings.toolbarIcons()
: ((typeof settings.toolbarIcons === "string") ? editormd.toolbarModes[settings.toolbarIcons] : settings.toolbarIcons);
var toolbarMenu = toolbar.find("." + this.classPrefix + "menu"), menu = "";
var pullRight = false;
for (var i = 0, len = icons.length; i < len; i++)
{
var name = icons[i];
if (name === "||")
{
pullRight = true;
}
else if (name === "|")
{
menu += "<li class=\"divider\" unselectable=\"on\">|</li>";
}
else
{
var isHeader = (/h(\d)/.test(name));
var index = name;
if (name === "watch" && !settings.watch) {
index = "unwatch";
}
var title = settings.lang.toolbar[index];
var iconTexts = settings.toolbarIconTexts[index];
var iconClass = settings.toolbarIconsClass[index];
title = (typeof title === "undefined") ? "" : title;
iconTexts = (typeof iconTexts === "undefined") ? "" : iconTexts;
iconClass = (typeof iconClass === "undefined") ? "" : iconClass;
var menuItem = pullRight ? "<li class=\"pull-right\">" : "<li>";
if (typeof settings.toolbarCustomIcons[name] !== "undefined" && typeof settings.toolbarCustomIcons[name] !== "function")
{
menuItem += settings.toolbarCustomIcons[name];
}
else
{
menuItem += "<a href=\"javascript:;\" title=\"" + title + "\" unselectable=\"on\">";
menuItem += "<i class=\"fa " + iconClass + "\" name=\""+name+"\" unselectable=\"on\">"+((isHeader) ? name.toUpperCase() : ( (iconClass === "") ? iconTexts : "") ) + "</i>";
menuItem += "</a>";
}
menuItem += "</li>";
menu = pullRight ? menuItem + menu : menu + menuItem;
}
}
toolbarMenu.html(menu);
toolbarMenu.find("[title=\"Lowercase\"]").attr("title", settings.lang.toolbar.lowercase);
toolbarMenu.find("[title=\"ucwords\"]").attr("title", settings.lang.toolbar.ucwords);
this.setToolbarHandler();
this.setToolbarAutoFixed();
return this;
},
/**
* 工具栏图标事件处理对象序列
* Get toolbar icons event handlers
*
* @param {Object} cm CodeMirror的实例对象
* @param {String} name 要获取的事件处理器名称
* @returns {Object} 返回处理对象序列
*/
dialogLockScreen : function() {
$.proxy(editormd.dialogLockScreen, this)();
return this;
},
dialogShowMask : function(dialog) {
$.proxy(editormd.dialogShowMask, this)(dialog);
return this;
},
getToolbarHandles : function(name) {
var toolbarHandlers = this.toolbarHandlers = editormd.toolbarHandlers;
return (name && typeof toolbarIconHandlers[name] !== "undefined") ? toolbarHandlers[name] : toolbarHandlers;
},
/**
* 工具栏图标事件处理器
* Bind toolbar icons event handle
*
* @returns {editormd} 返回editormd的实例对象
*/
setToolbarHandler : function() {
var _this = this;
var settings = this.settings;
if (!settings.toolbar || settings.readOnly) {
return this;
}
var toolbar = this.toolbar;
var cm = this.cm;
var classPrefix = this.classPrefix;
var toolbarIcons = this.toolbarIcons = toolbar.find("." + classPrefix + "menu > li > a");
var toolbarIconHandlers = this.getToolbarHandles();
toolbarIcons.bind(editormd.mouseOrTouch("click", "touchend"), function(event) {
var icon = $(this).children(".fa");
var name = icon.attr("name");
var cursor = cm.getCursor();
var selection = cm.getSelection();
if (name === "") {
return ;
}
_this.activeIcon = icon;
if (typeof toolbarIconHandlers[name] !== "undefined")
{
$.proxy(toolbarIconHandlers[name], _this)(cm);
}
else
{
if (typeof settings.toolbarHandlers[name] !== "undefined")
{
$.proxy(settings.toolbarHandlers[name], _this)(cm, icon, cursor, selection);
}
}
if (name !== "link" && name !== "reference-link" && name !== "image" && name !== "code-block" &&
name !== "preformatted-text" && name !== "watch" && name !== "preview" && name !== "search" && name !== "fullscreen" && name !== "info")
{
cm.focus();
}
return false;
});
return this;
},
/**
* 动态创建对话框
* Creating custom dialogs
*
* @param {Object} options 配置项键值对 Key/Value
* @returns {dialog} 返回创建的dialog的jQuery实例对象
*/
createDialog : function(options) {
return $.proxy(editormd.createDialog, this)(options);
},
/**
* 创建关于Editor.md的对话框
* Create about Editor.md dialog
*
* @returns {editormd} 返回editormd的实例对象
*/
createInfoDialog : function() {
var _this = this;
var editor = this.editor;
var classPrefix = this.classPrefix;
var infoDialogHTML = [
"<div class=\"" + classPrefix + "dialog " + classPrefix + "dialog-info\" style=\"\">",
"<div class=\"" + classPrefix + "dialog-container\">",
"<h1><i class=\"editormd-logo editormd-logo-lg editormd-logo-color\"></i> " + editormd.title + "<small>v" + editormd.version + "</small></h1>",
"<p>" + this.lang.description + "</p>",
"<p style=\"margin: 10px 0 20px 0;\"><a href=\"" + editormd.homePage + "\" target=\"_blank\">" + editormd.homePage + " <i class=\"fa fa-external-link\"></i></a></p>",
"<p style=\"font-size: 0.85em;\">Copyright © 2015 <a href=\"https://github.com/pandao\" target=\"_blank\" class=\"hover-link\">Pandao</a>, The <a href=\"https://github.com/pandao/editor.md/blob/master/LICENSE\" target=\"_blank\" class=\"hover-link\">MIT</a> License.</p>",
"</div>",
"<a href=\"javascript:;\" class=\"fa fa-close " + classPrefix + "dialog-close\"></a>",
"</div>"
].join("\n");
editor.append(infoDialogHTML);
var infoDialog = this.infoDialog = editor.children("." + classPrefix + "dialog-info");
infoDialog.find("." + classPrefix + "dialog-close").bind(editormd.mouseOrTouch("click", "touchend"), function() {
_this.hideInfoDialog();
});
infoDialog.css("border", (editormd.isIE8) ? "1px solid #ddd" : "").css("z-index", editormd.dialogZindex).show();
this.infoDialogPosition();
return this;
},
/**
* 关于Editor.md对话居中定位
* Editor.md dialog position handle
*
* @returns {editormd} 返回editormd的实例对象
*/
infoDialogPosition : function() {
var infoDialog = this.infoDialog;
var _infoDialogPosition = function() {
infoDialog.css({
top : ($(window).height() - infoDialog.height()) / 2 + "px",
left : ($(window).width() - infoDialog.width()) / 2 + "px"
});
};
_infoDialogPosition();
$(window).resize(_infoDialogPosition);
return this;
},
/**
* 显示关于Editor.md
* Display about Editor.md dialog
*
* @returns {editormd} 返回editormd的实例对象
*/
showInfoDialog : function() {
$("html,body").css("overflow-x", "hidden");
var _this = this;
var editor = this.editor;
var settings = this.settings;
var infoDialog = this.infoDialog = editor.children("." + this.classPrefix + "dialog-info");
if (infoDialog.length < 1)
{
this.createInfoDialog();
}
this.lockScreen(true);
this.mask.css({
opacity : settings.dialogMaskOpacity,
backgroundColor : settings.dialogMaskBgColor
}).show();
infoDialog.css("z-index", editormd.dialogZindex).show();
this.infoDialogPosition();
return this;
},
/**
* 隐藏关于Editor.md
* Hide about Editor.md dialog
*
* @returns {editormd} 返回editormd的实例对象
*/
hideInfoDialog : function() {
$("html,body").css("overflow-x", "");
this.infoDialog.hide();
this.mask.hide();
this.lockScreen(false);
return this;
},
/**
* 锁屏
* lock screen
*
* @param {Boolean} lock Boolean 布尔值,是否锁屏
* @returns {editormd} 返回editormd的实例对象
*/
lockScreen : function(lock) {
editormd.lockScreen(lock);
return this;
},
/**
* 编辑器界面重建,用于动态语言包或模块加载等
* Recreate editor
*
* @returns {editormd} 返回editormd的实例对象
*/
recreate : function() {
var _this = this;
var editor = this.editor;
var settings = this.settings;
this.codeMirror.remove();
this.setCodeMirror();
if (!settings.readOnly)
{
if (editor.find(".editormd-dialog").length > 0) {
editor.find(".editormd-dialog").remove();
}
if (settings.toolbar)
{
this.getToolbarHandles();
this.setToolbar();
}
}
this.loadedDisplay(true);
return this;
},
/**
* 高亮预览HTML的pre代码部分
* highlight of preview codes
*
* @returns {editormd} 返回editormd的实例对象
*/
previewCodeHighlight : function() {
var settings = this.settings;
var previewContainer = this.previewContainer;
if (settings.previewCodeHighlight)
{
previewContainer.find("pre").addClass("prettyprint linenums");
if (typeof prettyPrint !== "undefined")
{
prettyPrint();
}
}
return this;
},
/**
* 解析TeX(KaTeX)科学公式
* TeX(KaTeX) Renderer
*
* @returns {editormd} 返回editormd的实例对象
*/
katexRender : function() {
if (timer === null)
{
return this;
}
this.previewContainer.find("." + editormd.classNames.tex).each(function(){
var tex = $(this);
editormd.$katex.render(tex.text(), tex[0]);
});
return this;
},
/**
* 解析和渲染流程图及时序图
* FlowChart and SequenceDiagram Renderer
*
* @returns {editormd} 返回editormd的实例对象
*/
flowChartAndSequenceDiagramRender : function() {
var settings = this.settings;
var previewContainer = this.previewContainer;
if (editormd.isIE8) {
return this;
}
if (settings.flowChart) {
if (flowchartTimer === null) {
return this;
}
previewContainer.find(".flowchart").flowChart();
}
if (settings.sequenceDiagram) {
previewContainer.find(".sequence-diagram").sequenceDiagram({theme: "simple"});
}
return this;
},
/**
* 注册键盘快捷键处理
* Register CodeMirror keyMaps (keyboard shortcuts).
*
* @param {Object} keyMap KeyMap key/value {"(Ctrl/Shift/Alt)-Key" : function(){}}
* @returns {editormd} return this
*/
registerKeyMaps : function(keyMap) {
var _this = this;
var cm = this.cm;
var settings = this.settings;
var toolbarHandlers = editormd.toolbarHandlers;
var disabledKeyMaps = settings.disabledKeyMaps;
keyMap = keyMap || null;
if (keyMap)
{
for (var i in keyMap)
{
if ($.inArray(i, disabledKeyMaps) < 0)
{
var map = {};
map[i] = keyMap[i];
cm.addKeyMap(keyMap);
}
}
}
else
{
for (var k in editormd.keyMaps)
{
var _keyMap = editormd.keyMaps[k];
var handle = (typeof _keyMap === "string") ? $.proxy(toolbarHandlers[_keyMap], _this) : $.proxy(_keyMap, _this);
if ($.inArray(k, ["F9", "F10", "F11"]) < 0 && $.inArray(k, disabledKeyMaps) < 0)
{
var _map = {};
_map[k] = handle;
cm.addKeyMap(_map);
}
}
$(window).keydown(function(event) {
var keymaps = {
"120" : "F9",
"121" : "F10",
"122" : "F11"
};
if ( $.inArray(keymaps[event.keyCode], disabledKeyMaps) < 0 )
{
switch (event.keyCode)
{
case 120:
$.proxy(toolbarHandlers["watch"], _this)();
return false;
break;
case 121:
$.proxy(toolbarHandlers["preview"], _this)();
return false;
break;
case 122:
$.proxy(toolbarHandlers["fullscreen"], _this)();
return false;
break;
default:
break;
}
}
});
}
return this;
},
bindScrollEvent : function() {
var _this = this;
var preview = this.preview;
var settings = this.settings;
var codeMirror = this.codeMirror;
var mouseOrTouch = editormd.mouseOrTouch;
if (!settings.syncScrolling) {
return this;
}
var cmBindScroll = function() {
codeMirror.find(".CodeMirror-scroll").bind(mouseOrTouch("scroll", "touchmove"), function(event) {
var height = $(this).height();
var scrollTop = $(this).scrollTop();
var percent = (scrollTop / $(this)[0].scrollHeight);
if (scrollTop === 0)
{
preview.scrollTop(0);
}
else if (scrollTop + height >= $(this)[0].scrollHeight - 16)
{
preview.scrollTop(preview[0].scrollHeight);
}
else
{
preview.scrollTop(preview[0].scrollHeight * percent);
}
$.proxy(settings.onscroll, _this)(event);
});
};
var cmUnbindScroll = function() {
codeMirror.find(".CodeMirror-scroll").unbind(mouseOrTouch("scroll", "touchmove"));
};
var previewBindScroll = function() {
preview.bind(mouseOrTouch("scroll", "touchmove"), function(event) {
var height = $(this).height();
var scrollTop = $(this).scrollTop();
var percent = (scrollTop / $(this)[0].scrollHeight);
var codeView = codeMirror.find(".CodeMirror-scroll");
if(scrollTop === 0)
{
codeView.scrollTop(0);
}
else if (scrollTop + height >= $(this)[0].scrollHeight)
{
codeView.scrollTop(codeView[0].scrollHeight);
}
else
{
codeView.scrollTop(codeView[0].scrollHeight * percent);
}
$.proxy(settings.onpreviewscroll, _this)(event);
});
};
var previewUnbindScroll = function() {
preview.unbind(mouseOrTouch("scroll", "touchmove"));
};
codeMirror.bind({
mouseover : cmBindScroll,
mouseout : cmUnbindScroll,
touchstart : cmBindScroll,
touchend : cmUnbindScroll
});
preview.bind({
mouseover : previewBindScroll,
mouseout : previewUnbindScroll,
touchstart : previewBindScroll,
touchend : previewUnbindScroll
});
return this;
},
bindChangeEvent : function() {
var _this = this;
var cm = this.cm;
var settings = this.settings;
if (!settings.syncScrolling) {
return this;
}
cm.on("change", function(_cm, changeObj) {
if (settings.watch)
{
_this.previewContainer.css("padding", settings.autoHeight ? "20px 20px 50px 40px" : "20px");
}
timer = setTimeout(function() {
clearTimeout(timer);
_this.save();
timer = null;
}, settings.delay);
});
return this;
},
/**
* 加载队列完成之后的显示处理
* Display handle of the module queues loaded after.
*
* @param {Boolean} recreate 是否为重建编辑器
* @returns {editormd} 返回editormd的实例对象
*/
loadedDisplay : function(recreate) {
recreate = recreate || false;
var _this = this;
var editor = this.editor;
var preview = this.preview;
var settings = this.settings;
this.containerMask.hide();
this.save();
if (settings.watch) {
preview.show();
}
editor.data("oldWidth", editor.width()).data("oldHeight", editor.height()); // 为了兼容Zepto
this.resize();
this.registerKeyMaps();
$(window).resize(function(){
_this.resize();
});
this.bindScrollEvent().bindChangeEvent();
if (!recreate)
{
$.proxy(settings.onload, this)();
}
this.state.loaded = true;
return this;
},
/**
* 设置编辑器的宽度
* Set editor width
*
* @param {Number|String} width 编辑器宽度值
* @returns {editormd} 返回editormd的实例对象
*/
width : function(width) {
this.editor.css("width", (typeof width === "number") ? width + "px" : width);
this.resize();
return this;
},
/**
* 设置编辑器的高度
* Set editor height
*
* @param {Number|String} height 编辑器高度值
* @returns {editormd} 返回editormd的实例对象
*/
height : function(height) {
this.editor.css("height", (typeof height === "number") ? height + "px" : height);
this.resize();
return this;
},
/**
* 调整编辑器的尺寸和布局
* Resize editor layout
*
* @param {Number|String} [width=null] 编辑器宽度值
* @param {Number|String} [height=null] 编辑器高度值
* @returns {editormd} 返回editormd的实例对象
*/
resize : function(width, height) {
width = width || null;
height = height || null;
var state = this.state;
var editor = this.editor;
var preview = this.preview;
var toolbar = this.toolbar;
var settings = this.settings;
var codeMirror = this.codeMirror;
if (width)
{
editor.css("width", (typeof width === "number") ? width + "px" : width);
}
if (settings.autoHeight && !state.fullscreen && !state.preview)
{
editor.css("height", "auto");
codeMirror.css("height", "auto");
}
else
{
if (height)
{
editor.css("height", (typeof height === "number") ? height + "px" : height);
}
if (state.fullscreen)
{
editor.height($(window).height());
}
if (settings.toolbar && !settings.readOnly)
{
codeMirror.css("margin-top", toolbar.height() + 1).height(editor.height() - toolbar.height());
}
else
{
codeMirror.css("margin-top", 0).height(editor.height());
}
}
if(settings.watch)
{
codeMirror.width(editor.width() / 2);
preview.width((!state.preview) ? editor.width() / 2 : editor.width());
this.previewContainer.css("padding", settings.autoHeight ? "20px 20px 50px 40px" : "20px");
if (settings.toolbar && !settings.readOnly)
{
preview.css("top", toolbar.height());
}
else
{
preview.css("top", 0);
}
if (settings.autoHeight && !state.fullscreen && !state.preview)
{
preview.height("");
}
else
{
preview.height((settings.toolbar && !settings.readOnly) ? editor.height() - toolbar.height() : editor.height());
}
}
else
{
codeMirror.width(editor.width());
preview.hide();
}
if (state.loaded)
{
$.proxy(settings.onresize, this)();
}
return this;
},
/**
* 解析和保存Markdown代码
* Parse & Saving Markdown source code
*
* @returns {editormd} 返回editormd的实例对象
*/
save : function() {
if (timer === null)
{
return this;
}
var _this = this;
var state = this.state;
var settings = this.settings;
var cm = this.cm;
var cmValue = cm.getValue();
var previewContainer = this.previewContainer;
if (settings.mode !== "gfm" && settings.mode !== "markdown")
{
this.markdownTextarea.val(cmValue);
return this;
}
var marked = editormd.$marked;
var markdownToC = this.markdownToC = [];
var rendererOptions = this.markedRendererOptions = {
toc : settings.toc,
tocm : settings.tocm,
tocStartLevel : settings.tocStartLevel,
pageBreak : settings.pageBreak,
taskList : settings.taskList,
emoji : settings.emoji,
tex : settings.tex,
atLink : settings.atLink, // for @link
emailLink : settings.emailLink, // for mail address auto link
flowChart : settings.flowChart,
sequenceDiagram : settings.sequenceDiagram,
previewCodeHighlight : settings.previewCodeHighlight,
};
var markedOptions = this.markedOptions = {
renderer : editormd.markedRenderer(markdownToC, rendererOptions),
gfm : true,
tables : true,
breaks : true,
pedantic : false,
sanitize : (settings.htmlDecode) ? false : true, // 关闭忽略HTML标签,即开启识别HTML标签,默认为false
smartLists : true,
smartypants : true
};
marked.setOptions(markedOptions);
cmValue = editormd.filterHTMLTags(cmValue, settings.htmlDecode);
var newMarkdownDoc = editormd.$marked(cmValue, markedOptions);
this.markdownTextarea.val(cmValue);
cm.save();
if (settings.saveHTMLToTextarea)
{
this.htmlTextarea.val(newMarkdownDoc);
}
if(settings.watch || (!settings.watch && state.preview))
{
previewContainer.html(newMarkdownDoc);
this.previewCodeHighlight();
if (settings.toc)
{
var tocContainer = (settings.tocContainer === "") ? previewContainer : $(settings.tocContainer);
var tocMenu = tocContainer.find("." + this.classPrefix + "toc-menu");
tocContainer.attr("previewContainer", (settings.tocContainer === "") ? "true" : "false");
if (settings.tocContainer !== "" && tocMenu.length > 0)
{
tocMenu.remove();
}
editormd.markdownToCRenderer(markdownToC, tocContainer, settings.tocDropdown, settings.tocStartLevel);
if (settings.tocDropdown || tocContainer.find("." + this.classPrefix + "toc-menu").length > 0)
{
editormd.tocDropdownMenu(tocContainer, (settings.tocTitle !== "") ? settings.tocTitle : this.lang.tocTitle);
}
if (settings.tocContainer !== "")
{
previewContainer.find(".markdown-toc").css("border", "none");
}
}
if (settings.tex)
{
if (!editormd.kaTeXLoaded && settings.autoLoadModules)
{
editormd.loadKaTeX(function() {
editormd.$katex = katex;
editormd.kaTeXLoaded = true;
_this.katexRender();
});
}
else
{
editormd.$katex = katex;
this.katexRender();
}
}
if (settings.flowChart || settings.sequenceDiagram)
{
flowchartTimer = setTimeout(function(){
clearTimeout(flowchartTimer);
_this.flowChartAndSequenceDiagramRender();
flowchartTimer = null;
}, 10);
}
if (state.loaded)
{
$.proxy(settings.onchange, this)();
}
}
return this;
},
/**
* 聚焦光标位置
* Focusing the cursor position
*
* @returns {editormd} 返回editormd的实例对象
*/
focus : function() {
this.cm.focus();
return this;
},
/**
* 设置光标的位置
* Set cursor position
*
* @param {Object} cursor 要设置的光标位置键值对象,例:{line:1, ch:0}
* @returns {editormd} 返回editormd的实例对象
*/
setCursor : function(cursor) {
this.cm.setCursor(cursor);
return this;
},
/**
* 获取当前光标的位置
* Get the current position of the cursor
*
* @returns {Cursor} 返回一个光标Cursor对象
*/
getCursor : function() {
return this.cm.getCursor();
},
/**
* 设置光标选中的范围
* Set cursor selected ranges
*
* @param {Object} from 开始位置的光标键值对象,例:{line:1, ch:0}
* @param {Object} to 结束位置的光标键值对象,例:{line:1, ch:0}
* @returns {editormd} 返回editormd的实例对象
*/
setSelection : function(from, to) {
this.cm.setSelection(from, to);
return this;
},
/**
* 获取光标选中的文本
* Get the texts from cursor selected
*
* @returns {String} 返回选中文本的字符串形式
*/
getSelection : function() {
return this.cm.getSelection();
},
/**
* 设置光标选中的文本范围
* Set the cursor selection ranges
*
* @param {Array} ranges cursor selection ranges array
* @returns {Array} return this
*/
setSelections : function(ranges) {
this.cm.setSelections(ranges);
return this;
},
/**
* 获取光标选中的文本范围
* Get the cursor selection ranges
*
* @returns {Array} return selection ranges array
*/
getSelections : function() {
return this.cm.getSelections();
},
/**
* 替换当前光标选中的文本或在当前光标处插入新字符
* Replace the text at the current cursor selected or insert a new character at the current cursor position
*
* @param {String} value 要插入的字符值
* @returns {editormd} 返回editormd的实例对象
*/
replaceSelection : function(value) {
this.cm.replaceSelection(value);
return this;
},
/**
* 在当前光标处插入新字符
* Insert a new character at the current cursor position
*
* 同replaceSelection()方法
* With the replaceSelection() method
*
* @param {String} value 要插入的字符值
* @returns {editormd} 返回editormd的实例对象
*/
insertValue : function(value) {
this.replaceSelection(value);
return this;
},
/**
* 追加markdown
* append Markdown to editor
*
* @param {String} md 要追加的markdown源文档
* @returns {editormd} 返回editormd的实例对象
*/
appendMarkdown : function(md) {
var settings = this.settings;
var cm = this.cm;
cm.setValue(cm.getValue() + md);
return this;
},
/**
* 设置和传入编辑器的markdown源文档
* Set Markdown source document
*
* @param {String} md 要传入的markdown源文档
* @returns {editormd} 返回editormd的实例对象
*/
setMarkdown : function(md) {
this.cm.setValue(md || this.settings.markdown);
return this;
},
/**
* 获取编辑器的markdown源文档
* Set Editor.md markdown/CodeMirror value
*
* @returns {editormd} 返回editormd的实例对象
*/
getMarkdown : function() {
return this.cm.getValue();
},
/**
* 获取编辑器的源文档
* Get CodeMirror value
*
* @returns {editormd} 返回editormd的实例对象
*/
getValue : function() {
return this.cm.getValue();
},
/**
* 设置编辑器的源文档
* Set CodeMirror value
*
* @param {String} value set code/value/string/text
* @returns {editormd} 返回editormd的实例对象
*/
setValue : function(value) {
this.cm.setValue(value);
return this;
},
/**
* 清空编辑器
* Empty CodeMirror editor container
*
* @returns {editormd} 返回editormd的实例对象
*/
clear : function() {
this.cm.setValue("");
return this;
},
/**
* 获取解析后存放在Textarea的HTML源码
* Get parsed html code from Textarea
*
* @returns {String} 返回HTML源码
*/
getHTML : function() {
if (!this.settings.saveHTMLToTextarea)
{
alert("Error: settings.saveHTMLToTextarea == false");
return false;
}
return this.htmlTextarea.html();
},
/**
* getHTML()的别名
* getHTML (alias)
*
* @returns {String} Return html code 返回HTML源码
*/
getTextareaSavedHTML : function() {
return this.getHTML();
},
/**
* 获取预览窗口的HTML源码
* Get html from preview container
*
* @returns {editormd} 返回editormd的实例对象
*/
getPreviewedHTML : function() {
if (!this.settings.watch)
{
alert("Error: settings.watch == false");
return false;
}
return this.previewContainer.html();
},
/**
* 开启实时预览
* Enable real-time watching
*
* @returns {editormd} 返回editormd的实例对象
*/
watch : function(callback) {
var settings = this.settings;
if ($.inArray(settings.mode, ["gfm", "markdown"]) < 0)
{
return this;
}
this.state.watching = settings.watch = true;
this.preview.show();
if (this.toolbar)
{
var watchIcon = settings.toolbarIconsClass.watch;
var unWatchIcon = settings.toolbarIconsClass.unwatch;
var icon = this.toolbar.find(".fa[name=watch]");
icon.parent().attr("title", settings.lang.toolbar.watch);
icon.removeClass(unWatchIcon).addClass(watchIcon);
}
this.codeMirror.css("border-right", "1px solid #ddd").width(this.editor.width() / 2);
timer = 0;
this.save().resize();
if (!settings.onwatch)
{
settings.onwatch = callback || function() {};
}
$.proxy(settings.onwatch, this)();
return this;
},
/**
* 关闭实时预览
* Disable real-time watching
*
* @returns {editormd} 返回editormd的实例对象
*/
unwatch : function(callback) {
var settings = this.settings;
this.state.watching = settings.watch = false;
this.preview.hide();
if (this.toolbar)
{
var watchIcon = settings.toolbarIconsClass.watch;
var unWatchIcon = settings.toolbarIconsClass.unwatch;
var icon = this.toolbar.find(".fa[name=watch]");
icon.parent().attr("title", settings.lang.toolbar.unwatch);
icon.removeClass(watchIcon).addClass(unWatchIcon);
}
this.codeMirror.css("border-right", "none").width(this.editor.width());
this.resize();
if (!settings.onunwatch)
{
settings.onunwatch = callback || function() {};
}
$.proxy(settings.onunwatch, this)();
return this;
},
/**
* 显示编辑器
* Show editor
*
* @param {Function} [callback=function()] 回调函数
* @returns {editormd} 返回editormd的实例对象
*/
show : function(callback) {
callback = callback || function() {};
var _this = this;
this.editor.show(0, function() {
$.proxy(callback, _this)();
});
return this;
},
/**
* 隐藏编辑器
* Hide editor
*
* @param {Function} [callback=function()] 回调函数
* @returns {editormd} 返回editormd的实例对象
*/
hide : function(callback) {
callback = callback || function() {};
var _this = this;
this.editor.hide(0, function() {
$.proxy(callback, _this)();
});
return this;
},
/**
* 隐藏编辑器部分,只预览HTML
* Enter preview html state
*
* @returns {editormd} 返回editormd的实例对象
*/
previewing : function() {
var _this = this;
var editor = this.editor;
var preview = this.preview;
var toolbar = this.toolbar;
var settings = this.settings;
var codeMirror = this.codeMirror;
if ($.inArray(settings.mode, ["gfm", "markdown"]) < 0) {
return this;
}
if (settings.toolbar && toolbar) {
toolbar.toggle();
toolbar.find(".fa[name=preview]").toggleClass("active");
}
codeMirror.toggle();
var escHandle = function(event) {
if (event.shiftKey && event.keyCode === 27) {
_this.previewed();
}
};
if (codeMirror.css("display") === "none") // 为了兼容Zepto,而不使用codeMirror.is(":hidden")
{
this.state.preview = true;
if (this.state.fullscreen) {
preview.css("background", "#fff");
}
editor.find("." + this.classPrefix + "preview-close-btn").show().bind(editormd.mouseOrTouch("click", "touchend"), function(){
_this.previewed();
});
if (!settings.watch)
{
this.save();
}
preview.show().css({
position : "static",
top : 0,
width : editor.width(),
height : (settings.autoHeight && !this.state.fullscreen) ? "auto" : editor.height()
});
if (this.state.loaded)
{
$.proxy(settings.onpreviewing, this)();
}
$(window).bind("keyup", escHandle);
}
else
{
$(window).unbind("keyup", escHandle);
this.previewed();
}
},
/**
* 显示编辑器部分,退出只预览HTML
* Exit preview html state
*
* @returns {editormd} 返回editormd的实例对象
*/
previewed : function() {
var editor = this.editor;
var preview = this.preview;
var toolbar = this.toolbar;
var settings = this.settings;
var previewCloseBtn = editor.find("." + this.classPrefix + "preview-close-btn");
this.state.preview = false;
this.codeMirror.show();
if (settings.toolbar) {
toolbar.show();
}
preview[(settings.watch) ? "show" : "hide"]();
previewCloseBtn.hide().unbind(editormd.mouseOrTouch("click", "touchend"));
preview.css({
background : null,
position : "absolute",
width : editor.width() / 2,
height : (settings.autoHeight && !this.state.fullscreen) ? "auto" : editor.height() - toolbar.height(),
top : (settings.toolbar) ? toolbar.height() : 0
});
if (this.state.loaded)
{
$.proxy(settings.onpreviewed, this)();
}
return this;
},
/**
* 编辑器全屏显示
* Fullscreen show
*
* @returns {editormd} 返回editormd的实例对象
*/
fullscreen : function() {
var _this = this;
var state = this.state;
var editor = this.editor;
var preview = this.preview;
var toolbar = this.toolbar;
var settings = this.settings;
var fullscreenClass = this.classPrefix + "fullscreen";
if (toolbar) {
toolbar.find(".fa[name=fullscreen]").parent().toggleClass("active");
}
var escHandle = function(event) {
if (!event.shiftKey && event.keyCode === 27)
{
if (state.fullscreen)
{
_this.fullscreenExit();
}
}
};
if (!editor.hasClass(fullscreenClass))
{
state.fullscreen = true;
$("html,body").css("overflow", "hidden");
editor.css({
position : "fixed",
top : 0,
left : 0,
margin : 0,
border : "none",
width : $(window).width(),
height : $(window).height()
}).addClass(fullscreenClass);
this.resize();
$.proxy(settings.onfullscreen, this)();
$(window).bind("keyup", escHandle);
}
else
{
$(window).unbind("keyup", escHandle);
this.fullscreenExit();
}
return this;
},
/**
* 编辑器退出全屏显示
* Exit fullscreen state
*
* @returns {editormd} 返回editormd的实例对象
*/
fullscreenExit : function() {
var editor = this.editor;
var settings = this.settings;
var toolbar = this.toolbar;
var fullscreenClass = this.classPrefix + "fullscreen";
this.state.fullscreen = false;
if (toolbar) {
toolbar.find(".fa[name=fullscreen]").parent().removeClass("active");
}
$("html,body").css("overflow", "");
editor.css({
position : "",
top : "",
left : "",
margin : "0 auto 15px",
width : editor.data("oldWidth"),
height : editor.data("oldHeight"),
border : "1px solid #ddd"
}).removeClass(fullscreenClass);
this.resize();
$.proxy(settings.onfullscreenExit, this)();
return this;
},
/**
* 加载并执行插件
* Load and execute the plugin
*
* @param {String} name plugin name / function name
* @param {String} path plugin load path
* @returns {editormd} 返回editormd的实例对象
*/
executePlugin : function(name, path) {
var _this = this;
var cm = this.cm;
var settings = this.settings;
path = settings.pluginPath + path;
if (typeof define === "function")
{
if (typeof this[name] === "undefined")
{
alert("Error: " + name + " plugin is not found, you are not load this plugin.");
return this;
}
this[name](cm);
return this;
}
if ($.inArray(path, editormd.loadFiles.plugin) < 0)
{
editormd.loadPlugin(path, function() {
editormd.loadPlugins[name] = _this[name];
_this[name](cm);
});
}
else
{
$.proxy(editormd.loadPlugins[name], this)(cm);
}
return this;
},
/**
* 搜索替换
* Search & replace
*
* @param {String} command CodeMirror serach commands, "find, fintNext, fintPrev, clearSearch, replace, replaceAll"
* @returns {editormd} return this
*/
search : function(command) {
var settings = this.settings;
if (!settings.searchReplace)
{
alert("Error: settings.searchReplace == false");
return this;
}
if (!settings.readOnly)
{
this.cm.execCommand(command || "find");
}
return this;
},
searchReplace : function() {
this.search("replace");
return this;
},
searchReplaceAll : function() {
this.search("replaceAll");
return this;
}
};
editormd.fn.init.prototype = editormd.fn;
/**
* 锁屏
* lock screen when dialog opening
*
* @returns {void}
*/
editormd.dialogLockScreen = function() {
var settings = this.settings || {dialogLockScreen : true};
if (settings.dialogLockScreen)
{
$("html,body").css("overflow", "hidden");
}
};
/**
* 显示透明背景层
* Display mask layer when dialog opening
*
* @param {Object} dialog dialog jQuery object
* @returns {void}
*/
editormd.dialogShowMask = function(dialog) {
var editor = this.editor;
var settings = this.settings || {dialogShowMask : true};
dialog.css({
top : ($(window).height() - dialog.height()) / 2 + "px",
left : ($(window).width() - dialog.width()) / 2 + "px"
});
if (settings.dialogShowMask) {
editor.children("." + this.classPrefix + "mask").css("z-index", parseInt(dialog.css("z-index")) - 1).show();
}
};
editormd.toolbarHandlers = {
undo : function() {
this.cm.undo();
},
redo : function() {
this.cm.redo();
},
bold : function() {
var cm = this.cm;
var cursor = cm.getCursor();
var selection = cm.getSelection();
cm.replaceSelection("**" + selection + "**");
if(selection === "") {
cm.setCursor(cursor.line, cursor.ch + 2);
}
},
del : function() {
var cm = this.cm;
var cursor = cm.getCursor();
var selection = cm.getSelection();
cm.replaceSelection("~~" + selection + "~~");
if(selection === "") {
cm.setCursor(cursor.line, cursor.ch + 2);
}
},
italic : function() {
var cm = this.cm;
var cursor = cm.getCursor();
var selection = cm.getSelection();
cm.replaceSelection("*" + selection + "*");
if(selection === "") {
cm.setCursor(cursor.line, cursor.ch + 1);
}
},
quote : function() {
var cm = this.cm;
var cursor = cm.getCursor();
var selection = cm.getSelection();
cm.replaceSelection("> " + selection);
cm.setCursor(cursor.line, (selection === "") ? cursor.ch + 2 : cursor.ch + selection.length + 2);
},
ucfirst : function() {
var cm = this.cm;
var selection = cm.getSelection();
var selections = cm.listSelections();
cm.replaceSelection(editormd.firstUpperCase(selection));
cm.setSelections(selections);
},
ucwords : function() {
var cm = this.cm;
var selection = cm.getSelection();
var selections = cm.listSelections();
cm.replaceSelection(editormd.wordsFirstUpperCase(selection));
cm.setSelections(selections);
},
uppercase : function() {
var cm = this.cm;
var selection = cm.getSelection();
var selections = cm.listSelections();
cm.replaceSelection(selection.toUpperCase());
cm.setSelections(selections);
},
lowercase : function() {
var cm = this.cm;
var cursor = cm.getCursor();
var selection = cm.getSelection();
var selections = cm.listSelections();
cm.replaceSelection(selection.toLowerCase());
cm.setSelections(selections);
},
h1 : function() {
var cm = this.cm;
var selection = cm.getSelection();
cm.replaceSelection("# " + selection);
},
h2 : function() {
var cm = this.cm;
var selection = cm.getSelection();
cm.replaceSelection("## " + selection);
},
h3 : function() {
var cm = this.cm;
var selection = cm.getSelection();
cm.replaceSelection("### " + selection);
},
h4 : function() {
var cm = this.cm;
var selection = cm.getSelection();
cm.replaceSelection("#### " + selection);
},
h5 : function() {
var cm = this.cm;
var selection = cm.getSelection();
cm.replaceSelection("##### " + selection);
},
h6 : function() {
var cm = this.cm;
var selection = cm.getSelection();
cm.replaceSelection("###### " + selection);
},
"list-ul" : function() {
var cm = this.cm;
var cursor = cm.getCursor();
var selection = cm.getSelection();
if (selection === "")
{
cm.replaceSelection("- " + selection);
}
else
{
var selectionText = selection.split("\n");
for (var i = 0, len = selectionText.length; i < len; i++)
{
selectionText[i] = (selectionText[i] === "") ? "" : "- " + selectionText[i];
}
cm.replaceSelection(selectionText.join("\n"));
}
},
"list-ol" : function() {
var cm = this.cm;
var cursor = cm.getCursor();
var selection = cm.getSelection();
if(selection === "")
{
cm.replaceSelection("1. " + selection);
}
else
{
var selectionText = selection.split("\n");
for (var i = 0, len = selectionText.length; i < len; i++)
{
selectionText[i] = (selectionText[i] === "") ? "" : (i+1) + ". " + selectionText[i];
}
cm.replaceSelection(selectionText.join("\n"));
}
},
hr : function() {
var cm = this.cm;
var cursor = cm.getCursor();
var selection = cm.getSelection();
cm.replaceSelection("------------");
},
tex : function() {
if (!this.settings.tex)
{
alert("settings.tex === false");
return this;
}
var cm = this.cm;
var cursor = cm.getCursor();
var selection = cm.getSelection();
cm.replaceSelection("$$" + selection + "$$");
if(selection === "") {
cm.setCursor(cursor.line, cursor.ch + 2);
}
},
link : function() {
this.executePlugin("linkDialog", "link-dialog/link-dialog");
},
"reference-link" : function() {
this.executePlugin("referenceLinkDialog", "reference-link-dialog/reference-link-dialog");
},
pagebreak : function() {
if (!this.settings.pageBreak)
{
alert("settings.pageBreak === false");
return this;
}
var cm = this.cm;
var selection = cm.getSelection();
cm.replaceSelection("\r\n[========]\r\n");
},
image : function() {
this.executePlugin("imageDialog", "image-dialog/image-dialog");
},
code : function() {
var cm = this.cm;
var cursor = cm.getCursor();
var selection = cm.getSelection();
cm.replaceSelection("`" + selection + "`");
if (selection === "") {
cm.setCursor(cursor.line, cursor.ch + 1);
}
},
"code-block" : function() {
this.executePlugin("codeBlockDialog", "code-block-dialog/code-block-dialog");
},
"preformatted-text" : function() {
this.executePlugin("preformattedTextDialog", "preformatted-text-dialog/preformatted-text-dialog");
},
table : function() {
this.executePlugin("tableDialog", "table-dialog/table-dialog");
},
datetime : function() {
var cm = this.cm;
var selection = cm.getSelection();
var date = new Date();
var langName = this.settings.lang.name;
var datefmt = editormd.dateFormat() + " " + editormd.dateFormat((langName === "zh-cn" || langName === "zh-tw") ? "cn-week-day" : "week-day");
cm.replaceSelection(datefmt);
},
emoji : function() {
this.executePlugin("emojiDialog", "emoji-dialog/emoji-dialog");
},
"html-entities" : function() {
this.executePlugin("htmlEntitiesDialog", "html-entities-dialog/html-entities-dialog");
},
"goto-line" : function() {
this.executePlugin("gotoLineDialog", "goto-line-dialog/goto-line-dialog");
},
watch : function() {
this[this.settings.watch ? "unwatch" : "watch"]();
},
preview : function() {
this.previewing();
},
fullscreen : function() {
this.fullscreen();
},
clear : function() {
this.clear();
},
search : function() {
this.search();
},
help : function() {
this.executePlugin("helpDialog", "help-dialog/help-dialog");
},
info : function() {
this.showInfoDialog();
}
};
editormd.keyMaps = {
"Ctrl-1" : "h1",
"Ctrl-2" : "h2",
"Ctrl-3" : "h3",
"Ctrl-4" : "h4",
"Ctrl-5" : "h5",
"Ctrl-6" : "h6",
"Ctrl-B" : "bold", // if this is string == editormd.toolbarHandlers.xxxx
"Ctrl-D" : "datetime",
"Ctrl-E" : function() { // emoji
var cm = this.cm;
var cursor = cm.getCursor();
var selection = cm.getSelection();
if (!this.settings.emoji)
{
alert("Error: settings.emoji == false");
return ;
}
cm.replaceSelection(":" + selection + ":");
if (selection === "") {
cm.setCursor(cursor.line, cursor.ch + 1);
}
},
"Ctrl-Alt-G" : "goto-line",
"Ctrl-H" : "hr",
"Ctrl-I" : "italic",
"Ctrl-K" : "code",
"Ctrl-L" : function() {
var cm = this.cm;
var cursor = cm.getCursor();
var selection = cm.getSelection();
var title = (selection === "") ? "" : " \""+selection+"\"";
cm.replaceSelection("[" + selection + "]("+title+")");
if (selection === "") {
cm.setCursor(cursor.line, cursor.ch + 1);
}
},
"Ctrl-U" : "list-ul",
"Shift-Ctrl-A" : function() {
var cm = this.cm;
var cursor = cm.getCursor();
var selection = cm.getSelection();
if (!this.settings.atLink)
{
alert("Error: settings.atLink == false");
return ;
}
cm.replaceSelection("@" + selection);
if (selection === "") {
cm.setCursor(cursor.line, cursor.ch + 1);
}
},
"Shift-Ctrl-C" : "code",
"Shift-Ctrl-Q" : "quote",
"Shift-Ctrl-S" : "del",
"Shift-Ctrl-K" : "tex", // KaTeX
"Shift-Alt-C" : function() {
var cm = this.cm;
var cursor = cm.getCursor();
var selection = cm.getSelection();
cm.replaceSelection(["```", selection, "```"].join("\n"));
if (selection === "") {
cm.setCursor(cursor.line, cursor.ch + 3);
}
},
"Shift-Ctrl-Alt-C" : "code-block",
"Shift-Ctrl-H" : "html-entities",
"Shift-Alt-H" : "help",
"Shift-Ctrl-E" : "emoji",
"Shift-Ctrl-U" : "uppercase",
"Shift-Alt-U" : "ucwords",
"Shift-Ctrl-Alt-U" : "ucfirst",
"Shift-Alt-L" : "lowercase",
"Shift-Ctrl-I" : function() {
var cm = this.cm;
var cursor = cm.getCursor();
var selection = cm.getSelection();
var title = (selection === "") ? "" : " \""+selection+"\"";
cm.replaceSelection("");
if (selection === "") {
cm.setCursor(cursor.line, cursor.ch + 4);
}
},
"Shift-Ctrl-Alt-I" : "image",
"Shift-Ctrl-L" : "link",
"Shift-Ctrl-O" : "list-ol",
"Shift-Ctrl-P" : "preformatted-text",
"Shift-Ctrl-T" : "table",
"Shift-Alt-P" : "pagebreak",
"F9" : "watch",
"F10" : "preview",
"F11" : "fullscreen",
};
/**
* 清除字符串两边的空格
* Clear the space of strings both sides.
*
* @param {String} str string
* @returns {String} trimed string
*/
var trim = function(str) {
return (!String.prototype.trim) ? str.replace(/^[\s\uFEFF\xA0]+|[\s\uFEFF\xA0]+$/g, "") : str.trim();
};
editormd.trim = trim;
/**
* 所有单词首字母大写
* Words first to uppercase
*
* @param {String} str string
* @returns {String} string
*/
var ucwords = function (str) {
return str.toLowerCase().replace(/\b(\w)|\s(\w)/g, function($1) {
return $1.toUpperCase();
});
};
editormd.ucwords = editormd.wordsFirstUpperCase = ucwords;
/**
* 字符串首字母大写
* Only string first char to uppercase
*
* @param {String} str string
* @returns {String} string
*/
var firstUpperCase = function(str) {
return str.toLowerCase().replace(/\b(\w)/, function($1){
return $1.toUpperCase();
});
};
var ucfirst = firstUpperCase;
editormd.firstUpperCase = editormd.ucfirst = firstUpperCase;
editormd.urls = {
atLinkBase : "https://github.com/"
};
editormd.regexs = {
atLink : /@(\w+)/g,
email : /(\w+)@(\w+)\.(\w+)\.?(\w+)?/g,
emailLink : /(mailto:)?([\w\.\_]+)@(\w+)\.(\w+)\.?(\w+)?/g,
emoji : /:([\+-\w]+):/g,
emojiDatetime : /(\d{2}:\d{2}:\d{2})/g,
twemoji : /:(tw-([\w]+)-?(\w+)?):/g,
fontAwesome : /:(fa-([\w]+)(-(\w+)){0,}):/g,
editormdLogo : /:(editormd-logo-?(\w+)?):/g,
pageBreak : /^\[[=]{8,}\]$/
};
// Emoji graphics files url path
editormd.emoji = {
path : "http://www.emoji-cheat-sheet.com/graphics/emojis/",
ext : ".png"
};
// Twitter Emoji (Twemoji) graphics files url path
editormd.twemoji = {
path : "http://twemoji.maxcdn.com/36x36/",
ext : ".png"
};
/**
* 自定义marked的解析器
* Custom Marked renderer rules
*
* @param {Array} markdownToC 传入用于接收TOC的数组
* @returns {Renderer} markedRenderer 返回marked的Renderer自定义对象
*/
editormd.markedRenderer = function(markdownToC, options) {
var defaults = {
toc : true, // Table of contents
tocm : false,
tocStartLevel : 1, // Said from H1 to create ToC
pageBreak : true,
atLink : true, // for @link
emailLink : true, // for mail address auto link
taskList : false, // Enable Github Flavored Markdown task lists
emoji : false, // :emoji: , Support Twemoji, fontAwesome, Editor.md logo emojis.
tex : false, // TeX(LaTeX), based on KaTeX
flowChart : false, // flowChart.js only support IE9+
sequenceDiagram : false, // sequenceDiagram.js only support IE9+
};
var settings = $.extend(defaults, options || {});
var marked = editormd.$marked;
var markedRenderer = new marked.Renderer();
markdownToC = markdownToC || [];
var regexs = editormd.regexs;
var atLinkReg = regexs.atLink;
var emojiReg = regexs.emoji;
var emailReg = regexs.email;
var emailLinkReg = regexs.emailLink;
var twemojiReg = regexs.twemoji;
var faIconReg = regexs.fontAwesome;
var editormdLogoReg = regexs.editormdLogo;
var pageBreakReg = regexs.pageBreak;
markedRenderer.emoji = function(text) {
text = text.replace(editormd.regexs.emojiDatetime, function($1) {
return $1.replace(/:/g, ":");
});
var matchs = text.match(emojiReg);
if (!matchs || !settings.emoji) {
return text;
}
for (var i = 0, len = matchs.length; i < len; i++)
{
if (matchs[i] === ":+1:") {
matchs[i] = ":\\+1:";
}
text = text.replace(new RegExp(matchs[i]), function($1, $2){
var faMatchs = $1.match(faIconReg);
var name = $1.replace(/:/g, "");
if (faMatchs)
{
for (var fa = 0, len1 = faMatchs.length; fa < len1; fa++)
{
var faName = faMatchs[fa].replace(/:/g, "");
return "<i class=\"fa " + faName + " fa-emoji\" title=\"" + faName.replace("fa-", "") + "\"></i>";
}
}
else
{
var emdlogoMathcs = $1.match(editormdLogoReg);
var twemojiMatchs = $1.match(twemojiReg);
if (emdlogoMathcs)
{
for (var x = 0, len2 = emdlogoMathcs.length; x < len2; x++)
{
var logoName = emdlogoMathcs[x].replace(/:/g, "");
return "<i class=\"" + logoName + "\" title=\"Editor.md logo (" + logoName + ")\"></i>";
}
}
else if (twemojiMatchs)
{
for (var t = 0, len3 = twemojiMatchs.length; t < len3; t++)
{
var twe = twemojiMatchs[t].replace(/:/g, "").replace("tw-", "");
return "<img src=\"" + editormd.twemoji.path + twe + editormd.twemoji.ext + "\" title=\"twemoji-" + twe + "\" alt=\"twemoji-" + twe + "\" class=\"emoji twemoji\" />";
}
}
else
{
var src = (name === "+1") ? "plus1" : name;
src = (src === "black_large_square") ? "black_square" : src;
return "<img src=\"" + editormd.emoji.path + src + editormd.emoji.ext + "\" class=\"emoji\" title=\":" + name + ":\" alt=\":" + name + ":\" />";
}
}
});
}
return text;
};
markedRenderer.atLink = function(text) {
if (atLinkReg.test(text))
{
if (settings.atLink)
{
text = text.replace(emailReg, function($1, $2, $3, $4) {
return $1.replace(/@/g, "_#_@_#_");
});
text = text.replace(atLinkReg, function($1, $2) {
return "<a href=\"" + editormd.urls.atLinkBase + "" + $2 + "\" title=\"@" + $2 + "\" class=\"at-link\">" + $1 + "</a>";
}).replace(/_#_@_#_/g, "@");
}
if (settings.emailLink)
{
text = text.replace(emailLinkReg, function($1, $2, $3, $4, $5) {
return (!$2 && $.inArray($5, "jpg|jpeg|png|gif|webp|ico|icon|pdf".split("|")) < 0) ? "<a href=\"mailto:" + $1 + "\">"+$1+"</a>" : $1;
});
}
return text;
}
return text;
};
markedRenderer.link = function (href, title, text) {
if (this.options.sanitize) {
try {
var prot = decodeURIComponent(unescape(href)).replace(/[^\w:]/g,"").toLowerCase();
} catch(e) {
return "";
}
if (prot.indexOf("javascript:") === 0) {
return "";
}
}
var out = "<a href=\"" + href + "\"";
if (atLinkReg.test(title) || atLinkReg.test(text))
{
if (title)
{
out += " title=\"" + title.replace(/@/g, "@");
}
return out + "\">" + text.replace(/@/g, "@") + "</a>";
}
if (title) {
out += " title=\"" + title + "\"";
}
out += ">" + text + "</a>";
return out;
};
markedRenderer.heading = function(text, level, raw) {
var linkText = text;
var hasLinkReg = /\s*\<a\s*href\=\"(.*)\"\s*([^\>]*)\>(.*)\<\/a\>\s*/;
var getLinkTextReg = /\s*\<a\s*([^\>]+)\>([^\>]*)\<\/a\>\s*/g;
if (hasLinkReg.test(text))
{
var tempText = [];
text = text.split(/\<a\s*([^\>]+)\>([^\>]*)\<\/a\>/);
for (var i = 0, len = text.length; i < len; i++)
{
tempText.push(text[i].replace(/\s*href\=\"(.*)\"\s*/g, ""));
}
text = tempText.join(" ");
}
text = trim(text);
var escapedText = text.toLowerCase().replace(/[^\w]+/g, "-");
var toc = {
text : text,
level : level,
slug : escapedText
};
var isChinese = /^[\u4e00-\u9fa5]+$/.test(text);
var id = (isChinese) ? escape(text).replace(/\%/g, "") : text.toLowerCase().replace(/[^\w]+/g, "-");
markdownToC.push(toc);
var headingHTML = "<h" + level + " id=\"h"+ level + "-" + this.options.headerPrefix + id +"\">";
headingHTML += "<a name=\"" + text + "\" class=\"reference-link\"></a>";
headingHTML += "<span class=\"header-link octicon octicon-link\"></span>";
headingHTML += (hasLinkReg) ? this.atLink(this.emoji(linkText)) : this.atLink(this.emoji(text));
headingHTML += "</h" + level + ">";
return headingHTML;
};
markedRenderer.pageBreak = function(text) {
if (pageBreakReg.test(text) && settings.pageBreak)
{
text = "<hr style=\"page-break-after:always;\" class=\"page-break editormd-page-break\" />";
}
return text;
};
markedRenderer.paragraph = function(text) {
var isTeXInline = /\$\$(.*)\$\$/g.test(text);
var isTeXLine = /^\$\$(.*)\$\$$/.test(text);
var isTeXAddClass = (isTeXLine) ? " class=\"" + editormd.classNames.tex + "\"" : "";
var isToC = (settings.tocm) ? /^(\[TOC\]|\[TOCM\])$/.test(text) : /^\[TOC\]$/.test(text);
var isToCMenu = /^\[TOCM\]$/.test(text);
if (!isTeXLine && isTeXInline)
{
text = text.replace(/(\$\$([^\$]*)\$\$)+/g, function($1, $2) {
return "<span class=\"" + editormd.classNames.tex + "\">" + $2.replace(/\$/g, "") + "</span>";
});
}
else
{
text = (isTeXLine) ? text.replace(/\$/g, "") : text;
}
var tocHTML = "<div class=\"markdown-toc editormd-markdown-toc\">" + text + "</div>";
return (isToC) ? ( (isToCMenu) ? "<div class=\"editormd-toc-menu\">" + tocHTML + "</div><br/>" : tocHTML )
: ( (pageBreakReg.test(text)) ? this.pageBreak(text) : "<p" + isTeXAddClass + ">" + this.atLink(this.emoji(text)) + "</p>\n" );
};
markedRenderer.code = function (code, lang, escaped) {
if (lang === "seq" || lang === "sequence")
{
return "<div class=\"sequence-diagram\">" + code + "</div>";
}
else if ( lang === "flow")
{
return "<div class=\"flowchart\">" + code + "</div>";
}
else
{
return marked.Renderer.prototype.code.apply(this, arguments);
}
};
markedRenderer.tablecell = function(content, flags) {
var type = (flags.header) ? "th" : "td";
var tag = (flags.align) ? "<" + type +" style=\"text-align:" + flags.align + "\">" : "<" + type + ">";
return tag + this.atLink(this.emoji(content)) + "</" + type + ">\n";
};
markedRenderer.listitem = function(text) {
if (settings.taskList && /^\s*\[[x\s]\]\s*/.test(text))
{
text = text.replace(/^\s*\[\s\]\s*/, "<input type=\"checkbox\" class=\"task-list-item-checkbox\" /> ")
.replace(/^\s*\[x\]\s*/, "<input type=\"checkbox\" class=\"task-list-item-checkbox\" checked disabled /> ");
return "<li style=\"list-style: none;\">" + this.atLink(this.emoji(text)) + "</li>";
}
else
{
return "<li>" + this.atLink(this.emoji(text)) + "</li>";
}
};
return markedRenderer;
};
/**
*
* 生成TOC(Table of Contents)
* Creating ToC (Table of Contents)
*
* @param {Array} toc 从marked获取的TOC数组列表
* @param {Element} container 插入TOC的容器元素
* @param {Integer} startLevel Hx 起始层级
* @returns {Object} tocContainer 返回ToC列表容器层的jQuery对象元素
*/
editormd.markdownToCRenderer = function(toc, container, tocDropdown, startLevel) {
var html = "";
var lastLevel = 0;
var classPrefix = this.classPrefix;
startLevel = startLevel || 1;
for (var i = 0, len = toc.length; i < len; i++)
{
var text = toc[i].text;
var level = toc[i].level;
if (level < startLevel) {
continue;
}
if (level > lastLevel)
{
html += "";
}
else if (level < lastLevel)
{
html += (new Array(lastLevel - level + 2)).join("</ul></li>");
}
else
{
html += "</ul></li>";
}
html += "<li><a class=\"toc-level-" + level + "\" href=\"#" + text + "\" level=\"" + level + "\">" + text + "</a><ul>";
lastLevel = level;
}
var tocContainer = container.find(".markdown-toc");
if (tocContainer.length < 1 && container.attr("previewContainer") === "false")
{
var tocHTML = "<div class=\"markdown-toc " + classPrefix + "markdown-toc\"></div>";
tocHTML = (tocDropdown) ? "<div class=\"" + classPrefix + "toc-menu\">" + tocHTML + "</div>" : tocHTML;
container.html(tocHTML);
tocContainer = container.find(".markdown-toc");
}
if (tocDropdown)
{
tocContainer.wrap("<div class=\"" + classPrefix + "toc-menu\"></div><br/>");
}
tocContainer.html("<ul class=\"markdown-toc-list\"></ul>").children(".markdown-toc-list").html(html.replace(/\r?\n?\<ul\>\<\/ul\>/g, ""));
return tocContainer;
};
/**
*
* 生成TOC下拉菜单
* Creating ToC dropdown menu
*
* @param {Object} container 插入TOC的容器jQuery对象元素
* @param {String} tocTitle ToC title
* @returns {Object} return toc-menu object
*/
editormd.tocDropdownMenu = function(container, tocTitle) {
tocTitle = tocTitle || "Table of Contents";
var zindex = 400;
var tocMenus = container.find("." + this.classPrefix + "toc-menu");
tocMenus.each(function() {
var $this = $(this);
var toc = $this.children(".markdown-toc");
var icon = "<i class=\"fa fa-angle-down\"></i>";
var btn = "<a href=\"javascript:;\" class=\"toc-menu-btn\">" + icon + tocTitle + "</a>";
var menu = toc.children("ul");
var list = menu.find("li");
toc.append(btn);
list.first().before("<li><h1>" + tocTitle + " " + icon + "</h1></li>");
$this.mouseover(function(){
menu.show();
list.each(function(){
var li = $(this);
var ul = li.children("ul");
if (ul.html() === "")
{
ul.remove();
}
if (ul.length > 0 && ul.html() !== "")
{
var firstA = li.children("a").first();
if (firstA.children(".fa").length < 1)
{
firstA.append( $(icon).css({ float:"right", paddingTop:"4px" }) );
}
}
li.mouseover(function(){
ul.css("z-index", zindex).show();
zindex += 1;
}).mouseleave(function(){
ul.hide();
});
});
}).mouseleave(function(){
menu.hide();
});
});
return tocMenus;
};
/**
* 简单地过滤指定的HTML标签
* Filter custom html tags
*
* @param {String} html 要过滤HTML
* @param {String} filters 要过滤的标签
* @returns {String} html 返回过滤的HTML
*/
editormd.filterHTMLTags = function(html, filters) {
if (typeof html !== "string") {
html = new String(html);
}
if (typeof filters !== "string") {
return html;
}
var expression = filters.split("|");
var filterTags = expression[0].split(",");
var attrs = expression[1];
for (var i = 0, len = filterTags.length; i < len; i++)
{
var tag = filterTags[i];
html = html.replace(new RegExp("\<\s*" + tag + "\s*([^\>]*)\>([^\>]*)\<\s*\/" + tag + "\s*\>", "igm"), "");
}
if (typeof attrs !== "undefined")
{
var htmlTagRegex = /\<(\w+)\s*([^\>]*)\>([^\>]*)\<\/(\w+)\>/ig;
if (attrs === "*")
{
html = html.replace(htmlTagRegex, function($1, $2, $3, $4, $5) {
return "<" + $2 + ">" + $4 + "</" + $5 + ">";
});
}
else if (attrs === "on*")
{
html = html.replace(htmlTagRegex, function($1, $2, $3, $4, $5) {
var el = $("<" + $2 + ">" + $4 + "</" + $5 + ">");
var _attrs = $($1)[0].attributes;
var $attrs = {};
$.each(_attrs, function(i, e) {
$attrs[e.nodeName] = e.nodeValue;
});
$.each($attrs, function(i) {
if (i.indexOf("on") === 0) {
delete $attrs[i];
}
});
el.attr($attrs);
return el[0].outerHTML;
});
}
else
{
html = html.replace(htmlTagRegex, function($1, $2, $3, $4) {
var filterAttrs = attrs.split(",");
var el = $($1);
el.html($4);
$.each(filterAttrs, function(i) {
el.attr(filterAttrs[i], null);
});
return el[0].outerHTML;
});
}
}
return html;
};
/**
* 将Markdown文档解析为HTML用于前台显示
* Parse Markdown to HTML for Font-end preview.
*
* @param {String} id 用于显示HTML的对象ID
* @param {Object} [options={}] 配置选项,可选
* @returns {Object} div 返回jQuery对象元素
*/
editormd.markdownToHTML = function(id, options) {
var defaults = {
gfm : true,
toc : true,
tocm : false,
tocStartLevel : 1,
tocTitle : "目录",
tocDropdown : false,
markdown : "",
htmlDecode : false,
autoLoadKaTeX : true,
pageBreak : true,
atLink : true, // for @link
emailLink : true, // for mail address auto link
tex : false,
taskList : false, // Github Flavored Markdown task lists
emoji : false,
flowChart : false,
sequenceDiagram : false,
previewCodeHighlight : true
};
editormd.$marked = marked;
var div = $("#" + id);
var settings = div.settings = $.extend(true, defaults, options || {});
var saveTo = div.find("textarea");
if (saveTo.length < 1)
{
div.append("<textarea></textarea>");
saveTo = div.find("textarea");
}
var markdownDoc = (settings.markdown === "") ? saveTo.val() : settings.markdown;
var markdownToC = [];
var rendererOptions = {
toc : settings.toc,
tocm : settings.tocm,
tocStartLevel : settings.tocStartLevel,
taskList : settings.taskList,
emoji : settings.emoji,
tex : settings.tex,
pageBreak : settings.pageBreak,
atLink : settings.atLink, // for @link
emailLink : settings.emailLink, // for mail address auto link
flowChart : settings.flowChart,
sequenceDiagram : settings.sequenceDiagram,
previewCodeHighlight : settings.previewCodeHighlight,
};
var markedOptions = {
renderer : editormd.markedRenderer(markdownToC, rendererOptions),
gfm : settings.gfm,
tables : true,
breaks : true,
pedantic : false,
sanitize : (settings.htmlDecode) ? false : true, // 是否忽略HTML标签,即是否开启HTML标签解析,为了安全性,默认不开启
smartLists : true,
smartypants : true
};
markdownDoc = new String(markdownDoc);
markdownDoc = editormd.filterHTMLTags(markdownDoc, settings.htmlDecode);
var markdownParsed = marked(markdownDoc, markedOptions);
saveTo.val(markdownDoc);
div.addClass("markdown-body " + this.classPrefix + "html-preview").append(markdownParsed);
if (settings.toc)
{
div.tocContainer = this.markdownToCRenderer(markdownToC, div, settings.tocDropdown, settings.tocStartLevel);
if (settings.tocDropdown || div.find("." + this.classPrefix + "toc-menu").length > 0)
{
this.tocDropdownMenu(div, settings.tocTitle);
}
}
if (settings.previewCodeHighlight)
{
div.find("pre").addClass("prettyprint linenums");
prettyPrint();
}
if (!editormd.isIE8)
{
if (settings.flowChart) {
div.find(".flowchart").flowChart();
}
if (settings.sequenceDiagram) {
div.find(".sequence-diagram").sequenceDiagram({theme: "simple"});
}
}
if (settings.tex)
{
var katexHandle = function() {
div.find("." + editormd.classNames.tex).each(function(){
var tex = $(this);
katex.render(tex.html(), tex[0]);
});
};
if (settings.autoLoadKaTeX && !editormd.$katex && !editormd.kaTeXLoaded)
{
this.loadKaTeX(function() {
editormd.$katex = katex;
editormd.kaTeXLoaded = true;
katexHandle();
});
}
else
{
katexHandle();
}
}
div.getMarkdown = function() {
return saveTo.val();
};
return div;
};
editormd.themes = [
"default", "3024-day", "3024-night",
"ambiance", "ambiance-mobile",
"base16-dark", "base16-light", "blackboard",
"cobalt",
"eclipse", "elegant", "erlang-dark",
"lesser-dark",
"mbo", "mdn-like", "midnight", "monokai",
"neat", "neo", "night",
"paraiso-dark", "paraiso-light", "pastel-on-dark",
"rubyblue",
"solarized",
"the-matrix", "tomorrow-night-eighties", "twilight",
"vibrant-ink",
"xq-dark", "xq-light"
];
editormd.loadPlugins = {};
editormd.loadFiles = {
js : [],
css : [],
plugin : []
};
/**
* 动态加载Editor.md插件,但不立即执行
* Load editor.md plugins
*
* @param {String} fileName 插件文件路径
* @param {Function} [callback=function()] 加载成功后执行的回调函数
* @param {String} [into="head"] 嵌入页面的位置
*/
editormd.loadPlugin = function(fileName, callback, into) {
callback = callback || function() {};
this.loadScript(fileName, function() {
editormd.loadFiles.plugin.push(fileName);
callback();
}, into);
};
/**
* 动态加载CSS文件的方法
* Load css file method
*
* @param {String} fileName CSS文件名
* @param {Function} [callback=function()] 加载成功后执行的回调函数
* @param {String} [into="head"] 嵌入页面的位置
*/
editormd.loadCSS = function(fileName, callback, into) {
into = into || "head";
callback = callback || function() {};
var css = document.createElement("link");
css.type = "text/css";
css.rel = "stylesheet";
css.onload = css.onreadystatechange = function() {
editormd.loadFiles.css.push(fileName);
callback();
};
css.href = fileName + ".css";
if(into === "head") {
document.getElementsByTagName("head")[0].appendChild(css);
} else {
document.body.appendChild(css);
}
};
editormd.isIE = (navigator.appName == "Microsoft Internet Explorer");
editormd.isIE8 = (editormd.isIE && navigator.appVersion.match(/8./i) == "8.");
/**
* 动态加载JS文件的方法
* Load javascript file method
*
* @param {String} fileName JS文件名
* @param {Function} [callback=function()] 加载成功后执行的回调函数
* @param {String} [into="head"] 嵌入页面的位置
*/
editormd.loadScript = function(fileName, callback, into) {
into = into || "head";
callback = callback || function() {};
var script = null;
script = document.createElement("script");
script.id = fileName.replace(/[\./]+/g, "-");
script.type = "text/javascript";
script.src = fileName + ".js";
if (editormd.isIE8)
{
script.onreadystatechange = function() {
if(script.readyState)
{
if (script.readyState === "loaded" || script.readyState === "complete")
{
script.onreadystatechange = null;
editormd.loadFiles.js.push(fileName);
callback();
}
}
};
}
else
{
script.onload = function() {
editormd.loadFiles.js.push(fileName);
callback();
};
}
if (into === "head") {
document.getElementsByTagName("head")[0].appendChild(script);
} else {
document.body.appendChild(script);
}
};
// 使用国外的CDN,加载速度有时会很慢,或者自定义URL
// You can custom KaTeX load url.
editormd.katexURL = {
css : "//cdnjs.cloudflare.com/ajax/libs/KaTeX/0.3.0/katex.min",
js : "//cdnjs.cloudflare.com/ajax/libs/KaTeX/0.3.0/katex.min"
};
editormd.kaTeXLoaded = false;
/**
* 加载KaTeX文件
* load KaTeX files
*
* @param {Function} [callback=function()] 加载成功后执行的回调函数
*/
editormd.loadKaTeX = function (callback) {
editormd.loadCSS(editormd.katexURL.css, function(){
editormd.loadScript(editormd.katexURL.js, callback || function(){});
});
};
/**
* 锁屏
* lock screen
*
* @param {Boolean} lock Boolean 布尔值,是否锁屏
* @returns {void}
*/
editormd.lockScreen = function(lock) {
$("html,body").css("overflow", (lock) ? "hidden" : "");
};
/**
* 动态创建对话框
* Creating custom dialogs
*
* @param {Object} options 配置项键值对 Key/Value
* @returns {dialog} 返回创建的dialog的jQuery实例对象
*/
editormd.createDialog = function(options) {
var defaults = {
name : "",
width : 420,
height: 240,
title : "",
drag : true,
closed : true,
content : "",
mask : true,
maskStyle : {
backgroundColor : "#fff",
opacity : 0.1
},
lockScreen : true,
footer : true,
buttons : false
};
options = $.extend(true, defaults, options);
var editor = this.editor;
var classPrefix = editormd.classPrefix;
var guid = (new Date()).getTime();
var dialogName = ( (options.name === "") ? classPrefix + "dialog-" + guid : options.name);
var mouseOrTouch = editormd.mouseOrTouch;
var html = "<div class=\"" + classPrefix + "dialog " + dialogName + "\">";
if (options.title !== "")
{
html += "<div class=\"" + classPrefix + "dialog-header\"" + ( (options.drag) ? " style=\"cursor: move;\"" : "" ) + ">";
html += "<strong class=\"" + classPrefix + "dialog-title\">" + options.title + "</strong>";
html += "</div>";
}
if (options.closed)
{
html += "<a href=\"javascript:;\" class=\"fa fa-close " + classPrefix + "dialog-close\"></a>";
}
html += "<div class=\"" + classPrefix + "dialog-container\">" + options.content;
if (options.footer || typeof options.footer === "string")
{
html += "<div class=\"" + classPrefix + "dialog-footer\">" + ( (typeof options.footer === "boolean") ? "" : options.footer) + "</div>";
}
html += "</div>";
html += "<div class=\"" + classPrefix + "dialog-mask " + classPrefix + "dialog-mask-bg\"></div>";
html += "<div class=\"" + classPrefix + "dialog-mask " + classPrefix + "dialog-mask-con\"></div>";
html += "</div>";
editor.append(html);
var dialog = editor.find("." + dialogName);
dialog.lockScreen = function(lock) {
if (options.lockScreen)
{
$("html,body").css("overflow", (lock) ? "hidden" : "");
}
return dialog;
};
dialog.showMask = function() {
if (options.mask)
{
editor.find("." + classPrefix + "mask").css(options.maskStyle).css("z-index", editormd.dialogZindex - 1).show();
}
return dialog;
};
dialog.hideMask = function() {
if (options.mask)
{
editor.find("." + classPrefix + "mask").hide();
}
return dialog;
};
dialog.loading = function(show) {
var loading = dialog.find("." + classPrefix + "dialog-mask");
loading[(show) ? "show" : "hide"]();
return dialog;
};
dialog.lockScreen(true).showMask();
dialog.show().css({
zIndex : editormd.dialogZindex,
border : (editormd.isIE8) ? "1px solid #ddd" : "",
width : (typeof options.width === "number") ? options.width + "px" : options.width,
height : (typeof options.height === "number") ? options.height + "px" : options.height
});
var dialogPosition = function(){
dialog.css({
top : ($(window).height() - dialog.height()) / 2 + "px",
left : ($(window).width() - dialog.width()) / 2 + "px"
});
};
dialogPosition();
$(window).resize(dialogPosition);
dialog.children("." + classPrefix + "dialog-close").bind(mouseOrTouch("click", "touchend"), function() {
dialog.hide().lockScreen(false).hideMask();
});
if (typeof options.buttons === "object")
{
var footer = dialog.footer = dialog.find("." + classPrefix + "dialog-footer");
for (var key in options.buttons)
{
var btn = options.buttons[key];
var btnClassName = classPrefix + key + "-btn";
footer.append("<button class=\"" + classPrefix + "btn " + btnClassName + "\">" + btn[0] + "</button>");
btn[1] = $.proxy(btn[1], dialog);
footer.children("." + btnClassName).bind(mouseOrTouch("click", "touchend"), btn[1]);
}
}
if (options.title !== "" && options.drag)
{
var posX, posY;
var dialogHeader = dialog.children("." + classPrefix + "dialog-header");
if (!options.mask) {
dialogHeader.bind(mouseOrTouch("click", "touchend"), function(){
editormd.dialogZindex += 2;
dialog.css("z-index", editormd.dialogZindex);
});
}
dialogHeader.mousedown(function(e) {
e = e || window.event; //IE
posX = e.clientX - parseInt(dialog[0].style.left);
posY = e.clientY - parseInt(dialog[0].style.top);
document.onmousemove = moveAction;
});
var userCanSelect = function (obj) {
obj.removeClass(classPrefix + "user-unselect").off("selectstart");
};
var userUnselect = function (obj) {
obj.addClass(classPrefix + "user-unselect").on("selectstart", function(event) { // selectstart for IE
return false;
});
};
var moveAction = function (e) {
e = e || window.event; //IE
var left, top, nowLeft = parseInt(dialog[0].style.left), nowTop = parseInt(dialog[0].style.top);
if( nowLeft >= 0 ) {
if( nowLeft + dialog.width() <= $(window).width()) {
left = e.clientX - posX;
} else {
left = $(window).width() - dialog.width();
document.onmousemove = null;
}
} else {
left = 0;
document.onmousemove = null;
}
if( nowTop >= 0 ) {
top = e.clientY - posY;
} else {
top = 0;
document.onmousemove = null;
}
document.onselectstart = function() {
return false;
};
userUnselect($("body"));
userUnselect(dialog);
dialog[0].style.left = left + "px";
dialog[0].style.top = top + "px";
};
document.onmouseup = function() {
userCanSelect($("body"));
userCanSelect(dialog);
document.onselectstart = null;
document.onmousemove = null;
};
dialogHeader.touchDraggable = function() {
var offset = null;
var start = function(e) {
var orig = e.originalEvent;
var pos = $(this).parent().position();
offset = {
x : orig.changedTouches[0].pageX - pos.left,
y : orig.changedTouches[0].pageY - pos.top
};
};
var move = function(e) {
e.preventDefault();
var orig = e.originalEvent;
$(this).parent().css({
top : orig.changedTouches[0].pageY - offset.y,
left : orig.changedTouches[0].pageX - offset.x
});
};
this.bind("touchstart", start).bind("touchmove", move);
};
dialogHeader.touchDraggable();
}
editormd.dialogZindex += 2;
return dialog;
};
/**
* 鼠标和触摸事件的判断/选择方法
* MouseEvent or TouchEvent type switch
*
* @param {String} [mouseEventType="click"] 供选择的鼠标事件
* @param {String} [touchEventType="touchend"] 供选择的触摸事件
* @returns {String} EventType 返回事件类型名称
*/
editormd.mouseOrTouch = function(mouseEventType, touchEventType) {
mouseEventType = mouseEventType || "click";
touchEventType = touchEventType || "touchend";
var eventType = mouseEventType;
try {
document.createEvent("TouchEvent");
eventType = touchEventType;
} catch(e) {}
return eventType;
};
/**
* 日期时间的格式化方法
* Datetime format method
*
* @param {String} [format=""] 日期时间的格式,类似PHP的格式
* @returns {String} datefmt 返回格式化后的日期时间字符串
*/
editormd.dateFormat = function(format) {
format = format || "";
var addZero = function(d) {
return (d < 10) ? "0" + d : d;
};
var date = new Date();
var year = date.getFullYear();
var year2 = year.toString().slice(2, 4);
var month = addZero(date.getMonth() + 1);
var day = addZero(date.getDate());
var weekDay = date.getDay();
var hour = addZero(date.getHours());
var min = addZero(date.getMinutes());
var second = addZero(date.getSeconds());
var ms = addZero(date.getMilliseconds());
var datefmt = "";
var ymd = year2 + "-" + month + "-" + day;
var fymd = year + "-" + month + "-" + day;
var hms = hour + ":" + min + ":" + second;
switch (format)
{
case "UNIX Time" :
datefmt = date.getTime();
break;
case "UTC" :
datefmt = date.toUTCString();
break;
case "yy" :
datefmt = year2;
break;
case "year" :
case "yyyy" :
datefmt = year;
break;
case "month" :
case "mm" :
datefmt = month;
break;
case "cn-week-day" :
case "cn-wd" :
var cnWeekDays = ["日", "一", "二", "三", "四", "五", "六"];
datefmt = "星期" + cnWeekDays[weekDay];
break;
case "week-day" :
case "wd" :
var weekDays = ["Sunday", "Monday", "Tuesday", "Wednesday", "Thursday", "Friday", "Saturday"];
datefmt = weekDays[weekDay];
break;
case "day" :
case "dd" :
datefmt = day;
break;
case "hour" :
case "hh" :
datefmt = hour;
break;
case "min" :
case "ii" :
datefmt = min;
break;
case "second" :
case "ss" :
datefmt = second;
break;
case "ms" :
datefmt = ms;
break;
case "yy-mm-dd" :
datefmt = ymd;
break;
case "yyyy-mm-dd" :
datefmt = fymd;
break;
case "yyyy-mm-dd h:i:s ms" :
case "full + ms" :
datefmt = fymd + " " + hms + " " + ms;
break;
case "full" :
case "yyyy-mm-dd h:i:s" :
default:
datefmt = fymd + " " + hms;
break;
}
return datefmt;
};
return editormd;
}));
|
mit
|
theodi/british_values
|
test/integration/experimental/serverspec/nginx_spec.rb
|
635
|
require 'serverspec'
set :backend, :exec
describe package 'nginx' do
it { should be_installed }
end
describe service 'nginx' do
it { should be_running }
end
describe file '/etc/nginx/sites-enabled/certificates.theodi.org' do
it { should be_symlink }
its(:content) { should match /server 127.0.0.1:8001;/ }
its(:content) { should match /listen 80 default;/ }
its(:content) { should match /server_name experimental.certificates.theodi.org;/ }
its(:content) { should match /root \/home\/certificates\/certificates.theodi.org\/current\/public\/;/ }
its(:content) { should match /proxy_pass http:\/\/certificates;/ }
end
|
mit
|
stephaneAG/PengPod700
|
QtEsrc/qt-everywhere-opensource-src-4.8.5/src/plugins/inputmethods/imsw-multi/qmultiinputcontext.cpp
|
6408
|
/****************************************************************************
**
** Copyright (C) 2013 Digia Plc and/or its subsidiary(-ies).
** Contact: http://www.qt-project.org/legal
**
** This file is part of the plugins of the Qt Toolkit.
**
** $QT_BEGIN_LICENSE:LGPL$
** Commercial License Usage
** Licensees holding valid commercial Qt licenses may use this file in
** accordance with the commercial license agreement provided with the
** Software or, alternatively, in accordance with the terms contained in
** a written agreement between you and Digia. For licensing terms and
** conditions see http://qt.digia.com/licensing. For further information
** use the contact form at http://qt.digia.com/contact-us.
**
** GNU Lesser General Public License Usage
** Alternatively, this file may be used under the terms of the GNU Lesser
** General Public License version 2.1 as published by the Free Software
** Foundation and appearing in the file LICENSE.LGPL included in the
** packaging of this file. Please review the following information to
** ensure the GNU Lesser General Public License version 2.1 requirements
** will be met: http://www.gnu.org/licenses/old-licenses/lgpl-2.1.html.
**
** In addition, as a special exception, Digia gives you certain additional
** rights. These rights are described in the Digia Qt LGPL Exception
** version 1.1, included in the file LGPL_EXCEPTION.txt in this package.
**
** GNU General Public License Usage
** Alternatively, this file may be used under the terms of the GNU
** General Public License version 3.0 as published by the Free Software
** Foundation and appearing in the file LICENSE.GPL included in the
** packaging of this file. Please review the following information to
** ensure the GNU General Public License version 3.0 requirements will be
** met: http://www.gnu.org/copyleft/gpl.html.
**
**
** $QT_END_LICENSE$
**
****************************************************************************/
/****************************************************************************
**
** Implementation of QMultiInputContext class
**
** Copyright (C) 2004 immodule for Qt Project. All rights reserved.
**
** This file is written to contribute to Nokia Corporation and/or its subsidiary(-ies) under their own
** license. You may use this file under your Qt license. Following
** description is copied from their original file headers. Contact
** immodule-qt@freedesktop.org if any conditions of this licensing are
** not clear to you.
**
****************************************************************************/
#ifndef QT_NO_IM
#include "qmultiinputcontext.h"
#include <qinputcontextfactory.h>
#include <qstringlist.h>
#include <qaction.h>
#include <qsettings.h>
#include <qmenu.h>
#include <stdlib.h>
QT_BEGIN_NAMESPACE
QMultiInputContext::QMultiInputContext()
: QInputContext(), current(-1)
{
keys = QInputContextFactory::keys();
for (int i = keys.size()-1; i >= 0; --i)
if (keys.at(i).contains(QLatin1String("imsw")))
keys.removeAt(i);
QString def = QLatin1String(getenv("QT4_IM_MODULE"));
if (def.isEmpty())
def = QLatin1String(getenv("QT_IM_MODULE"));
if (def.isEmpty()) {
QSettings settings(QSettings::UserScope, QLatin1String("Trolltech"));
settings.beginGroup(QLatin1String("Qt"));
def = settings.value(QLatin1String("DefaultInputMethod"), QLatin1String("xim")).toString();
}
current = keys.indexOf(def);
if (current < 0)
current = 0;
menu = new QMenu(tr("Select IM"));
separator = new QAction(this);
separator->setSeparator(true);
QActionGroup *group = new QActionGroup(this);
for (int i = 0; i < keys.size(); ++i) {
slaves.append(0);
const QString key = keys.at(i);
QAction *a = menu->addAction(QInputContextFactory::displayName(key));
a->setData(key);
a->setCheckable(true);
group->addAction(a);
if (i == current) {
slaves.replace(current, QInputContextFactory::create(key, this));
a->setChecked(true);
}
}
connect(group, SIGNAL(triggered(QAction*)), this, SLOT(changeSlave(QAction*)));
}
QMultiInputContext::~QMultiInputContext()
{
delete menu;
}
QString QMultiInputContext::identifierName()
{
return (slave()) ? slave()->identifierName() : QLatin1String("");
}
QString QMultiInputContext::language()
{
return (slave()) ? slave()->language() : QLatin1String("");
}
#if defined(Q_WS_X11)
bool QMultiInputContext::x11FilterEvent(QWidget *keywidget, XEvent *event)
{
return (slave()) ? slave()->x11FilterEvent(keywidget, event) : false;
}
#endif // Q_WS_X11
bool QMultiInputContext::filterEvent(const QEvent *event)
{
return (slave()) ? slave()->filterEvent(event) : false;
}
void QMultiInputContext::reset()
{
if (slave())
slave()->reset();
}
void QMultiInputContext::update()
{
if (slave())
slave()->update();
}
void QMultiInputContext::mouseHandler(int x, QMouseEvent *event)
{
if (slave())
slave()->mouseHandler(x, event);
}
QFont QMultiInputContext::font() const
{
return (slave()) ? slave()->font() : QInputContext::font();
}
void QMultiInputContext::setFocusWidget(QWidget *w)
{
QInputContext::setFocusWidget(w);
if (slave())
slave()->setFocusWidget(w);
}
QWidget *QMultiInputContext::focusWidget() const
{
return QInputContext::focusWidget();
}
void QMultiInputContext::widgetDestroyed(QWidget *w)
{
if (slave())
slave()->widgetDestroyed(w);
}
bool QMultiInputContext::isComposing() const
{
return (slave()) ? slave()->isComposing() : false;
}
QList<QAction *> QMultiInputContext::actions()
{
QList<QAction *> a = slave()->actions();
a.append(separator);
a.append(menu->menuAction());
return a;
}
void QMultiInputContext::changeSlave(QAction *a)
{
for (int i = 0; i < slaves.size(); ++i) {
if (keys.at(i) == a->data().toString()) {
if (slaves.at(i) == 0)
slaves.replace(i, QInputContextFactory::create(keys.at(i), this));
QInputContext *qic = slaves.at(current);
QWidget *oldWidget = qic->focusWidget();
qic->reset();
qic->setFocusWidget(0);
current = i;
qic = slaves.at(current);
qic->setFocusWidget(oldWidget);
return;
}
}
}
QT_END_NAMESPACE
#endif // QT_NO_IM
|
mit
|
pendo324/Cobalt
|
Cobalt/Services/IServiceLocator.cs
|
125
|
namespace Cobalt.Services
{
public interface IServiceLocator
{
T GetInstance<T>() where T : class;
}
}
|
mit
|
glerchundi/confd
|
pkg/util/logs.go
|
3407
|
/*
Copyright 2014 The Kubernetes Authors All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package util
import (
"fmt"
"runtime"
"flag"
"log"
"time"
"github.com/golang/glog"
"github.com/spf13/pflag"
)
//
// kubernetes/pkg/util/util.go
//
// For testing, bypass HandleCrash.
var ReallyCrash bool
// PanicHandlers is a list of functions which will be invoked when a panic happens.
var PanicHandlers = []func(interface{}){logPanic}
// HandleCrash simply catches a crash and logs an error. Meant to be called via defer.
// Additional context-specific handlers can be provided, and will be called in case of panic
func HandleCrash(additionalHandlers ...func(interface{})) {
if ReallyCrash {
return
}
if r := recover(); r != nil {
for _, fn := range PanicHandlers {
fn(r)
}
for _, fn := range additionalHandlers {
fn(r)
}
}
}
// logPanic logs the caller tree when a panic occurs.
func logPanic(r interface{}) {
callers := ""
for i := 0; true; i++ {
_, file, line, ok := runtime.Caller(i)
if !ok {
break
}
callers = callers + fmt.Sprintf("%v:%v\n", file, line)
}
glog.Errorf("Recovered from panic: %#v (%v)\n%v", r, r, callers)
}
// NeverStop may be passed to Until to make it never stop.
var NeverStop <-chan struct{} = make(chan struct{})
// Until loops until stop channel is closed, running f every period.
// Catches any panics, and keeps going. f may not be invoked if
// stop channel is already closed. Pass NeverStop to Until if you
// don't want it stop.
func Until(f func(), period time.Duration, stopCh <-chan struct{}) {
select {
case <-stopCh:
return
default:
}
for {
func() {
defer HandleCrash()
f()
}()
select {
case <-stopCh:
return
case <-time.After(period):
}
}
}
//
// kubernetes/pkg/util/logs.go
//
var (
logFlushFreq = pflag.Duration("log-flush-frequency", 5*time.Second, "Maximum number of seconds between log flushes")
_ = pflag.Int("log-level", 0, "Enable V-leveled logging at the specified level.")
)
// TODO(thockin): This is temporary until we agree on log dirs and put those into each cmd.
func init() {
flag.Set("logtostderr", "true")
}
// GlogWriter serves as a bridge between the standard log package and the glog package.
type GlogWriter struct{}
// Write implements the io.Writer interface.
func (writer GlogWriter) Write(data []byte) (n int, err error) {
glog.Info(string(data))
return len(data), nil
}
// InitLogs initializes logs the way we want for kubernetes.
func InitLogs() {
log.SetOutput(GlogWriter{})
log.SetFlags(0)
// The default glog flush interval is 30 seconds, which is frighteningly long.
go Until(glog.Flush, *logFlushFreq, NeverStop)
}
// FlushLogs flushes logs immediately.
func FlushLogs() {
glog.Flush()
}
// NewLogger creates a new log.Logger which sends logs to glog.Info.
func NewLogger(prefix string) *log.Logger {
return log.New(GlogWriter{}, prefix, 0)
}
|
mit
|
envato/nulldb
|
lib/active_record/connection_adapters/nulldb_adapter/column.rb
|
124
|
class ActiveRecord::ConnectionAdapters::NullDBAdapter
class Column < ::ActiveRecord::ConnectionAdapters::Column
end
end
|
mit
|
danilovesky/workcraft
|
workcraft/DfsPlugin/src/org/workcraft/plugins/dfs/commands/InsertPopRegisterTransformationCommand.java
|
377
|
package org.workcraft.plugins.dfs.commands;
import org.workcraft.plugins.dfs.PopRegister;
public class InsertPopRegisterTransformationCommand extends AbstractInsertTransformationCommand {
@Override
public String getTypeName() {
return "pop register";
}
@Override
public PopRegister createComponent() {
return new PopRegister();
}
}
|
mit
|
nym/oilandwater
|
lib/Dynamics/b2World.js
|
35046
|
/**
* Class b2World
*
* @param gravity
* @param doSleep
*
*/
b2World = Box2D.Dynamics.b2World = function b2World(gravity, doSleep) {
this.m_stack = [];
this.m_contactManager = new b2ContactManager();
this.m_contactSolver = new b2ContactSolver();
this.m_island = new b2Island();
this.m_allowSleep = doSleep;
this.m_gravity = gravity;
this.m_contactManager.m_world = this;
var bd = new b2BodyDef();
this.m_groundBody = this.CreateBody(bd);
};
b2World.s_timestep2 = new b2TimeStep();
b2World.s_xf = new b2Transform();
b2World.s_backupA = new b2Sweep();
b2World.s_backupB = new b2Sweep();
b2World.s_timestep = new b2TimeStep();
b2World.s_queue = [];
b2World.s_jointColor = new b2Color(0.5, 0.8, 0.8);
b2World.e_newFixture = 0x0001;
b2World.e_locked = 0x0002;
b2World.constructor = b2World;
b2World.prototype = {
m_stack: null,
m_contactManager: null,
m_contactSolver: null,
m_island: null,
m_destructionListener: null,
m_debugDraw: null,
m_bodyList: null,
m_contactList: null,
m_jointList: null,
m_controllerList: null,
m_bodyCount: 0,
m_contactCount: 0,
m_jointCount: 0,
m_controllerCount: 0,
m_allowSleep: null,
m_gravity: null,
m_inv_dt0: 0.0,
m_groundBody: null,
m_warmStarting: true,
m_continuousPhysics: true,
/**
* SetDestructionListener
*
* @param listener
*
*/
SetDestructionListener: function (listener) {
this.m_destructionListener = listener;
},
/**
* SetContactFilter
*
* @param filter
*
*/
SetContactFilter: function (filter) {
this.m_contactManager.m_contactFilter = filter;
},
/**
* SetContactListener
*
* @param listener
*
*/
SetContactListener: function (listener) {
this.m_contactManager.m_contactListener = listener;
},
/**
* SetDebugDraw
*
* @param debugDraw
*
*/
SetDebugDraw: function (debugDraw) {
this.m_debugDraw = debugDraw;
},
/**
* SetBroadPhase
*
* @param broadPhase
*
*/
SetBroadPhase: function (broadPhase) {
var oldBroadPhase = this.m_contactManager.m_broadPhase;
this.m_contactManager.m_broadPhase = broadPhase;
for (var b = this.m_bodyList; b; b = b.m_next) {
for (var f = b.m_fixtureList; f; f = f.m_next) {
f.m_proxy = broadPhase.CreateProxy(oldBroadPhase.GetFatAABB(f.m_proxy), f);
}
}
},
/**
* Validate
*
* @param
*
*/
Validate: function () {
this.m_contactManager.m_broadPhase.Validate();
},
/**
* GetProxyCount
*
* @param
*
*/
GetProxyCount: function () {
return this.m_contactManager.m_broadPhase.GetProxyCount();
},
/**
* CreateBody
*
* @param def
*
*/
CreateBody: function (def) {
if (this.IsLocked() === true) {
return null;
}
var b = new b2Body(def, this);
b.m_prev = null;
b.m_next = this.m_bodyList;
if (this.m_bodyList) {
this.m_bodyList.m_prev = b;
}
this.m_bodyList = b;
++this.m_bodyCount;
return b;
},
/**
* DestroyBody
*
* @param b
*
*/
DestroyBody: function (b) {
if (this.IsLocked() === true) {
return;
}
var jn = b.m_jointList;
while (jn) {
var jn0 = jn;
jn = jn.next;
if (this.m_destructionListener) {
this.m_destructionListener.SayGoodbyeJoint(jn0.joint);
}
this.DestroyJoint(jn0.joint);
}
var coe = b.m_controllerList;
while (coe) {
var coe0 = coe;
coe = coe.nextController;
coe0.controller.RemoveBody(b);
}
var ce = b.m_contactList;
while (ce) {
var ce0 = ce;
ce = ce.next;
this.m_contactManager.Destroy(ce0.contact);
}
b.m_contactList = null;
var f = b.m_fixtureList;
while (f) {
var f0 = f;
f = f.m_next;
if (this.m_destructionListener) {
this.m_destructionListener.SayGoodbyeFixture(f0);
}
f0.DestroyProxy(this.m_contactManager.m_broadPhase);
f0.Destroy();
}
b.m_fixtureList = null;
b.m_fixtureCount = 0;
if (b.m_prev) {
b.m_prev.m_next = b.m_next;
}
if (b.m_next) {
b.m_next.m_prev = b.m_prev;
}
if (b === this.m_bodyList) {
this.m_bodyList = b.m_next;
}
--this.m_bodyCount;
},
/**
* CreateJoint
*
* @param def
*
*/
CreateJoint: function (def) {
var j = b2Joint.Create(def, null);
j.m_prev = null;
j.m_next = this.m_jointList;
if (this.m_jointList) {
this.m_jointList.m_prev = j;
}
this.m_jointList = j;
++this.m_jointCount;
j.m_edgeA.joint = j;
j.m_edgeA.other = j.m_bodyB;
j.m_edgeA.prev = null;
j.m_edgeA.next = j.m_bodyA.m_jointList;
if (j.m_bodyA.m_jointList) j.m_bodyA.m_jointList.prev = j.m_edgeA;
j.m_bodyA.m_jointList = j.m_edgeA;
j.m_edgeB.joint = j;
j.m_edgeB.other = j.m_bodyA;
j.m_edgeB.prev = null;
j.m_edgeB.next = j.m_bodyB.m_jointList;
if (j.m_bodyB.m_jointList) j.m_bodyB.m_jointList.prev = j.m_edgeB;
j.m_bodyB.m_jointList = j.m_edgeB;
var bodyA = def.bodyA,
bodyB = def.bodyB;
if (def.collideConnected === false) {
var edge = bodyB.GetContactList();
while (edge) {
if (edge.other === bodyA) {
edge.contact.FlagForFiltering();
}
edge = edge.next;
}
}
return j;
},
/**
* DestroyJoint
*
* @param j
*
*/
DestroyJoint: function (j) {
var collideConnected = j.m_collideConnected;
if (j.m_prev) {
j.m_prev.m_next = j.m_next;
}
if (j.m_next) {
j.m_next.m_prev = j.m_prev;
}
if (j === this.m_jointList) {
this.m_jointList = j.m_next;
}
var bodyA = j.m_bodyA,
bodyB = j.m_bodyB;
bodyA.SetAwake(true);
bodyB.SetAwake(true);
if (j.m_edgeA.prev) {
j.m_edgeA.prev.next = j.m_edgeA.next;
}
if (j.m_edgeA.next) {
j.m_edgeA.next.prev = j.m_edgeA.prev;
}
if (j.m_edgeA === bodyA.m_jointList) {
bodyA.m_jointList = j.m_edgeA.next;
}
j.m_edgeA.prev = null;
j.m_edgeA.next = null;
if (j.m_edgeB.prev) {
j.m_edgeB.prev.next = j.m_edgeB.next;
}
if (j.m_edgeB.next) {
j.m_edgeB.next.prev = j.m_edgeB.prev;
}
if (j.m_edgeB === bodyB.m_jointList) {
bodyB.m_jointList = j.m_edgeB.next;
}
j.m_edgeB.prev = null;
j.m_edgeB.next = null;
b2Joint.Destroy(j, null);
--this.m_jointCount;
if (collideConnected === false) {
var edge = bodyB.GetContactList();
while (edge) {
if (edge.other === bodyA) {
edge.contact.FlagForFiltering();
}
edge = edge.next;
}
}
},
/**
* AddController
*
* @param c
*
*/
AddController: function (c) {
c.m_next = this.m_controllerList;
c.m_prev = null;
this.m_controllerList = c;
c.m_world = this;
this.m_controllerCount++;
return c;
},
/**
* RemoveController
*
* @param c
*
*/
RemoveController: function (c) {
if (c.m_prev) c.m_prev.m_next = c.m_next;
if (c.m_next) c.m_next.m_prev = c.m_prev;
if (this.m_controllerList === c) this.m_controllerList = c.m_next;
this.m_controllerCount--;
},
/**
* CreateController
*
* @param controller
*
*/
CreateController: function (controller) {
if (controller.m_world !== this) throw new Error("Controller can only be a member of one world");
controller.m_next = this.m_controllerList;
controller.m_prev = null;
if (this.m_controllerList) this.m_controllerList.m_prev = controller;
this.m_controllerList = controller;
++this.m_controllerCount;
controller.m_world = this;
return controller;
},
/**
* DestroyController
*
* @param controller
*
*/
DestroyController: function (controller) {
controller.Clear();
if (controller.m_next) controller.m_next.m_prev = controller.m_prev;
if (controller.m_prev) controller.m_prev.m_next = controller.m_next;
if (controller === this.m_controllerList) this.m_controllerList = controller.m_next;
--this.m_controllerCount;
},
/**
* SetWarmStarting
*
* @param flag
*
*/
SetWarmStarting: function (flag) {
this.m_warmStarting = flag;
},
/**
* SetContinuousPhysics
*
* @param flag
*
*/
SetContinuousPhysics: function (flag) {
this.m_continuousPhysics = flag;
},
/**
* GetBodyCount
*
* @param
*
*/
GetBodyCount: function () {
return this.m_bodyCount;
},
/**
* GetJointCount
*
* @param
*
*/
GetJointCount: function () {
return this.m_jointCount;
},
/**
* GetContactCount
*
* @param
*
*/
GetContactCount: function () {
return this.m_contactCount;
},
/**
* SetGravity
*
* @param gravity
*
*/
SetGravity: function (gravity) {
this.m_gravity = gravity;
},
/**
* GetGravity
*
* @param
*
*/
GetGravity: function () {
return this.m_gravity;
},
/**
* GetGroundBody
*
* @param
*
*/
GetGroundBody: function () {
return this.m_groundBody;
},
/**
* Step
*
* @param dt
* @param velocityIterations
* @param positionIterations
*
*/
Step: function (dt, velocityIterations, positionIterations) {
dt = dt || 0;
velocityIterations = velocityIterations || 0;
positionIterations = positionIterations || 0;
if (this.m_flags & b2World.e_newFixture) {
this.m_contactManager.FindNewContacts();
this.m_flags &= ~b2World.e_newFixture;
}
this.m_flags |= b2World.e_locked;
var step = b2World.s_timestep2;
step.dt = dt;
step.velocityIterations = velocityIterations;
step.positionIterations = positionIterations;
if (dt > 0.0) {
step.inv_dt = 1.0 / dt;
}
else {
step.inv_dt = 0.0;
}
step.dtRatio = this.m_inv_dt0 * dt;
step.warmStarting = this.m_warmStarting;
this.m_contactManager.Collide();
if (step.dt > 0.0) {
this.Solve(step);
}
if (this.m_continuousPhysics && step.dt > 0.0) {
this.SolveTOI(step);
}
if (step.dt > 0.0) {
this.m_inv_dt0 = step.inv_dt;
}
this.m_flags &= ~b2World.e_locked;
},
/**
* ClearForces
*
* @param
*
*/
ClearForces: function () {
for (var body = this.m_bodyList; body; body = body.m_next) {
body.m_force.SetZero();
body.m_torque = 0.0;
}
},
/**
* DrawDebugData
*
* @param
*
*/
DrawDebugData: function () {
if (this.m_debugDraw == null) {
return;
}
this.m_debugDraw.m_sprite.graphics.clear();
var flags = this.m_debugDraw.GetFlags(),
i = 0,
b,
f,
s,
j,
bp,
invQ = new b2Vec2,
x1 = new b2Vec2,
x2 = new b2Vec2,
xf,
b1 = new b2AABB(),
b2 = new b2AABB(),
vs = [new b2Vec2(0, 0), new b2Vec2(0, 0), new b2Vec2(0, 0), new b2Vec2(0, 0)],
color = new b2Color(0, 0, 0);
if (flags & b2DebugDraw.e_shapeBit) {
for (b = this.m_bodyList;
b; b = b.m_next) {
xf = b.m_xf;
for (f = b.GetFixtureList();
f; f = f.m_next) {
s = f.GetShape();
if (b.IsActive() === false) {
color.Set(0.5, 0.5, 0.3);
this.DrawShape(s, xf, color);
}
else if (b.GetType() === b2Body.b2_staticBody) {
color.Set(0.5, 0.9, 0.5);
this.DrawShape(s, xf, color);
}
else if (b.GetType() === b2Body.b2_kinematicBody) {
color.Set(0.5, 0.5, 0.9);
this.DrawShape(s, xf, color);
}
else if (b.IsAwake() === false) {
color.Set(0.6, 0.6, 0.6);
this.DrawShape(s, xf, color);
}
else {
color.Set(0.9, 0.7, 0.7);
this.DrawShape(s, xf, color);
}
}
}
}
if (flags & b2DebugDraw.e_jointBit) {
for (j = this.m_jointList;
j; j = j.m_next) {
this.DrawJoint(j);
}
}
if (flags & b2DebugDraw.e_controllerBit) {
for (var c = this.m_controllerList; c; c = c.m_next) {
c.Draw(this.m_debugDraw);
}
}
if (flags & b2DebugDraw.e_pairBit) {
color.Set(0.3, 0.9, 0.9);
for (var contact = this.m_contactManager.m_contactList; contact; contact = contact.GetNext()) {
var fixtureA = contact.GetFixtureA(),
fixtureB = contact.GetFixtureB(),
cA = fixtureA.GetAABB().GetCenter(),
cB = fixtureB.GetAABB().GetCenter();
this.m_debugDraw.DrawSegment(cA, cB, color);
}
}
if (flags & b2DebugDraw.e_aabbBit) {
bp = this.m_contactManager.m_broadPhase;
vs = [new b2Vec2(0, 0), new b2Vec2(0, 0), new b2Vec2(0, 0), new b2Vec2(0, 0)];
for (b = this.m_bodyList;
b; b = b.GetNext()) {
if (b.IsActive() === false) {
continue;
}
for (f = b.GetFixtureList();
f; f = f.GetNext()) {
var aabb = bp.GetFatAABB(f.m_proxy);
vs[0].Set(aabb.lowerBound.x, aabb.lowerBound.y);
vs[1].Set(aabb.upperBound.x, aabb.lowerBound.y);
vs[2].Set(aabb.upperBound.x, aabb.upperBound.y);
vs[3].Set(aabb.lowerBound.x, aabb.upperBound.y);
this.m_debugDraw.DrawPolygon(vs, 4, color);
}
}
}
if (flags & b2DebugDraw.e_centerOfMassBit) {
for (b = this.m_bodyList;
b; b = b.m_next) {
xf = b2World.s_xf;
xf.R = b.m_xf.R;
xf.position = b.GetWorldCenter();
this.m_debugDraw.DrawTransform(xf);
}
}
},
/**
* QueryAABB
*
* @param callback
* @param aabb
*
*/
QueryAABB: function (callback, aabb) {
var __this = this,
broadPhase = __this.m_contactManager.m_broadPhase;
function WorldQueryWrapper(proxy) {
return callback(broadPhase.GetUserData(proxy));
}
broadPhase.Query(WorldQueryWrapper, aabb);
},
/**
* QueryShape
*
* @param callback
* @param shape
* @param transform
*
*/
QueryShape: function (callback, shape, transform) {
var __this = this;
transform = transform || null;
if (transform == null) {
transform = new b2Transform();
transform.SetIdentity();
}
var broadPhase = __this.m_contactManager.m_broadPhase;
function WorldQueryWrapper(proxy) {
var fixture = (broadPhase.GetUserData(proxy) instanceof b2Fixture ? broadPhase.GetUserData(proxy) : null);
if (b2Shape.TestOverlap(shape, transform, fixture.GetShape(), fixture.GetBody().GetTransform())) return callback(fixture);
return true;
}
var aabb = new b2AABB();
shape.ComputeAABB(aabb, transform);
broadPhase.Query(WorldQueryWrapper, aabb);
},
/**
* QueryPoint
*
* @param callback
* @param p
*
*/
QueryPoint: function (callback, p) {
var __this = this,
broadPhase = __this.m_contactManager.m_broadPhase;
function WorldQueryWrapper(proxy) {
var fixture = (broadPhase.GetUserData(proxy) instanceof b2Fixture ? broadPhase.GetUserData(proxy) : null);
if (fixture.TestPoint(p)) return callback(fixture);
return true;
}
var aabb = new b2AABB();
aabb.lowerBound.Set(p.x - b2Settings.b2_linearSlop, p.y - b2Settings.b2_linearSlop);
aabb.upperBound.Set(p.x + b2Settings.b2_linearSlop, p.y + b2Settings.b2_linearSlop);
broadPhase.Query(WorldQueryWrapper, aabb);
},
/**
* RayCast
*
* @param callback
* @param point1
* @param point2
*
*/
RayCast: function (callback, point1, point2) {
var __this = this,
broadPhase = __this.m_contactManager.m_broadPhase,
output = new b2RayCastOutput;
function RayCastWrapper(input, proxy) {
var userData = broadPhase.GetUserData(proxy),
fixture = (userData instanceof b2Fixture ? userData : null),
hit = fixture.RayCast(output, input);
if (hit) {
var fraction = output.fraction,
point = new b2Vec2((1.0 - fraction) * point1.x + fraction * point2.x, (1.0 - fraction) * point1.y + fraction * point2.y);
return callback(fixture, point, output.normal, fraction);
}
return input.maxFraction;
}
var input = new b2RayCastInput(point1, point2);
broadPhase.RayCast(RayCastWrapper, input);
},
/**
* RayCastOne
*
* @param point1
* @param point2
*
*/
RayCastOne: function (point1, point2) {
var __this = this,
result;
function RayCastOneWrapper(fixture, point, normal, fraction) {
fraction = fraction || 0;
result = fixture;
return fraction;
}
__this.RayCast(RayCastOneWrapper, point1, point2);
return result;
},
/**
* RayCastAll
*
* @param point1
* @param point2
*
*/
RayCastAll: function (point1, point2) {
var __this = this,
result = [];
function RayCastAllWrapper(fixture, point, normal, fraction) {
fraction = fraction || 0;
result[result.length] = fixture;
return 1;
}
__this.RayCast(RayCastAllWrapper, point1, point2);
return result;
},
/**
* GetBodyList
*
* @param
*
*/
GetBodyList: function () {
return this.m_bodyList;
},
/**
* GetJointList
*
* @param
*
*/
GetJointList: function () {
return this.m_jointList;
},
/**
* GetContactList
*
* @param
*
*/
GetContactList: function () {
return this.m_contactList;
},
/**
* IsLocked
*
* @param
*
*/
IsLocked: function () {
return (this.m_flags & b2World.e_locked) > 0;
},
/**
* Solve
*
* @param step
*
*/
Solve: function (step) {
var b;
for (var controller = this.m_controllerList; controller; controller = controller.m_next) {
controller.Step(step);
}
var island = this.m_island;
island.Initialize(this.m_bodyCount, this.m_contactCount, this.m_jointCount, null, this.m_contactManager.m_contactListener, this.m_contactSolver);
for (b = this.m_bodyList;
b; b = b.m_next) {
b.m_flags &= ~b2Body.e_islandFlag;
}
for (var c = this.m_contactList; c; c = c.m_next) {
c.m_flags &= ~b2Contact.e_islandFlag;
}
for (var j = this.m_jointList; j; j = j.m_next) {
j.m_islandFlag = false;
}
var stackSize = this.m_bodyCount,
stack = this.m_stack;
for (var seed = this.m_bodyList; seed; seed = seed.m_next) {
if (seed.m_flags & b2Body.e_islandFlag) {
continue;
}
if (seed.IsAwake() === false || seed.IsActive() === false) {
continue;
}
if (seed.GetType() === b2Body.b2_staticBody) {
continue;
}
island.Clear();
var stackCount = 0;
stack[stackCount++] = seed;
seed.m_flags |= b2Body.e_islandFlag;
while (stackCount > 0) {
b = stack[--stackCount];
island.AddBody(b);
if (b.IsAwake() === false) {
b.SetAwake(true);
}
if (b.GetType() === b2Body.b2_staticBody) {
continue;
}
var other;
for (var ce = b.m_contactList; ce; ce = ce.next) {
if (ce.contact.m_flags & b2Contact.e_islandFlag) {
continue;
}
if (ce.contact.IsSensor() === true || ce.contact.IsEnabled() === false || ce.contact.IsTouching() === false) {
continue;
}
island.AddContact(ce.contact);
ce.contact.m_flags |= b2Contact.e_islandFlag;
other = ce.other;
if (other.m_flags & b2Body.e_islandFlag) {
continue;
}
stack[stackCount++] = other;
other.m_flags |= b2Body.e_islandFlag;
}
for (var jn = b.m_jointList; jn; jn = jn.next) {
if (jn.joint.m_islandFlag === true) {
continue;
}
other = jn.other;
if (other.IsActive() === false) {
continue;
}
island.AddJoint(jn.joint);
jn.joint.m_islandFlag = true;
if (other.m_flags & b2Body.e_islandFlag) {
continue;
}
stack[stackCount++] = other;
other.m_flags |= b2Body.e_islandFlag;
}
}
island.Solve(step, this.m_gravity, this.m_allowSleep);
for (var i = 0; i < island.m_bodyCount; ++i) {
b = island.m_bodies[i];
if (b.GetType() === b2Body.b2_staticBody) {
b.m_flags &= ~b2Body.e_islandFlag;
}
}
}
for (i = 0; i < stack.length; ++i) {
if (!stack[i]) break;
stack[i] = null;
}
for (b = this.m_bodyList; b; b = b.m_next) {
if (b.IsAwake() === false || b.IsActive() === false) {
continue;
}
if (b.GetType() === b2Body.b2_staticBody) {
continue;
}
b.SynchronizeFixtures();
}
this.m_contactManager.FindNewContacts();
},
/**
* SolveTOI
*
* @param step
*
*/
SolveTOI: function (step) {
var b,
fA,
fB,
bA,
bB,
cEdge,
j,
island = this.m_island;
island.Initialize(this.m_bodyCount, b2Settings.b2_maxTOIContactsPerIsland, b2Settings.b2_maxTOIJointsPerIsland, null, this.m_contactManager.m_contactListener, this.m_contactSolver);
var queue = b2World.s_queue;
for (b = this.m_bodyList; b; b = b.m_next) {
b.m_flags &= ~b2Body.e_islandFlag;
b.m_sweep.t0 = 0.0;
}
var c;
for (c = this.m_contactList; c; c = c.m_next) {
c.m_flags &= ~(b2Contact.e_toiFlag | b2Contact.e_islandFlag);
}
for (j = this.m_jointList; j; j = j.m_next) {
j.m_islandFlag = false;
}
for (; ;) {
var minContact = null,
minTOI = 1.0;
for (c = this.m_contactList; c; c = c.m_next) {
if (c.IsSensor() === true || c.IsEnabled() === false || c.IsContinuous() === false) {
continue;
}
var toi = 1.0;
if (c.m_flags & b2Contact.e_toiFlag) {
toi = c.m_toi;
}
else {
fA = c.m_fixtureA;
fB = c.m_fixtureB;
bA = fA.m_body;
bB = fB.m_body;
if ((bA.GetType() !== b2Body.b2_dynamicBody || bA.IsAwake() === false) && (bB.GetType() !== b2Body.b2_dynamicBody || bB.IsAwake() === false)) {
continue;
}
var t0 = bA.m_sweep.t0;
if (bA.m_sweep.t0 < bB.m_sweep.t0) {
t0 = bB.m_sweep.t0;
bA.m_sweep.Advance(t0);
}
else if (bB.m_sweep.t0 < bA.m_sweep.t0) {
t0 = bA.m_sweep.t0;
bB.m_sweep.Advance(t0);
}
toi = c.ComputeTOI(bA.m_sweep, bB.m_sweep);
b2Assert(0.0 <= toi && toi <= 1.0);
if (toi > 0.0 && toi < 1.0) {
toi = (1.0 - toi) * t0 + toi;
if (toi > 1) toi = 1;
}
c.m_toi = toi;
c.m_flags |= b2Contact.e_toiFlag;
}
if (b2Settings.b2_epsilon < toi && toi < minTOI) {
minContact = c;
minTOI = toi;
}
}
if (minContact == null || 1.0 - 100.0 * b2Settings.b2_epsilon < minTOI) {
break;
}
fA = minContact.m_fixtureA;
fB = minContact.m_fixtureB;
bA = fA.m_body;
bB = fB.m_body;
b2World.s_backupA.Set(bA.m_sweep);
b2World.s_backupB.Set(bB.m_sweep);
bA.Advance(minTOI);
bB.Advance(minTOI);
minContact.Update(this.m_contactManager.m_contactListener);
minContact.m_flags &= ~b2Contact.e_toiFlag;
if (minContact.IsSensor() === true || minContact.IsEnabled() === false) {
bA.m_sweep.Set(b2World.s_backupA);
bB.m_sweep.Set(b2World.s_backupB);
bA.SynchronizeTransform();
bB.SynchronizeTransform();
continue;
}
if (minContact.IsTouching() === false) {
continue;
}
var seed = bA;
if (seed.GetType() !== b2Body.b2_dynamicBody) {
seed = bB;
}
island.Clear();
var queueStart = 0,
queueSize = 0;
queue[queueStart + queueSize++] = seed;
seed.m_flags |= b2Body.e_islandFlag;
while (queueSize > 0) {
b = queue[queueStart++];
--queueSize;
island.AddBody(b);
if (b.IsAwake() === false) {
b.SetAwake(true);
}
if (b.GetType() !== b2Body.b2_dynamicBody) {
continue;
}
for (cEdge = b.m_contactList;
cEdge; cEdge = cEdge.next) {
if (island.m_contactCount === island.m_contactCapacity) {
break;
}
if (cEdge.contact.m_flags & b2Contact.e_islandFlag) {
continue;
}
if (cEdge.contact.IsSensor() === true || cEdge.contact.IsEnabled() === false || cEdge.contact.IsTouching() === false) {
continue;
}
island.AddContact(cEdge.contact);
cEdge.contact.m_flags |= b2Contact.e_islandFlag;
var other = cEdge.other;
if (other.m_flags & b2Body.e_islandFlag) {
continue;
}
if (other.GetType() !== b2Body.b2_staticBody) {
other.Advance(minTOI);
other.SetAwake(true);
}
queue[queueStart + queueSize] = other;
++queueSize;
other.m_flags |= b2Body.e_islandFlag;
}
for (var jEdge = b.m_jointList; jEdge; jEdge = jEdge.next) {
if (island.m_jointCount === island.m_jointCapacity) continue;
if (jEdge.joint.m_islandFlag === true) continue;
other = jEdge.other;
if (other.IsActive() === false) {
continue;
}
island.AddJoint(jEdge.joint);
jEdge.joint.m_islandFlag = true;
if (other.m_flags & b2Body.e_islandFlag) continue;
if (other.GetType() !== b2Body.b2_staticBody) {
other.Advance(minTOI);
other.SetAwake(true);
}
queue[queueStart + queueSize] = other;
++queueSize;
other.m_flags |= b2Body.e_islandFlag;
}
}
var subStep = b2World.s_timestep;
subStep.warmStarting = false;
subStep.dt = (1.0 - minTOI) * step.dt;
subStep.inv_dt = 1.0 / subStep.dt;
subStep.dtRatio = 0.0;
subStep.velocityIterations = step.velocityIterations;
subStep.positionIterations = step.positionIterations;
island.SolveTOI(subStep);
var i = 0;
for (i = 0; i < island.m_bodyCount; ++i) {
b = island.m_bodies[i];
b.m_flags &= ~b2Body.e_islandFlag;
if (b.IsAwake() === false) {
continue;
}
if (b.GetType() !== b2Body.b2_dynamicBody) {
continue;
}
b.SynchronizeFixtures();
for (cEdge = b.m_contactList;
cEdge; cEdge = cEdge.next) {
cEdge.contact.m_flags &= ~b2Contact.e_toiFlag;
}
}
for (i = 0; i < island.m_contactCount; ++i) {
c = island.m_contacts[i];
c.m_flags &= ~(b2Contact.e_toiFlag | b2Contact.e_islandFlag);
}
for (i = 0; i < island.m_jointCount; ++i) {
j = island.m_joints[i];
j.m_islandFlag = false;
}
this.m_contactManager.FindNewContacts();
}
},
/**
* DrawJoint
*
* @param joint
*
*/
DrawJoint: function (joint) {
var b1 = joint.GetBodyA(),
b2 = joint.GetBodyB(),
xf1 = b1.m_xf,
xf2 = b2.m_xf,
x1 = xf1.position,
x2 = xf2.position,
p1 = joint.GetAnchorA(),
p2 = joint.GetAnchorB(),
color = b2World.s_jointColor;
switch (joint.m_type) {
case b2Joint.e_distanceJoint:
this.m_debugDraw.DrawSegment(p1, p2, color);
break;
case b2Joint.e_pulleyJoint:
{
var pulley = ((joint instanceof b2PulleyJoint ? joint : null)),
s1 = pulley.GetGroundAnchorA(),
s2 = pulley.GetGroundAnchorB();
this.m_debugDraw.DrawSegment(s1, p1, color);
this.m_debugDraw.DrawSegment(s2, p2, color);
this.m_debugDraw.DrawSegment(s1, s2, color);
}
break;
case b2Joint.e_mouseJoint:
this.m_debugDraw.DrawSegment(p1, p2, color);
break;
default:
if (b1 !== this.m_groundBody) this.m_debugDraw.DrawSegment(x1, p1, color);
this.m_debugDraw.DrawSegment(p1, p2, color);
if (b2 !== this.m_groundBody) this.m_debugDraw.DrawSegment(x2, p2, color);
}
},
/**
* DrawShape
*
* @param shape
* @param xf
* @param color
*
*/
DrawShape: function (shape, xf, color) {
switch (shape.m_type) {
case b2Shape.e_circleShape:
{
var circle = ((shape instanceof b2CircleShape ? shape : null)),
center = b2Math.MulX(xf, circle.m_p),
radius = circle.m_radius,
axis = xf.R.col1;
this.m_debugDraw.DrawSolidCircle(center, radius, axis, color);
}
break;
case b2Shape.e_polygonShape:
{
var i = 0,
poly = ((shape instanceof b2PolygonShape ? shape : null)),
vertexCount = poly.GetVertexCount(),
localVertices = poly.GetVertices(),
vertices = [];
for (i = 0; i < vertexCount; ++i) {
vertices.push(b2Math.MulX(xf, localVertices[i]));
}
this.m_debugDraw.DrawSolidPolygon(vertices, vertexCount, color);
}
break;
case b2Shape.e_edgeShape:
{
var edge = (shape instanceof b2EdgeShape ? shape : null);
this.m_debugDraw.DrawSegment(b2Math.MulX(xf, edge.GetVertex1()), b2Math.MulX(xf, edge.GetVertex2()), color);
}
break;
}
}
}
|
mit
|
mz121star/Crawler-chrome
|
js/plugins.js
|
734
|
// Avoid `console` errors in browsers that lack a console.
(function() {
var method;
var noop = function () {};
var methods = [
'assert', 'clear', 'count', 'debug', 'dir', 'dirxml', 'error',
'exception', 'group', 'groupCollapsed', 'groupEnd', 'info', 'log',
'markTimeline', 'profile', 'profileEnd', 'table', 'time', 'timeEnd',
'timeStamp', 'trace', 'warn'
];
var length = methods.length;
var console = (window.console = window.console || {});
while (length--) {
method = methods[length];
// Only stub undefined methods.
if (!console[method]) {
console[method] = noop;
}
}
}());
// Place any jQuery/helper plugins in here.
|
mit
|
maurer/tiamat
|
samples/Juliet/testcases/CWE590_Free_Memory_Not_on_Heap/s01/CWE590_Free_Memory_Not_on_Heap__delete_array_long_static_82a.cpp
|
2989
|
/* TEMPLATE GENERATED TESTCASE FILE
Filename: CWE590_Free_Memory_Not_on_Heap__delete_array_long_static_82a.cpp
Label Definition File: CWE590_Free_Memory_Not_on_Heap__delete_array.label.xml
Template File: sources-sink-82a.tmpl.cpp
*/
/*
* @description
* CWE: 590 Free Memory Not on Heap
* BadSource: static Data buffer is declared static on the stack
* GoodSource: Allocate memory on the heap
* Sinks:
* BadSink : Print then free data
* Flow Variant: 82 Data flow: data passed in a parameter to an virtual method called via a pointer
*
* */
#include "std_testcase.h"
#include "CWE590_Free_Memory_Not_on_Heap__delete_array_long_static_82.h"
namespace CWE590_Free_Memory_Not_on_Heap__delete_array_long_static_82
{
#ifndef OMITBAD
void bad()
{
long * data;
data = NULL; /* Initialize data */
{
/* FLAW: data is allocated on the stack and deallocated in the BadSink */
static long dataBuffer[100];
{
size_t i;
for (i = 0; i < 100; i++)
{
dataBuffer[i] = 5L;
}
}
data = dataBuffer;
}
CWE590_Free_Memory_Not_on_Heap__delete_array_long_static_82_base* baseObject = new CWE590_Free_Memory_Not_on_Heap__delete_array_long_static_82_bad;
baseObject->action(data);
delete baseObject;
}
#endif /* OMITBAD */
#ifndef OMITGOOD
/* goodG2B uses the GoodSource with the BadSink */
static void goodG2B()
{
long * data;
data = NULL; /* Initialize data */
{
/* FIX: data is allocated on the heap and deallocated in the BadSink */
long * dataBuffer = new long[100];
{
size_t i;
for (i = 0; i < 100; i++)
{
dataBuffer[i] = 5L;
}
}
data = dataBuffer;
}
CWE590_Free_Memory_Not_on_Heap__delete_array_long_static_82_base* baseObject = new CWE590_Free_Memory_Not_on_Heap__delete_array_long_static_82_goodG2B;
baseObject->action(data);
delete baseObject;
}
void good()
{
goodG2B();
}
#endif /* OMITGOOD */
} /* close namespace */
/* Below is the main(). It is only used when building this testcase on
its own for testing or for building a binary to use in testing binary
analysis tools. It is not used when compiling all the testcases as one
application, which is how source code analysis tools are tested. */
#ifdef INCLUDEMAIN
using namespace CWE590_Free_Memory_Not_on_Heap__delete_array_long_static_82; /* so that we can use good and bad easily */
int main(int argc, char * argv[])
{
/* seed randomness */
srand( (unsigned)time(NULL) );
#ifndef OMITGOOD
printLine("Calling good()...");
good();
printLine("Finished good()");
#endif /* OMITGOOD */
#ifndef OMITBAD
printLine("Calling bad()...");
bad();
printLine("Finished bad()");
#endif /* OMITBAD */
return 0;
}
#endif
|
mit
|
zhenkyle/shadowsocks_ruby
|
spec/integration/encrypted_tls_ticket_tunnel_spec.rb
|
1532
|
require "spec_helper"
require 'evented-spec'
require 'em-http'
RSpec.describe "encrypted tunnel proxy server" do
include EventedSpec::SpecHelper
default_timeout 30
it "should be act like a proxy server" do
em do
stack3 = ShadowsocksRuby::Protocols::ProtocolStack.new([
["shadowsocks", {}],
["iv_cipher", {}],
["tls_ticket", {}]
], "aes-256-cfb", "secret")
stack4 = ShadowsocksRuby::Protocols::ProtocolStack.new([
["plain", {}]
], "aes-256-cfb", "secret")
server_args = [stack3, {}, stack4, {} ]
EventMachine.start_server '127.0.0.1', 8388, ShadowsocksRuby::Connections::TCP::LocalBackendConnection, *server_args
stack1 = ShadowsocksRuby::Protocols::ProtocolStack.new([
["socks5", {}]
], "aes-256-cfb", "secret")
stack2 = ShadowsocksRuby::Protocols::ProtocolStack.new([
["shadowsocks", {}],
["iv_cipher", {}],
["tls_ticket", {:host => '127.0.0.1'}]
], "aes-256-cfb", "secret")
local_args = [stack1, {:host => '127.0.0.1', :port => 8388}, stack2, {} ]
EventMachine.start_server '127.0.0.1', 10800, ShadowsocksRuby::Connections::TCP::ClientConnection, *local_args
connection_opts = {:proxy => {:host => '127.0.0.1', :port => 10800, :type => :socks5 }}
http = EventMachine::HttpRequest.new('http://example.com/', connection_opts).get
http.callback {
expect(http.response_header.status).to eq(200)
done
}
end
end
end
|
mit
|
programmerpeter/OpenBazaar-Client
|
js/views/storeWizardVw.js
|
7814
|
var __ = require('underscore'),
Backbone = require('backbone'),
$ = require('jquery'),
is = require('is_js'),
loadTemplate = require('../utils/loadTemplate'),
saveToAPI = require('../utils/saveToAPI'),
MediumEditor = require('medium-editor'),
validateMediumEditor = require('../utils/validateMediumEditor'),
Taggle = require('taggle'),
userShortView = require('./userShortVw'),
userShortModel = require('../models/userShortMd');
module.exports = Backbone.View.extend({
classname: "storeWizard",
events: {
'click .js-storeWizardModal': 'blockClicks',
'click .js-closeStoreWizardModal': 'closeWizard',
'click .js-storeWizardSave': 'saveWizard',
'click .js-accordionNext': 'validateDescription',
'blur input': 'validateInput',
'blur textarea': 'validateInput'
},
initialize: function(options) {
"use strict";
this.options = options || {};
this.parentEl = $(options.parentEl);
this.socketView = options.socketView;
if(this.model.get('page').profile.header_hash){
this.model.set('headerURL', this.model.get('user').serverUrl+"get_image?hash="+this.model.get('page').profile.header_hash);
}
this.listenTo(window.obEventBus, "socketMessageReceived", function(response){
this.handleSocketMessage(response);
});
this.socketModeratorID = Math.random().toString(36).slice(2);
this.moderatorCount = 0;
this.render();
},
initAccordion: function(targ){
"use strict";
var acc = $(targ),
accWidth = acc.width(),
accHeight = acc.height(),
accChildren = acc.find('.accordion-child'),
accNum = accChildren.length,
accWin = acc.find('.accordion-window');
accWin.css({'left':0, 'width': function(){return accWidth * accNum;}});
accChildren.css({'width':accWidth, 'height':accHeight});
acc.find('.js-accordionNext').on('click', function(){
var oldPos = accWin.css('left').replace("px","");
if($('#storeWizardForm')[0].checkValidity()) {
if (oldPos > (accWidth * accNum * -1 + accWidth)) {
accWin.css('left', function () {
return parseInt(accWin.css('left').replace("px", "")) - accWidth;
});
// switch active tab
var curActive = acc.find('.accordion-active');
curActive.addClass('accordion-inactive').removeClass('accordion-active');
var newActive = curActive.next('.accordion-child');
newActive.addClass('accordion-active').removeClass('accordion-inactive');
// focus search input
newActive.find('input:visible:first').focus();
}
}
});
acc.find('.js-accordionPrev').on('click', function(){
var oldPos = accWin.css('left').replace("px","");
if(oldPos < (0)){
accWin.css('left', function(){
return parseInt(accWin.css('left').replace("px","")) + accWidth;
});
// switch active tab
var curActive = acc.find('.accordion-active');
curActive.addClass('accordion-inactive').removeClass('accordion-active');
var newActive = curActive.prev('.accordion-child');
newActive.addClass('accordion-active').removeClass('accordion-inactive');
// focus search input
newActive.find('input:visible:first').focus();
}
});
},
render: function() {
"use strict";
var self = this;
loadTemplate('./js/templates/storeWizard.html', function(loadedTemplate) {
self.$el.html(loadedTemplate(self.model.toJSON()));
//append the view to the passed in parent
self.parentEl.append(self.$el);
self.initAccordion('.js-storeWizardAccordion');
self.setValues();
// add blur to container
$('#obContainer').addClass('blur');
// fade the modal in after it loads and focus the input
self.$el.find('.js-storeWizardModal').removeClass('fadeOut');
self.$el.find('#storeNameInput').focus();
self.socketView.getModerators(self.socketModeratorID);
var editor = new MediumEditor('#aboutInput', {
placeholder: {
text: ''
},
toolbar: {
imageDragging: false
},
paste: {
cleanPastedHTML: false,
forcePlainText: false
}
});
editor.subscribe('blur', self.validateDescription);
});
},
validateDescription: function(e) {
validateMediumEditor.checkVal($('#aboutInput'));
},
setValues: function() {
"use strict";
var self = this;
this.$el.find('#locationSelect').val(this.model.get('user').country);
//activate tags plugin
this.categoriesInput = new Taggle('categoriesInput', {
submitKeys: [188, 9, 13, 32],
preserveCase: true,
saveOnBlur: true
});
},
handleSocketMessage: function(response) {
"use strict";
var data = JSON.parse(response.data);
if(data.id == this.socketModeratorID && data.moderator.guid != this.model.get('user').guid && this.model.get('user').blocked_guids.indexOf(data.moderator.guid) == -1){
this.renderModerator(data.moderator);
}
},
renderModerator: function(moderator){
"use strict";
var self = this;
moderator.serverUrl = this.model.get('user').serverUrl;
moderator.userID = moderator.guid;
moderator.avatarURL = this.model.get('user').serverUrl + "get_image?hash=" + moderator.avatar_hash + "&guid=" + moderator.guid;
moderator.isModerator = true; //flag for template
moderator.micro = true; //flag for template
moderator.userCount = this.moderatorCount;
var newModModel = new userShortModel(moderator);
var modShort = new userShortView({model: newModModel});
this.$el.find('.js-storeWizardModeratorList').append(modShort.el);
this.moderatorCount++;
},
blockClicks: function(e) {
"use strict";
if(!$(e.target).hasClass('js-externalLink')){
e.stopPropagation();
}
},
closeWizard: function() {
"use strict";
this.close();
},
validateInput: function(e) {
"use strict";
e.target.checkValidity();
$(e.target).closest('.flexRow').addClass('formChecked');
},
saveWizard: function() {
"use strict";
var self = this,
profileForm = this.$el.find('#storeWizardForm'),
moderatorsChecked = $('.js-storeWizardModeratorList input:checked'),
userProfile = this.model.get('page').profile,
modList = [],
wizData = {},
modData = {};
validateMediumEditor.checkVal($('#aboutInput'));
//convert taggle tags to data in the form
this.$el.find('#realCategoriesInput').val(this.categoriesInput.getTagValues().join(","));
wizData.vendor = true;
moderatorsChecked.each(function() {
modList.push($(this).data('guid'));
});
modData.moderators = modList.length > 0 ? modList : "";
modData.name = this.model.get('page').profile.name;
modData.location = this.model.get('page').profile.location;
wizData.primary_color = parseInt(userProfile.primary_color.slice(1), 16);
wizData.secondary_color = parseInt(userProfile.secondary_color.slice(1), 16);
wizData.background_color = parseInt(userProfile.background_color.slice(1), 16);
wizData.text_color = parseInt(userProfile.text_color.slice(1), 16);
saveToAPI(profileForm, '', self.model.get('user').serverUrl + "profile", function(){
saveToAPI('', self.model.get('user'), self.model.get('user').serverUrl + "settings", function(){
window.obEventBus.trigger("updateProfile");
window.obEventBus.trigger("updateUserModel");
self.trigger('storeCreated');
}, '', modData);
}, '', wizData);
},
close: function(){
$('#obContainer').removeClass('blur');
$('#modalHolder').fadeOut(300, ()=> {
this.remove();
});
}
});
|
mit
|
Djamy/platform
|
src/Oro/Bundle/NavigationBundle/Tests/Unit/Menu/NavigationMostviewedBuilderTest.php
|
4599
|
<?php
namespace Oro\Bundle\NavigationBundle\Tests\Unit\Menu;
use Symfony\Bundle\FrameworkBundle\Routing\Router;
use Oro\Bundle\FeatureToggleBundle\Checker\FeatureChecker;
use Oro\Bundle\NavigationBundle\Entity\NavigationHistoryItem;
use Oro\Bundle\NavigationBundle\Menu\NavigationMostviewedBuilder;
use Oro\Bundle\OrganizationBundle\Entity\Organization;
class NavigationMostviewedBuilderTest extends \PHPUnit_Framework_TestCase
{
/**
* @var \Doctrine\ORM\EntityManager
*/
protected $em;
/**
* @var \Symfony\Component\Security\Core\SecurityContextInterface
*/
protected $securityContext;
/**
* @var NavigationMostviewedBuilder
*/
protected $builder;
/**
* @var Router
*/
protected $router;
/**
* @var FeatureChecker
*/
protected $featureChecker;
/**
* @var \Oro\Bundle\NavigationBundle\Entity\Builder\ItemFactory
*/
protected $factory;
protected function setUp()
{
$this->securityContext = $this->createMock('Symfony\Component\Security\Core\SecurityContextInterface');
$this->em = $this->getMockBuilder('Doctrine\ORM\EntityManager')
->disableOriginalConstructor()
->getMock();
$this->factory = $this->createMock('Oro\Bundle\NavigationBundle\Entity\Builder\ItemFactory');
$this->router = $this->getMockBuilder('Symfony\Bundle\FrameworkBundle\Routing\Router')
->disableOriginalConstructor()
->getMock();
$this->featureChecker = $this->getMockBuilder(FeatureChecker::class)
->disableOriginalConstructor()
->getMock();
$this->builder = new NavigationMostviewedBuilder(
$this->securityContext,
$this->em,
$this->factory,
$this->router
);
$this->builder->setFeatureChecker($this->featureChecker);
$this->builder->addFeature('email');
}
public function testBuild()
{
$organization = new Organization();
$type = 'mostviewed';
$maxItems = 20;
$userId = 1;
$user = $this->getMockBuilder('stdClass')
->setMethods(array('getId'))
->getMock();
$user->expects($this->once())
->method('getId')
->will($this->returnValue($userId));
$token = $this->getMockBuilder(
'Oro\Bundle\SecurityBundle\Authentication\Token\UsernamePasswordOrganizationToken'
)
->disableOriginalConstructor()
->getMock();
$token->expects($this->once())
->method('getUser')
->will($this->returnValue($user));
$token->expects($this->once())
->method('getOrganizationContext')
->will($this->returnValue($organization));
$this->securityContext->expects($this->atLeastOnce())
->method('getToken')
->will($this->returnValue($token));
$item = $this->createMock('Oro\Bundle\NavigationBundle\Entity\NavigationItemInterface');
$this->factory->expects($this->once())
->method('createItem')
->with($type, array())
->will($this->returnValue($item));
$repository = $this->getMockBuilder('Oro\Bundle\NavigationBundle\Entity\Repository\HistoryItemRepository')
->disableOriginalConstructor()
->getMock();
$repository->expects($this->once())
->method('getNavigationItems')
->with(
$userId,
$organization,
$type,
array(
'max_items' => $maxItems,
'order_by' => array(array('field' => NavigationHistoryItem::NAVIGATION_HISTORY_COLUMN_VISIT_COUNT))
)
)
->will($this->returnValue(array()));
$this->em->expects($this->once())
->method('getRepository')
->with(get_class($item))
->will($this->returnValue($repository));
$configMock = $this->getMockBuilder('Oro\Bundle\ConfigBundle\Config\ConfigManager')
->disableOriginalConstructor()
->getMock();
$configMock->expects($this->once())
->method('get')
->with($this->equalTo('oro_navigation.max_items'))
->will($this->returnValue($maxItems));
$menu = $this->getMockBuilder('Knp\Menu\ItemInterface')->getMock();
$this->builder->setOptions($configMock);
$this->builder->build($menu, array(), $type);
}
}
|
mit
|
djeik/goto
|
programs/valid/print.go
|
128
|
package main
func main(){
print("hello")
println("hellO")
print("hello\n")
print("hello'pop")
print('h')
}
|
mit
|
MalloyPower/parsing-python
|
front-end/testsuite-python-lib/Python-2.4/Lib/test/test_support.py
|
10506
|
"""Supporting definitions for the Python regression tests."""
if __name__ != 'test.test_support':
raise ImportError, 'test_support must be imported from the test package'
import sys
class Error(Exception):
"""Base class for regression test exceptions."""
class TestFailed(Error):
"""Test failed."""
class TestSkipped(Error):
"""Test skipped.
This can be raised to indicate that a test was deliberatly
skipped, but not because a feature wasn't available. For
example, if some resource can't be used, such as the network
appears to be unavailable, this should be raised instead of
TestFailed.
"""
class ResourceDenied(TestSkipped):
"""Test skipped because it requested a disallowed resource.
This is raised when a test calls requires() for a resource that
has not be enabled. It is used to distinguish between expected
and unexpected skips.
"""
verbose = 1 # Flag set to 0 by regrtest.py
use_resources = None # Flag set to [] by regrtest.py
# _original_stdout is meant to hold stdout at the time regrtest began.
# This may be "the real" stdout, or IDLE's emulation of stdout, or whatever.
# The point is to have some flavor of stdout the user can actually see.
_original_stdout = None
def record_original_stdout(stdout):
global _original_stdout
_original_stdout = stdout
def get_original_stdout():
return _original_stdout or sys.stdout
def unload(name):
try:
del sys.modules[name]
except KeyError:
pass
def forget(modname):
'''"Forget" a module was ever imported by removing it from sys.modules and
deleting any .pyc and .pyo files.'''
unload(modname)
import os
for dirname in sys.path:
try:
os.unlink(os.path.join(dirname, modname + os.extsep + 'pyc'))
except os.error:
pass
# Deleting the .pyo file cannot be within the 'try' for the .pyc since
# the chance exists that there is no .pyc (and thus the 'try' statement
# is exited) but there is a .pyo file.
try:
os.unlink(os.path.join(dirname, modname + os.extsep + 'pyo'))
except os.error:
pass
def is_resource_enabled(resource):
"""Test whether a resource is enabled. Known resources are set by
regrtest.py."""
return use_resources is not None and resource in use_resources
def requires(resource, msg=None):
"""Raise ResourceDenied if the specified resource is not available.
If the caller's module is __main__ then automatically return True. The
possibility of False being returned occurs when regrtest.py is executing."""
# see if the caller's module is __main__ - if so, treat as if
# the resource was set
if sys._getframe().f_back.f_globals.get("__name__") == "__main__":
return
if not is_resource_enabled(resource):
if msg is None:
msg = "Use of the `%s' resource not enabled" % resource
raise ResourceDenied(msg)
FUZZ = 1e-6
def fcmp(x, y): # fuzzy comparison function
if type(x) == type(0.0) or type(y) == type(0.0):
try:
x, y = coerce(x, y)
fuzz = (abs(x) + abs(y)) * FUZZ
if abs(x-y) <= fuzz:
return 0
except:
pass
elif type(x) == type(y) and type(x) in (type(()), type([])):
for i in range(min(len(x), len(y))):
outcome = fcmp(x[i], y[i])
if outcome != 0:
return outcome
return cmp(len(x), len(y))
return cmp(x, y)
try:
unicode
have_unicode = 1
except NameError:
have_unicode = 0
is_jython = sys.platform.startswith('java')
import os
# Filename used for testing
if os.name == 'java':
# Jython disallows @ in module names
TESTFN = '$test'
elif os.name == 'riscos':
TESTFN = 'testfile'
else:
TESTFN = '@test'
# Unicode name only used if TEST_FN_ENCODING exists for the platform.
if have_unicode:
# Assuming sys.getfilesystemencoding()!=sys.getdefaultencoding()
# TESTFN_UNICODE is a filename that can be encoded using the
# file system encoding, but *not* with the default (ascii) encoding
if isinstance('', unicode):
# python -U
# XXX perhaps unicode() should accept Unicode strings?
TESTFN_UNICODE = "@test-\xe0\xf2"
else:
# 2 latin characters.
TESTFN_UNICODE = unicode("@test-\xe0\xf2", "latin-1")
TESTFN_ENCODING = sys.getfilesystemencoding()
# TESTFN_UNICODE_UNENCODEABLE is a filename that should *not* be
# able to be encoded by *either* the default or filesystem encoding.
# This test really only makes sense on Windows NT platforms
# which have special Unicode support in posixmodule.
if (not hasattr(sys, "getwindowsversion") or
sys.getwindowsversion()[3] < 2): # 0=win32s or 1=9x/ME
TESTFN_UNICODE_UNENCODEABLE = None
else:
# Japanese characters (I think - from bug 846133)
TESTFN_UNICODE_UNENCODEABLE = u"@test-\u5171\u6709\u3055\u308c\u308b"
try:
# XXX - Note - should be using TESTFN_ENCODING here - but for
# Windows, "mbcs" currently always operates as if in
# errors=ignore' mode - hence we get '?' characters rather than
# the exception. 'Latin1' operates as we expect - ie, fails.
# See [ 850997 ] mbcs encoding ignores errors
TESTFN_UNICODE_UNENCODEABLE.encode("Latin1")
except UnicodeEncodeError:
pass
else:
print \
'WARNING: The filename %r CAN be encoded by the filesystem. ' \
'Unicode filename tests may not be effective' \
% TESTFN_UNICODE_UNENCODEABLE
# Make sure we can write to TESTFN, try in /tmp if we can't
fp = None
try:
fp = open(TESTFN, 'w+')
except IOError:
TMP_TESTFN = os.path.join('/tmp', TESTFN)
try:
fp = open(TMP_TESTFN, 'w+')
TESTFN = TMP_TESTFN
del TMP_TESTFN
except IOError:
print ('WARNING: tests will fail, unable to write to: %s or %s' %
(TESTFN, TMP_TESTFN))
if fp is not None:
fp.close()
try:
os.unlink(TESTFN)
except:
pass
del os, fp
from os import unlink
def findfile(file, here=__file__):
"""Try to find a file on sys.path and the working directory. If it is not
found the argument passed to the function is returned (this does not
necessarily signal failure; could still be the legitimate path)."""
import os
if os.path.isabs(file):
return file
path = sys.path
path = [os.path.dirname(here)] + path
for dn in path:
fn = os.path.join(dn, file)
if os.path.exists(fn): return fn
return file
def verify(condition, reason='test failed'):
"""Verify that condition is true. If not, raise TestFailed.
The optional argument reason can be given to provide
a better error text.
"""
if not condition:
raise TestFailed(reason)
def vereq(a, b):
"""Raise TestFailed if a == b is false.
This is better than verify(a == b) because, in case of failure, the
error message incorporates repr(a) and repr(b) so you can see the
inputs.
Note that "not (a == b)" isn't necessarily the same as "a != b"; the
former is tested.
"""
if not (a == b):
raise TestFailed, "%r == %r" % (a, b)
def sortdict(dict):
"Like repr(dict), but in sorted order."
items = dict.items()
items.sort()
reprpairs = ["%r: %r" % pair for pair in items]
withcommas = ", ".join(reprpairs)
return "{%s}" % withcommas
def check_syntax(statement):
try:
compile(statement, '<string>', 'exec')
except SyntaxError:
pass
else:
print 'Missing SyntaxError: "%s"' % statement
#=======================================================================
# Preliminary PyUNIT integration.
import unittest
class BasicTestRunner:
def run(self, test):
result = unittest.TestResult()
test(result)
return result
def run_suite(suite, testclass=None):
"""Run tests from a unittest.TestSuite-derived class."""
if verbose:
runner = unittest.TextTestRunner(sys.stdout, verbosity=2)
else:
runner = BasicTestRunner()
result = runner.run(suite)
if not result.wasSuccessful():
if len(result.errors) == 1 and not result.failures:
err = result.errors[0][1]
elif len(result.failures) == 1 and not result.errors:
err = result.failures[0][1]
else:
if testclass is None:
msg = "errors occurred; run in verbose mode for details"
else:
msg = "errors occurred in %s.%s" \
% (testclass.__module__, testclass.__name__)
raise TestFailed(msg)
raise TestFailed(err)
def run_unittest(*classes):
"""Run tests from unittest.TestCase-derived classes."""
suite = unittest.TestSuite()
for cls in classes:
if isinstance(cls, (unittest.TestSuite, unittest.TestCase)):
suite.addTest(cls)
else:
suite.addTest(unittest.makeSuite(cls))
if len(classes)==1:
testclass = classes[0]
else:
testclass = None
run_suite(suite, testclass)
#=======================================================================
# doctest driver.
def run_doctest(module, verbosity=None):
"""Run doctest on the given module. Return (#failures, #tests).
If optional argument verbosity is not specified (or is None), pass
test_support's belief about verbosity on to doctest. Else doctest's
usual behavior is used (it searches sys.argv for -v).
"""
import doctest
if verbosity is None:
verbosity = verbose
else:
verbosity = None
# Direct doctest output (normally just errors) to real stdout; doctest
# output shouldn't be compared by regrtest.
save_stdout = sys.stdout
sys.stdout = get_original_stdout()
try:
f, t = doctest.testmod(module, verbose=verbosity)
if f:
raise TestFailed("%d of %d doctests failed" % (f, t))
finally:
sys.stdout = save_stdout
if verbose:
print 'doctest (%s) ... %d tests with zero failures' % (module.__name__, t)
return f, t
|
mit
|
gnuine/ubiquo_categories
|
lib/ubiquo_categories/filters/category_filter.rb
|
783
|
module UbiquoCategories
module Filters
class CategoryFilter < Ubiquo::Filters::LinksOrSelectFilter
def configure(set, options = {})
defaults = {
:collection => categories_for_select(set),
:caption => options[:caption] || I18n.t("ubiquo.category_sets.#{set}"),
:field => "filter_#{set.to_s}",
:id_field => :name,
:name_field => :name
}
@options = defaults.merge(options)
end
private
# Prepares a collection
def categories_for_select key
@context.uhook_categories_for_set category_set(key)
end
def category_set(key)
key = key.to_s.pluralize
CategorySet.find_by_key(key) || raise(SetNotFoundError.new(key))
end
end
end
end
|
mit
|
Kunstmaan/BootstrapCK4-Skin
|
plugins/fakeobjects/lang/mk.js
|
377
|
/*
Copyright (c) 2003-2014, CKSource - Frederico Knabben. All rights reserved.
For licensing, see LICENSE.md or http://ckeditor.com/license
*/
CKEDITOR.plugins.setLang( 'fakeobjects', 'mk', {
anchor: 'Anchor',
flash: 'Flash Animation', // MISSING
hiddenfield: 'Hidden Field', // MISSING
iframe: 'IFrame', // MISSING
unknown: 'Unknown Object' // MISSING
} );
|
mit
|
raadhuis/modx-basic
|
core/components/migx/processors/mgr/migxconfigs/remove.php
|
2507
|
<?php
/**
* XdbEdit
*
* Copyright 2010 by Bruno Perner <b.perner@gmx.de>
*
* This file is part of XdbEdit, for editing custom-tables in MODx Revolution CMP.
*
* XdbEdit is free software; you can redistribute it and/or modify it under the
* terms of the GNU General Public License as published by the Free Software
* Foundation; either version 2 of the License, or (at your option) any later
* version.
*
* XdbEdit is distributed in the hope that it will be useful, but WITHOUT ANY
* WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
* A PARTICULAR PURPOSE. See the GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License along with
* XdbEdit; if not, write to the Free Software Foundation, Inc., 59 Temple Place,
* Suite 330, Boston, MA 02111-1307 USA
*
* @package xdbedit
*/
/**
* Update and Create-processor for xdbedit
*
* @package xdbedit
* @subpackage processors
*/
//if (!$modx->hasPermission('quip.thread_view')) return $modx->error->failure($modx->lexicon('access_denied'));
//return $modx->error->failure('huhu');
if (empty($scriptProperties['object_id'])){
return $modx->error->failure($modx->lexicon('quip.thread_err_ns'));
}
$config = $modx->migx->customconfigs;
$prefix = $config['prefix'];
$packageName = $config['packageName'];
$packagepath = $modx->getOption('core_path') . 'components/' . $packageName .
'/';
$modelpath = $packagepath . 'model/';
$modx->addPackage($packageName, $modelpath, $prefix);
$classname = $config['classname'];
if ($modx->lexicon)
{
$modx->lexicon->load($packageName.':default');
}
switch ($scriptProperties['task']) {
case 'removeone':
$object = $modx->getObject($classname, $scriptProperties['object_id']);
if ($object->remove() === false) {
return $modx->error->failure($modx->lexicon('quip.comment_err_remove'));
}
break;
default:
break;
}
//clear cache
$paths = array(
'config.cache.php',
'sitePublishing.idx.php',
'registry/mgr/workspace/',
'lexicon/',
);
$contexts = $modx->getCollection('modContext');
foreach ($contexts as $context) {
$paths[] = $context->get('key') . '/';
}
$options = array(
'publishing' => 1,
'extensions' => array('.cache.php', '.msg.php', '.tpl.php'),
);
if ($modx->getOption('cache_db')) $options['objects'] = '*';
$results= $modx->cacheManager->clearCache($paths, $options);
return $modx->error->success();
?>
|
mit
|
timkrentz/SunTracker
|
IMU/VTK-6.2.0/Wrapping/Python/vtk/gtk/GtkGLExtVTKRenderWindow.py
|
18331
|
"""
Description:
This provides a VTK widget for pyGtk. This embeds a vtkRenderWindow
inside a GTK widget. This is based on GtkVTKRenderWindow.py.
The extensions here allow the use of gtkglext rather than gtkgl and
pygtk-2 rather than pygtk-0. It requires pygtk-2.0.0 or later.
There is a working example at the bottom.
Credits:
John Hunter <jdhunter@ace.bsd.uchicago.edu> developed and tested
this code based on VTK's GtkVTKRenderWindow.py and extended it to
work with pygtk-2.0.0.
License:
VTK license.
"""
import math, sys
import pygtk
pygtk.require('2.0')
import gtk
import gtk.gtkgl
from gtk import gdk
import vtk
class GtkGLExtVTKRenderWindowBase(gtk.gtkgl.DrawingArea):
""" A base class that enables one to embed a vtkRenderWindow into
a pyGTK widget. This class embeds the RenderWindow correctly.
Provided are some empty methods that can be overloaded to provide
a user defined interaction behaviour. The event handling
functions have names that are somewhat similar to the ones in the
vtkInteractorStyle class included with VTK. """
def __init__(self, *args):
gtk.gtkgl.DrawingArea.__init__(self)
self.set_double_buffered(gtk.FALSE)
self._RenderWindow = vtk.vtkRenderWindow()
# private attributes
self.__Created = 0
# used by the LOD actors
self._DesiredUpdateRate = 15
self._StillUpdateRate = 0.0001
self.ConnectSignals()
# need this to be able to handle key_press events.
self.set_flags(gtk.CAN_FOCUS)
# default size
self.set_size_request(300, 300)
def ConnectSignals(self):
self.connect("realize", self.OnRealize)
self.connect("expose_event", self.OnExpose)
self.connect("configure_event", self.OnConfigure)
self.connect("button_press_event", self.OnButtonDown)
self.connect("button_release_event", self.OnButtonUp)
self.connect("motion_notify_event", self.OnMouseMove)
self.connect("enter_notify_event", self.OnEnter)
self.connect("leave_notify_event", self.OnLeave)
self.connect("key_press_event", self.OnKeyPress)
self.connect("delete_event", self.OnDestroy)
self.add_events(gdk.EXPOSURE_MASK|
gdk.BUTTON_PRESS_MASK |
gdk.BUTTON_RELEASE_MASK |
gdk.KEY_PRESS_MASK |
gdk.POINTER_MOTION_MASK |
gdk.POINTER_MOTION_HINT_MASK |
gdk.ENTER_NOTIFY_MASK |
gdk.LEAVE_NOTIFY_MASK)
def GetRenderWindow(self):
return self._RenderWindow
def GetRenderer(self):
self._RenderWindow.GetRenderers().InitTraversal()
return self._RenderWindow.GetRenderers().GetNextItem()
def SetDesiredUpdateRate(self, rate):
"""Mirrors the method with the same name in
vtkRenderWindowInteractor."""
self._DesiredUpdateRate = rate
def GetDesiredUpdateRate(self):
"""Mirrors the method with the same name in
vtkRenderWindowInteractor."""
return self._DesiredUpdateRate
def SetStillUpdateRate(self, rate):
"""Mirrors the method with the same name in
vtkRenderWindowInteractor."""
self._StillUpdateRate = rate
def GetStillUpdateRate(self):
"""Mirrors the method with the same name in
vtkRenderWindowInteractor."""
return self._StillUpdateRate
def Render(self):
if self.__Created:
self._RenderWindow.Render()
def OnRealize(self, *args):
if self.__Created == 0:
# you can't get the xid without the window being realized.
self.realize()
if sys.platform=='win32':
win_id = str(self.widget.window.handle)
else:
win_id = str(self.widget.window.xid)
self._RenderWindow.SetWindowInfo(win_id)
self.__Created = 1
return gtk.TRUE
def Created(self):
return self.__Created
def OnConfigure(self, widget, event):
self.widget=widget
self._RenderWindow.SetSize(event.width, event.height)
self.Render()
return gtk.TRUE
def OnExpose(self, *args):
self.Render()
return gtk.TRUE
def OnDestroy(self, *args):
self.hide()
del self._RenderWindow
self.destroy()
return gtk.TRUE
def OnButtonDown(self, wid, event):
"""Mouse button pressed."""
self._RenderWindow.SetDesiredUpdateRate(self._DesiredUpdateRate)
return gtk.TRUE
def OnButtonUp(self, wid, event):
"""Mouse button released."""
self._RenderWindow.SetDesiredUpdateRate(self._StillUpdateRate)
return gtk.TRUE
def OnMouseMove(self, wid, event):
"""Mouse has moved."""
return gtk.TRUE
def OnEnter(self, wid, event):
"""Entering the vtkRenderWindow."""
return gtk.TRUE
def OnLeave(self, wid, event):
"""Leaving the vtkRenderWindow."""
return gtk.TRUE
def OnKeyPress(self, wid, event):
"""Key pressed."""
return gtk.TRUE
def OnKeyRelease(self, wid, event):
"Key released."
return gtk.TRUE
class GtkGLExtVTKRenderWindow(GtkGLExtVTKRenderWindowBase):
""" An example of a fully functional GtkGLExtVTKRenderWindow that
is based on the vtkRenderWidget.py provided with the VTK
sources."""
def __init__(self, *args):
GtkGLExtVTKRenderWindowBase.__init__(self)
self._CurrentRenderer = None
self._CurrentCamera = None
self._CurrentZoom = 1.0
self._CurrentLight = None
self._ViewportCenterX = 0
self._ViewportCenterY = 0
self._Picker = vtk.vtkCellPicker()
self._PickedAssembly = None
self._PickedProperty = vtk.vtkProperty()
self._PickedProperty.SetColor(1, 0, 0)
self._PrePickedProperty = None
self._OldFocus = None
# these record the previous mouse position
self._LastX = 0
self._LastY = 0
def OnButtonDown(self, wid, event):
self._RenderWindow.SetDesiredUpdateRate(self._DesiredUpdateRate)
return self.StartMotion(wid, event)
return gtk.TRUE
def OnButtonUp(self, wid, event):
self._RenderWindow.SetDesiredUpdateRate(self._StillUpdateRate)
return self.EndMotion(wid, event)
return gtk.TRUE
def OnMouseMove(self, wid, event=None):
if ((event.state & gdk.BUTTON1_MASK) == gdk.BUTTON1_MASK):
if ((event.state & gdk.SHIFT_MASK) == gdk.SHIFT_MASK):
m = self.get_pointer()
self.Pan(m[0], m[1])
else:
m = self.get_pointer()
self.Rotate(m[0], m[1])
elif ((event.state & gdk.BUTTON2_MASK) == gdk.BUTTON2_MASK):
m = self.get_pointer()
self.Pan(m[0], m[1])
elif ((event.state & gdk.BUTTON3_MASK) == gdk.BUTTON3_MASK):
m = self.get_pointer()
self.Zoom(m[0], m[1])
else:
return gtk.FALSE
return gtk.TRUE
def OnEnter(self, wid, event=None):
# a render hack because grab_focus blanks the renderwin
self.grab_focus()
w = self.get_pointer()
self.UpdateRenderer(w[0], w[1])
return gtk.TRUE
def OnKeyPress(self, wid, event=None):
#if (event.keyval == gdk.keyval_from_name("q") or
# event.keyval == gdk.keyval_from_name("Q")):
# gtk.mainquit()
if (event.keyval == gdk.keyval_from_name('r') or
event.keyval == gdk.keyval_from_name('R')):
self.Reset()
return gtk.TRUE
elif (event.keyval == gdk.keyval_from_name('w') or
event.keyval == gdk.keyval_from_name('W')):
self.Wireframe()
return gtk.TRUE
elif (event.keyval == gdk.keyval_from_name('s') or
event.keyval == gdk.keyval_from_name('S')):
self.Surface()
return gtk.TRUE
elif (event.keyval == gdk.keyval_from_name('p') or
event.keyval == gdk.keyval_from_name('P')):
m = self.get_pointer()
self.PickActor(m[0], m[1])
return gtk.TRUE
else:
return gtk.FALSE
def GetZoomFactor(self):
return self._CurrentZoom
def SetZoomFactor(self, zf):
self._CurrentZoom = zf
def GetPicker(self):
return self._Picker
def Render(self):
if (self._CurrentLight):
light = self._CurrentLight
light.SetPosition(self._CurrentCamera.GetPosition())
light.SetFocalPoint(self._CurrentCamera.GetFocalPoint())
GtkGLExtVTKRenderWindowBase.Render(self)
def UpdateRenderer(self,x,y):
"""
UpdateRenderer will identify the renderer under the mouse and set
up _CurrentRenderer, _CurrentCamera, and _CurrentLight.
"""
windowX,windowY = self.widget.window.get_size()
renderers = self._RenderWindow.GetRenderers()
numRenderers = renderers.GetNumberOfItems()
self._CurrentRenderer = None
renderers.InitTraversal()
for i in range(0,numRenderers):
renderer = renderers.GetNextItem()
vx,vy = (0,0)
if (windowX > 1):
vx = float(x)/(windowX-1)
if (windowY > 1):
vy = (windowY-float(y)-1)/(windowY-1)
(vpxmin,vpymin,vpxmax,vpymax) = renderer.GetViewport()
if (vx >= vpxmin and vx <= vpxmax and
vy >= vpymin and vy <= vpymax):
self._CurrentRenderer = renderer
self._ViewportCenterX = float(windowX)*(vpxmax-vpxmin)/2.0\
+vpxmin
self._ViewportCenterY = float(windowY)*(vpymax-vpymin)/2.0\
+vpymin
self._CurrentCamera = self._CurrentRenderer.GetActiveCamera()
lights = self._CurrentRenderer.GetLights()
lights.InitTraversal()
self._CurrentLight = lights.GetNextItem()
break
self._LastX = x
self._LastY = y
def GetCurrentRenderer(self):
if self._CurrentRenderer is None:
renderers = self._RenderWindow.GetRenderers()
numRenderers = renderers.GetNumberOfItems()
renderers.InitTraversal()
for i in range(0,numRenderers):
renderer = renderers.GetNextItem()
break
self._CurrentRenderer = renderer
return self._CurrentRenderer
def GetCurrentCamera(self):
if self._CurrentCamera is None:
renderer = self.GetCurrentRenderer()
self._CurrentCamera = renderer.GetActiveCamera()
return self._CurrentCamera
def StartMotion(self, wid, event=None):
x = event.x
y = event.y
self.UpdateRenderer(x,y)
return gtk.TRUE
def EndMotion(self, wid, event=None):
if self._CurrentRenderer:
self.Render()
return gtk.TRUE
def Rotate(self,x,y):
if self._CurrentRenderer:
self._CurrentCamera.Azimuth(self._LastX - x)
self._CurrentCamera.Elevation(y - self._LastY)
self._CurrentCamera.OrthogonalizeViewUp()
self._LastX = x
self._LastY = y
self._CurrentRenderer.ResetCameraClippingRange()
self.Render()
def Pan(self,x,y):
if self._CurrentRenderer:
renderer = self._CurrentRenderer
camera = self._CurrentCamera
(pPoint0,pPoint1,pPoint2) = camera.GetPosition()
(fPoint0,fPoint1,fPoint2) = camera.GetFocalPoint()
if (camera.GetParallelProjection()):
renderer.SetWorldPoint(fPoint0,fPoint1,fPoint2,1.0)
renderer.WorldToDisplay()
fx,fy,fz = renderer.GetDisplayPoint()
renderer.SetDisplayPoint(fx-x+self._LastX,
fy+y-self._LastY,
fz)
renderer.DisplayToWorld()
fx,fy,fz,fw = renderer.GetWorldPoint()
camera.SetFocalPoint(fx,fy,fz)
renderer.SetWorldPoint(pPoint0,pPoint1,pPoint2,1.0)
renderer.WorldToDisplay()
fx,fy,fz = renderer.GetDisplayPoint()
renderer.SetDisplayPoint(fx-x+self._LastX,
fy+y-self._LastY,
fz)
renderer.DisplayToWorld()
fx,fy,fz,fw = renderer.GetWorldPoint()
camera.SetPosition(fx,fy,fz)
else:
(fPoint0,fPoint1,fPoint2) = camera.GetFocalPoint()
# Specify a point location in world coordinates
renderer.SetWorldPoint(fPoint0,fPoint1,fPoint2,1.0)
renderer.WorldToDisplay()
# Convert world point coordinates to display coordinates
dPoint = renderer.GetDisplayPoint()
focalDepth = dPoint[2]
aPoint0 = self._ViewportCenterX + (x - self._LastX)
aPoint1 = self._ViewportCenterY - (y - self._LastY)
renderer.SetDisplayPoint(aPoint0,aPoint1,focalDepth)
renderer.DisplayToWorld()
(rPoint0,rPoint1,rPoint2,rPoint3) = renderer.GetWorldPoint()
if (rPoint3 != 0.0):
rPoint0 = rPoint0/rPoint3
rPoint1 = rPoint1/rPoint3
rPoint2 = rPoint2/rPoint3
camera.SetFocalPoint((fPoint0 - rPoint0) + fPoint0,
(fPoint1 - rPoint1) + fPoint1,
(fPoint2 - rPoint2) + fPoint2)
camera.SetPosition((fPoint0 - rPoint0) + pPoint0,
(fPoint1 - rPoint1) + pPoint1,
(fPoint2 - rPoint2) + pPoint2)
self._LastX = x
self._LastY = y
self.Render()
def Zoom(self,x,y):
if self._CurrentRenderer:
renderer = self._CurrentRenderer
camera = self._CurrentCamera
zoomFactor = math.pow(1.02,(0.5*(self._LastY - y)))
self._CurrentZoom = self._CurrentZoom * zoomFactor
if camera.GetParallelProjection():
parallelScale = camera.GetParallelScale()/zoomFactor
camera.SetParallelScale(parallelScale)
else:
camera.Dolly(zoomFactor)
renderer.ResetCameraClippingRange()
self._LastX = x
self._LastY = y
self.Render()
def Reset(self):
if self._CurrentRenderer:
self._CurrentRenderer.ResetCamera()
self.Render()
def Wireframe(self):
actors = self._CurrentRenderer.GetActors()
numActors = actors.GetNumberOfItems()
actors.InitTraversal()
for i in range(0,numActors):
actor = actors.GetNextItem()
actor.GetProperty().SetRepresentationToWireframe()
self.Render()
def Surface(self):
actors = self._CurrentRenderer.GetActors()
numActors = actors.GetNumberOfItems()
actors.InitTraversal()
for i in range(0,numActors):
actor = actors.GetNextItem()
actor.GetProperty().SetRepresentationToSurface()
self.Render()
def PickActor(self,x,y):
if self._CurrentRenderer:
renderer = self._CurrentRenderer
picker = self._Picker
windowX,windowY = self.widget.window.get_size()
picker.Pick(x,(windowY - y - 1),0.0,renderer)
assembly = picker.GetAssembly()
if (self._PickedAssembly != None and
self._PrePickedProperty != None):
self._PickedAssembly.SetProperty(self._PrePickedProperty)
# release hold of the property
self._PrePickedProperty.UnRegister(self._PrePickedProperty)
self._PrePickedProperty = None
if (assembly != None):
self._PickedAssembly = assembly
self._PrePickedProperty = self._PickedAssembly.GetProperty()
# hold onto the property
self._PrePickedProperty.Register(self._PrePickedProperty)
self._PickedAssembly.SetProperty(self._PickedProperty)
self.Render()
def main():
# The main window
window = gtk.Window()
window.set_title("A GtkGLExtVTKRenderWindow Demo!")
window.connect("destroy", gtk.mainquit)
window.connect("delete_event", gtk.mainquit)
window.set_border_width(10)
vtkgtk = GtkGLExtVTKRenderWindow()
vtkgtk.show()
vbox = gtk.VBox(spacing=3)
vbox.show()
vbox.pack_start(vtkgtk)
button = gtk.Button('My Button')
button.show()
vbox.pack_start(button)
window.add(vbox)
window.set_size_request(400, 400)
# The VTK stuff.
cone = vtk.vtkConeSource()
cone.SetResolution(80)
coneMapper = vtk.vtkPolyDataMapper()
coneMapper.SetInputConnection(cone.GetOutputPort())
#coneActor = vtk.vtkLODActor()
coneActor = vtk.vtkActor()
coneActor.SetMapper(coneMapper)
coneActor.GetProperty().SetColor(0.5, 0.5, 1.0)
ren = vtk.vtkRenderer()
vtkgtk.GetRenderWindow().AddRenderer(ren)
ren.AddActor(coneActor)
# show the main window and start event processing.
window.show()
gtk.mainloop()
if __name__ == "__main__":
main()
|
mit
|
jmarconi/saleprode
|
src/Jm/SaleBundle/Controller/EquipoController.php
|
5339
|
<?php
namespace Jm\SaleBundle\Controller;
use Symfony\Component\HttpFoundation\Request;
use Symfony\Bundle\FrameworkBundle\Controller\Controller;
use Sensio\Bundle\FrameworkExtraBundle\Configuration\Method;
use Sensio\Bundle\FrameworkExtraBundle\Configuration\Route;
use Sensio\Bundle\FrameworkExtraBundle\Configuration\Template;
use Jm\SaleBundle\Entity\Equipo;
use Jm\SaleBundle\Form\EquipoType;
/**
* Equipo controller.
*
* @Route("/admin/equipo")
*/
class EquipoController extends Controller
{
/**
* Lists all Equipo entities.
*
* @Route("/", name="admin_equipo")
* @Template()
*/
public function indexAction()
{
$em = $this->getDoctrine()->getManager();
$entities = $em->getRepository('JmSaleBundle:Equipo')->findAll();
return array(
'entities' => $entities,
);
}
/**
* Finds and displays a Equipo entity.
*
* @Route("/{id}/show", name="admin_equipo_show")
* @Template()
*/
public function showAction($id)
{
$em = $this->getDoctrine()->getManager();
$entity = $em->getRepository('JmSaleBundle:Equipo')->find($id);
if (!$entity) {
throw $this->createNotFoundException('Unable to find Equipo entity.');
}
$deleteForm = $this->createDeleteForm($id);
return array(
'entity' => $entity,
'delete_form' => $deleteForm->createView(),
);
}
/**
* Displays a form to create a new Equipo entity.
*
* @Route("/new", name="admin_equipo_new")
* @Template()
*/
public function newAction()
{
$entity = new Equipo();
$form = $this->createForm(new EquipoType(), $entity);
return array(
'entity' => $entity,
'form' => $form->createView(),
);
}
/**
* Creates a new Equipo entity.
*
* @Route("/create", name="admin_equipo_create")
* @Method("POST")
* @Template("JmSaleBundle:Equipo:new.html.twig")
*/
public function createAction(Request $request)
{
$entity = new Equipo();
$form = $this->createForm(new EquipoType(), $entity);
$form->bind($request);
if ($form->isValid()) {
$em = $this->getDoctrine()->getManager();
$em->persist($entity);
$em->flush();
return $this->redirect($this->generateUrl('admin_equipo_show', array('id' => $entity->getId())));
}
return array(
'entity' => $entity,
'form' => $form->createView(),
);
}
/**
* Displays a form to edit an existing Equipo entity.
*
* @Route("/{id}/edit", name="admin_equipo_edit")
* @Template()
*/
public function editAction($id)
{
$em = $this->getDoctrine()->getManager();
$entity = $em->getRepository('JmSaleBundle:Equipo')->find($id);
if (!$entity) {
throw $this->createNotFoundException('Unable to find Equipo entity.');
}
$editForm = $this->createForm(new EquipoType(), $entity);
$deleteForm = $this->createDeleteForm($id);
return array(
'entity' => $entity,
'edit_form' => $editForm->createView(),
'delete_form' => $deleteForm->createView(),
);
}
/**
* Edits an existing Equipo entity.
*
* @Route("/{id}/update", name="admin_equipo_update")
* @Method("POST")
* @Template("JmSaleBundle:Equipo:edit.html.twig")
*/
public function updateAction(Request $request, $id)
{
$em = $this->getDoctrine()->getManager();
$entity = $em->getRepository('JmSaleBundle:Equipo')->find($id);
if (!$entity) {
throw $this->createNotFoundException('Unable to find Equipo entity.');
}
$deleteForm = $this->createDeleteForm($id);
$editForm = $this->createForm(new EquipoType(), $entity);
$editForm->bind($request);
if ($editForm->isValid()) {
$em->persist($entity);
$em->flush();
return $this->redirect($this->generateUrl('admin_equipo_edit', array('id' => $id)));
}
return array(
'entity' => $entity,
'edit_form' => $editForm->createView(),
'delete_form' => $deleteForm->createView(),
);
}
/**
* Deletes a Equipo entity.
*
* @Route("/{id}/delete", name="admin_equipo_delete")
* @Method("POST")
*/
public function deleteAction(Request $request, $id)
{
$form = $this->createDeleteForm($id);
$form->bind($request);
if ($form->isValid()) {
$em = $this->getDoctrine()->getManager();
$entity = $em->getRepository('JmSaleBundle:Equipo')->find($id);
if (!$entity) {
throw $this->createNotFoundException('Unable to find Equipo entity.');
}
$em->remove($entity);
$em->flush();
}
return $this->redirect($this->generateUrl('admin_equipo'));
}
private function createDeleteForm($id)
{
return $this->createFormBuilder(array('id' => $id))
->add('id', 'hidden')
->getForm()
;
}
}
|
mit
|
kaizer04/SoftUni
|
ASP.NET MVC/toSend/SportSystem/SportSystem.Web/App_Start/NinjectWebCommon.cs
|
2201
|
[assembly: WebActivatorEx.PreApplicationStartMethod(typeof(SportSystem.Web.App_Start.NinjectWebCommon), "Start")]
[assembly: WebActivatorEx.ApplicationShutdownMethodAttribute(typeof(SportSystem.Web.App_Start.NinjectWebCommon), "Stop")]
namespace SportSystem.Web.App_Start
{
using System;
using System.Web;
using Microsoft.Web.Infrastructure.DynamicModuleHelper;
using Ninject;
using Ninject.Web.Common;
using SportSystem.Data;
public static class NinjectWebCommon
{
private static readonly Bootstrapper bootstrapper = new Bootstrapper();
/// <summary>
/// Starts the application
/// </summary>
public static void Start()
{
DynamicModuleUtility.RegisterModule(typeof(OnePerRequestHttpModule));
DynamicModuleUtility.RegisterModule(typeof(NinjectHttpModule));
bootstrapper.Initialize(CreateKernel);
}
/// <summary>
/// Stops the application.
/// </summary>
public static void Stop()
{
bootstrapper.ShutDown();
}
/// <summary>
/// Creates the kernel that will manage your application.
/// </summary>
/// <returns>The created kernel.</returns>
private static IKernel CreateKernel()
{
var kernel = new StandardKernel();
try
{
kernel.Bind<Func<IKernel>>().ToMethod(ctx => () => new Bootstrapper().Kernel);
kernel.Bind<IHttpModule>().To<HttpApplicationInitializationHttpModule>();
RegisterServices(kernel);
return kernel;
}
catch
{
kernel.Dispose();
throw;
}
}
/// <summary>
/// Load your modules or register your services here!
/// </summary>
/// <param name="kernel">The kernel.</param>
private static void RegisterServices(IKernel kernel)
{
kernel.Bind<ISportSystemData>().To<SportSystemData>();
kernel.Bind<ISportSystemDbContext>().To<SportSystemDbContext>();
}
}
}
|
mit
|
chenke91/ckPermission
|
app/decorators.py
|
16
|
#encoding: utf-8
|
mit
|
Nyholm/symfony
|
src/Symfony/Bundle/FrameworkBundle/DependencyInjection/Configuration.php
|
109287
|
<?php
/*
* This file is part of the Symfony package.
*
* (c) Fabien Potencier <fabien@symfony.com>
*
* For the full copyright and license information, please view the LICENSE
* file that was distributed with this source code.
*/
namespace Symfony\Bundle\FrameworkBundle\DependencyInjection;
use Doctrine\Common\Annotations\Annotation;
use Doctrine\Common\Annotations\PsrCachedReader;
use Doctrine\Common\Cache\Cache;
use Doctrine\DBAL\Connection;
use Symfony\Bundle\FullStack;
use Symfony\Component\Asset\Package;
use Symfony\Component\Config\Definition\Builder\ArrayNodeDefinition;
use Symfony\Component\Config\Definition\Builder\NodeBuilder;
use Symfony\Component\Config\Definition\Builder\TreeBuilder;
use Symfony\Component\Config\Definition\ConfigurationInterface;
use Symfony\Component\Config\Definition\Exception\InvalidConfigurationException;
use Symfony\Component\DependencyInjection\ContainerBuilder;
use Symfony\Component\DependencyInjection\Exception\LogicException;
use Symfony\Component\Form\Form;
use Symfony\Component\HttpClient\HttpClient;
use Symfony\Component\HttpFoundation\Cookie;
use Symfony\Component\Lock\Lock;
use Symfony\Component\Lock\Store\SemaphoreStore;
use Symfony\Component\Mailer\Mailer;
use Symfony\Component\Messenger\MessageBusInterface;
use Symfony\Component\Notifier\Notifier;
use Symfony\Component\PropertyAccess\PropertyAccessor;
use Symfony\Component\PropertyInfo\PropertyInfoExtractorInterface;
use Symfony\Component\RateLimiter\Policy\TokenBucketLimiter;
use Symfony\Component\Serializer\Serializer;
use Symfony\Component\Translation\Translator;
use Symfony\Component\Uid\Factory\UuidFactory;
use Symfony\Component\Validator\Validation;
use Symfony\Component\WebLink\HttpHeaderSerializer;
use Symfony\Component\Workflow\WorkflowEvents;
/**
* FrameworkExtension configuration structure.
*/
class Configuration implements ConfigurationInterface
{
private $debug;
/**
* @param bool $debug Whether debugging is enabled or not
*/
public function __construct(bool $debug)
{
$this->debug = $debug;
}
/**
* Generates the configuration tree builder.
*
* @return TreeBuilder The tree builder
*/
public function getConfigTreeBuilder()
{
$treeBuilder = new TreeBuilder('framework');
$rootNode = $treeBuilder->getRootNode();
$rootNode
->beforeNormalization()
->ifTrue(function ($v) { return !isset($v['assets']) && isset($v['templating']) && class_exists(Package::class); })
->then(function ($v) {
$v['assets'] = [];
return $v;
})
->end()
->children()
->scalarNode('secret')->end()
->scalarNode('http_method_override')
->info("Set true to enable support for the '_method' request parameter to determine the intended HTTP method on POST requests. Note: When using the HttpCache, you need to call the method in your front controller instead")
->defaultTrue()
->end()
->scalarNode('ide')->defaultNull()->end()
->booleanNode('test')->end()
->scalarNode('default_locale')->defaultValue('en')->end()
->arrayNode('trusted_hosts')
->beforeNormalization()->ifString()->then(function ($v) { return [$v]; })->end()
->prototype('scalar')->end()
->end()
->scalarNode('trusted_proxies')->end()
->arrayNode('trusted_headers')
->fixXmlConfig('trusted_header')
->performNoDeepMerging()
->defaultValue(['x-forwarded-for', 'x-forwarded-port', 'x-forwarded-proto'])
->beforeNormalization()->ifString()->then(function ($v) { return $v ? array_map('trim', explode(',', $v)) : []; })->end()
->enumPrototype()
->values([
'forwarded',
'x-forwarded-for', 'x-forwarded-host', 'x-forwarded-proto', 'x-forwarded-port', 'x-forwarded-prefix',
])
->end()
->end()
->scalarNode('error_controller')
->defaultValue('error_controller')
->end()
->end()
;
$willBeAvailable = static function (string $package, string $class, string $parentPackage = null) {
$parentPackages = (array) $parentPackage;
$parentPackages[] = 'symfony/framework-bundle';
return ContainerBuilder::willBeAvailable($package, $class, $parentPackages);
};
$enableIfStandalone = static function (string $package, string $class) use ($willBeAvailable) {
return !class_exists(FullStack::class) && $willBeAvailable($package, $class) ? 'canBeDisabled' : 'canBeEnabled';
};
$this->addCsrfSection($rootNode);
$this->addFormSection($rootNode, $enableIfStandalone);
$this->addHttpCacheSection($rootNode);
$this->addEsiSection($rootNode);
$this->addSsiSection($rootNode);
$this->addFragmentsSection($rootNode);
$this->addProfilerSection($rootNode);
$this->addWorkflowSection($rootNode);
$this->addRouterSection($rootNode);
$this->addSessionSection($rootNode);
$this->addRequestSection($rootNode);
$this->addAssetsSection($rootNode, $enableIfStandalone);
$this->addTranslatorSection($rootNode, $enableIfStandalone);
$this->addValidationSection($rootNode, $enableIfStandalone, $willBeAvailable);
$this->addAnnotationsSection($rootNode, $willBeAvailable);
$this->addSerializerSection($rootNode, $enableIfStandalone, $willBeAvailable);
$this->addPropertyAccessSection($rootNode, $willBeAvailable);
$this->addPropertyInfoSection($rootNode, $enableIfStandalone);
$this->addCacheSection($rootNode, $willBeAvailable);
$this->addPhpErrorsSection($rootNode);
$this->addWebLinkSection($rootNode, $enableIfStandalone);
$this->addLockSection($rootNode, $enableIfStandalone);
$this->addMessengerSection($rootNode, $enableIfStandalone);
$this->addRobotsIndexSection($rootNode);
$this->addHttpClientSection($rootNode, $enableIfStandalone);
$this->addMailerSection($rootNode, $enableIfStandalone);
$this->addSecretsSection($rootNode);
$this->addNotifierSection($rootNode, $enableIfStandalone);
$this->addRateLimiterSection($rootNode, $enableIfStandalone);
$this->addUidSection($rootNode, $enableIfStandalone);
return $treeBuilder;
}
private function addSecretsSection(ArrayNodeDefinition $rootNode)
{
$rootNode
->children()
->arrayNode('secrets')
->canBeDisabled()
->children()
->scalarNode('vault_directory')->defaultValue('%kernel.project_dir%/config/secrets/%kernel.runtime_environment%')->cannotBeEmpty()->end()
->scalarNode('local_dotenv_file')->defaultValue('%kernel.project_dir%/.env.%kernel.environment%.local')->end()
->scalarNode('decryption_env_var')->defaultValue('base64:default::SYMFONY_DECRYPTION_SECRET')->end()
->end()
->end()
->end()
;
}
private function addCsrfSection(ArrayNodeDefinition $rootNode)
{
$rootNode
->children()
->arrayNode('csrf_protection')
->treatFalseLike(['enabled' => false])
->treatTrueLike(['enabled' => true])
->treatNullLike(['enabled' => true])
->addDefaultsIfNotSet()
->children()
// defaults to framework.session.enabled && !class_exists(FullStack::class) && interface_exists(CsrfTokenManagerInterface::class)
->booleanNode('enabled')->defaultNull()->end()
->end()
->end()
->end()
;
}
private function addFormSection(ArrayNodeDefinition $rootNode, callable $enableIfStandalone)
{
$rootNode
->children()
->arrayNode('form')
->info('form configuration')
->{$enableIfStandalone('symfony/form', Form::class)}()
->children()
->arrayNode('csrf_protection')
->treatFalseLike(['enabled' => false])
->treatTrueLike(['enabled' => true])
->treatNullLike(['enabled' => true])
->addDefaultsIfNotSet()
->children()
->booleanNode('enabled')->defaultNull()->end() // defaults to framework.csrf_protection.enabled
->scalarNode('field_name')->defaultValue('_token')->end()
->end()
->end()
// to be set to false in Symfony 6.0
->booleanNode('legacy_error_messages')
->defaultTrue()
->validate()
->ifTrue()
->then(function ($v) {
trigger_deprecation('symfony/framework-bundle', '5.2', 'Setting the "framework.form.legacy_error_messages" option to "true" is deprecated. It will have no effect as of Symfony 6.0.');
return $v;
})
->end()
->end()
->end()
->end()
->end()
;
}
private function addHttpCacheSection(ArrayNodeDefinition $rootNode)
{
$rootNode
->children()
->arrayNode('http_cache')
->info('HTTP cache configuration')
->canBeEnabled()
->fixXmlConfig('private_header')
->children()
->booleanNode('debug')->defaultValue('%kernel.debug%')->end()
->enumNode('trace_level')
->values(['none', 'short', 'full'])
->end()
->scalarNode('trace_header')->end()
->integerNode('default_ttl')->end()
->arrayNode('private_headers')
->performNoDeepMerging()
->scalarPrototype()->end()
->end()
->booleanNode('allow_reload')->end()
->booleanNode('allow_revalidate')->end()
->integerNode('stale_while_revalidate')->end()
->integerNode('stale_if_error')->end()
->end()
->end()
->end()
;
}
private function addEsiSection(ArrayNodeDefinition $rootNode)
{
$rootNode
->children()
->arrayNode('esi')
->info('esi configuration')
->canBeEnabled()
->end()
->end()
;
}
private function addSsiSection(ArrayNodeDefinition $rootNode)
{
$rootNode
->children()
->arrayNode('ssi')
->info('ssi configuration')
->canBeEnabled()
->end()
->end();
}
private function addFragmentsSection(ArrayNodeDefinition $rootNode)
{
$rootNode
->children()
->arrayNode('fragments')
->info('fragments configuration')
->canBeEnabled()
->children()
->scalarNode('hinclude_default_template')->defaultNull()->end()
->scalarNode('path')->defaultValue('/_fragment')->end()
->end()
->end()
->end()
;
}
private function addProfilerSection(ArrayNodeDefinition $rootNode)
{
$rootNode
->children()
->arrayNode('profiler')
->info('profiler configuration')
->canBeEnabled()
->children()
->booleanNode('collect')->defaultTrue()->end()
->booleanNode('only_exceptions')->defaultFalse()->end()
->booleanNode('only_main_requests')->defaultFalse()->end()
->booleanNode('only_master_requests')->setDeprecated('symfony/framework-bundle', '5.3', 'Option "%node%" at "%path%" is deprecated, use "only_main_requests" instead.')->defaultFalse()->end()
->scalarNode('dsn')->defaultValue('file:%kernel.cache_dir%/profiler')->end()
->end()
->end()
->end()
;
}
private function addWorkflowSection(ArrayNodeDefinition $rootNode)
{
$rootNode
->fixXmlConfig('workflow')
->children()
->arrayNode('workflows')
->canBeEnabled()
->beforeNormalization()
->always(function ($v) {
if (\is_array($v) && true === $v['enabled']) {
$workflows = $v;
unset($workflows['enabled']);
if (1 === \count($workflows) && isset($workflows[0]['enabled']) && 1 === \count($workflows[0])) {
$workflows = [];
}
if (1 === \count($workflows) && isset($workflows['workflows']) && array_keys($workflows['workflows']) !== range(0, \count($workflows) - 1) && !empty(array_diff(array_keys($workflows['workflows']), ['audit_trail', 'type', 'marking_store', 'supports', 'support_strategy', 'initial_marking', 'places', 'transitions']))) {
$workflows = $workflows['workflows'];
}
foreach ($workflows as $key => $workflow) {
if (isset($workflow['enabled']) && false === $workflow['enabled']) {
throw new LogicException(sprintf('Cannot disable a single workflow. Remove the configuration for the workflow "%s" instead.', $workflow['name']));
}
unset($workflows[$key]['enabled']);
}
$v = [
'enabled' => true,
'workflows' => $workflows,
];
}
return $v;
})
->end()
->children()
->arrayNode('workflows')
->useAttributeAsKey('name')
->prototype('array')
->fixXmlConfig('support')
->fixXmlConfig('place')
->fixXmlConfig('transition')
->fixXmlConfig('event_to_dispatch', 'events_to_dispatch')
->children()
->arrayNode('audit_trail')
->canBeEnabled()
->end()
->enumNode('type')
->values(['workflow', 'state_machine'])
->defaultValue('state_machine')
->end()
->arrayNode('marking_store')
->children()
->enumNode('type')
->values(['method'])
->end()
->scalarNode('property')
->defaultValue('marking')
->end()
->scalarNode('service')
->cannotBeEmpty()
->end()
->end()
->end()
->arrayNode('supports')
->beforeNormalization()
->ifString()
->then(function ($v) { return [$v]; })
->end()
->prototype('scalar')
->cannotBeEmpty()
->validate()
->ifTrue(function ($v) { return !class_exists($v) && !interface_exists($v, false); })
->thenInvalid('The supported class or interface "%s" does not exist.')
->end()
->end()
->end()
->scalarNode('support_strategy')
->cannotBeEmpty()
->end()
->arrayNode('initial_marking')
->beforeNormalization()->castToArray()->end()
->defaultValue([])
->prototype('scalar')->end()
->end()
->variableNode('events_to_dispatch')
->defaultValue(null)
->validate()
->ifTrue(function ($v) {
if (null === $v) {
return false;
}
if (!\is_array($v)) {
return true;
}
foreach ($v as $value) {
if (!\is_string($value)) {
return true;
}
if (class_exists(WorkflowEvents::class) && !\in_array($value, WorkflowEvents::ALIASES)) {
return true;
}
}
return false;
})
->thenInvalid('The value must be "null" or an array of workflow events (like ["workflow.enter"]).')
->end()
->info('Select which Transition events should be dispatched for this Workflow')
->example(['workflow.enter', 'workflow.transition'])
->end()
->arrayNode('places')
->beforeNormalization()
->always()
->then(function ($places) {
// It's an indexed array of shape ['place1', 'place2']
if (isset($places[0]) && \is_string($places[0])) {
return array_map(function (string $place) {
return ['name' => $place];
}, $places);
}
// It's an indexed array, we let the validation occur
if (isset($places[0]) && \is_array($places[0])) {
return $places;
}
foreach ($places as $name => $place) {
if (\is_array($place) && \array_key_exists('name', $place)) {
continue;
}
$place['name'] = $name;
$places[$name] = $place;
}
return array_values($places);
})
->end()
->isRequired()
->requiresAtLeastOneElement()
->prototype('array')
->children()
->scalarNode('name')
->isRequired()
->cannotBeEmpty()
->end()
->arrayNode('metadata')
->normalizeKeys(false)
->defaultValue([])
->example(['color' => 'blue', 'description' => 'Workflow to manage article.'])
->prototype('variable')
->end()
->end()
->end()
->end()
->end()
->arrayNode('transitions')
->beforeNormalization()
->always()
->then(function ($transitions) {
// It's an indexed array, we let the validation occur
if (isset($transitions[0]) && \is_array($transitions[0])) {
return $transitions;
}
foreach ($transitions as $name => $transition) {
if (\is_array($transition) && \array_key_exists('name', $transition)) {
continue;
}
$transition['name'] = $name;
$transitions[$name] = $transition;
}
return $transitions;
})
->end()
->isRequired()
->requiresAtLeastOneElement()
->prototype('array')
->children()
->scalarNode('name')
->isRequired()
->cannotBeEmpty()
->end()
->scalarNode('guard')
->cannotBeEmpty()
->info('An expression to block the transition')
->example('is_fully_authenticated() and is_granted(\'ROLE_JOURNALIST\') and subject.getTitle() == \'My first article\'')
->end()
->arrayNode('from')
->beforeNormalization()
->ifString()
->then(function ($v) { return [$v]; })
->end()
->requiresAtLeastOneElement()
->prototype('scalar')
->cannotBeEmpty()
->end()
->end()
->arrayNode('to')
->beforeNormalization()
->ifString()
->then(function ($v) { return [$v]; })
->end()
->requiresAtLeastOneElement()
->prototype('scalar')
->cannotBeEmpty()
->end()
->end()
->arrayNode('metadata')
->normalizeKeys(false)
->defaultValue([])
->example(['color' => 'blue', 'description' => 'Workflow to manage article.'])
->prototype('variable')
->end()
->end()
->end()
->end()
->end()
->arrayNode('metadata')
->normalizeKeys(false)
->defaultValue([])
->example(['color' => 'blue', 'description' => 'Workflow to manage article.'])
->prototype('variable')
->end()
->end()
->end()
->validate()
->ifTrue(function ($v) {
return $v['supports'] && isset($v['support_strategy']);
})
->thenInvalid('"supports" and "support_strategy" cannot be used together.')
->end()
->validate()
->ifTrue(function ($v) {
return !$v['supports'] && !isset($v['support_strategy']);
})
->thenInvalid('"supports" or "support_strategy" should be configured.')
->end()
->beforeNormalization()
->always()
->then(function ($values) {
// Special case to deal with XML when the user wants an empty array
if (\array_key_exists('event_to_dispatch', $values) && null === $values['event_to_dispatch']) {
$values['events_to_dispatch'] = [];
unset($values['event_to_dispatch']);
}
return $values;
})
->end()
->end()
->end()
->end()
->end()
->end()
;
}
private function addRouterSection(ArrayNodeDefinition $rootNode)
{
$rootNode
->children()
->arrayNode('router')
->info('router configuration')
->canBeEnabled()
->children()
->scalarNode('resource')->isRequired()->end()
->scalarNode('type')->end()
->scalarNode('default_uri')
->info('The default URI used to generate URLs in a non-HTTP context')
->defaultNull()
->end()
->scalarNode('http_port')->defaultValue(80)->end()
->scalarNode('https_port')->defaultValue(443)->end()
->scalarNode('strict_requirements')
->info(
"set to true to throw an exception when a parameter does not match the requirements\n".
"set to false to disable exceptions when a parameter does not match the requirements (and return null instead)\n".
"set to null to disable parameter checks against requirements\n".
"'true' is the preferred configuration in development mode, while 'false' or 'null' might be preferred in production"
)
->defaultTrue()
->end()
->booleanNode('utf8')->defaultNull()->end()
->end()
->end()
->end()
;
}
private function addSessionSection(ArrayNodeDefinition $rootNode)
{
$rootNode
->children()
->arrayNode('session')
->info('session configuration')
->canBeEnabled()
->beforeNormalization()
->ifTrue(function ($v) {
return \is_array($v) && isset($v['storage_id']) && isset($v['storage_factory_id']);
})
->thenInvalid('You cannot use both "storage_id" and "storage_factory_id" at the same time under "framework.session"')
->end()
->children()
->scalarNode('storage_id')->defaultValue('session.storage.native')->end()
->scalarNode('storage_factory_id')->defaultNull()->end()
->scalarNode('handler_id')->defaultValue('session.handler.native_file')->end()
->scalarNode('name')
->validate()
->ifTrue(function ($v) {
parse_str($v, $parsed);
return implode('&', array_keys($parsed)) !== (string) $v;
})
->thenInvalid('Session name %s contains illegal character(s)')
->end()
->end()
->scalarNode('cookie_lifetime')->end()
->scalarNode('cookie_path')->end()
->scalarNode('cookie_domain')->end()
->enumNode('cookie_secure')->values([true, false, 'auto'])->end()
->booleanNode('cookie_httponly')->defaultTrue()->end()
->enumNode('cookie_samesite')->values([null, Cookie::SAMESITE_LAX, Cookie::SAMESITE_STRICT, Cookie::SAMESITE_NONE])->defaultNull()->end()
->booleanNode('use_cookies')->end()
->scalarNode('gc_divisor')->end()
->scalarNode('gc_probability')->defaultValue(1)->end()
->scalarNode('gc_maxlifetime')->end()
->scalarNode('save_path')->defaultValue('%kernel.cache_dir%/sessions')->end()
->integerNode('metadata_update_threshold')
->defaultValue(0)
->info('seconds to wait between 2 session metadata updates')
->end()
->integerNode('sid_length')
->min(22)
->max(256)
->end()
->integerNode('sid_bits_per_character')
->min(4)
->max(6)
->end()
->end()
->end()
->end()
;
}
private function addRequestSection(ArrayNodeDefinition $rootNode)
{
$rootNode
->children()
->arrayNode('request')
->info('request configuration')
->canBeEnabled()
->fixXmlConfig('format')
->children()
->arrayNode('formats')
->useAttributeAsKey('name')
->prototype('array')
->beforeNormalization()
->ifTrue(function ($v) { return \is_array($v) && isset($v['mime_type']); })
->then(function ($v) { return $v['mime_type']; })
->end()
->beforeNormalization()->castToArray()->end()
->prototype('scalar')->end()
->end()
->end()
->end()
->end()
->end()
;
}
private function addAssetsSection(ArrayNodeDefinition $rootNode, callable $enableIfStandalone)
{
$rootNode
->children()
->arrayNode('assets')
->info('assets configuration')
->{$enableIfStandalone('symfony/asset', Package::class)}()
->fixXmlConfig('base_url')
->children()
->scalarNode('version_strategy')->defaultNull()->end()
->scalarNode('version')->defaultNull()->end()
->scalarNode('version_format')->defaultValue('%%s?%%s')->end()
->scalarNode('json_manifest_path')->defaultNull()->end()
->scalarNode('base_path')->defaultValue('')->end()
->arrayNode('base_urls')
->requiresAtLeastOneElement()
->beforeNormalization()->castToArray()->end()
->prototype('scalar')->end()
->end()
->end()
->validate()
->ifTrue(function ($v) {
return isset($v['version_strategy']) && isset($v['version']);
})
->thenInvalid('You cannot use both "version_strategy" and "version" at the same time under "assets".')
->end()
->validate()
->ifTrue(function ($v) {
return isset($v['version_strategy']) && isset($v['json_manifest_path']);
})
->thenInvalid('You cannot use both "version_strategy" and "json_manifest_path" at the same time under "assets".')
->end()
->validate()
->ifTrue(function ($v) {
return isset($v['version']) && isset($v['json_manifest_path']);
})
->thenInvalid('You cannot use both "version" and "json_manifest_path" at the same time under "assets".')
->end()
->fixXmlConfig('package')
->children()
->arrayNode('packages')
->normalizeKeys(false)
->useAttributeAsKey('name')
->prototype('array')
->fixXmlConfig('base_url')
->children()
->scalarNode('version_strategy')->defaultNull()->end()
->scalarNode('version')
->beforeNormalization()
->ifTrue(function ($v) { return '' === $v; })
->then(function ($v) { return; })
->end()
->end()
->scalarNode('version_format')->defaultNull()->end()
->scalarNode('json_manifest_path')->defaultNull()->end()
->scalarNode('base_path')->defaultValue('')->end()
->arrayNode('base_urls')
->requiresAtLeastOneElement()
->beforeNormalization()->castToArray()->end()
->prototype('scalar')->end()
->end()
->end()
->validate()
->ifTrue(function ($v) {
return isset($v['version_strategy']) && isset($v['version']);
})
->thenInvalid('You cannot use both "version_strategy" and "version" at the same time under "assets" packages.')
->end()
->validate()
->ifTrue(function ($v) {
return isset($v['version_strategy']) && isset($v['json_manifest_path']);
})
->thenInvalid('You cannot use both "version_strategy" and "json_manifest_path" at the same time under "assets" packages.')
->end()
->validate()
->ifTrue(function ($v) {
return isset($v['version']) && isset($v['json_manifest_path']);
})
->thenInvalid('You cannot use both "version" and "json_manifest_path" at the same time under "assets" packages.')
->end()
->end()
->end()
->end()
->end()
->end()
;
}
private function addTranslatorSection(ArrayNodeDefinition $rootNode, callable $enableIfStandalone)
{
$rootNode
->children()
->arrayNode('translator')
->info('translator configuration')
->{$enableIfStandalone('symfony/translation', Translator::class)}()
->fixXmlConfig('fallback')
->fixXmlConfig('path')
->fixXmlConfig('enabled_locale')
->fixXmlConfig('provider')
->children()
->arrayNode('fallbacks')
->info('Defaults to the value of "default_locale".')
->beforeNormalization()->ifString()->then(function ($v) { return [$v]; })->end()
->prototype('scalar')->end()
->defaultValue([])
->end()
->booleanNode('logging')->defaultValue(false)->end()
->scalarNode('formatter')->defaultValue('translator.formatter.default')->end()
->scalarNode('cache_dir')->defaultValue('%kernel.cache_dir%/translations')->end()
->scalarNode('default_path')
->info('The default path used to load translations')
->defaultValue('%kernel.project_dir%/translations')
->end()
->arrayNode('paths')
->prototype('scalar')->end()
->end()
->arrayNode('enabled_locales')
->prototype('scalar')->end()
->defaultValue([])
->end()
->arrayNode('pseudo_localization')
->canBeEnabled()
->fixXmlConfig('localizable_html_attribute')
->children()
->booleanNode('accents')->defaultTrue()->end()
->floatNode('expansion_factor')
->min(1.0)
->defaultValue(1.0)
->end()
->booleanNode('brackets')->defaultTrue()->end()
->booleanNode('parse_html')->defaultFalse()->end()
->arrayNode('localizable_html_attributes')
->prototype('scalar')->end()
->end()
->end()
->end()
->arrayNode('providers')
->info('Translation providers you can read/write your translations from')
->useAttributeAsKey('name')
->prototype('array')
->fixXmlConfig('domain')
->fixXmlConfig('locale')
->children()
->scalarNode('dsn')->end()
->arrayNode('domains')
->prototype('scalar')->end()
->defaultValue([])
->end()
->arrayNode('locales')
->prototype('scalar')->end()
->defaultValue([])
->info('If not set, all locales listed under framework.translator.enabled_locales are used.')
->end()
->end()
->end()
->defaultValue([])
->end()
->end()
->end()
->end()
;
}
private function addValidationSection(ArrayNodeDefinition $rootNode, callable $enableIfStandalone, callable $willBeAvailable)
{
$rootNode
->children()
->arrayNode('validation')
->info('validation configuration')
->{$enableIfStandalone('symfony/validator', Validation::class)}()
->children()
->scalarNode('cache')->end()
->booleanNode('enable_annotations')->{!class_exists(FullStack::class) && $willBeAvailable('doctrine/annotations', Annotation::class, 'symfony/validator') ? 'defaultTrue' : 'defaultFalse'}()->end()
->arrayNode('static_method')
->defaultValue(['loadValidatorMetadata'])
->prototype('scalar')->end()
->treatFalseLike([])
->validate()->castToArray()->end()
->end()
->scalarNode('translation_domain')->defaultValue('validators')->end()
->enumNode('email_validation_mode')->values(['html5', 'loose', 'strict'])->end()
->arrayNode('mapping')
->addDefaultsIfNotSet()
->fixXmlConfig('path')
->children()
->arrayNode('paths')
->prototype('scalar')->end()
->end()
->end()
->end()
->arrayNode('not_compromised_password')
->canBeDisabled()
->children()
->booleanNode('enabled')
->defaultTrue()
->info('When disabled, compromised passwords will be accepted as valid.')
->end()
->scalarNode('endpoint')
->defaultNull()
->info('API endpoint for the NotCompromisedPassword Validator.')
->end()
->end()
->end()
->arrayNode('auto_mapping')
->info('A collection of namespaces for which auto-mapping will be enabled by default, or null to opt-in with the EnableAutoMapping constraint.')
->example([
'App\\Entity\\' => [],
'App\\WithSpecificLoaders\\' => ['validator.property_info_loader'],
])
->useAttributeAsKey('namespace')
->normalizeKeys(false)
->beforeNormalization()
->ifArray()
->then(function (array $values): array {
foreach ($values as $k => $v) {
if (isset($v['service'])) {
continue;
}
if (isset($v['namespace'])) {
$values[$k]['services'] = [];
continue;
}
if (!\is_array($v)) {
$values[$v]['services'] = [];
unset($values[$k]);
continue;
}
$tmp = $v;
unset($values[$k]);
$values[$k]['services'] = $tmp;
}
return $values;
})
->end()
->arrayPrototype()
->fixXmlConfig('service')
->children()
->arrayNode('services')
->prototype('scalar')->end()
->end()
->end()
->end()
->end()
->end()
->end()
->end()
;
}
private function addAnnotationsSection(ArrayNodeDefinition $rootNode, callable $willBeAvailable)
{
$doctrineCache = $willBeAvailable('doctrine/cache', Cache::class, 'doctrine/annotation');
$psr6Cache = $willBeAvailable('symfony/cache', PsrCachedReader::class, 'doctrine/annotation');
$rootNode
->children()
->arrayNode('annotations')
->info('annotation configuration')
->{$willBeAvailable('doctrine/annotations', Annotation::class) ? 'canBeDisabled' : 'canBeEnabled'}()
->children()
->scalarNode('cache')->defaultValue(($doctrineCache || $psr6Cache) ? 'php_array' : 'none')->end()
->scalarNode('file_cache_dir')->defaultValue('%kernel.cache_dir%/annotations')->end()
->booleanNode('debug')->defaultValue($this->debug)->end()
->end()
->end()
->end()
;
}
private function addSerializerSection(ArrayNodeDefinition $rootNode, callable $enableIfStandalone, $willBeAvailable)
{
$rootNode
->children()
->arrayNode('serializer')
->info('serializer configuration')
->{$enableIfStandalone('symfony/serializer', Serializer::class)}()
->children()
->booleanNode('enable_annotations')->{!class_exists(FullStack::class) && $willBeAvailable('doctrine/annotations', Annotation::class, 'symfony/serializer') ? 'defaultTrue' : 'defaultFalse'}()->end()
->scalarNode('name_converter')->end()
->scalarNode('circular_reference_handler')->end()
->scalarNode('max_depth_handler')->end()
->arrayNode('mapping')
->addDefaultsIfNotSet()
->fixXmlConfig('path')
->children()
->arrayNode('paths')
->prototype('scalar')->end()
->end()
->end()
->end()
->end()
->end()
->end()
;
}
private function addPropertyAccessSection(ArrayNodeDefinition $rootNode, callable $willBeAvailable)
{
$rootNode
->children()
->arrayNode('property_access')
->addDefaultsIfNotSet()
->info('Property access configuration')
->{$willBeAvailable('symfony/property-access', PropertyAccessor::class) ? 'canBeDisabled' : 'canBeEnabled'}()
->children()
->booleanNode('magic_call')->defaultFalse()->end()
->booleanNode('magic_get')->defaultTrue()->end()
->booleanNode('magic_set')->defaultTrue()->end()
->booleanNode('throw_exception_on_invalid_index')->defaultFalse()->end()
->booleanNode('throw_exception_on_invalid_property_path')->defaultTrue()->end()
->end()
->end()
->end()
;
}
private function addPropertyInfoSection(ArrayNodeDefinition $rootNode, callable $enableIfStandalone)
{
$rootNode
->children()
->arrayNode('property_info')
->info('Property info configuration')
->{$enableIfStandalone('symfony/property-info', PropertyInfoExtractorInterface::class)}()
->end()
->end()
;
}
private function addCacheSection(ArrayNodeDefinition $rootNode, callable $willBeAvailable)
{
$rootNode
->children()
->arrayNode('cache')
->info('Cache configuration')
->addDefaultsIfNotSet()
->fixXmlConfig('pool')
->children()
->scalarNode('prefix_seed')
->info('Used to namespace cache keys when using several apps with the same shared backend')
->defaultValue('_%kernel.project_dir%.%kernel.container_class%')
->example('my-application-name/%kernel.environment%')
->end()
->scalarNode('app')
->info('App related cache pools configuration')
->defaultValue('cache.adapter.filesystem')
->end()
->scalarNode('system')
->info('System related cache pools configuration')
->defaultValue('cache.adapter.system')
->end()
->scalarNode('directory')->defaultValue('%kernel.cache_dir%/pools')->end()
->scalarNode('default_doctrine_provider')->end()
->scalarNode('default_psr6_provider')->end()
->scalarNode('default_redis_provider')->defaultValue('redis://localhost')->end()
->scalarNode('default_memcached_provider')->defaultValue('memcached://localhost')->end()
->scalarNode('default_pdo_provider')->defaultValue($willBeAvailable('doctrine/dbal', Connection::class) ? 'database_connection' : null)->end()
->arrayNode('pools')
->useAttributeAsKey('name')
->prototype('array')
->fixXmlConfig('adapter')
->beforeNormalization()
->ifTrue(function ($v) { return (isset($v['adapters']) || \is_array($v['adapter'] ?? null)) && isset($v['provider']); })
->thenInvalid('Pool cannot have a "provider" while "adapter" is set to a map')
->end()
->children()
->arrayNode('adapters')
->performNoDeepMerging()
->info('One or more adapters to chain for creating the pool, defaults to "cache.app".')
->beforeNormalization()
->always()->then(function ($values) {
if ([0] === array_keys($values) && \is_array($values[0])) {
return $values[0];
}
$adapters = [];
foreach ($values as $k => $v) {
if (\is_int($k) && \is_string($v)) {
$adapters[] = $v;
} elseif (!\is_array($v)) {
$adapters[$k] = $v;
} elseif (isset($v['provider'])) {
$adapters[$v['provider']] = $v['name'] ?? $v;
} else {
$adapters[] = $v['name'] ?? $v;
}
}
return $adapters;
})
->end()
->prototype('scalar')->end()
->end()
->scalarNode('tags')->defaultNull()->end()
->booleanNode('public')->defaultFalse()->end()
->scalarNode('default_lifetime')
->info('Default lifetime of the pool')
->example('"600" for 5 minutes expressed in seconds, "PT5M" for five minutes expressed as ISO 8601 time interval, or "5 minutes" as a date expression')
->end()
->scalarNode('provider')
->info('Overwrite the setting from the default provider for this adapter.')
->end()
->scalarNode('early_expiration_message_bus')
->example('"messenger.default_bus" to send early expiration events to the default Messenger bus.')
->end()
->scalarNode('clearer')->end()
->end()
->end()
->validate()
->ifTrue(function ($v) { return isset($v['cache.app']) || isset($v['cache.system']); })
->thenInvalid('"cache.app" and "cache.system" are reserved names')
->end()
->end()
->end()
->end()
->end()
;
}
private function addPhpErrorsSection(ArrayNodeDefinition $rootNode)
{
$rootNode
->children()
->arrayNode('php_errors')
->info('PHP errors handling configuration')
->addDefaultsIfNotSet()
->children()
->variableNode('log')
->info('Use the application logger instead of the PHP logger for logging PHP errors.')
->example('"true" to use the default configuration: log all errors. "false" to disable. An integer bit field of E_* constants, or an array mapping E_* constants to log levels.')
->defaultValue($this->debug)
->treatNullLike($this->debug)
->beforeNormalization()
->ifArray()
->then(function (array $v): array {
if (!($v[0]['type'] ?? false)) {
return $v;
}
// Fix XML normalization
$ret = [];
foreach ($v as ['type' => $type, 'logLevel' => $logLevel]) {
$ret[$type] = $logLevel;
}
return $ret;
})
->end()
->validate()
->ifTrue(function ($v) { return !(\is_int($v) || \is_bool($v) || \is_array($v)); })
->thenInvalid('The "php_errors.log" parameter should be either an integer, a boolean, or an array')
->end()
->end()
->booleanNode('throw')
->info('Throw PHP errors as \ErrorException instances.')
->defaultValue($this->debug)
->treatNullLike($this->debug)
->end()
->end()
->end()
->end()
;
}
private function addLockSection(ArrayNodeDefinition $rootNode, callable $enableIfStandalone)
{
$rootNode
->children()
->arrayNode('lock')
->info('Lock configuration')
->{$enableIfStandalone('symfony/lock', Lock::class)}()
->beforeNormalization()
->ifString()->then(function ($v) { return ['enabled' => true, 'resources' => $v]; })
->end()
->beforeNormalization()
->ifTrue(function ($v) { return \is_array($v) && !isset($v['enabled']); })
->then(function ($v) { return $v + ['enabled' => true]; })
->end()
->beforeNormalization()
->ifTrue(function ($v) { return \is_array($v) && !isset($v['resources']) && !isset($v['resource']); })
->then(function ($v) {
$e = $v['enabled'];
unset($v['enabled']);
return ['enabled' => $e, 'resources' => $v];
})
->end()
->addDefaultsIfNotSet()
->fixXmlConfig('resource')
->children()
->arrayNode('resources')
->normalizeKeys(false)
->useAttributeAsKey('name')
->requiresAtLeastOneElement()
->defaultValue(['default' => [class_exists(SemaphoreStore::class) && SemaphoreStore::isSupported() ? 'semaphore' : 'flock']])
->beforeNormalization()
->ifString()->then(function ($v) { return ['default' => $v]; })
->end()
->beforeNormalization()
->ifTrue(function ($v) { return \is_array($v) && array_keys($v) === range(0, \count($v) - 1); })
->then(function ($v) {
$resources = [];
foreach ($v as $resource) {
$resources = array_merge_recursive(
$resources,
\is_array($resource) && isset($resource['name'])
? [$resource['name'] => $resource['value']]
: ['default' => $resource]
);
}
return $resources;
})
->end()
->prototype('array')
->performNoDeepMerging()
->beforeNormalization()->ifString()->then(function ($v) { return [$v]; })->end()
->prototype('scalar')->end()
->end()
->end()
->end()
->end()
->end()
;
}
private function addWebLinkSection(ArrayNodeDefinition $rootNode, callable $enableIfStandalone)
{
$rootNode
->children()
->arrayNode('web_link')
->info('web links configuration')
->{$enableIfStandalone('symfony/weblink', HttpHeaderSerializer::class)}()
->end()
->end()
;
}
private function addMessengerSection(ArrayNodeDefinition $rootNode, callable $enableIfStandalone)
{
$rootNode
->children()
->arrayNode('messenger')
->info('Messenger configuration')
->{$enableIfStandalone('symfony/messenger', MessageBusInterface::class)}()
->fixXmlConfig('transport')
->fixXmlConfig('bus', 'buses')
->validate()
->ifTrue(function ($v) { return isset($v['buses']) && \count($v['buses']) > 1 && null === $v['default_bus']; })
->thenInvalid('You must specify the "default_bus" if you define more than one bus.')
->end()
->validate()
->ifTrue(static function ($v): bool { return isset($v['buses']) && null !== $v['default_bus'] && !isset($v['buses'][$v['default_bus']]); })
->then(static function (array $v): void { throw new InvalidConfigurationException(sprintf('The specified default bus "%s" is not configured. Available buses are "%s".', $v['default_bus'], implode('", "', array_keys($v['buses'])))); })
->end()
->children()
->arrayNode('routing')
->normalizeKeys(false)
->useAttributeAsKey('message_class')
->beforeNormalization()
->always()
->then(function ($config) {
if (!\is_array($config)) {
return [];
}
// If XML config with only one routing attribute
if (2 === \count($config) && isset($config['message-class']) && isset($config['sender'])) {
$config = [0 => $config];
}
$newConfig = [];
foreach ($config as $k => $v) {
if (!\is_int($k)) {
$newConfig[$k] = [
'senders' => $v['senders'] ?? (\is_array($v) ? array_values($v) : [$v]),
];
} else {
$newConfig[$v['message-class']]['senders'] = array_map(
function ($a) {
return \is_string($a) ? $a : $a['service'];
},
array_values($v['sender'])
);
}
}
return $newConfig;
})
->end()
->prototype('array')
->performNoDeepMerging()
->children()
->arrayNode('senders')
->requiresAtLeastOneElement()
->prototype('scalar')->end()
->end()
->end()
->end()
->end()
->arrayNode('serializer')
->addDefaultsIfNotSet()
->children()
->scalarNode('default_serializer')
->defaultValue('messenger.transport.native_php_serializer')
->info('Service id to use as the default serializer for the transports.')
->end()
->arrayNode('symfony_serializer')
->addDefaultsIfNotSet()
->children()
->scalarNode('format')->defaultValue('json')->info('Serialization format for the messenger.transport.symfony_serializer service (which is not the serializer used by default).')->end()
->arrayNode('context')
->normalizeKeys(false)
->useAttributeAsKey('name')
->defaultValue([])
->info('Context array for the messenger.transport.symfony_serializer service (which is not the serializer used by default).')
->prototype('variable')->end()
->end()
->end()
->end()
->end()
->end()
->arrayNode('transports')
->normalizeKeys(false)
->useAttributeAsKey('name')
->arrayPrototype()
->beforeNormalization()
->ifString()
->then(function (string $dsn) {
return ['dsn' => $dsn];
})
->end()
->fixXmlConfig('option')
->children()
->scalarNode('dsn')->end()
->scalarNode('serializer')->defaultNull()->info('Service id of a custom serializer to use.')->end()
->arrayNode('options')
->normalizeKeys(false)
->defaultValue([])
->prototype('variable')
->end()
->end()
->scalarNode('failure_transport')
->defaultNull()
->info('Transport name to send failed messages to (after all retries have failed).')
->end()
->arrayNode('retry_strategy')
->addDefaultsIfNotSet()
->beforeNormalization()
->always(function ($v) {
if (isset($v['service']) && (isset($v['max_retries']) || isset($v['delay']) || isset($v['multiplier']) || isset($v['max_delay']))) {
throw new \InvalidArgumentException('The "service" cannot be used along with the other "retry_strategy" options.');
}
return $v;
})
->end()
->children()
->scalarNode('service')->defaultNull()->info('Service id to override the retry strategy entirely')->end()
->integerNode('max_retries')->defaultValue(3)->min(0)->end()
->integerNode('delay')->defaultValue(1000)->min(0)->info('Time in ms to delay (or the initial value when multiplier is used)')->end()
->floatNode('multiplier')->defaultValue(2)->min(1)->info('If greater than 1, delay will grow exponentially for each retry: this delay = (delay * (multiple ^ retries))')->end()
->integerNode('max_delay')->defaultValue(0)->min(0)->info('Max time in ms that a retry should ever be delayed (0 = infinite)')->end()
->end()
->end()
->end()
->end()
->end()
->scalarNode('failure_transport')
->defaultNull()
->info('Transport name to send failed messages to (after all retries have failed).')
->end()
->scalarNode('default_bus')->defaultNull()->end()
->arrayNode('buses')
->defaultValue(['messenger.bus.default' => ['default_middleware' => true, 'middleware' => []]])
->normalizeKeys(false)
->useAttributeAsKey('name')
->arrayPrototype()
->addDefaultsIfNotSet()
->children()
->enumNode('default_middleware')
->values([true, false, 'allow_no_handlers'])
->defaultTrue()
->end()
->arrayNode('middleware')
->performNoDeepMerging()
->beforeNormalization()
->ifTrue(function ($v) { return \is_string($v) || (\is_array($v) && !\is_int(key($v))); })
->then(function ($v) { return [$v]; })
->end()
->defaultValue([])
->arrayPrototype()
->beforeNormalization()
->always()
->then(function ($middleware): array {
if (!\is_array($middleware)) {
return ['id' => $middleware];
}
if (isset($middleware['id'])) {
return $middleware;
}
if (1 < \count($middleware)) {
throw new \InvalidArgumentException('Invalid middleware at path "framework.messenger": a map with a single factory id as key and its arguments as value was expected, '.json_encode($middleware).' given.');
}
return [
'id' => key($middleware),
'arguments' => current($middleware),
];
})
->end()
->fixXmlConfig('argument')
->children()
->scalarNode('id')->isRequired()->cannotBeEmpty()->end()
->arrayNode('arguments')
->normalizeKeys(false)
->defaultValue([])
->prototype('variable')
->end()
->end()
->end()
->end()
->end()
->end()
->end()
->end()
->end()
->end()
;
}
private function addRobotsIndexSection(ArrayNodeDefinition $rootNode)
{
$rootNode
->children()
->booleanNode('disallow_search_engine_index')
->info('Enabled by default when debug is enabled.')
->defaultValue($this->debug)
->treatNullLike($this->debug)
->end()
->end()
;
}
private function addHttpClientSection(ArrayNodeDefinition $rootNode, callable $enableIfStandalone)
{
$rootNode
->children()
->arrayNode('http_client')
->info('HTTP Client configuration')
->{$enableIfStandalone('symfony/http-client', HttpClient::class)}()
->fixXmlConfig('scoped_client')
->beforeNormalization()
->always(function ($config) {
if (empty($config['scoped_clients']) || !\is_array($config['default_options']['retry_failed'] ?? null)) {
return $config;
}
foreach ($config['scoped_clients'] as &$scopedConfig) {
if (!isset($scopedConfig['retry_failed']) || true === $scopedConfig['retry_failed']) {
$scopedConfig['retry_failed'] = $config['default_options']['retry_failed'];
continue;
}
if (\is_array($scopedConfig['retry_failed'])) {
$scopedConfig['retry_failed'] = $scopedConfig['retry_failed'] + $config['default_options']['retry_failed'];
}
}
return $config;
})
->end()
->children()
->integerNode('max_host_connections')
->info('The maximum number of connections to a single host.')
->end()
->arrayNode('default_options')
->fixXmlConfig('header')
->children()
->arrayNode('headers')
->info('Associative array: header => value(s).')
->useAttributeAsKey('name')
->normalizeKeys(false)
->variablePrototype()->end()
->end()
->integerNode('max_redirects')
->info('The maximum number of redirects to follow.')
->end()
->scalarNode('http_version')
->info('The default HTTP version, typically 1.1 or 2.0, leave to null for the best version.')
->end()
->arrayNode('resolve')
->info('Associative array: domain => IP.')
->useAttributeAsKey('host')
->beforeNormalization()
->always(function ($config) {
if (!\is_array($config)) {
return [];
}
if (!isset($config['host'], $config['value']) || \count($config) > 2) {
return $config;
}
return [$config['host'] => $config['value']];
})
->end()
->normalizeKeys(false)
->scalarPrototype()->end()
->end()
->scalarNode('proxy')
->info('The URL of the proxy to pass requests through or null for automatic detection.')
->end()
->scalarNode('no_proxy')
->info('A comma separated list of hosts that do not require a proxy to be reached.')
->end()
->floatNode('timeout')
->info('The idle timeout, defaults to the "default_socket_timeout" ini parameter.')
->end()
->floatNode('max_duration')
->info('The maximum execution time for the request+response as a whole.')
->end()
->scalarNode('bindto')
->info('A network interface name, IP address, a host name or a UNIX socket to bind to.')
->end()
->booleanNode('verify_peer')
->info('Indicates if the peer should be verified in an SSL/TLS context.')
->end()
->booleanNode('verify_host')
->info('Indicates if the host should exist as a certificate common name.')
->end()
->scalarNode('cafile')
->info('A certificate authority file.')
->end()
->scalarNode('capath')
->info('A directory that contains multiple certificate authority files.')
->end()
->scalarNode('local_cert')
->info('A PEM formatted certificate file.')
->end()
->scalarNode('local_pk')
->info('A private key file.')
->end()
->scalarNode('passphrase')
->info('The passphrase used to encrypt the "local_pk" file.')
->end()
->scalarNode('ciphers')
->info('A list of SSL/TLS ciphers separated by colons, commas or spaces (e.g. "RC3-SHA:TLS13-AES-128-GCM-SHA256"...)')
->end()
->arrayNode('peer_fingerprint')
->info('Associative array: hashing algorithm => hash(es).')
->normalizeKeys(false)
->children()
->variableNode('sha1')->end()
->variableNode('pin-sha256')->end()
->variableNode('md5')->end()
->end()
->end()
->append($this->addHttpClientRetrySection())
->end()
->end()
->scalarNode('mock_response_factory')
->info('The id of the service that should generate mock responses. It should be either an invokable or an iterable.')
->end()
->arrayNode('scoped_clients')
->useAttributeAsKey('name')
->normalizeKeys(false)
->arrayPrototype()
->fixXmlConfig('header')
->beforeNormalization()
->always()
->then(function ($config) {
if (!class_exists(HttpClient::class)) {
throw new LogicException('HttpClient support cannot be enabled as the component is not installed. Try running "composer require symfony/http-client".');
}
return \is_array($config) ? $config : ['base_uri' => $config];
})
->end()
->validate()
->ifTrue(function ($v) { return !isset($v['scope']) && !isset($v['base_uri']); })
->thenInvalid('Either "scope" or "base_uri" should be defined.')
->end()
->validate()
->ifTrue(function ($v) { return !empty($v['query']) && !isset($v['base_uri']); })
->thenInvalid('"query" applies to "base_uri" but no base URI is defined.')
->end()
->children()
->scalarNode('scope')
->info('The regular expression that the request URL must match before adding the other options. When none is provided, the base URI is used instead.')
->cannotBeEmpty()
->end()
->scalarNode('base_uri')
->info('The URI to resolve relative URLs, following rules in RFC 3985, section 2.')
->cannotBeEmpty()
->end()
->scalarNode('auth_basic')
->info('An HTTP Basic authentication "username:password".')
->end()
->scalarNode('auth_bearer')
->info('A token enabling HTTP Bearer authorization.')
->end()
->scalarNode('auth_ntlm')
->info('A "username:password" pair to use Microsoft NTLM authentication (requires the cURL extension).')
->end()
->arrayNode('query')
->info('Associative array of query string values merged with the base URI.')
->useAttributeAsKey('key')
->beforeNormalization()
->always(function ($config) {
if (!\is_array($config)) {
return [];
}
if (!isset($config['key'], $config['value']) || \count($config) > 2) {
return $config;
}
return [$config['key'] => $config['value']];
})
->end()
->normalizeKeys(false)
->scalarPrototype()->end()
->end()
->arrayNode('headers')
->info('Associative array: header => value(s).')
->useAttributeAsKey('name')
->normalizeKeys(false)
->variablePrototype()->end()
->end()
->integerNode('max_redirects')
->info('The maximum number of redirects to follow.')
->end()
->scalarNode('http_version')
->info('The default HTTP version, typically 1.1 or 2.0, leave to null for the best version.')
->end()
->arrayNode('resolve')
->info('Associative array: domain => IP.')
->useAttributeAsKey('host')
->beforeNormalization()
->always(function ($config) {
if (!\is_array($config)) {
return [];
}
if (!isset($config['host'], $config['value']) || \count($config) > 2) {
return $config;
}
return [$config['host'] => $config['value']];
})
->end()
->normalizeKeys(false)
->scalarPrototype()->end()
->end()
->scalarNode('proxy')
->info('The URL of the proxy to pass requests through or null for automatic detection.')
->end()
->scalarNode('no_proxy')
->info('A comma separated list of hosts that do not require a proxy to be reached.')
->end()
->floatNode('timeout')
->info('The idle timeout, defaults to the "default_socket_timeout" ini parameter.')
->end()
->floatNode('max_duration')
->info('The maximum execution time for the request+response as a whole.')
->end()
->scalarNode('bindto')
->info('A network interface name, IP address, a host name or a UNIX socket to bind to.')
->end()
->booleanNode('verify_peer')
->info('Indicates if the peer should be verified in an SSL/TLS context.')
->end()
->booleanNode('verify_host')
->info('Indicates if the host should exist as a certificate common name.')
->end()
->scalarNode('cafile')
->info('A certificate authority file.')
->end()
->scalarNode('capath')
->info('A directory that contains multiple certificate authority files.')
->end()
->scalarNode('local_cert')
->info('A PEM formatted certificate file.')
->end()
->scalarNode('local_pk')
->info('A private key file.')
->end()
->scalarNode('passphrase')
->info('The passphrase used to encrypt the "local_pk" file.')
->end()
->scalarNode('ciphers')
->info('A list of SSL/TLS ciphers separated by colons, commas or spaces (e.g. "RC3-SHA:TLS13-AES-128-GCM-SHA256"...)')
->end()
->arrayNode('peer_fingerprint')
->info('Associative array: hashing algorithm => hash(es).')
->normalizeKeys(false)
->children()
->variableNode('sha1')->end()
->variableNode('pin-sha256')->end()
->variableNode('md5')->end()
->end()
->end()
->append($this->addHttpClientRetrySection())
->end()
->end()
->end()
->end()
->end()
->end()
;
}
private function addHttpClientRetrySection()
{
$root = new NodeBuilder();
return $root
->arrayNode('retry_failed')
->fixXmlConfig('http_code')
->canBeEnabled()
->addDefaultsIfNotSet()
->beforeNormalization()
->always(function ($v) {
if (isset($v['retry_strategy']) && (isset($v['http_codes']) || isset($v['delay']) || isset($v['multiplier']) || isset($v['max_delay']) || isset($v['jitter']))) {
throw new \InvalidArgumentException('The "retry_strategy" option cannot be used along with the "http_codes", "delay", "multiplier", "max_delay" or "jitter" options.');
}
return $v;
})
->end()
->children()
->scalarNode('retry_strategy')->defaultNull()->info('service id to override the retry strategy')->end()
->arrayNode('http_codes')
->performNoDeepMerging()
->beforeNormalization()
->ifArray()
->then(static function ($v) {
$list = [];
foreach ($v as $key => $val) {
if (is_numeric($val)) {
$list[] = ['code' => $val];
} elseif (\is_array($val)) {
if (isset($val['code']) || isset($val['methods'])) {
$list[] = $val;
} else {
$list[] = ['code' => $key, 'methods' => $val];
}
} elseif (true === $val || null === $val) {
$list[] = ['code' => $key];
}
}
return $list;
})
->end()
->useAttributeAsKey('code')
->arrayPrototype()
->fixXmlConfig('method')
->children()
->integerNode('code')->end()
->arrayNode('methods')
->beforeNormalization()
->ifArray()
->then(function ($v) {
return array_map('strtoupper', $v);
})
->end()
->prototype('scalar')->end()
->info('A list of HTTP methods that triggers a retry for this status code. When empty, all methods are retried')
->end()
->end()
->end()
->info('A list of HTTP status code that triggers a retry')
->end()
->integerNode('max_retries')->defaultValue(3)->min(0)->end()
->integerNode('delay')->defaultValue(1000)->min(0)->info('Time in ms to delay (or the initial value when multiplier is used)')->end()
->floatNode('multiplier')->defaultValue(2)->min(1)->info('If greater than 1, delay will grow exponentially for each retry: delay * (multiple ^ retries)')->end()
->integerNode('max_delay')->defaultValue(0)->min(0)->info('Max time in ms that a retry should ever be delayed (0 = infinite)')->end()
->floatNode('jitter')->defaultValue(0.1)->min(0)->max(1)->info('Randomness in percent (between 0 and 1) to apply to the delay')->end()
->end()
;
}
private function addMailerSection(ArrayNodeDefinition $rootNode, callable $enableIfStandalone)
{
$rootNode
->children()
->arrayNode('mailer')
->info('Mailer configuration')
->{$enableIfStandalone('symfony/mailer', Mailer::class)}()
->validate()
->ifTrue(function ($v) { return isset($v['dsn']) && \count($v['transports']); })
->thenInvalid('"dsn" and "transports" cannot be used together.')
->end()
->fixXmlConfig('transport')
->fixXmlConfig('header')
->children()
->scalarNode('message_bus')->defaultNull()->info('The message bus to use. Defaults to the default bus if the Messenger component is installed.')->end()
->scalarNode('dsn')->defaultNull()->end()
->arrayNode('transports')
->useAttributeAsKey('name')
->prototype('scalar')->end()
->end()
->arrayNode('envelope')
->info('Mailer Envelope configuration')
->children()
->scalarNode('sender')->end()
->arrayNode('recipients')
->performNoDeepMerging()
->beforeNormalization()
->ifArray()
->then(function ($v) {
return array_filter(array_values($v));
})
->end()
->prototype('scalar')->end()
->end()
->end()
->end()
->arrayNode('headers')
->normalizeKeys(false)
->useAttributeAsKey('name')
->prototype('array')
->normalizeKeys(false)
->beforeNormalization()
->ifTrue(function ($v) { return !\is_array($v) || array_keys($v) !== ['value']; })
->then(function ($v) { return ['value' => $v]; })
->end()
->children()
->variableNode('value')->end()
->end()
->end()
->end()
->end()
->end()
->end()
;
}
private function addNotifierSection(ArrayNodeDefinition $rootNode, callable $enableIfStandalone)
{
$rootNode
->children()
->arrayNode('notifier')
->info('Notifier configuration')
->{$enableIfStandalone('symfony/notifier', Notifier::class)}()
->fixXmlConfig('chatter_transport')
->children()
->arrayNode('chatter_transports')
->useAttributeAsKey('name')
->prototype('scalar')->end()
->end()
->end()
->fixXmlConfig('texter_transport')
->children()
->arrayNode('texter_transports')
->useAttributeAsKey('name')
->prototype('scalar')->end()
->end()
->end()
->children()
->booleanNode('notification_on_failed_messages')->defaultFalse()->end()
->end()
->children()
->arrayNode('channel_policy')
->useAttributeAsKey('name')
->prototype('array')
->beforeNormalization()->ifString()->then(function (string $v) { return [$v]; })->end()
->prototype('scalar')->end()
->end()
->end()
->end()
->fixXmlConfig('admin_recipient')
->children()
->arrayNode('admin_recipients')
->prototype('array')
->children()
->scalarNode('email')->cannotBeEmpty()->end()
->scalarNode('phone')->defaultValue('')->end()
->end()
->end()
->end()
->end()
->end()
->end()
;
}
private function addRateLimiterSection(ArrayNodeDefinition $rootNode, callable $enableIfStandalone)
{
$rootNode
->children()
->arrayNode('rate_limiter')
->info('Rate limiter configuration')
->{$enableIfStandalone('symfony/rate-limiter', TokenBucketLimiter::class)}()
->fixXmlConfig('limiter')
->beforeNormalization()
->ifTrue(function ($v) { return \is_array($v) && !isset($v['limiters']) && !isset($v['limiter']); })
->then(function (array $v) {
$newV = [
'enabled' => $v['enabled'] ?? true,
];
unset($v['enabled']);
$newV['limiters'] = $v;
return $newV;
})
->end()
->children()
->arrayNode('limiters')
->useAttributeAsKey('name')
->arrayPrototype()
->children()
->scalarNode('lock_factory')
->info('The service ID of the lock factory used by this limiter (or null to disable locking)')
->defaultValue('lock.factory')
->end()
->scalarNode('cache_pool')
->info('The cache pool to use for storing the current limiter state')
->defaultValue('cache.rate_limiter')
->end()
->scalarNode('storage_service')
->info('The service ID of a custom storage implementation, this precedes any configured "cache_pool"')
->defaultNull()
->end()
->enumNode('policy')
->info('The algorithm to be used by this limiter')
->isRequired()
->values(['fixed_window', 'token_bucket', 'sliding_window', 'no_limit'])
->end()
->integerNode('limit')
->info('The maximum allowed hits in a fixed interval or burst')
->isRequired()
->end()
->scalarNode('interval')
->info('Configures the fixed interval if "policy" is set to "fixed_window" or "sliding_window". The value must be a number followed by "second", "minute", "hour", "day", "week" or "month" (or their plural equivalent).')
->end()
->arrayNode('rate')
->info('Configures the fill rate if "policy" is set to "token_bucket"')
->children()
->scalarNode('interval')
->info('Configures the rate interval. The value must be a number followed by "second", "minute", "hour", "day", "week" or "month" (or their plural equivalent).')
->end()
->integerNode('amount')->info('Amount of tokens to add each interval')->defaultValue(1)->end()
->end()
->end()
->end()
->end()
->end()
->end()
->end()
->end()
;
}
private function addUidSection(ArrayNodeDefinition $rootNode, callable $enableIfStandalone)
{
$rootNode
->children()
->arrayNode('uid')
->info('Uid configuration')
->{$enableIfStandalone('symfony/uid', UuidFactory::class)}()
->addDefaultsIfNotSet()
->children()
->enumNode('default_uuid_version')
->defaultValue(6)
->values([6, 4, 1])
->end()
->enumNode('name_based_uuid_version')
->defaultValue(5)
->values([5, 3])
->end()
->scalarNode('name_based_uuid_namespace')
->cannotBeEmpty()
->end()
->enumNode('time_based_uuid_version')
->defaultValue(6)
->values([6, 1])
->end()
->scalarNode('time_based_uuid_node')
->cannotBeEmpty()
->end()
->end()
->end()
->end()
;
}
}
|
mit
|
jobdoc/selections-app
|
tests/routes/Selections/modules/selections.spec.js
|
8349
|
import {
ADD_SELECTION,
POST_SELECTION_REQUEST,
POST_SELECTION_FAILURE,
POST_SELECTION_SUCCESS,
FETCH_SELECTIONS_REQUEST,
FETCH_SELECTIONS_FAILURE,
FETCH_SELECTIONS_SUCCESS,
addSelection,
loadSelections,
default as selectionsReducer
} from 'routes/Selections/modules/selections'
import fetchMock from 'fetch-mock'
import configureMockStore from 'redux-mock-store'
import thunk from 'redux-thunk'
import {
API_ROOT,
default as api
} from 'middleware/api'
const middlewares = [thunk, api]
const mockStore = configureMockStore(middlewares)
describe('(Redux Module) Selections', () => {
const selection = {
item: 'Door knob downstairs',
product: 'Fancy one from the cool store'
}
const selectionId = '1234'
it('Should export a constant ADD_SELECTION.', () => {
expect(ADD_SELECTION).to.equal('ADD_SELECTION')
})
describe('(Reducer)', () => {
it('Should be a function.', () => {
expect(selectionsReducer).to.be.a('function')
})
it('Should initialize with a state of {} (Object).', () => {
expect(selectionsReducer(undefined, {})).to.deep.equal({})
})
it('Should return the previous state if an action was not matched.', () => {
let state = selectionsReducer(undefined, {})
expect(state).to.deep.equal({})
state = selectionsReducer(state, { type: '@@@@@@@' })
expect(state).to.deep.equal({})
state = selectionsReducer(state, { type: ADD_SELECTION, payload: selection })
expect(state).to.deep.equal({
justAdded: selection
})
state = selectionsReducer(state, { type: '@@@@@@@' })
expect(state).to.deep.equal({
justAdded: selection
})
})
})
describe('(Action Creator) loadSelections', () => {
afterEach(() => fetchMock.restore())
it('Should be exported as a function.', () => {
expect(loadSelections).to.be.a('function')
})
it('Should return a function (is a thunk).', () => {
expect(loadSelections()).to.be.a('function')
})
it('Should create "FETCH_SELECTIONS_SUCCESS" when fetching selections has been done.', () => {
const selectionWithId = {
...selection,
id: selectionId
}
fetchMock.get(`${API_ROOT}getSelections`, { body: [ selectionWithId ] })
const expectedActions = [
{ type: FETCH_SELECTIONS_REQUEST },
{
type: FETCH_SELECTIONS_SUCCESS,
response: {
entities: {
selections: {
[selectionId]: selectionWithId
}
},
result: [
selectionId
],
nextPageUrl: null
}
}
]
const store = mockStore({ selections: {} })
return store.dispatch(loadSelections())
.then(() => {
expect(store.getActions()).to.deep.equal(expectedActions)
})
})
it('Should create "FETCH_SELECTIONS_FAILURE" when fetching selections has failed.', () => {
fetchMock.get(`${API_ROOT}getSelections`, 500)
const expectedActions = [
{ type: FETCH_SELECTIONS_REQUEST },
{
type: FETCH_SELECTIONS_FAILURE,
error: 'JSON Parse error: Unexpected EOF'
}
]
const store = mockStore({ selections: {} })
return store.dispatch(loadSelections())
.then(() => {
expect(store.getActions()).to.deep.equal(expectedActions)
})
})
})
describe('(Action Creator) addSelection', () => {
afterEach(() => fetchMock.restore())
it('Should be exported as a function.', () => {
expect(addSelection).to.be.a('function')
})
it('Should return a function (is a thunk).', () => {
expect(addSelection()).to.be.a('function')
})
it('Should create "POST_SELECTION_SUCCESS" when adding selection has been done.', () => {
const selectionWithId = {
...selection,
id: selectionId
}
fetchMock.post(`${API_ROOT}addSelection`, { body: selectionWithId })
const expectedActions = [
{
type: ADD_SELECTION,
payload: selection
},
{ type: POST_SELECTION_REQUEST },
{
type: POST_SELECTION_SUCCESS,
response: {
entities: {
selections: {
[selectionId]: selectionWithId
}
},
result: selectionId,
nextPageUrl: null
}
}
]
const store = mockStore({ selections: {} })
return store.dispatch(addSelection(selection))
.then(() => {
expect(store.getActions()).to.deep.equal(expectedActions)
})
})
it('Should create "FETCH_SELECTIONS_FAILURE" when fetching selections has failed.', () => {
fetchMock.post(`${API_ROOT}addSelection`, 500)
const expectedActions = [
{
type: ADD_SELECTION,
payload: selection
},
{ type: POST_SELECTION_REQUEST },
{
type: POST_SELECTION_FAILURE,
error: 'JSON Parse error: Unexpected EOF'
}
]
const store = mockStore({ selections: {} })
return store.dispatch(addSelection(selection))
.then(() => {
expect(store.getActions()).to.deep.equal(expectedActions)
})
})
})
describe('(Action Handler) ADD_SELECTION', () => {
it('Should update the state\'s justAdded property with the action payload.', () => {
let state = selectionsReducer(undefined, {})
expect(state).to.deep.equal({})
state = selectionsReducer(state, { type: ADD_SELECTION, payload: selection })
expect(state).to.deep.equal({ justAdded: selection })
state = selectionsReducer(state, { type: ADD_SELECTION, payload: selection })
expect(state).to.deep.equal({ justAdded: selection })
})
})
describe('(Action Handler) FETCH_SELECTIONS_REQUEST', () => {
it('Should update the state\'s isFetching property to `true`.', () => {
let state = selectionsReducer(undefined, {})
expect(state).to.deep.equal({})
state = selectionsReducer(state, { type: FETCH_SELECTIONS_REQUEST })
expect(state).to.deep.equal({ isFetching: true })
})
})
describe('(Action Handler) FETCH_SELECTIONS_SUCCESS', () => {
it('Should update the state\'s isFetching property to `false`.', () => {
let state = selectionsReducer(undefined, {})
expect(state).to.deep.equal({})
state = selectionsReducer(state, { type: FETCH_SELECTIONS_SUCCESS })
expect(state).to.deep.equal({ isFetching: false })
})
})
describe('(Action Handler) FETCH_SELECTIONS_FAILURE', () => {
it('Should update the state\'s isFetching property to `false` and the error property to error payload.', () => {
const error = 'Something terrible happened'
let state = selectionsReducer(undefined, {})
expect(state).to.deep.equal({})
state = selectionsReducer(state, { type: FETCH_SELECTIONS_FAILURE, error })
expect(state).to.deep.equal({
isFetching: false,
error
})
})
})
describe('(Action Handler) POST_SELECTION_REQUEST', () => {
it('Should update the state\'s isPosting property to `true`.', () => {
let state = selectionsReducer(undefined, {})
expect(state).to.deep.equal({})
state = selectionsReducer(state, { type: POST_SELECTION_REQUEST })
expect(state).to.deep.equal({ isPosting: true })
})
})
describe('(Action Handler) POST_SELECTION_SUCCESS', () => {
it('Should update the state\'s isPosting property to `false`.', () => {
let state = selectionsReducer(undefined, {})
expect(state).to.deep.equal({})
state = selectionsReducer(state, { type: POST_SELECTION_SUCCESS })
expect(state).to.deep.equal({ isPosting: false })
})
})
describe('(Action Handler) POST_SELECTION_FAILURE', () => {
it('Should update the state\'s isPosting property to `false` and the error property to error payload.', () => {
const error = 'Something terrible happened'
let state = selectionsReducer(undefined, {})
expect(state).to.deep.equal({})
state = selectionsReducer(state, { type: POST_SELECTION_FAILURE, error })
expect(state).to.deep.equal({
isPosting: false,
error
})
})
})
})
|
mit
|
spirinvladimir/connect-nodejs-samples
|
lib/subscribe_for_spots.js
|
370
|
'use strict';
var subscribeForSpots = function (params) {
return this.sendGuaranteedCommand(
this.protocol.getPayloadTypeByName('ProtoOASubscribeForSpotsReq'),
{
accountId: params.accountId,
accessToken: params.accessToken,
symblolName: params.symblolName
}
);
};
module.exports = subscribeForSpots;
|
mit
|
huoxudong125/Z.ExtensionMethods
|
test/Z.Core.Test/System.String/String.ExtractInt16.cs
|
1294
|
// Copyright (c) 2015 ZZZ Projects. All rights reserved
// Licensed under MIT License (MIT) (https://github.com/zzzprojects/Z.ExtensionMethods)
// Website: http://www.zzzprojects.com/
// Feedback / Feature Requests / Issues : http://zzzprojects.uservoice.com/forums/283927
// All ZZZ Projects products: Entity Framework Extensions / Bulk Operations / Extension Methods /Icon Library
using Microsoft.VisualStudio.TestTools.UnitTesting;
namespace Z.Core.Test
{
[TestClass]
public class System_String_ExtractInt16
{
[TestMethod]
public void ExtractInt16()
{
// Type
// Exemples
short result1 = "Fizz 123 Buzz".ExtractInt16(); // return 123;
short result2 = "Fizz -123 Buzz".ExtractInt16(); // return -123;
short result3 = "-Fizz 123 Buzz".ExtractInt16(); // return 123;
short result4 = "Fizz 123.4 Buzz".ExtractInt16(); // return 1234;
short result5 = "Fizz -123Fizz.Buzz4 Buzz".ExtractInt16(); // return -1234;
// Unit Test
Assert.AreEqual(123, result1);
Assert.AreEqual(-123, result2);
Assert.AreEqual(123, result3);
Assert.AreEqual(1234, result4);
Assert.AreEqual(-1234, result5);
}
}
}
|
mit
|
yuleska/wapmadrid
|
public/modules/routes/config/routes.client.config.js
|
369
|
'use strict';
// Configuring the Articles module
angular.module('routes').run(['Menus',
function(Menus) {
// Set top bar menu items
Menus.addMenuItem('topbar', 'Routes', 'routes', 'dropdown', '/routes(/create)?');
Menus.addSubMenuItem('topbar', 'routes', 'List Routes', 'routes');
Menus.addSubMenuItem('topbar', 'routes', 'New Route', 'routes/create');
}
]);
|
mit
|
cloudbearings/virtual-assistant
|
routes/index.php
|
43
|
<?php
$this->smarty->display('index.tpl');
|
mit
|
MilesGithub/tor_ibin
|
web/assets/js/jquery.qtip.js
|
102233
|
/*
* qTip2 - Pretty powerful tooltips - v2.2.0
* http://qtip2.com
*
* Copyright (c) 2013 Craig Michael Thompson
* Released under the MIT, GPL licenses
* http://jquery.org/license
*
* Date: Thu Nov 21 2013 08:34 GMT+0000
* Plugins: tips modal viewport svg imagemap ie6
* Styles: basic css3
*/
/*global window: false, jQuery: false, console: false, define: false */
/* Cache window, document, undefined */
(function( window, document, undefined ) {
// Uses AMD or browser globals to create a jQuery plugin.
(function( factory ) {
"use strict";
if(typeof define === 'function' && define.amd) {
define(['jquery'], factory);
}
else if(jQuery && !jQuery.fn.qtip) {
factory(jQuery);
}
}
(function($) {
"use strict"; // Enable ECMAScript "strict" operation for this function. See more: http://ejohn.org/blog/ecmascript-5-strict-mode-json-and-more/
;// Munge the primitives - Paul Irish tip
var TRUE = true,
FALSE = false,
NULL = null,
// Common variables
X = 'x', Y = 'y',
WIDTH = 'width',
HEIGHT = 'height',
// Positioning sides
TOP = 'top',
LEFT = 'left',
BOTTOM = 'bottom',
RIGHT = 'right',
CENTER = 'center',
// Position adjustment types
FLIP = 'flip',
FLIPINVERT = 'flipinvert',
SHIFT = 'shift',
// Shortcut vars
QTIP, PROTOTYPE, CORNER, CHECKS,
PLUGINS = {},
NAMESPACE = 'qtip',
ATTR_HAS = 'data-hasqtip',
ATTR_ID = 'data-qtip-id',
WIDGET = ['ui-widget', 'ui-tooltip'],
SELECTOR = '.'+NAMESPACE,
INACTIVE_EVENTS = 'click dblclick mousedown mouseup mousemove mouseleave mouseenter'.split(' '),
CLASS_FIXED = NAMESPACE+'-fixed',
CLASS_DEFAULT = NAMESPACE + '-default',
CLASS_FOCUS = NAMESPACE + '-focus',
CLASS_HOVER = NAMESPACE + '-hover',
CLASS_DISABLED = NAMESPACE+'-disabled',
replaceSuffix = '_replacedByqTip',
oldtitle = 'oldtitle',
trackingBound,
// Browser detection
BROWSER = {
/*
* IE version detection
*
* Adapted from: http://ajaxian.com/archives/attack-of-the-ie-conditional-comment
* Credit to James Padolsey for the original implemntation!
*/
ie: (function(){
var v = 3, div = document.createElement('div');
while ((div.innerHTML = '<!--[if gt IE '+(++v)+']><i></i><![endif]-->')) {
if(!div.getElementsByTagName('i')[0]) { break; }
}
return v > 4 ? v : NaN;
}()),
/*
* iOS version detection
*/
iOS: parseFloat(
('' + (/CPU.*OS ([0-9_]{1,5})|(CPU like).*AppleWebKit.*Mobile/i.exec(navigator.userAgent) || [0,''])[1])
.replace('undefined', '3_2').replace('_', '.').replace('_', '')
) || FALSE
};
;function QTip(target, options, id, attr) {
// Elements and ID
this.id = id;
this.target = target;
this.tooltip = NULL;
this.elements = { target: target };
// Internal constructs
this._id = NAMESPACE + '-' + id;
this.timers = { img: {} };
this.options = options;
this.plugins = {};
// Cache object
this.cache = {
event: {},
target: $(),
disabled: FALSE,
attr: attr,
onTooltip: FALSE,
lastClass: ''
};
// Set the initial flags
this.rendered = this.destroyed = this.disabled = this.waiting =
this.hiddenDuringWait = this.positioning = this.triggering = FALSE;
}
PROTOTYPE = QTip.prototype;
PROTOTYPE._when = function(deferreds) {
return $.when.apply($, deferreds);
};
PROTOTYPE.render = function(show) {
if(this.rendered || this.destroyed) { return this; } // If tooltip has already been rendered, exit
var self = this,
options = this.options,
cache = this.cache,
elements = this.elements,
text = options.content.text,
title = options.content.title,
button = options.content.button,
posOptions = options.position,
namespace = '.'+this._id+' ',
deferreds = [],
tooltip;
// Add ARIA attributes to target
$.attr(this.target[0], 'aria-describedby', this._id);
// Create tooltip element
this.tooltip = elements.tooltip = tooltip = $('<div/>', {
'id': this._id,
'class': [ NAMESPACE, CLASS_DEFAULT, options.style.classes, NAMESPACE + '-pos-' + options.position.my.abbrev() ].join(' '),
'width': options.style.width || '',
'height': options.style.height || '',
'tracking': posOptions.target === 'mouse' && posOptions.adjust.mouse,
/* ARIA specific attributes */
'role': 'alert',
'aria-live': 'polite',
'aria-atomic': FALSE,
'aria-describedby': this._id + '-content',
'aria-hidden': TRUE
})
.toggleClass(CLASS_DISABLED, this.disabled)
.attr(ATTR_ID, this.id)
.data(NAMESPACE, this)
.appendTo(posOptions.container)
.append(
// Create content element
elements.content = $('<div />', {
'class': NAMESPACE + '-content',
'id': this._id + '-content',
'aria-atomic': TRUE
})
);
// Set rendered flag and prevent redundant reposition calls for now
this.rendered = -1;
this.positioning = TRUE;
// Create title...
if(title) {
this._createTitle();
// Update title only if its not a callback (called in toggle if so)
if(!$.isFunction(title)) {
deferreds.push( this._updateTitle(title, FALSE) );
}
}
// Create button
if(button) { this._createButton(); }
// Set proper rendered flag and update content if not a callback function (called in toggle)
if(!$.isFunction(text)) {
deferreds.push( this._updateContent(text, FALSE) );
}
this.rendered = TRUE;
// Setup widget classes
this._setWidget();
// Initialize 'render' plugins
$.each(PLUGINS, function(name) {
var instance;
if(this.initialize === 'render' && (instance = this(self))) {
self.plugins[name] = instance;
}
});
// Unassign initial events and assign proper events
this._unassignEvents();
this._assignEvents();
// When deferreds have completed
this._when(deferreds).then(function() {
// tooltiprender event
self._trigger('render');
// Reset flags
self.positioning = FALSE;
// Show tooltip if not hidden during wait period
if(!self.hiddenDuringWait && (options.show.ready || show)) {
self.toggle(TRUE, cache.event, FALSE);
}
self.hiddenDuringWait = FALSE;
});
// Expose API
QTIP.api[this.id] = this;
return this;
};
PROTOTYPE.destroy = function(immediate) {
// Set flag the signify destroy is taking place to plugins
// and ensure it only gets destroyed once!
if(this.destroyed) { return this.target; }
function process() {
if(this.destroyed) { return; }
this.destroyed = TRUE;
var target = this.target,
title = target.attr(oldtitle);
// Destroy tooltip if rendered
if(this.rendered) {
this.tooltip.stop(1,0).find('*').remove().end().remove();
}
// Destroy all plugins
$.each(this.plugins, function(name) {
this.destroy && this.destroy();
});
// Clear timers and remove bound events
clearTimeout(this.timers.show);
clearTimeout(this.timers.hide);
this._unassignEvents();
// Remove api object and ARIA attributes
target.removeData(NAMESPACE)
.removeAttr(ATTR_ID)
.removeAttr(ATTR_HAS)
.removeAttr('aria-describedby');
// Reset old title attribute if removed
if(this.options.suppress && title) {
target.attr('title', title).removeAttr(oldtitle);
}
// Remove qTip events associated with this API
this._unbind(target);
// Remove ID from used id objects, and delete object references
// for better garbage collection and leak protection
this.options = this.elements = this.cache = this.timers =
this.plugins = this.mouse = NULL;
// Delete epoxsed API object
delete QTIP.api[this.id];
}
// If an immediate destory is needed
if((immediate !== TRUE || this.triggering === 'hide') && this.rendered) {
this.tooltip.one('tooltiphidden', $.proxy(process, this));
!this.triggering && this.hide();
}
// If we're not in the process of hiding... process
else { process.call(this); }
return this.target;
};
;function invalidOpt(a) {
return a === NULL || $.type(a) !== 'object';
}
function invalidContent(c) {
return !( $.isFunction(c) || (c && c.attr) || c.length || ($.type(c) === 'object' && (c.jquery || c.then) ));
}
// Option object sanitizer
function sanitizeOptions(opts) {
var content, text, ajax, once;
if(invalidOpt(opts)) { return FALSE; }
if(invalidOpt(opts.metadata)) {
opts.metadata = { type: opts.metadata };
}
if('content' in opts) {
content = opts.content;
if(invalidOpt(content) || content.jquery || content.done) {
content = opts.content = {
text: (text = invalidContent(content) ? FALSE : content)
};
}
else { text = content.text; }
// DEPRECATED - Old content.ajax plugin functionality
// Converts it into the proper Deferred syntax
if('ajax' in content) {
ajax = content.ajax;
once = ajax && ajax.once !== FALSE;
delete content.ajax;
content.text = function(event, api) {
var loading = text || $(this).attr(api.options.content.attr) || 'Loading...',
deferred = $.ajax(
$.extend({}, ajax, { context: api })
)
.then(ajax.success, NULL, ajax.error)
.then(function(content) {
if(content && once) { api.set('content.text', content); }
return content;
},
function(xhr, status, error) {
if(api.destroyed || xhr.status === 0) { return; }
api.set('content.text', status + ': ' + error);
});
return !once ? (api.set('content.text', loading), deferred) : loading;
};
}
if('title' in content) {
if(!invalidOpt(content.title)) {
content.button = content.title.button;
content.title = content.title.text;
}
if(invalidContent(content.title || FALSE)) {
content.title = FALSE;
}
}
}
if('position' in opts && invalidOpt(opts.position)) {
opts.position = { my: opts.position, at: opts.position };
}
if('show' in opts && invalidOpt(opts.show)) {
opts.show = opts.show.jquery ? { target: opts.show } :
opts.show === TRUE ? { ready: TRUE } : { event: opts.show };
}
if('hide' in opts && invalidOpt(opts.hide)) {
opts.hide = opts.hide.jquery ? { target: opts.hide } : { event: opts.hide };
}
if('style' in opts && invalidOpt(opts.style)) {
opts.style = { classes: opts.style };
}
// Sanitize plugin options
$.each(PLUGINS, function() {
this.sanitize && this.sanitize(opts);
});
return opts;
}
// Setup builtin .set() option checks
CHECKS = PROTOTYPE.checks = {
builtin: {
// Core checks
'^id$': function(obj, o, v, prev) {
var id = v === TRUE ? QTIP.nextid : v,
new_id = NAMESPACE + '-' + id;
if(id !== FALSE && id.length > 0 && !$('#'+new_id).length) {
this._id = new_id;
if(this.rendered) {
this.tooltip[0].id = this._id;
this.elements.content[0].id = this._id + '-content';
this.elements.title[0].id = this._id + '-title';
}
}
else { obj[o] = prev; }
},
'^prerender': function(obj, o, v) {
v && !this.rendered && this.render(this.options.show.ready);
},
// Content checks
'^content.text$': function(obj, o, v) {
this._updateContent(v);
},
'^content.attr$': function(obj, o, v, prev) {
if(this.options.content.text === this.target.attr(prev)) {
this._updateContent( this.target.attr(v) );
}
},
'^content.title$': function(obj, o, v) {
// Remove title if content is null
if(!v) { return this._removeTitle(); }
// If title isn't already created, create it now and update
v && !this.elements.title && this._createTitle();
this._updateTitle(v);
},
'^content.button$': function(obj, o, v) {
this._updateButton(v);
},
'^content.title.(text|button)$': function(obj, o, v) {
this.set('content.'+o, v); // Backwards title.text/button compat
},
// Position checks
'^position.(my|at)$': function(obj, o, v){
'string' === typeof v && (obj[o] = new CORNER(v, o === 'at'));
},
'^position.container$': function(obj, o, v){
this.rendered && this.tooltip.appendTo(v);
},
// Show checks
'^show.ready$': function(obj, o, v) {
v && (!this.rendered && this.render(TRUE) || this.toggle(TRUE));
},
// Style checks
'^style.classes$': function(obj, o, v, p) {
this.rendered && this.tooltip.removeClass(p).addClass(v);
},
'^style.(width|height)': function(obj, o, v) {
this.rendered && this.tooltip.css(o, v);
},
'^style.widget|content.title': function() {
this.rendered && this._setWidget();
},
'^style.def': function(obj, o, v) {
this.rendered && this.tooltip.toggleClass(CLASS_DEFAULT, !!v);
},
// Events check
'^events.(render|show|move|hide|focus|blur)$': function(obj, o, v) {
this.rendered && this.tooltip[($.isFunction(v) ? '' : 'un') + 'bind']('tooltip'+o, v);
},
// Properties which require event reassignment
'^(show|hide|position).(event|target|fixed|inactive|leave|distance|viewport|adjust)': function() {
if(!this.rendered) { return; }
// Set tracking flag
var posOptions = this.options.position;
this.tooltip.attr('tracking', posOptions.target === 'mouse' && posOptions.adjust.mouse);
// Reassign events
this._unassignEvents();
this._assignEvents();
}
}
};
// Dot notation converter
function convertNotation(options, notation) {
var i = 0, obj, option = options,
// Split notation into array
levels = notation.split('.');
// Loop through
while( option = option[ levels[i++] ] ) {
if(i < levels.length) { obj = option; }
}
return [obj || options, levels.pop()];
}
PROTOTYPE.get = function(notation) {
if(this.destroyed) { return this; }
var o = convertNotation(this.options, notation.toLowerCase()),
result = o[0][ o[1] ];
return result.precedance ? result.string() : result;
};
function setCallback(notation, args) {
var category, rule, match;
for(category in this.checks) {
for(rule in this.checks[category]) {
if(match = (new RegExp(rule, 'i')).exec(notation)) {
args.push(match);
if(category === 'builtin' || this.plugins[category]) {
this.checks[category][rule].apply(
this.plugins[category] || this, args
);
}
}
}
}
}
var rmove = /^position\.(my|at|adjust|target|container|viewport)|style|content|show\.ready/i,
rrender = /^prerender|show\.ready/i;
PROTOTYPE.set = function(option, value) {
if(this.destroyed) { return this; }
var rendered = this.rendered,
reposition = FALSE,
options = this.options,
checks = this.checks,
name;
// Convert singular option/value pair into object form
if('string' === typeof option) {
name = option; option = {}; option[name] = value;
}
else { option = $.extend({}, option); }
// Set all of the defined options to their new values
$.each(option, function(notation, value) {
if(rendered && rrender.test(notation)) {
delete option[notation]; return;
}
// Set new obj value
var obj = convertNotation(options, notation.toLowerCase()), previous;
previous = obj[0][ obj[1] ];
obj[0][ obj[1] ] = value && value.nodeType ? $(value) : value;
// Also check if we need to reposition
reposition = rmove.test(notation) || reposition;
// Set the new params for the callback
option[notation] = [obj[0], obj[1], value, previous];
});
// Re-sanitize options
sanitizeOptions(options);
/*
* Execute any valid callbacks for the set options
* Also set positioning flag so we don't get loads of redundant repositioning calls.
*/
this.positioning = TRUE;
$.each(option, $.proxy(setCallback, this));
this.positioning = FALSE;
// Update position if needed
if(this.rendered && this.tooltip[0].offsetWidth > 0 && reposition) {
this.reposition( options.position.target === 'mouse' ? NULL : this.cache.event );
}
return this;
};
;PROTOTYPE._update = function(content, element, reposition) {
var self = this,
cache = this.cache;
// Make sure tooltip is rendered and content is defined. If not return
if(!this.rendered || !content) { return FALSE; }
// Use function to parse content
if($.isFunction(content)) {
content = content.call(this.elements.target, cache.event, this) || '';
}
// Handle deferred content
if($.isFunction(content.then)) {
cache.waiting = TRUE;
return content.then(function(c) {
cache.waiting = FALSE;
return self._update(c, element);
}, NULL, function(e) {
return self._update(e, element);
});
}
// If content is null... return false
if(content === FALSE || (!content && content !== '')) { return FALSE; }
// Append new content if its a DOM array and show it if hidden
if(content.jquery && content.length > 0) {
element.empty().append(
content.css({ display: 'block', visibility: 'visible' })
);
}
// Content is a regular string, insert the new content
else { element.html(content); }
// Wait for content to be loaded, and reposition
return this._waitForContent(element).then(function(images) {
if(images.images && images.images.length && self.rendered && self.tooltip[0].offsetWidth > 0) {
self.reposition(cache.event, !images.length);
}
});
};
PROTOTYPE._waitForContent = function(element) {
var cache = this.cache;
// Set flag
cache.waiting = TRUE;
// If imagesLoaded is included, ensure images have loaded and return promise
return ( $.fn.imagesLoaded ? element.imagesLoaded() : $.Deferred().resolve([]) )
.done(function() { cache.waiting = FALSE; })
.promise();
};
PROTOTYPE._updateContent = function(content, reposition) {
this._update(content, this.elements.content, reposition);
};
PROTOTYPE._updateTitle = function(content, reposition) {
if(this._update(content, this.elements.title, reposition) === FALSE) {
this._removeTitle(FALSE);
}
};
PROTOTYPE._createTitle = function()
{
var elements = this.elements,
id = this._id+'-title';
// Destroy previous title element, if present
if(elements.titlebar) { this._removeTitle(); }
// Create title bar and title elements
elements.titlebar = $('<div />', {
'class': NAMESPACE + '-titlebar ' + (this.options.style.widget ? createWidgetClass('header') : '')
})
.append(
elements.title = $('<div />', {
'id': id,
'class': NAMESPACE + '-title',
'aria-atomic': TRUE
})
)
.insertBefore(elements.content)
// Button-specific events
.delegate('.qtip-close', 'mousedown keydown mouseup keyup mouseout', function(event) {
$(this).toggleClass('ui-state-active ui-state-focus', event.type.substr(-4) === 'down');
})
.delegate('.qtip-close', 'mouseover mouseout', function(event){
$(this).toggleClass('ui-state-hover', event.type === 'mouseover');
});
// Create button if enabled
if(this.options.content.button) { this._createButton(); }
};
PROTOTYPE._removeTitle = function(reposition)
{
var elements = this.elements;
if(elements.title) {
elements.titlebar.remove();
elements.titlebar = elements.title = elements.button = NULL;
// Reposition if enabled
if(reposition !== FALSE) { this.reposition(); }
}
};
;PROTOTYPE.reposition = function(event, effect) {
if(!this.rendered || this.positioning || this.destroyed) { return this; }
// Set positioning flag
this.positioning = TRUE;
var cache = this.cache,
tooltip = this.tooltip,
posOptions = this.options.position,
target = posOptions.target,
my = posOptions.my,
at = posOptions.at,
viewport = posOptions.viewport,
container = posOptions.container,
adjust = posOptions.adjust,
method = adjust.method.split(' '),
tooltipWidth = tooltip.outerWidth(FALSE),
tooltipHeight = tooltip.outerHeight(FALSE),
targetWidth = 0,
targetHeight = 0,
type = tooltip.css('position'),
position = { left: 0, top: 0 },
visible = tooltip[0].offsetWidth > 0,
isScroll = event && event.type === 'scroll',
win = $(window),
doc = container[0].ownerDocument,
mouse = this.mouse,
pluginCalculations, offset;
// Check if absolute position was passed
if($.isArray(target) && target.length === 2) {
// Force left top and set position
at = { x: LEFT, y: TOP };
position = { left: target[0], top: target[1] };
}
// Check if mouse was the target
else if(target === 'mouse') {
// Force left top to allow flipping
at = { x: LEFT, y: TOP };
// Use the cached mouse coordinates if available, or passed event has no coordinates
if(mouse && mouse.pageX && (adjust.mouse || !event || !event.pageX) ) {
event = mouse;
}
// If the passed event has no coordinates (such as a scroll event)
else if(!event || !event.pageX) {
// Use the mouse origin that caused the show event, if distance hiding is enabled
if((!adjust.mouse || this.options.show.distance) && cache.origin && cache.origin.pageX) {
event = cache.origin;
}
// Use cached event for resize/scroll events
else if(!event || (event && (event.type === 'resize' || event.type === 'scroll'))) {
event = cache.event;
}
}
// Calculate body and container offset and take them into account below
if(type !== 'static') { position = container.offset(); }
if(doc.body.offsetWidth !== (window.innerWidth || doc.documentElement.clientWidth)) {
offset = $(document.body).offset();
}
// Use event coordinates for position
position = {
left: event.pageX - position.left + (offset && offset.left || 0),
top: event.pageY - position.top + (offset && offset.top || 0)
};
// Scroll events are a pain, some browsers
if(adjust.mouse && isScroll && mouse) {
position.left -= (mouse.scrollX || 0) - win.scrollLeft();
position.top -= (mouse.scrollY || 0) - win.scrollTop();
}
}
// Target wasn't mouse or absolute...
else {
// Check if event targetting is being used
if(target === 'event') {
if(event && event.target && event.type !== 'scroll' && event.type !== 'resize') {
cache.target = $(event.target);
}
else if(!event.target) {
cache.target = this.elements.target;
}
}
else if(target !== 'event'){
cache.target = $(target.jquery ? target : this.elements.target);
}
target = cache.target;
// Parse the target into a jQuery object and make sure there's an element present
target = $(target).eq(0);
if(target.length === 0) { return this; }
// Check if window or document is the target
else if(target[0] === document || target[0] === window) {
targetWidth = BROWSER.iOS ? window.innerWidth : target.width();
targetHeight = BROWSER.iOS ? window.innerHeight : target.height();
if(target[0] === window) {
position = {
top: (viewport || target).scrollTop(),
left: (viewport || target).scrollLeft()
};
}
}
// Check if the target is an <AREA> element
else if(PLUGINS.imagemap && target.is('area')) {
pluginCalculations = PLUGINS.imagemap(this, target, at, PLUGINS.viewport ? method : FALSE);
}
// Check if the target is an SVG element
else if(PLUGINS.svg && target && target[0].ownerSVGElement) {
pluginCalculations = PLUGINS.svg(this, target, at, PLUGINS.viewport ? method : FALSE);
}
// Otherwise use regular jQuery methods
else {
targetWidth = target.outerWidth(FALSE);
targetHeight = target.outerHeight(FALSE);
position = target.offset();
}
// Parse returned plugin values into proper variables
if(pluginCalculations) {
targetWidth = pluginCalculations.width;
targetHeight = pluginCalculations.height;
offset = pluginCalculations.offset;
position = pluginCalculations.position;
}
// Adjust position to take into account offset parents
position = this.reposition.offset(target, position, container);
// Adjust for position.fixed tooltips (and also iOS scroll bug in v3.2-4.0 & v4.3-4.3.2)
if((BROWSER.iOS > 3.1 && BROWSER.iOS < 4.1) ||
(BROWSER.iOS >= 4.3 && BROWSER.iOS < 4.33) ||
(!BROWSER.iOS && type === 'fixed')
){
position.left -= win.scrollLeft();
position.top -= win.scrollTop();
}
// Adjust position relative to target
if(!pluginCalculations || (pluginCalculations && pluginCalculations.adjustable !== FALSE)) {
position.left += at.x === RIGHT ? targetWidth : at.x === CENTER ? targetWidth / 2 : 0;
position.top += at.y === BOTTOM ? targetHeight : at.y === CENTER ? targetHeight / 2 : 0;
}
}
// Adjust position relative to tooltip
position.left += adjust.x + (my.x === RIGHT ? -tooltipWidth : my.x === CENTER ? -tooltipWidth / 2 : 0);
position.top += adjust.y + (my.y === BOTTOM ? -tooltipHeight : my.y === CENTER ? -tooltipHeight / 2 : 0);
// Use viewport adjustment plugin if enabled
if(PLUGINS.viewport) {
position.adjusted = PLUGINS.viewport(
this, position, posOptions, targetWidth, targetHeight, tooltipWidth, tooltipHeight
);
// Apply offsets supplied by positioning plugin (if used)
if(offset && position.adjusted.left) { position.left += offset.left; }
if(offset && position.adjusted.top) { position.top += offset.top; }
}
// Viewport adjustment is disabled, set values to zero
else { position.adjusted = { left: 0, top: 0 }; }
// tooltipmove event
if(!this._trigger('move', [position, viewport.elem || viewport], event)) { return this; }
delete position.adjusted;
// If effect is disabled, target it mouse, no animation is defined or positioning gives NaN out, set CSS directly
if(effect === FALSE || !visible || isNaN(position.left) || isNaN(position.top) || target === 'mouse' || !$.isFunction(posOptions.effect)) {
tooltip.css(position);
}
// Use custom function if provided
else if($.isFunction(posOptions.effect)) {
posOptions.effect.call(tooltip, this, $.extend({}, position));
tooltip.queue(function(next) {
// Reset attributes to avoid cross-browser rendering bugs
$(this).css({ opacity: '', height: '' });
if(BROWSER.ie) { this.style.removeAttribute('filter'); }
next();
});
}
// Set positioning flag
this.positioning = FALSE;
return this;
};
// Custom (more correct for qTip!) offset calculator
PROTOTYPE.reposition.offset = function(elem, pos, container) {
if(!container[0]) { return pos; }
var ownerDocument = $(elem[0].ownerDocument),
quirks = !!BROWSER.ie && document.compatMode !== 'CSS1Compat',
parent = container[0],
scrolled, position, parentOffset, overflow;
function scroll(e, i) {
pos.left += i * e.scrollLeft();
pos.top += i * e.scrollTop();
}
// Compensate for non-static containers offset
do {
if((position = $.css(parent, 'position')) !== 'static') {
if(position === 'fixed') {
parentOffset = parent.getBoundingClientRect();
scroll(ownerDocument, -1);
}
else {
parentOffset = $(parent).position();
parentOffset.left += (parseFloat($.css(parent, 'borderLeftWidth')) || 0);
parentOffset.top += (parseFloat($.css(parent, 'borderTopWidth')) || 0);
}
pos.left -= parentOffset.left + (parseFloat($.css(parent, 'marginLeft')) || 0);
pos.top -= parentOffset.top + (parseFloat($.css(parent, 'marginTop')) || 0);
// If this is the first parent element with an overflow of "scroll" or "auto", store it
if(!scrolled && (overflow = $.css(parent, 'overflow')) !== 'hidden' && overflow !== 'visible') { scrolled = $(parent); }
}
}
while((parent = parent.offsetParent));
// Compensate for containers scroll if it also has an offsetParent (or in IE quirks mode)
if(scrolled && (scrolled[0] !== ownerDocument[0] || quirks)) {
scroll(scrolled, 1);
}
return pos;
};
// Corner class
var C = (CORNER = PROTOTYPE.reposition.Corner = function(corner, forceY) {
corner = ('' + corner).replace(/([A-Z])/, ' $1').replace(/middle/gi, CENTER).toLowerCase();
this.x = (corner.match(/left|right/i) || corner.match(/center/) || ['inherit'])[0].toLowerCase();
this.y = (corner.match(/top|bottom|center/i) || ['inherit'])[0].toLowerCase();
this.forceY = !!forceY;
var f = corner.charAt(0);
this.precedance = (f === 't' || f === 'b' ? Y : X);
}).prototype;
C.invert = function(z, center) {
this[z] = this[z] === LEFT ? RIGHT : this[z] === RIGHT ? LEFT : center || this[z];
};
C.string = function() {
var x = this.x, y = this.y;
return x === y ? x : this.precedance === Y || (this.forceY && y !== 'center') ? y+' '+x : x+' '+y;
};
C.abbrev = function() {
var result = this.string().split(' ');
return result[0].charAt(0) + (result[1] && result[1].charAt(0) || '');
};
C.clone = function() {
return new CORNER( this.string(), this.forceY );
};;
PROTOTYPE.toggle = function(state, event) {
var cache = this.cache,
options = this.options,
tooltip = this.tooltip;
// Try to prevent flickering when tooltip overlaps show element
if(event) {
if((/over|enter/).test(event.type) && (/out|leave/).test(cache.event.type) &&
options.show.target.add(event.target).length === options.show.target.length &&
tooltip.has(event.relatedTarget).length) {
return this;
}
// Cache event
cache.event = cloneEvent(event);
}
// If we're currently waiting and we've just hidden... stop it
this.waiting && !state && (this.hiddenDuringWait = TRUE);
// Render the tooltip if showing and it isn't already
if(!this.rendered) { return state ? this.render(1) : this; }
else if(this.destroyed || this.disabled) { return this; }
var type = state ? 'show' : 'hide',
opts = this.options[type],
otherOpts = this.options[ !state ? 'show' : 'hide' ],
posOptions = this.options.position,
contentOptions = this.options.content,
width = this.tooltip.css('width'),
visible = this.tooltip.is(':visible'),
animate = state || opts.target.length === 1,
sameTarget = !event || opts.target.length < 2 || cache.target[0] === event.target,
identicalState, allow, showEvent, delay, after;
// Detect state if valid one isn't provided
if((typeof state).search('boolean|number')) { state = !visible; }
// Check if the tooltip is in an identical state to the new would-be state
identicalState = !tooltip.is(':animated') && visible === state && sameTarget;
// Fire tooltip(show/hide) event and check if destroyed
allow = !identicalState ? !!this._trigger(type, [90]) : NULL;
// Check to make sure the tooltip wasn't destroyed in the callback
if(this.destroyed) { return this; }
// If the user didn't stop the method prematurely and we're showing the tooltip, focus it
if(allow !== FALSE && state) { this.focus(event); }
// If the state hasn't changed or the user stopped it, return early
if(!allow || identicalState) { return this; }
// Set ARIA hidden attribute
$.attr(tooltip[0], 'aria-hidden', !!!state);
// Execute state specific properties
if(state) {
// Store show origin coordinates
cache.origin = cloneEvent(this.mouse);
// Update tooltip content & title if it's a dynamic function
if($.isFunction(contentOptions.text)) { this._updateContent(contentOptions.text, FALSE); }
if($.isFunction(contentOptions.title)) { this._updateTitle(contentOptions.title, FALSE); }
// Cache mousemove events for positioning purposes (if not already tracking)
if(!trackingBound && posOptions.target === 'mouse' && posOptions.adjust.mouse) {
$(document).bind('mousemove.'+NAMESPACE, this._storeMouse);
trackingBound = TRUE;
}
// Update the tooltip position (set width first to prevent viewport/max-width issues)
if(!width) { tooltip.css('width', tooltip.outerWidth(FALSE)); }
this.reposition(event, arguments[2]);
if(!width) { tooltip.css('width', ''); }
// Hide other tooltips if tooltip is solo
if(!!opts.solo) {
(typeof opts.solo === 'string' ? $(opts.solo) : $(SELECTOR, opts.solo))
.not(tooltip).not(opts.target).qtip('hide', $.Event('tooltipsolo'));
}
}
else {
// Clear show timer if we're hiding
clearTimeout(this.timers.show);
// Remove cached origin on hide
delete cache.origin;
// Remove mouse tracking event if not needed (all tracking qTips are hidden)
if(trackingBound && !$(SELECTOR+'[tracking="true"]:visible', opts.solo).not(tooltip).length) {
$(document).unbind('mousemove.'+NAMESPACE);
trackingBound = FALSE;
}
// Blur the tooltip
this.blur(event);
}
// Define post-animation, state specific properties
after = $.proxy(function() {
if(state) {
// Prevent antialias from disappearing in IE by removing filter
if(BROWSER.ie) { tooltip[0].style.removeAttribute('filter'); }
// Remove overflow setting to prevent tip bugs
tooltip.css('overflow', '');
// Autofocus elements if enabled
if('string' === typeof opts.autofocus) {
$(this.options.show.autofocus, tooltip).focus();
}
// If set, hide tooltip when inactive for delay period
this.options.show.target.trigger('qtip-'+this.id+'-inactive');
}
else {
// Reset CSS states
tooltip.css({
display: '',
visibility: '',
opacity: '',
left: '',
top: ''
});
}
// tooltipvisible/tooltiphidden events
this._trigger(state ? 'visible' : 'hidden');
}, this);
// If no effect type is supplied, use a simple toggle
if(opts.effect === FALSE || animate === FALSE) {
tooltip[ type ]();
after();
}
// Use custom function if provided
else if($.isFunction(opts.effect)) {
tooltip.stop(1, 1);
opts.effect.call(tooltip, this);
tooltip.queue('fx', function(n) {
after(); n();
});
}
// Use basic fade function by default
else { tooltip.fadeTo(90, state ? 1 : 0, after); }
// If inactive hide method is set, active it
if(state) { opts.target.trigger('qtip-'+this.id+'-inactive'); }
return this;
};
PROTOTYPE.show = function(event) { return this.toggle(TRUE, event); };
PROTOTYPE.hide = function(event) { return this.toggle(FALSE, event); };
;PROTOTYPE.focus = function(event) {
if(!this.rendered || this.destroyed) { return this; }
var qtips = $(SELECTOR),
tooltip = this.tooltip,
curIndex = parseInt(tooltip[0].style.zIndex, 10),
newIndex = QTIP.zindex + qtips.length,
focusedElem;
// Only update the z-index if it has changed and tooltip is not already focused
if(!tooltip.hasClass(CLASS_FOCUS)) {
// tooltipfocus event
if(this._trigger('focus', [newIndex], event)) {
// Only update z-index's if they've changed
if(curIndex !== newIndex) {
// Reduce our z-index's and keep them properly ordered
qtips.each(function() {
if(this.style.zIndex > curIndex) {
this.style.zIndex = this.style.zIndex - 1;
}
});
// Fire blur event for focused tooltip
qtips.filter('.' + CLASS_FOCUS).qtip('blur', event);
}
// Set the new z-index
tooltip.addClass(CLASS_FOCUS)[0].style.zIndex = newIndex;
}
}
return this;
};
PROTOTYPE.blur = function(event) {
if(!this.rendered || this.destroyed) { return this; }
// Set focused status to FALSE
this.tooltip.removeClass(CLASS_FOCUS);
// tooltipblur event
this._trigger('blur', [ this.tooltip.css('zIndex') ], event);
return this;
};
;PROTOTYPE.disable = function(state) {
if(this.destroyed) { return this; }
// If 'toggle' is passed, toggle the current state
if(state === 'toggle') {
state = !(this.rendered ? this.tooltip.hasClass(CLASS_DISABLED) : this.disabled);
}
// Disable if no state passed
else if('boolean' !== typeof state) {
state = TRUE;
}
if(this.rendered) {
this.tooltip.toggleClass(CLASS_DISABLED, state)
.attr('aria-disabled', state);
}
this.disabled = !!state;
return this;
};
PROTOTYPE.enable = function() { return this.disable(FALSE); };
;PROTOTYPE._createButton = function()
{
var self = this,
elements = this.elements,
tooltip = elements.tooltip,
button = this.options.content.button,
isString = typeof button === 'string',
close = isString ? button : 'Close tooltip';
if(elements.button) { elements.button.remove(); }
// Use custom button if one was supplied by user, else use default
if(button.jquery) {
elements.button = button;
}
else {
elements.button = $('<a />', {
'class': 'qtip-close ' + (this.options.style.widget ? '' : NAMESPACE+'-icon'),
'title': close,
'aria-label': close
})
.prepend(
$('<span />', {
'class': 'ui-icon ui-icon-close',
'html': '×'
})
);
}
// Create button and setup attributes
elements.button.appendTo(elements.titlebar || tooltip)
.attr('role', 'button')
.click(function(event) {
if(!tooltip.hasClass(CLASS_DISABLED)) { self.hide(event); }
return FALSE;
});
};
PROTOTYPE._updateButton = function(button)
{
// Make sure tooltip is rendered and if not, return
if(!this.rendered) { return FALSE; }
var elem = this.elements.button;
if(button) { this._createButton(); }
else { elem.remove(); }
};
;// Widget class creator
function createWidgetClass(cls) {
return WIDGET.concat('').join(cls ? '-'+cls+' ' : ' ');
}
// Widget class setter method
PROTOTYPE._setWidget = function()
{
var on = this.options.style.widget,
elements = this.elements,
tooltip = elements.tooltip,
disabled = tooltip.hasClass(CLASS_DISABLED);
tooltip.removeClass(CLASS_DISABLED);
CLASS_DISABLED = on ? 'ui-state-disabled' : 'qtip-disabled';
tooltip.toggleClass(CLASS_DISABLED, disabled);
tooltip.toggleClass('ui-helper-reset '+createWidgetClass(), on).toggleClass(CLASS_DEFAULT, this.options.style.def && !on);
if(elements.content) {
elements.content.toggleClass( createWidgetClass('content'), on);
}
if(elements.titlebar) {
elements.titlebar.toggleClass( createWidgetClass('header'), on);
}
if(elements.button) {
elements.button.toggleClass(NAMESPACE+'-icon', !on);
}
};;function cloneEvent(event) {
return event && {
type: event.type,
pageX: event.pageX,
pageY: event.pageY,
target: event.target,
relatedTarget: event.relatedTarget,
scrollX: event.scrollX || window.pageXOffset || document.body.scrollLeft || document.documentElement.scrollLeft,
scrollY: event.scrollY || window.pageYOffset || document.body.scrollTop || document.documentElement.scrollTop
} || {};
}
function delay(callback, duration) {
// If tooltip has displayed, start hide timer
if(duration > 0) {
return setTimeout(
$.proxy(callback, this), duration
);
}
else{ callback.call(this); }
}
function showMethod(event) {
if(this.tooltip.hasClass(CLASS_DISABLED)) { return FALSE; }
// Clear hide timers
clearTimeout(this.timers.show);
clearTimeout(this.timers.hide);
// Start show timer
this.timers.show = delay.call(this,
function() { this.toggle(TRUE, event); },
this.options.show.delay
);
}
function hideMethod(event) {
if(this.tooltip.hasClass(CLASS_DISABLED)) { return FALSE; }
// Check if new target was actually the tooltip element
var relatedTarget = $(event.relatedTarget),
ontoTooltip = relatedTarget.closest(SELECTOR)[0] === this.tooltip[0],
ontoTarget = relatedTarget[0] === this.options.show.target[0];
// Clear timers and stop animation queue
clearTimeout(this.timers.show);
clearTimeout(this.timers.hide);
// Prevent hiding if tooltip is fixed and event target is the tooltip.
// Or if mouse positioning is enabled and cursor momentarily overlaps
if(this !== relatedTarget[0] &&
(this.options.position.target === 'mouse' && ontoTooltip) ||
(this.options.hide.fixed && (
(/mouse(out|leave|move)/).test(event.type) && (ontoTooltip || ontoTarget))
))
{
try {
event.preventDefault();
event.stopImmediatePropagation();
} catch(e) {}
return;
}
// If tooltip has displayed, start hide timer
this.timers.hide = delay.call(this,
function() { this.toggle(FALSE, event); },
this.options.hide.delay,
this
);
}
function inactiveMethod(event) {
if(this.tooltip.hasClass(CLASS_DISABLED) || !this.options.hide.inactive) { return FALSE; }
// Clear timer
clearTimeout(this.timers.inactive);
this.timers.inactive = delay.call(this,
function(){ this.hide(event); },
this.options.hide.inactive
);
}
function repositionMethod(event) {
if(this.rendered && this.tooltip[0].offsetWidth > 0) { this.reposition(event); }
}
// Store mouse coordinates
PROTOTYPE._storeMouse = function(event) {
(this.mouse = cloneEvent(event)).type = 'mousemove';
};
// Bind events
PROTOTYPE._bind = function(targets, events, method, suffix, context) {
var ns = '.' + this._id + (suffix ? '-'+suffix : '');
events.length && $(targets).bind(
(events.split ? events : events.join(ns + ' ')) + ns,
$.proxy(method, context || this)
);
};
PROTOTYPE._unbind = function(targets, suffix) {
$(targets).unbind('.' + this._id + (suffix ? '-'+suffix : ''));
};
// Apply common event handlers using delegate (avoids excessive .bind calls!)
var ns = '.'+NAMESPACE;
function delegate(selector, events, method) {
$(document.body).delegate(selector,
(events.split ? events : events.join(ns + ' ')) + ns,
function() {
var api = QTIP.api[ $.attr(this, ATTR_ID) ];
api && !api.disabled && method.apply(api, arguments);
}
);
}
$(function() {
delegate(SELECTOR, ['mouseenter', 'mouseleave'], function(event) {
var state = event.type === 'mouseenter',
tooltip = $(event.currentTarget),
target = $(event.relatedTarget || event.target),
options = this.options;
// On mouseenter...
if(state) {
// Focus the tooltip on mouseenter (z-index stacking)
this.focus(event);
// Clear hide timer on tooltip hover to prevent it from closing
tooltip.hasClass(CLASS_FIXED) && !tooltip.hasClass(CLASS_DISABLED) && clearTimeout(this.timers.hide);
}
// On mouseleave...
else {
// Hide when we leave the tooltip and not onto the show target (if a hide event is set)
if(options.position.target === 'mouse' && options.hide.event &&
options.show.target && !target.closest(options.show.target[0]).length) {
this.hide(event);
}
}
// Add hover class
tooltip.toggleClass(CLASS_HOVER, state);
});
// Define events which reset the 'inactive' event handler
delegate('['+ATTR_ID+']', INACTIVE_EVENTS, inactiveMethod);
});
// Event trigger
PROTOTYPE._trigger = function(type, args, event) {
var callback = $.Event('tooltip'+type);
callback.originalEvent = (event && $.extend({}, event)) || this.cache.event || NULL;
this.triggering = type;
this.tooltip.trigger(callback, [this].concat(args || []));
this.triggering = FALSE;
return !callback.isDefaultPrevented();
};
PROTOTYPE._bindEvents = function(showEvents, hideEvents, showTarget, hideTarget, showMethod, hideMethod) {
// If hide and show targets are the same...
if(hideTarget.add(showTarget).length === hideTarget.length) {
var toggleEvents = [];
// Filter identical show/hide events
hideEvents = $.map(hideEvents, function(type) {
var showIndex = $.inArray(type, showEvents);
// Both events are identical, remove from both hide and show events
// and append to toggleEvents
if(showIndex > -1) {
toggleEvents.push( showEvents.splice( showIndex, 1 )[0] );
return;
}
return type;
});
// Toggle events are special case of identical show/hide events, which happen in sequence
toggleEvents.length && this._bind(showTarget, toggleEvents, function(event) {
var state = this.rendered ? this.tooltip[0].offsetWidth > 0 : false;
(state ? hideMethod : showMethod).call(this, event);
});
}
// Apply show/hide/toggle events
this._bind(showTarget, showEvents, showMethod);
this._bind(hideTarget, hideEvents, hideMethod);
};
PROTOTYPE._assignInitialEvents = function(event) {
var options = this.options,
showTarget = options.show.target,
hideTarget = options.hide.target,
showEvents = options.show.event ? $.trim('' + options.show.event).split(' ') : [],
hideEvents = options.hide.event ? $.trim('' + options.hide.event).split(' ') : [];
/*
* Make sure hoverIntent functions properly by using mouseleave as a hide event if
* mouseenter/mouseout is used for show.event, even if it isn't in the users options.
*/
if(/mouse(over|enter)/i.test(options.show.event) && !/mouse(out|leave)/i.test(options.hide.event)) {
hideEvents.push('mouseleave');
}
/*
* Also make sure initial mouse targetting works correctly by caching mousemove coords
* on show targets before the tooltip has rendered. Also set onTarget when triggered to
* keep mouse tracking working.
*/
this._bind(showTarget, 'mousemove', function(event) {
this._storeMouse(event);
this.cache.onTarget = TRUE;
});
// Define hoverIntent function
function hoverIntent(event) {
// Only continue if tooltip isn't disabled
if(this.disabled || this.destroyed) { return FALSE; }
// Cache the event data
this.cache.event = cloneEvent(event);
this.cache.target = event ? $(event.target) : [undefined];
// Start the event sequence
clearTimeout(this.timers.show);
this.timers.show = delay.call(this,
function() { this.render(typeof event === 'object' || options.show.ready); },
options.show.delay
);
}
// Filter and bind events
this._bindEvents(showEvents, hideEvents, showTarget, hideTarget, hoverIntent, function() {
clearTimeout(this.timers.show);
});
// Prerendering is enabled, create tooltip now
if(options.show.ready || options.prerender) { hoverIntent.call(this, event); }
};
// Event assignment method
PROTOTYPE._assignEvents = function() {
var self = this,
options = this.options,
posOptions = options.position,
tooltip = this.tooltip,
showTarget = options.show.target,
hideTarget = options.hide.target,
containerTarget = posOptions.container,
viewportTarget = posOptions.viewport,
documentTarget = $(document),
bodyTarget = $(document.body),
windowTarget = $(window),
showEvents = options.show.event ? $.trim('' + options.show.event).split(' ') : [],
hideEvents = options.hide.event ? $.trim('' + options.hide.event).split(' ') : [];
// Assign passed event callbacks
$.each(options.events, function(name, callback) {
self._bind(tooltip, name === 'toggle' ? ['tooltipshow','tooltiphide'] : ['tooltip'+name], callback, null, tooltip);
});
// Hide tooltips when leaving current window/frame (but not select/option elements)
if(/mouse(out|leave)/i.test(options.hide.event) && options.hide.leave === 'window') {
this._bind(documentTarget, ['mouseout', 'blur'], function(event) {
if(!/select|option/.test(event.target.nodeName) && !event.relatedTarget) {
this.hide(event);
}
});
}
// Enable hide.fixed by adding appropriate class
if(options.hide.fixed) {
hideTarget = hideTarget.add( tooltip.addClass(CLASS_FIXED) );
}
/*
* Make sure hoverIntent functions properly by using mouseleave to clear show timer if
* mouseenter/mouseout is used for show.event, even if it isn't in the users options.
*/
else if(/mouse(over|enter)/i.test(options.show.event)) {
this._bind(hideTarget, 'mouseleave', function() {
clearTimeout(this.timers.show);
});
}
// Hide tooltip on document mousedown if unfocus events are enabled
if(('' + options.hide.event).indexOf('unfocus') > -1) {
this._bind(containerTarget.closest('html'), ['mousedown', 'touchstart'], function(event) {
var elem = $(event.target),
enabled = this.rendered && !this.tooltip.hasClass(CLASS_DISABLED) && this.tooltip[0].offsetWidth > 0,
isAncestor = elem.parents(SELECTOR).filter(this.tooltip[0]).length > 0;
if(elem[0] !== this.target[0] && elem[0] !== this.tooltip[0] && !isAncestor &&
!this.target.has(elem[0]).length && enabled
) {
this.hide(event);
}
});
}
// Check if the tooltip hides when inactive
if('number' === typeof options.hide.inactive) {
// Bind inactive method to show target(s) as a custom event
this._bind(showTarget, 'qtip-'+this.id+'-inactive', inactiveMethod);
// Define events which reset the 'inactive' event handler
this._bind(hideTarget.add(tooltip), QTIP.inactiveEvents, inactiveMethod, '-inactive');
}
// Filter and bind events
this._bindEvents(showEvents, hideEvents, showTarget, hideTarget, showMethod, hideMethod);
// Mouse movement bindings
this._bind(showTarget.add(tooltip), 'mousemove', function(event) {
// Check if the tooltip hides when mouse is moved a certain distance
if('number' === typeof options.hide.distance) {
var origin = this.cache.origin || {},
limit = this.options.hide.distance,
abs = Math.abs;
// Check if the movement has gone beyond the limit, and hide it if so
if(abs(event.pageX - origin.pageX) >= limit || abs(event.pageY - origin.pageY) >= limit) {
this.hide(event);
}
}
// Cache mousemove coords on show targets
this._storeMouse(event);
});
// Mouse positioning events
if(posOptions.target === 'mouse') {
// If mouse adjustment is on...
if(posOptions.adjust.mouse) {
// Apply a mouseleave event so we don't get problems with overlapping
if(options.hide.event) {
// Track if we're on the target or not
this._bind(showTarget, ['mouseenter', 'mouseleave'], function(event) {
this.cache.onTarget = event.type === 'mouseenter';
});
}
// Update tooltip position on mousemove
this._bind(documentTarget, 'mousemove', function(event) {
// Update the tooltip position only if the tooltip is visible and adjustment is enabled
if(this.rendered && this.cache.onTarget && !this.tooltip.hasClass(CLASS_DISABLED) && this.tooltip[0].offsetWidth > 0) {
this.reposition(event);
}
});
}
}
// Adjust positions of the tooltip on window resize if enabled
if(posOptions.adjust.resize || viewportTarget.length) {
this._bind( $.event.special.resize ? viewportTarget : windowTarget, 'resize', repositionMethod );
}
// Adjust tooltip position on scroll of the window or viewport element if present
if(posOptions.adjust.scroll) {
this._bind( windowTarget.add(posOptions.container), 'scroll', repositionMethod );
}
};
// Un-assignment method
PROTOTYPE._unassignEvents = function() {
var targets = [
this.options.show.target[0],
this.options.hide.target[0],
this.rendered && this.tooltip[0],
this.options.position.container[0],
this.options.position.viewport[0],
this.options.position.container.closest('html')[0], // unfocus
window,
document
];
this._unbind($([]).pushStack( $.grep(targets, function(i) {
return typeof i === 'object';
})));
};
;// Initialization method
function init(elem, id, opts) {
var obj, posOptions, attr, config, title,
// Setup element references
docBody = $(document.body),
// Use document body instead of document element if needed
newTarget = elem[0] === document ? docBody : elem,
// Grab metadata from element if plugin is present
metadata = (elem.metadata) ? elem.metadata(opts.metadata) : NULL,
// If metadata type if HTML5, grab 'name' from the object instead, or use the regular data object otherwise
metadata5 = opts.metadata.type === 'html5' && metadata ? metadata[opts.metadata.name] : NULL,
// Grab data from metadata.name (or data-qtipopts as fallback) using .data() method,
html5 = elem.data(opts.metadata.name || 'qtipopts');
// If we don't get an object returned attempt to parse it manualyl without parseJSON
try { html5 = typeof html5 === 'string' ? $.parseJSON(html5) : html5; } catch(e) {}
// Merge in and sanitize metadata
config = $.extend(TRUE, {}, QTIP.defaults, opts,
typeof html5 === 'object' ? sanitizeOptions(html5) : NULL,
sanitizeOptions(metadata5 || metadata));
// Re-grab our positioning options now we've merged our metadata and set id to passed value
posOptions = config.position;
config.id = id;
// Setup missing content if none is detected
if('boolean' === typeof config.content.text) {
attr = elem.attr(config.content.attr);
// Grab from supplied attribute if available
if(config.content.attr !== FALSE && attr) { config.content.text = attr; }
// No valid content was found, abort render
else { return FALSE; }
}
// Setup target options
if(!posOptions.container.length) { posOptions.container = docBody; }
if(posOptions.target === FALSE) { posOptions.target = newTarget; }
if(config.show.target === FALSE) { config.show.target = newTarget; }
if(config.show.solo === TRUE) { config.show.solo = posOptions.container.closest('body'); }
if(config.hide.target === FALSE) { config.hide.target = newTarget; }
if(config.position.viewport === TRUE) { config.position.viewport = posOptions.container; }
// Ensure we only use a single container
posOptions.container = posOptions.container.eq(0);
// Convert position corner values into x and y strings
posOptions.at = new CORNER(posOptions.at, TRUE);
posOptions.my = new CORNER(posOptions.my);
// Destroy previous tooltip if overwrite is enabled, or skip element if not
if(elem.data(NAMESPACE)) {
if(config.overwrite) {
elem.qtip('destroy', true);
}
else if(config.overwrite === FALSE) {
return FALSE;
}
}
// Add has-qtip attribute
elem.attr(ATTR_HAS, id);
// Remove title attribute and store it if present
if(config.suppress && (title = elem.attr('title'))) {
// Final attr call fixes event delegatiom and IE default tooltip showing problem
elem.removeAttr('title').attr(oldtitle, title).attr('title', '');
}
// Initialize the tooltip and add API reference
obj = new QTip(elem, config, id, !!attr);
elem.data(NAMESPACE, obj);
// Catch remove/removeqtip events on target element to destroy redundant tooltip
elem.one('remove.qtip-'+id+' removeqtip.qtip-'+id, function() {
var api; if((api = $(this).data(NAMESPACE))) { api.destroy(true); }
});
return obj;
}
// jQuery $.fn extension method
QTIP = $.fn.qtip = function(options, notation, newValue)
{
var command = ('' + options).toLowerCase(), // Parse command
returned = NULL,
args = $.makeArray(arguments).slice(1),
event = args[args.length - 1],
opts = this[0] ? $.data(this[0], NAMESPACE) : NULL;
// Check for API request
if((!arguments.length && opts) || command === 'api') {
return opts;
}
// Execute API command if present
else if('string' === typeof options) {
this.each(function() {
var api = $.data(this, NAMESPACE);
if(!api) { return TRUE; }
// Cache the event if possible
if(event && event.timeStamp) { api.cache.event = event; }
// Check for specific API commands
if(notation && (command === 'option' || command === 'options')) {
if(newValue !== undefined || $.isPlainObject(notation)) {
api.set(notation, newValue);
}
else {
returned = api.get(notation);
return FALSE;
}
}
// Execute API command
else if(api[command]) {
api[command].apply(api, args);
}
});
return returned !== NULL ? returned : this;
}
// No API commands. validate provided options and setup qTips
else if('object' === typeof options || !arguments.length) {
// Sanitize options first
opts = sanitizeOptions($.extend(TRUE, {}, options));
return this.each(function(i) {
var api, id;
// Find next available ID, or use custom ID if provided
id = $.isArray(opts.id) ? opts.id[i] : opts.id;
id = !id || id === FALSE || id.length < 1 || QTIP.api[id] ? QTIP.nextid++ : id;
// Initialize the qTip and re-grab newly sanitized options
api = init($(this), id, opts);
if(api === FALSE) { return TRUE; }
else { QTIP.api[id] = api; }
// Initialize plugins
$.each(PLUGINS, function() {
if(this.initialize === 'initialize') { this(api); }
});
// Assign initial pre-render events
api._assignInitialEvents(event);
});
}
};
// Expose class
$.qtip = QTip;
// Populated in render method
QTIP.api = {};
;$.each({
/* Allow other plugins to successfully retrieve the title of an element with a qTip applied */
attr: function(attr, val) {
if(this.length) {
var self = this[0],
title = 'title',
api = $.data(self, 'qtip');
if(attr === title && api && 'object' === typeof api && api.options.suppress) {
if(arguments.length < 2) {
return $.attr(self, oldtitle);
}
// If qTip is rendered and title was originally used as content, update it
if(api && api.options.content.attr === title && api.cache.attr) {
api.set('content.text', val);
}
// Use the regular attr method to set, then cache the result
return this.attr(oldtitle, val);
}
}
return $.fn['attr'+replaceSuffix].apply(this, arguments);
},
/* Allow clone to correctly retrieve cached title attributes */
clone: function(keepData) {
var titles = $([]), title = 'title',
// Clone our element using the real clone method
elems = $.fn['clone'+replaceSuffix].apply(this, arguments);
// Grab all elements with an oldtitle set, and change it to regular title attribute, if keepData is false
if(!keepData) {
elems.filter('['+oldtitle+']').attr('title', function() {
return $.attr(this, oldtitle);
})
.removeAttr(oldtitle);
}
return elems;
}
}, function(name, func) {
if(!func || $.fn[name+replaceSuffix]) { return TRUE; }
var old = $.fn[name+replaceSuffix] = $.fn[name];
$.fn[name] = function() {
return func.apply(this, arguments) || old.apply(this, arguments);
};
});
/* Fire off 'removeqtip' handler in $.cleanData if jQuery UI not present (it already does similar).
* This snippet is taken directly from jQuery UI source code found here:
* http://code.jquery.com/ui/jquery-ui-git.js
*/
if(!$.ui) {
$['cleanData'+replaceSuffix] = $.cleanData;
$.cleanData = function( elems ) {
for(var i = 0, elem; (elem = $( elems[i] )).length; i++) {
if(elem.attr(ATTR_HAS)) {
try { elem.triggerHandler('removeqtip'); }
catch( e ) {}
}
}
$['cleanData'+replaceSuffix].apply(this, arguments);
};
}
;// qTip version
QTIP.version = '2.2.0';
// Base ID for all qTips
QTIP.nextid = 0;
// Inactive events array
QTIP.inactiveEvents = INACTIVE_EVENTS;
// Base z-index for all qTips
QTIP.zindex = 15000;
// Define configuration defaults
QTIP.defaults = {
prerender: FALSE,
id: FALSE,
overwrite: TRUE,
suppress: TRUE,
content: {
text: TRUE,
attr: 'title',
title: FALSE,
button: FALSE
},
position: {
my: 'top left',
at: 'bottom right',
target: FALSE,
container: FALSE,
viewport: FALSE,
adjust: {
x: 0, y: 0,
mouse: TRUE,
scroll: TRUE,
resize: TRUE,
method: 'flipinvert flipinvert'
},
effect: function(api, pos, viewport) {
$(this).animate(pos, {
duration: 200,
queue: FALSE
});
}
},
show: {
target: FALSE,
event: 'mouseenter',
effect: TRUE,
delay: 90,
solo: FALSE,
ready: FALSE,
autofocus: FALSE
},
hide: {
target: FALSE,
event: 'mouseleave',
effect: TRUE,
delay: 0,
fixed: FALSE,
inactive: FALSE,
leave: 'window',
distance: FALSE
},
style: {
classes: '',
widget: FALSE,
width: FALSE,
height: FALSE,
def: TRUE
},
events: {
render: NULL,
move: NULL,
show: NULL,
hide: NULL,
toggle: NULL,
visible: NULL,
hidden: NULL,
focus: NULL,
blur: NULL
}
};
;var TIP,
// .bind()/.on() namespace
TIPNS = '.qtip-tip',
// Common CSS strings
MARGIN = 'margin',
BORDER = 'border',
COLOR = 'color',
BG_COLOR = 'background-color',
TRANSPARENT = 'transparent',
IMPORTANT = ' !important',
// Check if the browser supports <canvas/> elements
HASCANVAS = !!document.createElement('canvas').getContext,
// Invalid colour values used in parseColours()
INVALID = /rgba?\(0, 0, 0(, 0)?\)|transparent|#123456/i;
// Camel-case method, taken from jQuery source
// http://code.jquery.com/jquery-1.8.0.js
function camel(s) { return s.charAt(0).toUpperCase() + s.slice(1); }
/*
* Modified from Modernizr's testPropsAll()
* http://modernizr.com/downloads/modernizr-latest.js
*/
var cssProps = {}, cssPrefixes = ["Webkit", "O", "Moz", "ms"];
function vendorCss(elem, prop) {
var ucProp = prop.charAt(0).toUpperCase() + prop.slice(1),
props = (prop + ' ' + cssPrefixes.join(ucProp + ' ') + ucProp).split(' '),
cur, val, i = 0;
// If the property has already been mapped...
if(cssProps[prop]) { return elem.css(cssProps[prop]); }
while((cur = props[i++])) {
if((val = elem.css(cur)) !== undefined) {
return cssProps[prop] = cur, val;
}
}
}
// Parse a given elements CSS property into an int
function intCss(elem, prop) {
return Math.ceil(parseFloat(vendorCss(elem, prop)));
}
// VML creation (for IE only)
if(!HASCANVAS) {
var createVML = function(tag, props, style) {
return '<qtipvml:'+tag+' xmlns="urn:schemas-microsoft.com:vml" class="qtip-vml" '+(props||'')+
' style="behavior: url(#default#VML); '+(style||'')+ '" />';
};
}
// Canvas only definitions
else {
var PIXEL_RATIO = window.devicePixelRatio || 1,
BACKING_STORE_RATIO = (function() {
var context = document.createElement('canvas').getContext('2d');
return context.backingStorePixelRatio || context.webkitBackingStorePixelRatio || context.mozBackingStorePixelRatio ||
context.msBackingStorePixelRatio || context.oBackingStorePixelRatio || 1;
}()),
SCALE = PIXEL_RATIO / BACKING_STORE_RATIO;
}
function Tip(qtip, options) {
this._ns = 'tip';
this.options = options;
this.offset = options.offset;
this.size = [ options.width, options.height ];
// Initialize
this.init( (this.qtip = qtip) );
}
$.extend(Tip.prototype, {
init: function(qtip) {
var context, tip;
// Create tip element and prepend to the tooltip
tip = this.element = qtip.elements.tip = $('<div />', { 'class': NAMESPACE+'-tip' }).prependTo(qtip.tooltip);
// Create tip drawing element(s)
if(HASCANVAS) {
// save() as soon as we create the canvas element so FF2 doesn't bork on our first restore()!
context = $('<canvas />').appendTo(this.element)[0].getContext('2d');
// Setup constant parameters
context.lineJoin = 'miter';
context.miterLimit = 100000;
context.save();
}
else {
context = createVML('shape', 'coordorigin="0,0"', 'position:absolute;');
this.element.html(context + context);
// Prevent mousing down on the tip since it causes problems with .live() handling in IE due to VML
qtip._bind( $('*', tip).add(tip), ['click', 'mousedown'], function(event) { event.stopPropagation(); }, this._ns);
}
// Bind update events
qtip._bind(qtip.tooltip, 'tooltipmove', this.reposition, this._ns, this);
// Create it
this.create();
},
_swapDimensions: function() {
this.size[0] = this.options.height;
this.size[1] = this.options.width;
},
_resetDimensions: function() {
this.size[0] = this.options.width;
this.size[1] = this.options.height;
},
_useTitle: function(corner) {
var titlebar = this.qtip.elements.titlebar;
return titlebar && (
corner.y === TOP || (corner.y === CENTER && this.element.position().top + (this.size[1] / 2) + this.options.offset < titlebar.outerHeight(TRUE))
);
},
_parseCorner: function(corner) {
var my = this.qtip.options.position.my;
// Detect corner and mimic properties
if(corner === FALSE || my === FALSE) {
corner = FALSE;
}
else if(corner === TRUE) {
corner = new CORNER( my.string() );
}
else if(!corner.string) {
corner = new CORNER(corner);
corner.fixed = TRUE;
}
return corner;
},
_parseWidth: function(corner, side, use) {
var elements = this.qtip.elements,
prop = BORDER + camel(side) + 'Width';
return (use ? intCss(use, prop) : (
intCss(elements.content, prop) ||
intCss(this._useTitle(corner) && elements.titlebar || elements.content, prop) ||
intCss(elements.tooltip, prop)
)) || 0;
},
_parseRadius: function(corner) {
var elements = this.qtip.elements,
prop = BORDER + camel(corner.y) + camel(corner.x) + 'Radius';
return BROWSER.ie < 9 ? 0 :
intCss(this._useTitle(corner) && elements.titlebar || elements.content, prop) ||
intCss(elements.tooltip, prop) || 0;
},
_invalidColour: function(elem, prop, compare) {
var val = elem.css(prop);
return !val || (compare && val === elem.css(compare)) || INVALID.test(val) ? FALSE : val;
},
_parseColours: function(corner) {
var elements = this.qtip.elements,
tip = this.element.css('cssText', ''),
borderSide = BORDER + camel(corner[ corner.precedance ]) + camel(COLOR),
colorElem = this._useTitle(corner) && elements.titlebar || elements.content,
css = this._invalidColour, color = [];
// Attempt to detect the background colour from various elements, left-to-right precedance
color[0] = css(tip, BG_COLOR) || css(colorElem, BG_COLOR) || css(elements.content, BG_COLOR) ||
css(elements.tooltip, BG_COLOR) || tip.css(BG_COLOR);
// Attempt to detect the correct border side colour from various elements, left-to-right precedance
color[1] = css(tip, borderSide, COLOR) || css(colorElem, borderSide, COLOR) ||
css(elements.content, borderSide, COLOR) || css(elements.tooltip, borderSide, COLOR) || elements.tooltip.css(borderSide);
// Reset background and border colours
$('*', tip).add(tip).css('cssText', BG_COLOR+':'+TRANSPARENT+IMPORTANT+';'+BORDER+':0'+IMPORTANT+';');
return color;
},
_calculateSize: function(corner) {
var y = corner.precedance === Y,
width = this.options['width'],
height = this.options['height'],
isCenter = corner.abbrev() === 'c',
base = (y ? width: height) * (isCenter ? 0.5 : 1),
pow = Math.pow,
round = Math.round,
bigHyp, ratio, result,
smallHyp = Math.sqrt( pow(base, 2) + pow(height, 2) ),
hyp = [ (this.border / base) * smallHyp, (this.border / height) * smallHyp ];
hyp[2] = Math.sqrt( pow(hyp[0], 2) - pow(this.border, 2) );
hyp[3] = Math.sqrt( pow(hyp[1], 2) - pow(this.border, 2) );
bigHyp = smallHyp + hyp[2] + hyp[3] + (isCenter ? 0 : hyp[0]);
ratio = bigHyp / smallHyp;
result = [ round(ratio * width), round(ratio * height) ];
return y ? result : result.reverse();
},
// Tip coordinates calculator
_calculateTip: function(corner, size, scale) {
scale = scale || 1;
size = size || this.size;
var width = size[0] * scale,
height = size[1] * scale,
width2 = Math.ceil(width / 2), height2 = Math.ceil(height / 2),
// Define tip coordinates in terms of height and width values
tips = {
br: [0,0, width,height, width,0],
bl: [0,0, width,0, 0,height],
tr: [0,height, width,0, width,height],
tl: [0,0, 0,height, width,height],
tc: [0,height, width2,0, width,height],
bc: [0,0, width,0, width2,height],
rc: [0,0, width,height2, 0,height],
lc: [width,0, width,height, 0,height2]
};
// Set common side shapes
tips.lt = tips.br; tips.rt = tips.bl;
tips.lb = tips.tr; tips.rb = tips.tl;
return tips[ corner.abbrev() ];
},
// Tip coordinates drawer (canvas)
_drawCoords: function(context, coords) {
context.beginPath();
context.moveTo(coords[0], coords[1]);
context.lineTo(coords[2], coords[3]);
context.lineTo(coords[4], coords[5]);
context.closePath();
},
create: function() {
// Determine tip corner
var c = this.corner = (HASCANVAS || BROWSER.ie) && this._parseCorner(this.options.corner);
// If we have a tip corner...
if( (this.enabled = !!this.corner && this.corner.abbrev() !== 'c') ) {
// Cache it
this.qtip.cache.corner = c.clone();
// Create it
this.update();
}
// Toggle tip element
this.element.toggle(this.enabled);
return this.corner;
},
update: function(corner, position) {
if(!this.enabled) { return this; }
var elements = this.qtip.elements,
tip = this.element,
inner = tip.children(),
options = this.options,
curSize = this.size,
mimic = options.mimic,
round = Math.round,
color, precedance, context,
coords, bigCoords, translate, newSize, border, BACKING_STORE_RATIO;
// Re-determine tip if not already set
if(!corner) { corner = this.qtip.cache.corner || this.corner; }
// Use corner property if we detect an invalid mimic value
if(mimic === FALSE) { mimic = corner; }
// Otherwise inherit mimic properties from the corner object as necessary
else {
mimic = new CORNER(mimic);
mimic.precedance = corner.precedance;
if(mimic.x === 'inherit') { mimic.x = corner.x; }
else if(mimic.y === 'inherit') { mimic.y = corner.y; }
else if(mimic.x === mimic.y) {
mimic[ corner.precedance ] = corner[ corner.precedance ];
}
}
precedance = mimic.precedance;
// Ensure the tip width.height are relative to the tip position
if(corner.precedance === X) { this._swapDimensions(); }
else { this._resetDimensions(); }
// Update our colours
color = this.color = this._parseColours(corner);
// Detect border width, taking into account colours
if(color[1] !== TRANSPARENT) {
// Grab border width
border = this.border = this._parseWidth(corner, corner[corner.precedance]);
// If border width isn't zero, use border color as fill if it's not invalid (1.0 style tips)
if(options.border && border < 1 && !INVALID.test(color[1])) { color[0] = color[1]; }
// Set border width (use detected border width if options.border is true)
this.border = border = options.border !== TRUE ? options.border : border;
}
// Border colour was invalid, set border to zero
else { this.border = border = 0; }
// Determine tip size
newSize = this.size = this._calculateSize(corner);
tip.css({
width: newSize[0],
height: newSize[1],
lineHeight: newSize[1]+'px'
});
// Calculate tip translation
if(corner.precedance === Y) {
translate = [
round(mimic.x === LEFT ? border : mimic.x === RIGHT ? newSize[0] - curSize[0] - border : (newSize[0] - curSize[0]) / 2),
round(mimic.y === TOP ? newSize[1] - curSize[1] : 0)
];
}
else {
translate = [
round(mimic.x === LEFT ? newSize[0] - curSize[0] : 0),
round(mimic.y === TOP ? border : mimic.y === BOTTOM ? newSize[1] - curSize[1] - border : (newSize[1] - curSize[1]) / 2)
];
}
// Canvas drawing implementation
if(HASCANVAS) {
// Grab canvas context and clear/save it
context = inner[0].getContext('2d');
context.restore(); context.save();
context.clearRect(0,0,6000,6000);
// Calculate coordinates
coords = this._calculateTip(mimic, curSize, SCALE);
bigCoords = this._calculateTip(mimic, this.size, SCALE);
// Set the canvas size using calculated size
inner.attr(WIDTH, newSize[0] * SCALE).attr(HEIGHT, newSize[1] * SCALE);
inner.css(WIDTH, newSize[0]).css(HEIGHT, newSize[1]);
// Draw the outer-stroke tip
this._drawCoords(context, bigCoords);
context.fillStyle = color[1];
context.fill();
// Draw the actual tip
context.translate(translate[0] * SCALE, translate[1] * SCALE);
this._drawCoords(context, coords);
context.fillStyle = color[0];
context.fill();
}
// VML (IE Proprietary implementation)
else {
// Calculate coordinates
coords = this._calculateTip(mimic);
// Setup coordinates string
coords = 'm' + coords[0] + ',' + coords[1] + ' l' + coords[2] +
',' + coords[3] + ' ' + coords[4] + ',' + coords[5] + ' xe';
// Setup VML-specific offset for pixel-perfection
translate[2] = border && /^(r|b)/i.test(corner.string()) ?
BROWSER.ie === 8 ? 2 : 1 : 0;
// Set initial CSS
inner.css({
coordsize: (newSize[0]+border) + ' ' + (newSize[1]+border),
antialias: ''+(mimic.string().indexOf(CENTER) > -1),
left: translate[0] - (translate[2] * Number(precedance === X)),
top: translate[1] - (translate[2] * Number(precedance === Y)),
width: newSize[0] + border,
height: newSize[1] + border
})
.each(function(i) {
var $this = $(this);
// Set shape specific attributes
$this[ $this.prop ? 'prop' : 'attr' ]({
coordsize: (newSize[0]+border) + ' ' + (newSize[1]+border),
path: coords,
fillcolor: color[0],
filled: !!i,
stroked: !i
})
.toggle(!!(border || i));
// Check if border is enabled and add stroke element
!i && $this.html( createVML(
'stroke', 'weight="'+(border*2)+'px" color="'+color[1]+'" miterlimit="1000" joinstyle="miter"'
) );
});
}
// Opera bug #357 - Incorrect tip position
// https://github.com/Craga89/qTip2/issues/367
window.opera && setTimeout(function() {
elements.tip.css({
display: 'inline-block',
visibility: 'visible'
});
}, 1);
// Position if needed
if(position !== FALSE) { this.calculate(corner, newSize); }
},
calculate: function(corner, size) {
if(!this.enabled) { return FALSE; }
var self = this,
elements = this.qtip.elements,
tip = this.element,
userOffset = this.options.offset,
isWidget = elements.tooltip.hasClass('ui-widget'),
position = { },
precedance, corners;
// Inherit corner if not provided
corner = corner || this.corner;
precedance = corner.precedance;
// Determine which tip dimension to use for adjustment
size = size || this._calculateSize(corner);
// Setup corners and offset array
corners = [ corner.x, corner.y ];
if(precedance === X) { corners.reverse(); }
// Calculate tip position
$.each(corners, function(i, side) {
var b, bc, br;
if(side === CENTER) {
b = precedance === Y ? LEFT : TOP;
position[ b ] = '50%';
position[MARGIN+'-' + b] = -Math.round(size[ precedance === Y ? 0 : 1 ] / 2) + userOffset;
}
else {
b = self._parseWidth(corner, side, elements.tooltip);
bc = self._parseWidth(corner, side, elements.content);
br = self._parseRadius(corner);
position[ side ] = Math.max(-self.border, i ? bc : (userOffset + (br > b ? br : -b)));
}
});
// Adjust for tip size
position[ corner[precedance] ] -= size[ precedance === X ? 0 : 1 ];
// Set and return new position
tip.css({ margin: '', top: '', bottom: '', left: '', right: '' }).css(position);
return position;
},
reposition: function(event, api, pos, viewport) {
if(!this.enabled) { return; }
var cache = api.cache,
newCorner = this.corner.clone(),
adjust = pos.adjusted,
method = api.options.position.adjust.method.split(' '),
horizontal = method[0],
vertical = method[1] || method[0],
shift = { left: FALSE, top: FALSE, x: 0, y: 0 },
offset, css = {}, props;
function shiftflip(direction, precedance, popposite, side, opposite) {
// Horizontal - Shift or flip method
if(direction === SHIFT && newCorner.precedance === precedance && adjust[side] && newCorner[popposite] !== CENTER) {
newCorner.precedance = newCorner.precedance === X ? Y : X;
}
else if(direction !== SHIFT && adjust[side]){
newCorner[precedance] = newCorner[precedance] === CENTER ?
(adjust[side] > 0 ? side : opposite) : (newCorner[precedance] === side ? opposite : side);
}
}
function shiftonly(xy, side, opposite) {
if(newCorner[xy] === CENTER) {
css[MARGIN+'-'+side] = shift[xy] = offset[MARGIN+'-'+side] - adjust[side];
}
else {
props = offset[opposite] !== undefined ?
[ adjust[side], -offset[side] ] : [ -adjust[side], offset[side] ];
if( (shift[xy] = Math.max(props[0], props[1])) > props[0] ) {
pos[side] -= adjust[side];
shift[side] = FALSE;
}
css[ offset[opposite] !== undefined ? opposite : side ] = shift[xy];
}
}
// If our tip position isn't fixed e.g. doesn't adjust with viewport...
if(this.corner.fixed !== TRUE) {
// Perform shift/flip adjustments
shiftflip(horizontal, X, Y, LEFT, RIGHT);
shiftflip(vertical, Y, X, TOP, BOTTOM);
// Update and redraw the tip if needed (check cached details of last drawn tip)
if(newCorner.string() !== cache.corner.string() && (cache.cornerTop !== adjust.top || cache.cornerLeft !== adjust.left)) {
this.update(newCorner, FALSE);
}
}
// Setup tip offset properties
offset = this.calculate(newCorner);
// Readjust offset object to make it left/top
if(offset.right !== undefined) { offset.left = -offset.right; }
if(offset.bottom !== undefined) { offset.top = -offset.bottom; }
offset.user = this.offset;
// Perform shift adjustments
if(shift.left = (horizontal === SHIFT && !!adjust.left)) { shiftonly(X, LEFT, RIGHT); }
if(shift.top = (vertical === SHIFT && !!adjust.top)) { shiftonly(Y, TOP, BOTTOM); }
/*
* If the tip is adjusted in both dimensions, or in a
* direction that would cause it to be anywhere but the
* outer border, hide it!
*/
this.element.css(css).toggle(
!((shift.x && shift.y) || (newCorner.x === CENTER && shift.y) || (newCorner.y === CENTER && shift.x))
);
// Adjust position to accomodate tip dimensions
pos.left -= offset.left.charAt ? offset.user :
horizontal !== SHIFT || shift.top || !shift.left && !shift.top ? offset.left + this.border : 0;
pos.top -= offset.top.charAt ? offset.user :
vertical !== SHIFT || shift.left || !shift.left && !shift.top ? offset.top + this.border : 0;
// Cache details
cache.cornerLeft = adjust.left; cache.cornerTop = adjust.top;
cache.corner = newCorner.clone();
},
destroy: function() {
// Unbind events
this.qtip._unbind(this.qtip.tooltip, this._ns);
// Remove the tip element(s)
if(this.qtip.elements.tip) {
this.qtip.elements.tip.find('*')
.remove().end().remove();
}
}
});
TIP = PLUGINS.tip = function(api) {
return new Tip(api, api.options.style.tip);
};
// Initialize tip on render
TIP.initialize = 'render';
// Setup plugin sanitization options
TIP.sanitize = function(options) {
if(options.style && 'tip' in options.style) {
var opts = options.style.tip;
if(typeof opts !== 'object') { opts = options.style.tip = { corner: opts }; }
if(!(/string|boolean/i).test(typeof opts.corner)) { opts.corner = TRUE; }
}
};
// Add new option checks for the plugin
CHECKS.tip = {
'^position.my|style.tip.(corner|mimic|border)$': function() {
// Make sure a tip can be drawn
this.create();
// Reposition the tooltip
this.qtip.reposition();
},
'^style.tip.(height|width)$': function(obj) {
// Re-set dimensions and redraw the tip
this.size = [ obj.width, obj.height ];
this.update();
// Reposition the tooltip
this.qtip.reposition();
},
'^content.title|style.(classes|widget)$': function() {
this.update();
}
};
// Extend original qTip defaults
$.extend(TRUE, QTIP.defaults, {
style: {
tip: {
corner: TRUE,
mimic: FALSE,
width: 6,
height: 6,
border: TRUE,
offset: 0
}
}
});
;var MODAL, OVERLAY,
MODALCLASS = 'qtip-modal',
MODALSELECTOR = '.'+MODALCLASS;
OVERLAY = function()
{
var self = this,
focusableElems = {},
current, onLast,
prevState, elem;
// Modified code from jQuery UI 1.10.0 source
// http://code.jquery.com/ui/1.10.0/jquery-ui.js
function focusable(element) {
// Use the defined focusable checker when possible
if($.expr[':'].focusable) { return $.expr[':'].focusable; }
var isTabIndexNotNaN = !isNaN($.attr(element, 'tabindex')),
nodeName = element.nodeName && element.nodeName.toLowerCase(),
map, mapName, img;
if('area' === nodeName) {
map = element.parentNode;
mapName = map.name;
if(!element.href || !mapName || map.nodeName.toLowerCase() !== 'map') {
return false;
}
img = $('img[usemap=#' + mapName + ']')[0];
return !!img && img.is(':visible');
}
return (/input|select|textarea|button|object/.test( nodeName ) ?
!element.disabled :
'a' === nodeName ?
element.href || isTabIndexNotNaN :
isTabIndexNotNaN
);
}
// Focus inputs using cached focusable elements (see update())
function focusInputs(blurElems) {
// Blurring body element in IE causes window.open windows to unfocus!
if(focusableElems.length < 1 && blurElems.length) { blurElems.not('body').blur(); }
// Focus the inputs
else { focusableElems.first().focus(); }
}
// Steal focus from elements outside tooltip
function stealFocus(event) {
if(!elem.is(':visible')) { return; }
var target = $(event.target),
tooltip = current.tooltip,
container = target.closest(SELECTOR),
targetOnTop;
// Determine if input container target is above this
targetOnTop = container.length < 1 ? FALSE :
(parseInt(container[0].style.zIndex, 10) > parseInt(tooltip[0].style.zIndex, 10));
// If we're showing a modal, but focus has landed on an input below
// this modal, divert focus to the first visible input in this modal
// or if we can't find one... the tooltip itself
if(!targetOnTop && target.closest(SELECTOR)[0] !== tooltip[0]) {
focusInputs(target);
}
// Detect when we leave the last focusable element...
onLast = event.target === focusableElems[focusableElems.length - 1];
}
$.extend(self, {
init: function() {
// Create document overlay
elem = self.elem = $('<div />', {
id: 'qtip-overlay',
html: '<div></div>',
mousedown: function() { return FALSE; }
})
.hide();
// Make sure we can't focus anything outside the tooltip
$(document.body).bind('focusin'+MODALSELECTOR, stealFocus);
// Apply keyboard "Escape key" close handler
$(document).bind('keydown'+MODALSELECTOR, function(event) {
if(current && current.options.show.modal.escape && event.keyCode === 27) {
current.hide(event);
}
});
// Apply click handler for blur option
elem.bind('click'+MODALSELECTOR, function(event) {
if(current && current.options.show.modal.blur) {
current.hide(event);
}
});
return self;
},
update: function(api) {
// Update current API reference
current = api;
// Update focusable elements if enabled
if(api.options.show.modal.stealfocus !== FALSE) {
focusableElems = api.tooltip.find('*').filter(function() {
return focusable(this);
});
}
else { focusableElems = []; }
},
toggle: function(api, state, duration) {
var docBody = $(document.body),
tooltip = api.tooltip,
options = api.options.show.modal,
effect = options.effect,
type = state ? 'show': 'hide',
visible = elem.is(':visible'),
visibleModals = $(MODALSELECTOR).filter(':visible:not(:animated)').not(tooltip),
zindex;
// Set active tooltip API reference
self.update(api);
// If the modal can steal the focus...
// Blur the current item and focus anything in the modal we an
if(state && options.stealfocus !== FALSE) {
focusInputs( $(':focus') );
}
// Toggle backdrop cursor style on show
elem.toggleClass('blurs', options.blur);
// Append to body on show
if(state) {
elem.appendTo(document.body);
}
// Prevent modal from conflicting with show.solo, and don't hide backdrop is other modals are visible
if((elem.is(':animated') && visible === state && prevState !== FALSE) || (!state && visibleModals.length)) {
return self;
}
// Stop all animations
elem.stop(TRUE, FALSE);
// Use custom function if provided
if($.isFunction(effect)) {
effect.call(elem, state);
}
// If no effect type is supplied, use a simple toggle
else if(effect === FALSE) {
elem[ type ]();
}
// Use basic fade function
else {
elem.fadeTo( parseInt(duration, 10) || 90, state ? 1 : 0, function() {
if(!state) { elem.hide(); }
});
}
// Reset position and detach from body on hide
if(!state) {
elem.queue(function(next) {
elem.css({ left: '', top: '' });
if(!$(MODALSELECTOR).length) { elem.detach(); }
next();
});
}
// Cache the state
prevState = state;
// If the tooltip is destroyed, set reference to null
if(current.destroyed) { current = NULL; }
return self;
}
});
self.init();
};
OVERLAY = new OVERLAY();
function Modal(api, options) {
this.options = options;
this._ns = '-modal';
this.init( (this.qtip = api) );
}
$.extend(Modal.prototype, {
init: function(qtip) {
var tooltip = qtip.tooltip;
// If modal is disabled... return
if(!this.options.on) { return this; }
// Set overlay reference
qtip.elements.overlay = OVERLAY.elem;
// Add unique attribute so we can grab modal tooltips easily via a SELECTOR, and set z-index
tooltip.addClass(MODALCLASS).css('z-index', QTIP.modal_zindex + $(MODALSELECTOR).length);
// Apply our show/hide/focus modal events
qtip._bind(tooltip, ['tooltipshow', 'tooltiphide'], function(event, api, duration) {
var oEvent = event.originalEvent;
// Make sure mouseout doesn't trigger a hide when showing the modal and mousing onto backdrop
if(event.target === tooltip[0]) {
if(oEvent && event.type === 'tooltiphide' && /mouse(leave|enter)/.test(oEvent.type) && $(oEvent.relatedTarget).closest(OVERLAY.elem[0]).length) {
try { event.preventDefault(); } catch(e) {}
}
else if(!oEvent || (oEvent && oEvent.type !== 'tooltipsolo')) {
this.toggle(event, event.type === 'tooltipshow', duration);
}
}
}, this._ns, this);
// Adjust modal z-index on tooltip focus
qtip._bind(tooltip, 'tooltipfocus', function(event, api) {
// If focus was cancelled before it reached us, don't do anything
if(event.isDefaultPrevented() || event.target !== tooltip[0]) { return; }
var qtips = $(MODALSELECTOR),
// Keep the modal's lower than other, regular qtips
newIndex = QTIP.modal_zindex + qtips.length,
curIndex = parseInt(tooltip[0].style.zIndex, 10);
// Set overlay z-index
OVERLAY.elem[0].style.zIndex = newIndex - 1;
// Reduce modal z-index's and keep them properly ordered
qtips.each(function() {
if(this.style.zIndex > curIndex) {
this.style.zIndex -= 1;
}
});
// Fire blur event for focused tooltip
qtips.filter('.' + CLASS_FOCUS).qtip('blur', event.originalEvent);
// Set the new z-index
tooltip.addClass(CLASS_FOCUS)[0].style.zIndex = newIndex;
// Set current
OVERLAY.update(api);
// Prevent default handling
try { event.preventDefault(); } catch(e) {}
}, this._ns, this);
// Focus any other visible modals when this one hides
qtip._bind(tooltip, 'tooltiphide', function(event) {
if(event.target === tooltip[0]) {
$(MODALSELECTOR).filter(':visible').not(tooltip).last().qtip('focus', event);
}
}, this._ns, this);
},
toggle: function(event, state, duration) {
// Make sure default event hasn't been prevented
if(event && event.isDefaultPrevented()) { return this; }
// Toggle it
OVERLAY.toggle(this.qtip, !!state, duration);
},
destroy: function() {
// Remove modal class
this.qtip.tooltip.removeClass(MODALCLASS);
// Remove bound events
this.qtip._unbind(this.qtip.tooltip, this._ns);
// Delete element reference
OVERLAY.toggle(this.qtip, FALSE);
delete this.qtip.elements.overlay;
}
});
MODAL = PLUGINS.modal = function(api) {
return new Modal(api, api.options.show.modal);
};
// Setup sanitiztion rules
MODAL.sanitize = function(opts) {
if(opts.show) {
if(typeof opts.show.modal !== 'object') { opts.show.modal = { on: !!opts.show.modal }; }
else if(typeof opts.show.modal.on === 'undefined') { opts.show.modal.on = TRUE; }
}
};
// Base z-index for all modal tooltips (use qTip core z-index as a base)
QTIP.modal_zindex = QTIP.zindex - 200;
// Plugin needs to be initialized on render
MODAL.initialize = 'render';
// Setup option set checks
CHECKS.modal = {
'^show.modal.(on|blur)$': function() {
// Initialise
this.destroy();
this.init();
// Show the modal if not visible already and tooltip is visible
this.qtip.elems.overlay.toggle(
this.qtip.tooltip[0].offsetWidth > 0
);
}
};
// Extend original api defaults
$.extend(TRUE, QTIP.defaults, {
show: {
modal: {
on: FALSE,
effect: TRUE,
blur: TRUE,
stealfocus: TRUE,
escape: TRUE
}
}
});
;PLUGINS.viewport = function(api, position, posOptions, targetWidth, targetHeight, elemWidth, elemHeight)
{
var target = posOptions.target,
tooltip = api.elements.tooltip,
my = posOptions.my,
at = posOptions.at,
adjust = posOptions.adjust,
method = adjust.method.split(' '),
methodX = method[0],
methodY = method[1] || method[0],
viewport = posOptions.viewport,
container = posOptions.container,
cache = api.cache,
adjusted = { left: 0, top: 0 },
fixed, newMy, newClass, containerOffset, containerStatic,
viewportWidth, viewportHeight, viewportScroll, viewportOffset;
// If viewport is not a jQuery element, or it's the window/document, or no adjustment method is used... return
if(!viewport.jquery || target[0] === window || target[0] === document.body || adjust.method === 'none') {
return adjusted;
}
// Cach container details
containerOffset = container.offset() || adjusted;
containerStatic = container.css('position') === 'static';
// Cache our viewport details
fixed = tooltip.css('position') === 'fixed';
viewportWidth = viewport[0] === window ? viewport.width() : viewport.outerWidth(FALSE);
viewportHeight = viewport[0] === window ? viewport.height() : viewport.outerHeight(FALSE);
viewportScroll = { left: fixed ? 0 : viewport.scrollLeft(), top: fixed ? 0 : viewport.scrollTop() };
viewportOffset = viewport.offset() || adjusted;
// Generic calculation method
function calculate(side, otherSide, type, adjust, side1, side2, lengthName, targetLength, elemLength) {
var initialPos = position[side1],
mySide = my[side],
atSide = at[side],
isShift = type === SHIFT,
myLength = mySide === side1 ? elemLength : mySide === side2 ? -elemLength : -elemLength / 2,
atLength = atSide === side1 ? targetLength : atSide === side2 ? -targetLength : -targetLength / 2,
sideOffset = viewportScroll[side1] + viewportOffset[side1] - (containerStatic ? 0 : containerOffset[side1]),
overflow1 = sideOffset - initialPos,
overflow2 = initialPos + elemLength - (lengthName === WIDTH ? viewportWidth : viewportHeight) - sideOffset,
offset = myLength - (my.precedance === side || mySide === my[otherSide] ? atLength : 0) - (atSide === CENTER ? targetLength / 2 : 0);
// shift
if(isShift) {
offset = (mySide === side1 ? 1 : -1) * myLength;
// Adjust position but keep it within viewport dimensions
position[side1] += overflow1 > 0 ? overflow1 : overflow2 > 0 ? -overflow2 : 0;
position[side1] = Math.max(
-containerOffset[side1] + viewportOffset[side1],
initialPos - offset,
Math.min(
Math.max(
-containerOffset[side1] + viewportOffset[side1] + (lengthName === WIDTH ? viewportWidth : viewportHeight),
initialPos + offset
),
position[side1],
// Make sure we don't adjust complete off the element when using 'center'
mySide === 'center' ? initialPos - myLength : 1E9
)
);
}
// flip/flipinvert
else {
// Update adjustment amount depending on if using flipinvert or flip
adjust *= (type === FLIPINVERT ? 2 : 0);
// Check for overflow on the left/top
if(overflow1 > 0 && (mySide !== side1 || overflow2 > 0)) {
position[side1] -= offset + adjust;
newMy.invert(side, side1);
}
// Check for overflow on the bottom/right
else if(overflow2 > 0 && (mySide !== side2 || overflow1 > 0) ) {
position[side1] -= (mySide === CENTER ? -offset : offset) + adjust;
newMy.invert(side, side2);
}
// Make sure we haven't made things worse with the adjustment and reset if so
if(position[side1] < viewportScroll && -position[side1] > overflow2) {
position[side1] = initialPos; newMy = my.clone();
}
}
return position[side1] - initialPos;
}
// Set newMy if using flip or flipinvert methods
if(methodX !== 'shift' || methodY !== 'shift') { newMy = my.clone(); }
// Adjust position based onviewport and adjustment options
adjusted = {
left: methodX !== 'none' ? calculate( X, Y, methodX, adjust.x, LEFT, RIGHT, WIDTH, targetWidth, elemWidth ) : 0,
top: methodY !== 'none' ? calculate( Y, X, methodY, adjust.y, TOP, BOTTOM, HEIGHT, targetHeight, elemHeight ) : 0
};
// Set tooltip position class if it's changed
if(newMy && cache.lastClass !== (newClass = NAMESPACE + '-pos-' + newMy.abbrev())) {
tooltip.removeClass(api.cache.lastClass).addClass( (api.cache.lastClass = newClass) );
}
return adjusted;
};
;PLUGINS.polys = {
// POLY area coordinate calculator
// Special thanks to Ed Cradock for helping out with this.
// Uses a binary search algorithm to find suitable coordinates.
polygon: function(baseCoords, corner) {
var result = {
width: 0, height: 0,
position: {
top: 1e10, right: 0,
bottom: 0, left: 1e10
},
adjustable: FALSE
},
i = 0, next,
coords = [],
compareX = 1, compareY = 1,
realX = 0, realY = 0,
newWidth, newHeight;
// First pass, sanitize coords and determine outer edges
i = baseCoords.length; while(i--) {
next = [ parseInt(baseCoords[--i], 10), parseInt(baseCoords[i+1], 10) ];
if(next[0] > result.position.right){ result.position.right = next[0]; }
if(next[0] < result.position.left){ result.position.left = next[0]; }
if(next[1] > result.position.bottom){ result.position.bottom = next[1]; }
if(next[1] < result.position.top){ result.position.top = next[1]; }
coords.push(next);
}
// Calculate height and width from outer edges
newWidth = result.width = Math.abs(result.position.right - result.position.left);
newHeight = result.height = Math.abs(result.position.bottom - result.position.top);
// If it's the center corner...
if(corner.abbrev() === 'c') {
result.position = {
left: result.position.left + (result.width / 2),
top: result.position.top + (result.height / 2)
};
}
else {
// Second pass, use a binary search algorithm to locate most suitable coordinate
while(newWidth > 0 && newHeight > 0 && compareX > 0 && compareY > 0)
{
newWidth = Math.floor(newWidth / 2);
newHeight = Math.floor(newHeight / 2);
if(corner.x === LEFT){ compareX = newWidth; }
else if(corner.x === RIGHT){ compareX = result.width - newWidth; }
else{ compareX += Math.floor(newWidth / 2); }
if(corner.y === TOP){ compareY = newHeight; }
else if(corner.y === BOTTOM){ compareY = result.height - newHeight; }
else{ compareY += Math.floor(newHeight / 2); }
i = coords.length; while(i--)
{
if(coords.length < 2){ break; }
realX = coords[i][0] - result.position.left;
realY = coords[i][1] - result.position.top;
if((corner.x === LEFT && realX >= compareX) ||
(corner.x === RIGHT && realX <= compareX) ||
(corner.x === CENTER && (realX < compareX || realX > (result.width - compareX))) ||
(corner.y === TOP && realY >= compareY) ||
(corner.y === BOTTOM && realY <= compareY) ||
(corner.y === CENTER && (realY < compareY || realY > (result.height - compareY)))) {
coords.splice(i, 1);
}
}
}
result.position = { left: coords[0][0], top: coords[0][1] };
}
return result;
},
rect: function(ax, ay, bx, by) {
return {
width: Math.abs(bx - ax),
height: Math.abs(by - ay),
position: {
left: Math.min(ax, bx),
top: Math.min(ay, by)
}
};
},
_angles: {
tc: 3 / 2, tr: 7 / 4, tl: 5 / 4,
bc: 1 / 2, br: 1 / 4, bl: 3 / 4,
rc: 2, lc: 1, c: 0
},
ellipse: function(cx, cy, rx, ry, corner) {
var c = PLUGINS.polys._angles[ corner.abbrev() ],
rxc = c === 0 ? 0 : rx * Math.cos( c * Math.PI ),
rys = ry * Math.sin( c * Math.PI );
return {
width: (rx * 2) - Math.abs(rxc),
height: (ry * 2) - Math.abs(rys),
position: {
left: cx + rxc,
top: cy + rys
},
adjustable: FALSE
};
},
circle: function(cx, cy, r, corner) {
return PLUGINS.polys.ellipse(cx, cy, r, r, corner);
}
};;PLUGINS.svg = function(api, svg, corner)
{
var doc = $(document),
elem = svg[0],
root = $(elem.ownerSVGElement),
xScale = 1, yScale = 1,
complex = true,
rootWidth, rootHeight,
mtx, transformed, viewBox,
len, next, i, points,
result, position, dimensions;
// Ascend the parentNode chain until we find an element with getBBox()
while(!elem.getBBox) { elem = elem.parentNode; }
if(!elem.getBBox || !elem.parentNode) { return FALSE; }
// Determine dimensions where possible
rootWidth = root.attr('width') || root.width() || parseInt(root.css('width'), 10);
rootHeight = root.attr('height') || root.height() || parseInt(root.css('height'), 10);
// Add stroke characteristics to scaling
var strokeWidth2 = (parseInt(svg.css('stroke-width'), 10) || 0) / 2;
if(strokeWidth2) {
xScale += strokeWidth2 / rootWidth;
yScale += strokeWidth2 / rootHeight;
}
// Determine which shape calculation to use
switch(elem.nodeName) {
case 'ellipse':
case 'circle':
result = PLUGINS.polys.ellipse(
elem.cx.baseVal.value,
elem.cy.baseVal.value,
(elem.rx || elem.r).baseVal.value + strokeWidth2,
(elem.ry || elem.r).baseVal.value + strokeWidth2,
corner
);
break;
case 'line':
case 'polygon':
case 'polyline':
// Determine points object (line has none, so mimic using array)
points = elem.points || [
{ x: elem.x1.baseVal.value, y: elem.y1.baseVal.value },
{ x: elem.x2.baseVal.value, y: elem.y2.baseVal.value }
];
for(result = [], i = -1, len = points.numberOfItems || points.length; ++i < len;) {
next = points.getItem ? points.getItem(i) : points[i];
result.push.apply(result, [next.x, next.y]);
}
result = PLUGINS.polys.polygon(result, corner);
break;
// Unknown shape or rectangle? Use bounding box
default:
result = elem.getBoundingClientRect();
result = {
width: result.width, height: result.height,
position: {
left: result.left,
top: result.top
}
};
complex = false;
break;
}
// Shortcut assignments
position = result.position;
root = root[0];
// If the shape was complex (i.e. not using bounding box calculations)
if(complex) {
// Convert position into a pixel value
if(root.createSVGPoint) {
mtx = elem.getScreenCTM();
points = root.createSVGPoint();
points.x = position.left;
points.y = position.top;
transformed = points.matrixTransform( mtx );
position.left = transformed.x;
position.top = transformed.y;
}
// Calculate viewBox characteristics
if(root.viewBox && (viewBox = root.viewBox.baseVal) && viewBox.width && viewBox.height) {
xScale *= rootWidth / viewBox.width;
yScale *= rootHeight / viewBox.height;
}
}
// Adjust by scroll offset
position.left += doc.scrollLeft();
position.top += doc.scrollTop();
return result;
};;PLUGINS.imagemap = function(api, area, corner, adjustMethod)
{
if(!area.jquery) { area = $(area); }
var shape = area.attr('shape').toLowerCase().replace('poly', 'polygon'),
image = $('img[usemap="#'+area.parent('map').attr('name')+'"]'),
coordsString = $.trim(area.attr('coords')),
coordsArray = coordsString.replace(/,$/, '').split(','),
imageOffset, coords, i, next, result, len;
// If we can't find the image using the map...
if(!image.length) { return FALSE; }
// Pass coordinates string if polygon
if(shape === 'polygon') {
result = PLUGINS.polys.polygon(coordsArray, corner);
}
// Otherwise parse the coordinates and pass them as arguments
else if(PLUGINS.polys[shape]) {
for(i = -1, len = coordsArray.length, coords = []; ++i < len;) {
coords.push( parseInt(coordsArray[i], 10) );
}
result = PLUGINS.polys[shape].apply(
this, coords.concat(corner)
);
}
// If no shapre calculation method was found, return false
else { return FALSE; }
// Make sure we account for padding and borders on the image
imageOffset = image.offset();
imageOffset.left += Math.ceil((image.outerWidth(FALSE) - image.width()) / 2);
imageOffset.top += Math.ceil((image.outerHeight(FALSE) - image.height()) / 2);
// Add image position to offset coordinates
result.position.left += imageOffset.left;
result.position.top += imageOffset.top;
return result;
};;var IE6,
/*
* BGIFrame adaption (http://plugins.jquery.com/project/bgiframe)
* Special thanks to Brandon Aaron
*/
BGIFRAME = '<iframe class="qtip-bgiframe" frameborder="0" tabindex="-1" src="javascript:\'\';" ' +
' style="display:block; position:absolute; z-index:-1; filter:alpha(opacity=0); ' +
'-ms-filter:"progid:DXImageTransform.Microsoft.Alpha(Opacity=0)";"></iframe>';
function Ie6(api, qtip) {
this._ns = 'ie6';
this.init( (this.qtip = api) );
}
$.extend(Ie6.prototype, {
_scroll : function() {
var overlay = this.qtip.elements.overlay;
overlay && (overlay[0].style.top = $(window).scrollTop() + 'px');
},
init: function(qtip) {
var tooltip = qtip.tooltip,
scroll;
// Create the BGIFrame element if needed
if($('select, object').length < 1) {
this.bgiframe = qtip.elements.bgiframe = $(BGIFRAME).appendTo(tooltip);
// Update BGIFrame on tooltip move
qtip._bind(tooltip, 'tooltipmove', this.adjustBGIFrame, this._ns, this);
}
// redraw() container for width/height calculations
this.redrawContainer = $('<div/>', { id: NAMESPACE+'-rcontainer' })
.appendTo(document.body);
// Fixup modal plugin if present too
if( qtip.elements.overlay && qtip.elements.overlay.addClass('qtipmodal-ie6fix') ) {
qtip._bind(window, ['scroll', 'resize'], this._scroll, this._ns, this);
qtip._bind(tooltip, ['tooltipshow'], this._scroll, this._ns, this);
}
// Set dimensions
this.redraw();
},
adjustBGIFrame: function() {
var tooltip = this.qtip.tooltip,
dimensions = {
height: tooltip.outerHeight(FALSE),
width: tooltip.outerWidth(FALSE)
},
plugin = this.qtip.plugins.tip,
tip = this.qtip.elements.tip,
tipAdjust, offset;
// Adjust border offset
offset = parseInt(tooltip.css('borderLeftWidth'), 10) || 0;
offset = { left: -offset, top: -offset };
// Adjust for tips plugin
if(plugin && tip) {
tipAdjust = (plugin.corner.precedance === 'x') ? [WIDTH, LEFT] : [HEIGHT, TOP];
offset[ tipAdjust[1] ] -= tip[ tipAdjust[0] ]();
}
// Update bgiframe
this.bgiframe.css(offset).css(dimensions);
},
// Max/min width simulator function
redraw: function() {
if(this.qtip.rendered < 1 || this.drawing) { return this; }
var tooltip = this.qtip.tooltip,
style = this.qtip.options.style,
container = this.qtip.options.position.container,
perc, width, max, min;
// Set drawing flag
this.qtip.drawing = 1;
// If tooltip has a set height/width, just set it... like a boss!
if(style.height) { tooltip.css(HEIGHT, style.height); }
if(style.width) { tooltip.css(WIDTH, style.width); }
// Simulate max/min width if not set width present...
else {
// Reset width and add fluid class
tooltip.css(WIDTH, '').appendTo(this.redrawContainer);
// Grab our tooltip width (add 1 if odd so we don't get wrapping problems.. huzzah!)
width = tooltip.width();
if(width % 2 < 1) { width += 1; }
// Grab our max/min properties
max = tooltip.css('maxWidth') || '';
min = tooltip.css('minWidth') || '';
// Parse into proper pixel values
perc = (max + min).indexOf('%') > -1 ? container.width() / 100 : 0;
max = ((max.indexOf('%') > -1 ? perc : 1) * parseInt(max, 10)) || width;
min = ((min.indexOf('%') > -1 ? perc : 1) * parseInt(min, 10)) || 0;
// Determine new dimension size based on max/min/current values
width = max + min ? Math.min(Math.max(width, min), max) : width;
// Set the newly calculated width and remvoe fluid class
tooltip.css(WIDTH, Math.round(width)).appendTo(container);
}
// Set drawing flag
this.drawing = 0;
return this;
},
destroy: function() {
// Remove iframe
this.bgiframe && this.bgiframe.remove();
// Remove bound events
this.qtip._unbind([window, this.qtip.tooltip], this._ns);
}
});
IE6 = PLUGINS.ie6 = function(api) {
// Proceed only if the browser is IE6
return BROWSER.ie === 6 ? new Ie6(api) : FALSE;
};
IE6.initialize = 'render';
CHECKS.ie6 = {
'^content|style$': function() {
this.redraw();
}
};;}));
}( window, document ));
|
mit
|
ihorlaitan/poet
|
server.py
|
10080
|
#!/usr/bin/python2.7
import os
import sys
import zlib
import base64
import socket
import os.path
import argparse
from datetime import datetime
import debug
import module
import config as CFG
from poetsocket import *
__version__ = '0.4.4'
POSH_PROMPT = 'posh > '
FAKEOK = """HTTP/1.1 200 OK\r
Date: Tue, 19 Mar 2013 22:12:25 GMT\r
Server: Apache\r
X-Powered-By: PHP/5.3.10-1ubuntu3.2\r
Content-Length: 364\r
Content-Type: text/plain\r
\r
body{background-color:#f0f0f2;margin:0;padding:0;font-family:"Open Sans","Helvetica Neue",Helvetica,Arial,sans-serif}div{width:600px;margin:5em auto;padding:50px;background-color:#fff;border-radius:1em}a:link,a:visited{color:#38488f;text-decoration:none}@media (max-width:700px){body{background-color:#fff}div{width:auto;margin:0 auto;border-radius:0;padding:1em}}"""
class PoetSocketServer(PoetSocket):
def __init__(self, port):
self.s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.s.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
self.s.bind(('', port))
self.s.listen(1)
def accept(self):
return self.s.accept()
class PoetServer(object):
"""Core server functionality.
Implements control shell, and necessary helper functions.
Attributes:
s: socket instance for initial client connection
conn: socket instance for actual client communication
cmds: list of supported control shell commands
"""
def __init__(self, s):
self.s = s
self.conn = None
self.builtins = ['exit', 'help']
# exists so modules can stop server (used by selfdestruct)
self.continue_ = True
def start(self):
"""Poet server control shell."""
debug.info('Entering control shell')
self.conn = PoetSocket(self.s.accept()[0])
print 'Welcome to posh, the Poet Shell!'
print 'Running `help\' will give you a list of supported commands.'
while True:
try:
found = False
argv = raw_input(POSH_PROMPT).split()
#
# builtins
#
if argv == []:
continue
if argv[0] == 'exit':
break
elif argv[0] == 'help':
found = True
print 'Commands:\n {}'.format('\n '.join(sorted(self.builtins + module.server_commands.keys())))
#
# modules
#
# try to find command in registered modules
for cmd, func in module.server_commands.iteritems():
if argv[0] == cmd:
found = True
try:
func(self, argv)
except Exception as e:
self.info(str(e.args))
# see comment above for self.continue_ for why this is here
if not self.continue_:
return
if not found:
self.info('{}: command not found'.format(argv[0]))
except KeyboardInterrupt:
print
continue
except EOFError:
print
break
self.conn.send('fin')
debug.info('Exiting control shell')
def info(self, msg):
print 'posh : {}'.format(msg)
def generic(self, req, write_flag=False, write_file=None):
"""Abstraction layer for exchanging with client and writing to file.
Args:
req: command to send to client
write_flag: whether client response should be written
write_file: optional filename to use for file
"""
resp = self.conn.exchange(req)
# TODO: this hardcoding is bad, should be some generic way to see
# if response should be decompressed. maybe a list of all keywords
# which cause a compressed response to come back
if req == 'recon':
resp = zlib.decompress(resp)
print resp
if write_flag:
self.write(resp, req.split()[0], write_file)
def write(self, response, prefix, write_file=None):
"""Write to server archive.
Args:
response: data to write
prefix: directory to write file to (usually named after command
executed)
write_file: optional filename to use for file
"""
ts = datetime.now().strftime('%Y%m%d%M%S')
out_ts_dir = '{}/{}'.format(CFG.ARCHIVE_DIR, ts[:len('yyyymmdd')])
out_prefix_dir = '{}/{}'.format(out_ts_dir, prefix)
# create filename to write to
if write_file:
chunks = write_file.split('.')
# separate the file extension from the file name, default to .txt
ext = '.{}'.format('.'.join(chunks[1:])) if chunks[1:] else '.txt'
outfile = '{}/{}-{}{}'.format(out_prefix_dir, chunks[0], ts, ext)
else:
outfile = '{}/{}-{}.txt'.format(out_prefix_dir, prefix, ts)
# create directories if they don't exist
if not os.path.isdir(CFG.ARCHIVE_DIR):
os.mkdir(CFG.ARCHIVE_DIR)
if not os.path.isdir(out_ts_dir):
os.mkdir(out_ts_dir)
if not os.path.isdir(out_prefix_dir):
os.mkdir(out_prefix_dir)
# if file already exists, append unique digit to the end
if os.path.exists(outfile):
count = 1
orig_outfile = outfile
outfile = orig_outfile + '.{}'.format(count)
while os.path.exists(outfile):
outfile = orig_outfile + '.{}'.format(count)
count += 1
with open(outfile, 'w') as f:
f.write(response)
print 'posh : {} written to {}'.format(prefix, outfile)
def exec_preproc(self, inp):
"""Parse posh `exec' command line.
Args:
inp: raw `exec' command line
Returns:
Tuple suitable for expansion into as self.generic() parameters.
"""
tmp = inp.split()
write_file = None
write_flag = tmp[1] == '-o'
if write_flag:
if '"' not in tmp[2]:
write_file = tmp[2]
del tmp[2]
del tmp[1]
tmp = ' '.join(tmp)
return tmp, write_flag, write_file
def get_args():
""" Parse arguments and return dictionary. """
parser = argparse.ArgumentParser()
parser.add_argument('-p', '--port')
parser.add_argument('-v', '--version', action='store_true',
help='prints the Poet version number and exits')
return parser.parse_args()
def print_header():
""" Prints big ASCII logo and other info. """
print """
_
____ ____ ___ / /_
/ __ \/ __ \/ _ \/ __/
/ /_/ / /_/ / __/ /
/ .___/\____/\___/\__/ v{}
/_/
""".format(__version__)
def die(msg=None):
if msg:
debug.err(msg)
debug.err('Poet server terminated')
sys.exit(0)
def authenticate(ping):
"""Verify that the client is in fact connecting by checking the request
path and the auth token contained in the cookie.
Args:
ping: http request sent from client (string)
Returns:
None: client authenticated successfully
str: the reason authentication failed
"""
if ping.startswith('GET /style.css HTTP/1.1'):
if 'Cookie: c={};'.format(base64.b64encode(CFG.AUTH)) in ping:
return None
else:
return 'AUTH TOKEN'
else:
return 'REQUEST'
def drop_privs():
try:
new_uid = int(os.getenv('SUDO_UID'))
new_gid = int(os.getenv('SUDO_GID'))
except TypeError:
# they were running directly from a root user and didn't have
# sudo env variables
print """[!] WARNING: Couldn't drop privileges! To avoid this error, run from a non-root user.
You may also use sudo, from a non-root user. Continue? (y/n)""",
if raw_input().lower()[0] == 'y':
return
die()
debug.info('Dropping privileges to uid: {}, gid: {}'.format(new_uid,
new_gid))
# drop group before user, because otherwise you're not privileged enough
# to drop group
os.setgroups([])
os.setregid(new_gid, new_gid)
os.setreuid(new_uid, new_uid)
# check to make sure we can't re-escalate
try:
os.seteuid(0)
print '[!] WARNING: Failed to drop privileges! Continue? (y/n)',
if raw_input().lower()[0] != 'y':
die()
except OSError:
return
def main():
args = get_args()
if args.version:
print 'Poet version {}'.format(__version__)
sys.exit(0)
print_header()
PORT = int(args.port) if args.port else 443
try:
s = PoetSocketServer(PORT)
except socket.error as e:
if e.errno == 13:
die('You need to be root!')
if os.geteuid() == 0:
drop_privs()
debug.info('Poet server started on port: {}'.format(PORT))
module.load_modules()
while True:
try:
conn, addr = s.accept()
except KeyboardInterrupt:
die()
conntime = datetime.now().strftime(debug.DATE_FMT)
ping = conn.recv(SIZE)
if not ping:
die('Socket error: {}'.format(e.message))
auth_err = authenticate(ping)
if auth_err:
print '[!] ({}) Connected By: {} -> INVALID! ({})'.format(conntime, addr, auth_err)
conn.close()
else:
print '[+] ({}) Connected By: {} -> VALID'.format(conntime, addr)
conn.send(FAKEOK)
conn.close()
try:
PoetServer(s).start()
break
except Exception as e:
print e
die('Fatal error: {}'.format(e.message))
die()
if __name__ == '__main__':
main()
|
mit
|
The-Acronym-Coders/ContentTweaker
|
src/main/java/com/teamacronymcoders/contenttweaker/modules/vanilla/utils/commands/CommandSenderWrapper.java
|
2321
|
package com.teamacronymcoders.contenttweaker.modules.vanilla.utils.commands;
import net.minecraft.command.CommandResultStats;
import net.minecraft.command.ICommandSender;
import net.minecraft.entity.Entity;
import net.minecraft.server.MinecraftServer;
import net.minecraft.util.math.BlockPos;
import net.minecraft.util.math.Vec3d;
import net.minecraft.util.text.ITextComponent;
import net.minecraft.world.World;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
public class CommandSenderWrapper implements ICommandSender {
private final ICommandSender parent;
private final boolean bypassPermissions;
private final boolean sendMessages;
public CommandSenderWrapper(ICommandSender parent, boolean bypassPermissions, boolean sendMessages) {
this.parent = parent;
this.bypassPermissions = bypassPermissions;
this.sendMessages = sendMessages;
}
@Nonnull
@Override
public String getName() {
return parent.getName();
}
@Nonnull
@Override
public ITextComponent getDisplayName() {
return parent.getDisplayName();
}
@Override
public void sendMessage(@Nonnull ITextComponent component) {
if (sendMessages) {
parent.sendMessage(component);
}
}
@Override
public boolean canUseCommand(int permLevel, @Nonnull String commandName) {
return bypassPermissions || parent.canUseCommand(permLevel, commandName);
}
@Nonnull
@Override
public BlockPos getPosition() {
return parent.getPosition();
}
@Nonnull
@Override
public Vec3d getPositionVector() {
return parent.getPositionVector();
}
@Nonnull
@Override
public World getEntityWorld() {
return parent.getEntityWorld();
}
@Override
@Nullable
public Entity getCommandSenderEntity() {
return parent.getCommandSenderEntity();
}
@Override
public boolean sendCommandFeedback() {
return sendMessages && parent.sendCommandFeedback();
}
@Override
public void setCommandStat(@Nonnull CommandResultStats.Type type, int amount) {
parent.setCommandStat(type, amount);
}
@Override
@Nullable
public MinecraftServer getServer() {
return parent.getServer();
}
}
|
mit
|
gianthat/carrieburnett
|
web/app/plugins/wp-rocket/inc/functions/posts.php
|
4359
|
<?php
defined( 'ABSPATH' ) || die( 'Cheatin’ uh?' );
/**
* Get all terms archives urls associated to a specific post
*
* @since 1.0
*
* @param int $post_id The post ID.
* @return array $urls List of taxonomies URLs
*/
function get_rocket_post_terms_urls( $post_id ) {
$urls = array();
$taxonomies = get_object_taxonomies( get_post_type( $post_id ), 'objects' );
foreach ( $taxonomies as $taxonomy ) {
if ( ! $taxonomy->public || 'product_shipping_class' === $taxonomy->name ) {
continue;
}
// Get the terms related to post.
$terms = get_the_terms( $post_id, $taxonomy->name );
if ( ! empty( $terms ) ) {
foreach ( $terms as $term ) {
$term_url = get_term_link( $term->slug, $taxonomy->name );
if ( ! is_wp_error( $term_url ) ) {
$urls[] = $term_url;
}
}
}
}
/**
* Filter the list of taxonomies URLs
*
* @since 1.1.0
*
* @param array $urls List of taxonomies URLs
*/
$urls = apply_filters( 'rocket_post_terms_urls', $urls );
return $urls;
}
/**
* Get all dates archives urls associated to a specific post
*
* @since 1.0
*
* @param int $post_id The post ID.
* @return array $urls List of dates URLs
*/
function get_rocket_post_dates_urls( $post_id ) {
// Get the day and month of the post.
$date = explode( '-', get_the_time( 'Y-m-d', $post_id ) );
$urls = array(
trailingslashit( get_year_link( $date[0] ) ) . 'index.html',
trailingslashit( get_year_link( $date[0] ) ) . 'index.html_gzip',
trailingslashit( get_year_link( $date[0] ) ) . $GLOBALS['wp_rewrite']->pagination_base,
trailingslashit( get_month_link( $date[0], $date[1] ) ) . 'index.html',
trailingslashit( get_month_link( $date[0], $date[1] ) ) . 'index.html_gzip',
trailingslashit( get_month_link( $date[0], $date[1] ) ) . $GLOBALS['wp_rewrite']->pagination_base,
get_day_link( $date[0], $date[1], $date[2] ),
);
/**
* Filter the list of dates URLs
*
* @since 1.1.0
*
* @param array $urls List of dates URLs
*/
$urls = apply_filters( 'rocket_post_dates_urls', $urls );
return $urls;
}
/**
* Get the permalink post
*
* @since 1.3.1
*
* @source : get_sample_permalink() in wp-admin/includes/post.php
*
* @param int $id The post ID.
* @param string $title The post title.
* @param string $name The post name.
* @return string The permalink
*/
function get_rocket_sample_permalink( $id, $title = null, $name = null ) {
$post = get_post( $id );
if ( ! $post ) {
return array( '', '' );
}
$ptype = get_post_type_object( $post->post_type );
$original_status = $post->post_status;
$original_date = $post->post_date;
$original_name = $post->post_name;
// Hack: get_permalink() would return ugly permalink for drafts, so we will fake that our post is published.
if ( in_array( $post->post_status, array( 'draft', 'pending' ), true ) ) {
$post->post_status = 'publish';
$post->post_name = sanitize_title( $post->post_name ? $post->post_name : $post->post_title, $post->ID );
}
// If the user wants to set a new name -- override the current one.
// Note: if empty name is supplied -- use the title instead, see #6072.
if ( ! is_null( $name ) ) {
$post->post_name = sanitize_title( $name ? $name : $title, $post->ID );
}
$post->post_name = wp_unique_post_slug( $post->post_name, $post->ID, $post->post_status, $post->post_type, $post->post_parent );
$post->filter = 'sample';
$permalink = get_permalink( $post, false );
// Replace custom post_type Token with generic pagename token for ease of use.
$permalink = str_replace( "%$post->post_type%", '%pagename%', $permalink );
// Handle page hierarchy.
if ( $ptype->hierarchical ) {
$uri = get_page_uri( $post );
$uri = untrailingslashit( $uri );
$uri = strrev( stristr( strrev( $uri ), '/' ) );
$uri = untrailingslashit( $uri );
/** This filter is documented in wp-admin/edit-tag-form.php */
$uri = apply_filters( 'editable_slug', $uri );
if ( ! empty( $uri ) ) {
$uri .= '/';
}
$permalink = str_replace( '%pagename%', "{$uri}%pagename%", $permalink );
}
/** This filter is documented in wp-admin/edit-tag-form.php */
$permalink = array( $permalink, apply_filters( 'editable_slug', $post->post_name ) );
$post->post_status = $original_status;
$post->post_date = $original_date;
$post->post_name = $original_name;
unset( $post->filter );
return $permalink;
}
|
mit
|
jenkinsci/blueocean-plugin
|
blueocean-dashboard/src/main/js/creation/CreatePipeline.js
|
5079
|
import React, { PropTypes } from 'react';
import { Page } from '@jenkins-cd/design-language';
import { ContentPageHeader, i18nTranslator, loadingIndicator } from '@jenkins-cd/blueocean-core-js';
import Extensions from '@jenkins-cd/js-extensions';
import { ClassicCreationLink } from './ClassicCreationLink';
import { CreatePipelineScmListRenderer } from './CreatePipelineScmListRenderer';
import { CreatePipelineStepsRenderer } from './CreatePipelineStepsRenderer';
import VerticalStep from './flow2/VerticalStep';
import StepStatus from './flow2/FlowStepStatus';
import creationUtils from './creation-status-utils';
const Sandbox = Extensions.SandboxedComponent;
const t = i18nTranslator('blueocean-dashboard');
export default class CreatePipeline extends React.Component {
constructor(props) {
super(props);
this.state = {
selectedProvider: null,
};
}
componentWillMount() {
loadingIndicator.hide();
}
componentWillUnmount() {
if (this.state.selectedProvider) {
this.state.selectedProvider.destroyFlowManager();
}
}
_onSelection(selectedProvider) {
if (this.state.selectedProvider) {
this.state.selectedProvider.destroyFlowManager();
}
this.setState({
selectedProvider,
});
}
_onCompleteFlow(path) {
this._onExit(path);
}
_onNavigatePipelines() {
this._onExit({ url: '/pipelines' });
}
_onExit({ url } = {}) {
if (url) {
this.context.router.replace(url);
} else if (history && history.length <= 2) {
this.context.router.replace('/pipelines');
} else {
this.context.router.goBack();
}
}
render() {
const firstStepStatus = this.state.selectedProvider ? StepStatus.COMPLETE : StepStatus.ACTIVE;
return (
<Page>
<div className="create-pipeline">
<ContentPageHeader>
<h1>{t('creation.core.header.title')}</h1>
<ClassicCreationLink />
</ContentPageHeader>
{creationUtils.isEnabled() && (
<main>
<article className="content-area">
<VerticalStep className="first-step" status={firstStepStatus}>
<h1>{t('creation.core.intro.scm_provider')}</h1>
<CreatePipelineScmListRenderer
extensionPoint="jenkins.pipeline.create.scm.provider"
onSelection={provider => this._onSelection(provider)}
selectedProvider={this.state.selectedProvider}
/>
</VerticalStep>
<Sandbox>
<CreatePipelineStepsRenderer
selectedProvider={this.state.selectedProvider}
onCompleteFlow={data => this._onCompleteFlow(data)}
/>
</Sandbox>
</article>
</main>
)}
{creationUtils.isDisabled() && (
<main>
<article className="content-area">
<VerticalStep className="first-step" status={StepStatus.ERROR}>
<h1>{t('creation.core.intro.invalid_security_title')}</h1>
<p>
<span>{t('creation.core.intro.invalid_security_message')} - </span>
<a href={t('creation.core.intro.invalid_security_linkhref')} target="_blank">
{t('creation.core.intro.invalid_security_linktext')}
</a>
</p>
</VerticalStep>
</article>
</main>
)}
{creationUtils.isHidden() && (
<main>
<article className="content-area">
<VerticalStep className="first-step" status={StepStatus.ERROR}>
<h1>{t('creation.core.intro.invalid_permission_title')}</h1>
<button onClick={() => this._onNavigatePipelines()}>{t('creation.core.intro.invalid_permission_button')}</button>
</VerticalStep>
</article>
</main>
)}
</div>
</Page>
);
}
}
CreatePipeline.contextTypes = {
router: PropTypes.object,
};
|
mit
|
IjzerenHein/famous-flex
|
src/FlexScrollView.js
|
27446
|
/**
* This Source Code is licensed under the MIT license. If a copy of the
* MIT-license was not distributed with this file, You can obtain one at:
* http://opensource.org/licenses/mit-license.html.
*
* @author: Hein Rutjes (IjzerenHein)
* @license MIT
* @copyright Gloey Apps, 2015
*/
/**
* Flexible FlexScrollView for famo.us.
*
* Key features:
* - Customizable layout (uses ListLayout by default)
* - Insert/remove at any position using animations
* - Support for `true` size renderables
* - Pull to refresh (header & footer)
* - Horizontal/vertical direction
* - Top/left or bottom/right alignment
* - Pagination
* - Option to embed in a ContainerSurface
* - FlexScrollView linking
*
* Inherited from: [ScrollController](./ScrollController.md)
* @module
*/
define(function(require, exports, module) {
// import dependencies
var LayoutUtility = require('./LayoutUtility');
var ScrollController = require('./ScrollController');
var ListLayout = require('./layouts/ListLayout');
//
// Pull to refresh states
//
var PullToRefreshState = {
HIDDEN: 0,
PULLING: 1,
ACTIVE: 2,
COMPLETED: 3,
HIDDING: 4
};
/**
* @class
* @extends ScrollController
* @param {Object} options Configurable options (see ScrollController for all inherited options).
* @param {Renderable} [options.pullToRefreshHeader] Pull to refresh renderable that is displayed when pulling down from the top.
* @param {Renderable} [options.pullToRefreshFooter] Pull to refresh renderable that is displayed when pulling up from the bottom.
* @param {FlexScrollView} [options.leadingScrollView] Leading scrollview into which input events are piped (see Tutorial)
* @param {FlexScrollView} [options.trailingScrollView] Trailing scrollview into which input events are piped (see Tutorial)
* @alias module:FlexScrollView
*/
function FlexScrollView(options) {
ScrollController.call(this, LayoutUtility.combineOptions(FlexScrollView.DEFAULT_OPTIONS, options));
this._thisScrollViewDelta = 0;
this._leadingScrollViewDelta = 0;
this._trailingScrollViewDelta = 0;
}
FlexScrollView.prototype = Object.create(ScrollController.prototype);
FlexScrollView.prototype.constructor = FlexScrollView;
FlexScrollView.PullToRefreshState = PullToRefreshState;
FlexScrollView.Bounds = ScrollController.Bounds;
FlexScrollView.PaginationMode = ScrollController.PaginationMode;
FlexScrollView.DEFAULT_OPTIONS = {
layout: ListLayout, // sequential layout, uses width/height from renderable
direction: undefined, // 0 = X, 1 = Y, undefined = use default from layout
paginated: false, // pagination on/off
alignment: 0, // 0 = top/left, 1 = bottom/right
flow: false, // allow renderables to flow between layouts when not scrolling
mouseMove: false, // allow mouse to hold and move the view
useContainer: false, // embeds inside a ContainerSurface for clipping and capturing input events
visibleItemThresshold: 0.5, // by default, when an item is 50% visible, it is considered visible by `getFirstVisibleItem`
pullToRefreshHeader: undefined, // assign pull-to-refresh renderable here (renderable must have a size)
pullToRefreshFooter: undefined, // assign pull-to-refresh renderable here (renderable must have a size)
leadingScrollView: undefined,
trailingScrollView: undefined
// see ScrollController for all other options
};
/**
* Patches the FlexScrollView instance's options with the passed-in ones.
*
* @param {Object} options Configurable options (see ScrollController for all inherited options).
* @param {Renderable} [options.pullToRefreshHeader] Pull to refresh renderable that is displayed when pulling down from the top.
* @param {Renderable} [options.pullToRefreshFooter] Pull to refresh renderable that is displayed when pulling up from the bottom.
* @param {FlexScrollView} [options.leadingScrollView] Leading scrollview into which input events are piped (see Tutorial).
* @param {FlexScrollView} [options.trailingScrollView] Trailing scrollview into which input events are piped (see Tutorial).
* @return {FlexScrollView} this
*/
FlexScrollView.prototype.setOptions = function(options) {
ScrollController.prototype.setOptions.call(this, options);
// Update pull to refresh renderables
if (options.pullToRefreshHeader || options.pullToRefreshFooter || this._pullToRefresh) {
if (options.pullToRefreshHeader) {
this._pullToRefresh = this._pullToRefresh || [undefined, undefined];
if (!this._pullToRefresh[0]) {
this._pullToRefresh[0] = {
state: PullToRefreshState.HIDDEN,
prevState: PullToRefreshState.HIDDEN,
footer: false
};
}
this._pullToRefresh[0].node = options.pullToRefreshHeader;
}
else if (!this.options.pullToRefreshHeader && this._pullToRefresh) {
this._pullToRefresh[0] = undefined;
}
if (options.pullToRefreshFooter) {
this._pullToRefresh = this._pullToRefresh || [undefined, undefined];
if (!this._pullToRefresh[1]) {
this._pullToRefresh[1] = {
state: PullToRefreshState.HIDDEN,
prevState: PullToRefreshState.HIDDEN,
footer: true
};
}
this._pullToRefresh[1].node = options.pullToRefreshFooter;
}
else if (!this.options.pullToRefreshFooter && this._pullToRefresh) {
this._pullToRefresh[1] = undefined;
}
if (this._pullToRefresh && !this._pullToRefresh[0] && !this._pullToRefresh[1]) {
this._pullToRefresh = undefined;
}
}
return this;
};
/**
* Sets the data-source (alias for setDataSource).
*
* This function is a shim provided for compatibility with the stock famo.us Scrollview.
*
* @param {Array|LinkedListViewSequence} node Either an array of renderables or a viewSequence.
* @return {FlexScrollView} this
*/
FlexScrollView.prototype.sequenceFrom = function(node) {
return this.setDataSource(node);
};
/**
* Returns the index of the first visible renderable.
*
* This function is a shim provided for compatibility with the stock famo.us Scrollview.
*
* @return {Number} Index of the first visible renderable.
*/
FlexScrollView.prototype.getCurrentIndex = function() {
var item = this.getFirstVisibleItem();
return item ? item.viewSequence.getIndex() : -1;
};
/**
* Paginates the Scrollview to an absolute page index. This function is a shim provided
* for compatibility with the stock famo.us Scrollview.
*
* @param {Number} index view-sequence index to go to.
* @param {Bool} [noAnimation] When set to true, immediately shows the node without scrolling animation.
* @return {FlexScrollView} this
*/
FlexScrollView.prototype.goToPage = function(index, noAnimation) {
var viewSequence = this._viewSequence;
if (!viewSequence) {
return this;
}
while (viewSequence.getIndex() < index) {
viewSequence = viewSequence.getNext();
if (!viewSequence) {
return this;
}
}
while (viewSequence.getIndex() > index) {
viewSequence = viewSequence.getPrevious();
if (!viewSequence) {
return this;
}
}
this.goToRenderNode(viewSequence.get(), noAnimation);
return this;
};
/**
* Returns the offset associated with the Scrollview instance's current node
* (generally the node currently at the top).
*
* This function is a shim provided for compatibility with the stock famo.us Scrollview.
*
* @return {number} The position of either the specified node, or the Scrollview's current Node,
* in pixels translated.
*/
FlexScrollView.prototype.getOffset = function() {
return this._scrollOffsetCache;
};
/**
* Returns the position associated with the Scrollview instance's current node
* (generally the node currently at the top).
*
* This function is a shim provided for compatibility with the stock famo.us Scrollview.
*
* @deprecated
* @param {number} [node] If specified, returns the position of the node at that index in the
* Scrollview instance's currently managed collection.
* @return {number} The position of either the specified node, or the Scrollview's current Node,
* in pixels translated.
*/
FlexScrollView.prototype.getPosition = FlexScrollView.prototype.getOffset;
/**
* Returns the absolute position associated with the Scrollview instance.
*
* This function is a shim provided for compatibility with the stock famo.us Scrollview.
*
* @return {number} The position of the Scrollview's current Node, in pixels translated.
*/
FlexScrollView.prototype.getAbsolutePosition = function() {
return -(this._scrollOffsetCache + this._scroll.groupStart);
};
/**
* Helper function for setting the pull-to-refresh status.
*/
function _setPullToRefreshState(pullToRefresh, state) {
if (pullToRefresh.state !== state) {
pullToRefresh.state = state;
if (pullToRefresh.node && pullToRefresh.node.setPullToRefreshStatus) {
pullToRefresh.node.setPullToRefreshStatus(state);
}
}
}
/**
* Helper function for getting the pull-to-refresh data.
*/
function _getPullToRefresh(footer) {
return this._pullToRefresh ? this._pullToRefresh[footer ? 1 : 0] : undefined;
}
/**
* Post-layout function that adds the pull-to-refresh renderables.
* @private
*/
FlexScrollView.prototype._postLayout = function(size, scrollOffset) {
// Exit immediately when pull to refresh is not configured
if (!this._pullToRefresh) {
return;
}
// Adjust scroll-offset for alignment
if (this.options.alignment) {
scrollOffset += size[this._direction];
}
// Prepare
var prevHeight;
var nextHeight;
var totalHeight;
// Show/activate pull to refresh renderables
for (var i = 0; i < 2; i++) {
var pullToRefresh = this._pullToRefresh[i];
if (pullToRefresh) {
// Calculate offset
var length = pullToRefresh.node.getSize()[this._direction];
var pullLength = pullToRefresh.node.getPullToRefreshSize ? pullToRefresh.node.getPullToRefreshSize()[this._direction] : length;
var offset;
if (!pullToRefresh.footer) {
// header
prevHeight = this._calcScrollHeight(false);
prevHeight = (prevHeight === undefined) ? -1 : prevHeight;
offset = (prevHeight >= 0) ? (scrollOffset - prevHeight) : prevHeight;
if (this.options.alignment) {
nextHeight = this._calcScrollHeight(true);
nextHeight = (nextHeight === undefined) ? -1 : nextHeight;
totalHeight = ((prevHeight >= 0) && (nextHeight >= 0)) ? (prevHeight + nextHeight) : -1;
if ((totalHeight >= 0) && (totalHeight < size[this._direction])) {
offset = Math.round((scrollOffset - size[this._direction]) + nextHeight);
}
}
}
else {
// footer
nextHeight = (nextHeight === undefined) ? nextHeight = this._calcScrollHeight(true) : nextHeight;
nextHeight = (nextHeight === undefined) ? -1 : nextHeight;
offset = (nextHeight >= 0) ? (scrollOffset + nextHeight) : (size[this._direction] + 1);
if (!this.options.alignment) {
prevHeight = (prevHeight === undefined) ? this._calcScrollHeight(false) : prevHeight;
prevHeight = (prevHeight === undefined) ? -1 : prevHeight;
totalHeight = ((prevHeight >= 0) && (nextHeight >= 0)) ? (prevHeight + nextHeight) : -1;
if ((totalHeight >= 0) && (totalHeight < size[this._direction])) {
offset = Math.round((scrollOffset - prevHeight) + size[this._direction]);
}
}
offset = -(offset - size[this._direction]);
}
// Determine current state
var visiblePerc = Math.max(Math.min(offset / pullLength, 1), 0);
switch (pullToRefresh.state) {
case PullToRefreshState.HIDDEN:
if (this._scroll.scrollForceCount) {
if (visiblePerc >= 1) {
_setPullToRefreshState(pullToRefresh, PullToRefreshState.ACTIVE);
}
else if (offset >= 0.2) {
_setPullToRefreshState(pullToRefresh, PullToRefreshState.PULLING);
}
}
break;
case PullToRefreshState.PULLING:
if (this._scroll.scrollForceCount && (visiblePerc >= 1)) {
_setPullToRefreshState(pullToRefresh, PullToRefreshState.ACTIVE);
}
else if (offset < 0.2) {
_setPullToRefreshState(pullToRefresh, PullToRefreshState.HIDDEN);
}
break;
case PullToRefreshState.ACTIVE:
// nothing to do, wait for completed
break;
case PullToRefreshState.COMPLETED:
if (!this._scroll.scrollForceCount) {
if (offset >= 0.2) {
_setPullToRefreshState(pullToRefresh, PullToRefreshState.HIDDING);
}
else {
_setPullToRefreshState(pullToRefresh, PullToRefreshState.HIDDEN);
}
}
break;
case PullToRefreshState.HIDDING:
if (offset < 0.2) {
_setPullToRefreshState(pullToRefresh, PullToRefreshState.HIDDEN);
}
break;
}
// Show pull to refresh node
if (pullToRefresh.state !== PullToRefreshState.HIDDEN) {
var contextNode = {
renderNode: pullToRefresh.node,
prev: !pullToRefresh.footer,
next: pullToRefresh.footer,
index: !pullToRefresh.footer ? --this._nodes._contextState.prevGetIndex : ++this._nodes._contextState.nextGetIndex
};
var scrollLength;
if (pullToRefresh.state === PullToRefreshState.ACTIVE) {
scrollLength = length;
}
else if (this._scroll.scrollForceCount) {
scrollLength = Math.min(offset, length);
}
var set = {
size: [size[0], size[1]],
translate: [0, 0, -1e-3], // transform.behind
scrollLength: scrollLength
};
set.size[this._direction] = Math.max(Math.min(offset, pullLength), 0);
set.translate[this._direction] = pullToRefresh.footer ? (size[this._direction] - length) : 0;
this._nodes._context.set(contextNode, set);
}
}
}
};
/**
* Shows the pulls-to-refresh renderable indicating that a refresh is in progress.
*
* @param {Bool} [footer] set to true to show pull-to-refresh at the footer (default: false).
* @return {FlexScrollView} this
*/
FlexScrollView.prototype.showPullToRefresh = function(footer) {
var pullToRefresh = _getPullToRefresh.call(this, footer);
if (pullToRefresh) {
_setPullToRefreshState(pullToRefresh, PullToRefreshState.ACTIVE);
this._scroll.scrollDirty = true;
}
};
/**
* Hides the pull-to-refresh renderable in case it was visible.
*
* @param {Bool} [footer] set to true to hide the pull-to-refresh at the footer (default: false).
* @return {FlexScrollView} this
*/
FlexScrollView.prototype.hidePullToRefresh = function(footer) {
var pullToRefresh = _getPullToRefresh.call(this, footer);
if (pullToRefresh && (pullToRefresh.state === PullToRefreshState.ACTIVE)) {
_setPullToRefreshState(pullToRefresh, PullToRefreshState.COMPLETED);
this._scroll.scrollDirty = true;
}
return this;
};
/**
* Get the visible state of the pull-to-refresh renderable.
*
* @param {Bool} [footer] set to true to get the state of the pull-to-refresh footer (default: false).
*/
FlexScrollView.prototype.isPullToRefreshVisible = function(footer) {
var pullToRefresh = _getPullToRefresh.call(this, footer);
return pullToRefresh ? (pullToRefresh.state === PullToRefreshState.ACTIVE) : false;
};
/**
* Delegates any scroll force to leading/trailing scrollviews.
* @private
*/
FlexScrollView.prototype.applyScrollForce = function(delta) {
var leadingScrollView = this.options.leadingScrollView;
var trailingScrollView = this.options.trailingScrollView;
if (!leadingScrollView && !trailingScrollView) {
return ScrollController.prototype.applyScrollForce.call(this, delta);
}
var partialDelta;
if (delta < 0) {
if (leadingScrollView) {
partialDelta = leadingScrollView.canScroll(delta);
this._leadingScrollViewDelta += partialDelta;
leadingScrollView.applyScrollForce(partialDelta);
delta -= partialDelta;
}
if (trailingScrollView) {
partialDelta = this.canScroll(delta);
ScrollController.prototype.applyScrollForce.call(this, partialDelta);
this._thisScrollViewDelta += partialDelta;
delta -= partialDelta;
trailingScrollView.applyScrollForce(delta);
this._trailingScrollViewDelta += delta;
}
else {
ScrollController.prototype.applyScrollForce.call(this, delta);
this._thisScrollViewDelta += delta;
}
}
else {
if (trailingScrollView) {
partialDelta = trailingScrollView.canScroll(delta);
trailingScrollView.applyScrollForce(partialDelta);
this._trailingScrollViewDelta += partialDelta;
delta -= partialDelta;
}
if (leadingScrollView) {
partialDelta = this.canScroll(delta);
ScrollController.prototype.applyScrollForce.call(this, partialDelta);
this._thisScrollViewDelta += partialDelta;
delta -= partialDelta;
leadingScrollView.applyScrollForce(delta);
this._leadingScrollViewDelta += delta;
}
else {
ScrollController.prototype.applyScrollForce.call(this, delta);
this._thisScrollViewDelta += delta;
}
}
return this;
};
/**
* Delegates any scroll force to leading/trailing scrollviews.
* @private
*/
FlexScrollView.prototype.updateScrollForce = function(prevDelta, newDelta) {
var leadingScrollView = this.options.leadingScrollView;
var trailingScrollView = this.options.trailingScrollView;
if (!leadingScrollView && !trailingScrollView) {
return ScrollController.prototype.updateScrollForce.call(this, prevDelta, newDelta);
}
var partialDelta;
var delta = newDelta - prevDelta;
if (delta < 0) {
if (leadingScrollView) {
partialDelta = leadingScrollView.canScroll(delta);
leadingScrollView.updateScrollForce(this._leadingScrollViewDelta, this._leadingScrollViewDelta + partialDelta);
this._leadingScrollViewDelta += partialDelta;
delta -= partialDelta;
}
if (trailingScrollView && delta) {
partialDelta = this.canScroll(delta);
ScrollController.prototype.updateScrollForce.call(this, this._thisScrollViewDelta, this._thisScrollViewDelta + partialDelta);
this._thisScrollViewDelta += partialDelta;
delta -= partialDelta;
this._trailingScrollViewDelta += delta;
trailingScrollView.updateScrollForce(this._trailingScrollViewDelta, this._trailingScrollViewDelta + delta);
}
else if (delta) {
ScrollController.prototype.updateScrollForce.call(this, this._thisScrollViewDelta, this._thisScrollViewDelta + delta);
this._thisScrollViewDelta += delta;
}
}
else {
if (trailingScrollView) {
partialDelta = trailingScrollView.canScroll(delta);
trailingScrollView.updateScrollForce(this._trailingScrollViewDelta, this._trailingScrollViewDelta + partialDelta);
this._trailingScrollViewDelta += partialDelta;
delta -= partialDelta;
}
if (leadingScrollView) {
partialDelta = this.canScroll(delta);
ScrollController.prototype.updateScrollForce.call(this, this._thisScrollViewDelta, this._thisScrollViewDelta + partialDelta);
this._thisScrollViewDelta += partialDelta;
delta -= partialDelta;
leadingScrollView.updateScrollForce(this._leadingScrollViewDelta, this._leadingScrollViewDelta + delta);
this._leadingScrollViewDelta += delta;
}
else {
ScrollController.prototype.updateScrollForce.call(this, this._thisScrollViewDelta, this._thisScrollViewDelta + delta);
this._thisScrollViewDelta += delta;
}
}
return this;
};
/**
* Delegates any scroll force to leading/trailing scrollviews.
* @private
*/
FlexScrollView.prototype.releaseScrollForce = function(delta, velocity, detectSwipes) {
var leadingScrollView = this.options.leadingScrollView;
var trailingScrollView = this.options.trailingScrollView;
if (!leadingScrollView && !trailingScrollView) {
return ScrollController.prototype.releaseScrollForce.call(this, delta, velocity, detectSwipes);
}
var partialDelta;
if (delta < 0) {
if (leadingScrollView) {
partialDelta = Math.max(this._leadingScrollViewDelta, delta);
this._leadingScrollViewDelta -= partialDelta;
delta -= partialDelta;
leadingScrollView.releaseScrollForce(this._leadingScrollViewDelta, delta ? 0 : velocity, detectSwipes);
}
if (trailingScrollView) {
partialDelta = Math.max(this._thisScrollViewDelta, delta);
this._thisScrollViewDelta -= partialDelta;
delta -= partialDelta;
ScrollController.prototype.releaseScrollForce.call(this, this._thisScrollViewDelta, delta ? 0 : velocity, detectSwipes);
this._trailingScrollViewDelta -= delta;
trailingScrollView.releaseScrollForce(this._trailingScrollViewDelta, delta ? velocity : 0, detectSwipes);
}
else {
this._thisScrollViewDelta -= delta;
ScrollController.prototype.releaseScrollForce.call(this, this._thisScrollViewDelta, delta ? velocity : 0, detectSwipes);
}
}
else {
if (trailingScrollView) {
partialDelta = Math.min(this._trailingScrollViewDelta, delta);
this._trailingScrollViewDelta -= partialDelta;
delta -= partialDelta;
trailingScrollView.releaseScrollForce(this._trailingScrollViewDelta, delta ? 0 : velocity, detectSwipes);
}
if (leadingScrollView) {
partialDelta = Math.min(this._thisScrollViewDelta, delta);
this._thisScrollViewDelta -= partialDelta;
delta -= partialDelta;
ScrollController.prototype.releaseScrollForce.call(this, this._thisScrollViewDelta, delta ? 0 : velocity, detectSwipes);
this._leadingScrollViewDelta -= delta;
leadingScrollView.releaseScrollForce(this._leadingScrollViewDelta, delta ? velocity : 0, detectSwipes);
}
else {
this._thisScrollViewDelta -= delta;
ScrollController.prototype.updateScrollForce.call(this, this._thisScrollViewDelta, delta ? velocity : 0, detectSwipes);
}
}
return this;
};
/**
* Overriden commit, in order to emit pull-to-refresh event after
* all the rendering has been done.
* @private
*/
FlexScrollView.prototype.commit = function(context) {
// Call base class
var result = ScrollController.prototype.commit.call(this, context);
// Emit pull to refresh events after the whole commit call has been executed
// so that when the refresh event is received, the FlexScrollView is in a valid state
// and can be queried.
if (this._pullToRefresh) {
for (var i = 0; i < 2; i++) {
var pullToRefresh = this._pullToRefresh[i];
if (pullToRefresh) {
if ((pullToRefresh.state === PullToRefreshState.ACTIVE) &&
(pullToRefresh.prevState !== PullToRefreshState.ACTIVE)) {
this._eventOutput.emit('refresh', {
target: this,
footer: pullToRefresh.footer
});
}
pullToRefresh.prevState = pullToRefresh.state;
}
}
}
return result;
};
module.exports = FlexScrollView;
});
|
mit
|
Rainbowcoin/Rainbowcoin
|
src/qt/qrcodedialog.cpp
|
4315
|
#include "qrcodedialog.h"
#include "ui_qrcodedialog.h"
#include "bitcoinunits.h"
#include "guiconstants.h"
#include "guiutil.h"
#include "optionsmodel.h"
#include <QPixmap>
#include <QUrl>
#include <qrencode.h>
QRCodeDialog::QRCodeDialog(const QString &addr, const QString &label, bool enableReq, QWidget *parent) :
QDialog(parent),
ui(new Ui::QRCodeDialog),
model(0),
address(addr)
{
ui->setupUi(this);
setWindowTitle(QString("%1").arg(address));
ui->chkReqPayment->setVisible(enableReq);
ui->lblAmount->setVisible(enableReq);
ui->lnReqAmount->setVisible(enableReq);
ui->lnLabel->setText(label);
ui->btnSaveAs->setEnabled(false);
genCode();
}
QRCodeDialog::~QRCodeDialog()
{
delete ui;
}
void QRCodeDialog::setModel(OptionsModel *model)
{
this->model = model;
if (model)
connect(model, SIGNAL(displayUnitChanged(int)), this, SLOT(updateDisplayUnit()));
// update the display unit, to not use the default ("LGBT")
updateDisplayUnit();
}
void QRCodeDialog::genCode()
{
QString uri = getURI();
if (uri != "")
{
ui->lblQRCode->setText("");
QRcode *code = QRcode_encodeString(uri.toUtf8().constData(), 0, QR_ECLEVEL_L, QR_MODE_8, 1);
if (!code)
{
ui->lblQRCode->setText(tr("Error encoding URI into QR Code."));
return;
}
myImage = QImage(code->width + 8, code->width + 8, QImage::Format_RGB32);
myImage.fill(0xffffff);
unsigned char *p = code->data;
for (int y = 0; y < code->width; y++)
{
for (int x = 0; x < code->width; x++)
{
myImage.setPixel(x + 4, y + 4, ((*p & 1) ? 0x0 : 0xffffff));
p++;
}
}
QRcode_free(code);
ui->lblQRCode->setPixmap(QPixmap::fromImage(myImage).scaled(300, 300));
ui->outUri->setPlainText(uri);
}
}
QString QRCodeDialog::getURI()
{
QString ret = QString("rainbowcoin:%1").arg(address);
int paramCount = 0;
ui->outUri->clear();
if (ui->chkReqPayment->isChecked())
{
if (ui->lnReqAmount->validate())
{
// even if we allow a non BTC unit input in lnReqAmount, we generate the URI with BTC as unit (as defined in BIP21)
ret += QString("?amount=%1").arg(BitcoinUnits::format(BitcoinUnits::BTC, ui->lnReqAmount->value()));
paramCount++;
}
else
{
ui->btnSaveAs->setEnabled(false);
ui->lblQRCode->setText(tr("The entered amount is invalid, please check."));
return QString("");
}
}
if (!ui->lnLabel->text().isEmpty())
{
QString lbl(QUrl::toPercentEncoding(ui->lnLabel->text()));
ret += QString("%1label=%2").arg(paramCount == 0 ? "?" : "&").arg(lbl);
paramCount++;
}
if (!ui->lnMessage->text().isEmpty())
{
QString msg(QUrl::toPercentEncoding(ui->lnMessage->text()));
ret += QString("%1message=%2").arg(paramCount == 0 ? "?" : "&").arg(msg);
paramCount++;
}
// limit URI length to prevent a DoS against the QR-Code dialog
if (ret.length() > MAX_URI_LENGTH)
{
ui->btnSaveAs->setEnabled(false);
ui->lblQRCode->setText(tr("Resulting URI too long, try to reduce the text for label / message."));
return QString("");
}
ui->btnSaveAs->setEnabled(true);
return ret;
}
void QRCodeDialog::on_lnReqAmount_textChanged()
{
genCode();
}
void QRCodeDialog::on_lnLabel_textChanged()
{
genCode();
}
void QRCodeDialog::on_lnMessage_textChanged()
{
genCode();
}
void QRCodeDialog::on_btnSaveAs_clicked()
{
QString fn = GUIUtil::getSaveFileName(this, tr("Save QR Code"), QString(), tr("PNG Images (*.png)"));
if (!fn.isEmpty())
myImage.scaled(EXPORT_IMAGE_SIZE, EXPORT_IMAGE_SIZE).save(fn);
}
void QRCodeDialog::on_chkReqPayment_toggled(bool fChecked)
{
if (!fChecked)
// if chkReqPayment is not active, don't display lnReqAmount as invalid
ui->lnReqAmount->setValid(true);
genCode();
}
void QRCodeDialog::updateDisplayUnit()
{
if (model)
{
// Update lnReqAmount with the current unit
ui->lnReqAmount->setDisplayUnit(model->getDisplayUnit());
}
}
|
mit
|
alucidwolf/alucidwolf.github.io
|
ps-practical-design-patterns/structure/Facade/task.js
|
1425
|
var Task = function (data) {
this.name = data.name;
this.priority = data.priority;
this.project = data.project;
this.user = data.user;
this.completed = data.completed;
}
var TaskService = function () {
return {
complete: function (task) {
task.completed = true;
console.log('completing task: ' + task.name);
},
setCompleteDate: function (task) {
task.completedDate = new Date();
console.log(task.name + ' completed on ' + task.completedDate);
},
notifyCompletion: function (task, user) {
console.log('Notifying ' + user + ' of the completion of ' + task.name);
},
save: function (task) {
console.log('saving Task: ' + task.name);
}
}
}();
var TaskServiceWrapper = function () {
var completeAndNotify = function (task) {
TaskService.complete(myTask);
if (myTask.completed == true) {
TaskService.setCompleteDate(myTask);
TaskService.notifyCompletion(myTask, myTask.user);
TaskService.save(myTask);
}
}
return {
completeAndNotify: completeAndNotify
}
}();
var myTask = new Task({
name: 'MyTask',
priority: 1,
project: 'Courses',
user: 'Jon',
completed: false
});
//console.log(myTask);
TaskServiceWrapper.completeAndNotify(myTask);
console.log(myTask);
//
|
mit
|
cmars/tools
|
src/github.com/Masterminds/glide/action/get.go
|
6515
|
package action
import (
"fmt"
"path/filepath"
"strings"
"github.com/Masterminds/glide/cache"
"github.com/Masterminds/glide/cfg"
"github.com/Masterminds/glide/godep"
"github.com/Masterminds/glide/msg"
gpath "github.com/Masterminds/glide/path"
"github.com/Masterminds/glide/repo"
"github.com/Masterminds/glide/util"
"github.com/Masterminds/semver"
)
// Get fetches one or more dependencies and installs.
//
// This includes resolving dependency resolution and re-generating the lock file.
func Get(names []string, installer *repo.Installer, insecure, skipRecursive, strip, stripVendor, nonInteract bool) {
if installer.UseCache {
cache.SystemLock()
}
base := gpath.Basepath()
EnsureGopath()
EnsureVendorDir()
conf := EnsureConfig()
glidefile, err := gpath.Glide()
if err != nil {
msg.Die("Could not find Glide file: %s", err)
}
// Add the packages to the config.
if count, err2 := addPkgsToConfig(conf, names, insecure, nonInteract); err2 != nil {
msg.Die("Failed to get new packages: %s", err2)
} else if count == 0 {
msg.Warn("Nothing to do")
return
}
// Fetch the new packages. Can't resolve versions via installer.Update if
// get is called while the vendor/ directory is empty so we checkout
// everything.
err = installer.Checkout(conf, false)
if err != nil {
msg.Die("Failed to checkout packages: %s", err)
}
// Prior to resolving dependencies we need to start working with a clone
// of the conf because we'll be making real changes to it.
confcopy := conf.Clone()
if !skipRecursive {
// Get all repos and update them.
// TODO: Can we streamline this in any way? The reason that we update all
// of the dependencies is that we need to re-negotiate versions. For example,
// if an existing dependency has the constraint >1.0 and this new package
// adds the constraint <2.0, then this may re-resolve the existing dependency
// to be between 1.0 and 2.0. But changing that dependency may then result
// in that dependency's dependencies changing... so we sorta do the whole
// thing to be safe.
err = installer.Update(confcopy)
if err != nil {
msg.Die("Could not update packages: %s", err)
}
}
// Set Reference
if err := repo.SetReference(confcopy); err != nil {
msg.Err("Failed to set references: %s", err)
}
// VendoredCleanup
// When stripping VCS happens this will happen as well. No need for double
// effort.
if installer.UpdateVendored && !strip {
repo.VendoredCleanup(confcopy)
}
// Write YAML
if err := conf.WriteFile(glidefile); err != nil {
msg.Die("Failed to write glide YAML file: %s", err)
}
if !skipRecursive {
// Write lock
if stripVendor {
confcopy = godep.RemoveGodepSubpackages(confcopy)
}
writeLock(conf, confcopy, base)
} else {
msg.Warn("Skipping lockfile generation because full dependency tree is not being calculated")
}
if strip {
msg.Info("Removing version control data from vendor directory...")
gpath.StripVcs()
}
if stripVendor {
msg.Info("Removing nested vendor and Godeps/_workspace directories...")
err := gpath.StripVendor()
if err != nil {
msg.Err("Unable to strip vendor directories: %s", err)
}
}
}
func writeLock(conf, confcopy *cfg.Config, base string) {
hash, err := conf.Hash()
if err != nil {
msg.Die("Failed to generate config hash. Unable to generate lock file.")
}
lock := cfg.NewLockfile(confcopy.Imports, hash)
if err := lock.WriteFile(filepath.Join(base, gpath.LockFile)); err != nil {
msg.Die("Failed to write glide lock file: %s", err)
}
}
// addPkgsToConfig adds the given packages to the config file.
//
// Along the way it:
// - ensures that this package is not in the ignore list
// - checks to see if this is already in the dependency list.
// - splits version of of package name and adds the version attribute
// - separates repo from packages
// - sets up insecure repo URLs where necessary
// - generates a list of subpackages
func addPkgsToConfig(conf *cfg.Config, names []string, insecure, nonInteract bool) (int, error) {
if len(names) == 1 {
msg.Info("Preparing to install %d package.", len(names))
} else {
msg.Info("Preparing to install %d packages.", len(names))
}
numAdded := 0
for _, name := range names {
var version string
parts := strings.Split(name, "#")
if len(parts) > 1 {
name = parts[0]
version = parts[1]
}
msg.Info("Attempting to get package %s", name)
root, subpkg := util.NormalizeName(name)
if len(root) == 0 {
return 0, fmt.Errorf("Package name is required for %q.", name)
}
if conf.HasDependency(root) {
// Check if the subpackage is present.
if subpkg != "" {
dep := conf.Imports.Get(root)
if dep.HasSubpackage(subpkg) {
msg.Warn("--> Package %q is already in glide.yaml. Skipping", name)
} else {
dep.Subpackages = append(dep.Subpackages, subpkg)
msg.Info("--> Adding sub-package %s to existing import %s", subpkg, root)
numAdded++
}
} else {
msg.Warn("--> Package %q is already in glide.yaml. Skipping", root)
}
continue
}
if conf.HasIgnore(root) {
msg.Warn("--> Package %q is set to be ignored in glide.yaml. Skipping", root)
continue
}
dep := &cfg.Dependency{
Name: root,
}
// When retriving from an insecure location set the repo to the
// insecure location.
if insecure {
dep.Repository = "http://" + root
}
if version != "" {
dep.Reference = version
} else if !nonInteract {
getWizard(dep)
}
if len(subpkg) > 0 {
dep.Subpackages = []string{subpkg}
}
if dep.Reference != "" {
msg.Info("--> Adding %s to your configuration with the version %s", dep.Name, dep.Reference)
} else {
msg.Info("--> Adding %s to your configuration %s", dep.Name)
}
conf.Imports = append(conf.Imports, dep)
numAdded++
}
return numAdded, nil
}
func getWizard(dep *cfg.Dependency) {
var remote string
if dep.Repository != "" {
remote = dep.Repository
} else {
remote = "https://" + dep.Name
}
// Lookup dependency info and store in cache.
msg.Info("--> Gathering release information for %s", dep.Name)
wizardFindVersions(dep)
memlatest := cache.MemLatest(remote)
if memlatest != "" {
dres := wizardAskLatest(memlatest, dep)
if dres {
dep.Reference = memlatest
sv, err := semver.NewVersion(dep.Reference)
if err == nil {
res := wizardAskRange(sv, dep)
if res == "m" {
dep.Reference = "^" + sv.String()
} else if res == "p" {
dep.Reference = "~" + sv.String()
}
}
}
}
}
|
mit
|
cdnjs/cdnjs
|
ajax/libs/openplayerjs/2.2.4/esm/controls.js
|
11802
|
import Captions from './controls/captions';
import Fullscreen from './controls/fullscreen';
import Levels from './controls/levels';
import Play from './controls/play';
import Progress from './controls/progress';
import Settings from './controls/settings';
import Time from './controls/time';
import Volume from './controls/volume';
import { EVENT_OPTIONS, IS_ANDROID, IS_IOS } from './utils/constants';
import { addEvent } from './utils/events';
import { isAudio, isVideo, removeElement } from './utils/general';
class Controls {
constructor(player) {
this.events = {
media: {},
mouse: {},
};
this.timer = 0;
this.controlEls = {
Captions,
Fullscreen,
Levels,
Play,
Progress,
Settings,
Time,
Volume,
};
this.player = player;
this._setElements();
return this;
}
create() {
this.player.getElement().controls = false;
const isMediaVideo = isVideo(this.player.getElement());
this._createControlsLayer();
this._buildElements();
this.events.controlschanged = () => {
this.destroy();
this._setElements();
this.create();
};
this.events.ended = () => {
this.player.getContainer().classList.remove('op-controls--hidden');
};
this.player.getElement().addEventListener('controlschanged', this.events.controlschanged, EVENT_OPTIONS);
this.player.getElement().addEventListener('ended', this.events.ended, EVENT_OPTIONS);
const { alwaysVisible } = this.player.getOptions().controls;
if (!alwaysVisible && !IS_ANDROID && !IS_IOS) {
this.events.mouse.mouseenter = () => {
if (isMediaVideo && !this.player.activeElement().paused) {
this._stopControlTimer();
if (this.player.activeElement().currentTime) {
this.player.playBtn.setAttribute('aria-hidden', this.player.isMedia() ? 'false' : 'true');
this.player.loader.setAttribute('aria-hidden', 'true');
}
else if (this.player.getOptions().showLoaderOnInit) {
this.player.playBtn.setAttribute('aria-hidden', 'true');
this.player.loader.setAttribute('aria-hidden', 'false');
}
this.player.getContainer().classList.remove('op-controls--hidden');
this._startControlTimer(2500);
}
};
this.events.mouse.mousemove = () => {
if (isMediaVideo) {
if (this.player.activeElement().currentTime) {
this.player.loader.setAttribute('aria-hidden', 'true');
this.player.playBtn.setAttribute('aria-hidden', this.player.isMedia() ? 'false' : 'true');
}
else {
this.player.playBtn.setAttribute('aria-hidden', this.player.getOptions().showLoaderOnInit ? 'true' : 'false');
this.player.loader.setAttribute('aria-hidden', this.player.getOptions().showLoaderOnInit ? 'false' : 'true');
}
this.player.getContainer().classList.remove('op-controls--hidden');
this._startControlTimer(2500);
}
};
this.events.mouse.mouseleave = () => {
if (isMediaVideo && !this.player.activeElement().paused) {
this._startControlTimer(1000);
}
};
this.events.media.play = () => {
if (isMediaVideo) {
this._startControlTimer(this.player.getOptions().hidePlayBtnTimer);
}
};
this.events.media.pause = () => {
this.player.getContainer().classList.remove('op-controls--hidden');
this._stopControlTimer();
};
Object.keys(this.events.media).forEach(event => {
this.player.getElement().addEventListener(event, this.events.media[event], EVENT_OPTIONS);
});
Object.keys(this.events.mouse).forEach(event => {
this.player.getContainer().addEventListener(event, this.events.mouse[event], EVENT_OPTIONS);
});
this._startControlTimer(3000);
}
}
destroy() {
if (!IS_ANDROID && !IS_IOS) {
Object.keys(this.events.mouse).forEach(event => {
this.player.getContainer().removeEventListener(event, this.events.mouse[event]);
});
Object.keys(this.events.media).forEach(event => {
this.player.getElement().removeEventListener(event, this.events.media[event]);
});
this._stopControlTimer();
}
this.player.getElement().removeEventListener('controlschanged', this.events.controlschanged);
this.player.getElement().removeEventListener('ended', this.events.ended);
Object.keys(this.items).forEach((position) => {
this.items[position].forEach((item) => {
if (item.custom) {
this._destroyCustomControl(item);
}
else if (typeof item.destroy === 'function') {
item.destroy();
}
});
});
removeElement(this.controls);
}
getContainer() {
return this.controls;
}
getLayer(layer) {
return this.controls.querySelector(`.op-controls-layer__${layer}`) || this.controls;
}
_createControlsLayer() {
if (!this.controls) {
this.controls = document.createElement('div');
this.controls.className = 'op-controls';
this.player.getContainer().appendChild(this.controls);
}
}
_startControlTimer(time) {
const el = this.player.activeElement();
this._stopControlTimer();
if (typeof window !== 'undefined') {
this.timer = window.setTimeout(() => {
if ((!el.paused || !el.ended) && isVideo(this.player.getElement())) {
this.player.getContainer().classList.add('op-controls--hidden');
this.player.playBtn.setAttribute('aria-hidden', 'true');
this._stopControlTimer();
const event = addEvent('controlshidden');
this.player.getElement().dispatchEvent(event);
}
}, time);
}
}
_stopControlTimer() {
if (this.timer !== 0) {
clearTimeout(this.timer);
this.timer = 0;
}
}
_setElements() {
const controls = this.player.getOptions().controls.layers;
this.items = {
'bottom-left': [],
'bottom-middle': [],
'bottom-right': [],
'left': [],
'main': [],
'middle': [],
'right': [],
'top-left': [],
'top-middle': [],
'top-right': [],
};
const isVideoEl = isVideo(this.player.getElement());
const isAudioEl = isAudio(this.player.getElement());
const controlPositions = Object.keys(controls);
const layersExist = controlPositions.find(item => /^(top|bottom)/.test(item));
this._createControlsLayer();
controlPositions.forEach((position) => {
const [layer, pos] = position.split('-');
if (pos) {
const className = `op-controls-layer__${layer}`;
if (!this.controls.querySelector(`.${className}`)) {
const controlLayer = document.createElement('div');
controlLayer.className = className;
this.controls.appendChild(controlLayer);
}
}
else if (layersExist) {
const className = 'op-controls-layer__center';
if (!this.controls.querySelector(`.${className}`)) {
const controlLayer = document.createElement('div');
controlLayer.className = className;
this.controls.appendChild(controlLayer);
}
}
controls[position]
.filter((v, i, a) => a.indexOf(v) === i)
.forEach((el) => {
const currentLayer = layersExist && !pos ? 'center' : layer;
const className = `${el.charAt(0).toUpperCase()}${el.slice(1)}`;
const item = new this.controlEls[className](this.player, pos || layer, currentLayer);
if (el === 'settings') {
this.settings = item;
}
if (isVideoEl || (el !== 'fullscreen' && isAudioEl)) {
this.items[position].push(item);
}
});
});
this.player.getCustomControls().forEach(item => {
const [layer, pos] = item.position.split('-');
const currentLayer = layersExist && !pos ? 'center' : layer;
item.layer = currentLayer;
item.position = pos || layer;
if (item.position === 'right') {
this.items[item.position].unshift(item);
}
else {
this.items[item.position].push(item);
}
});
}
_buildElements() {
Object.keys(this.items).forEach((position) => {
this.items[position].forEach((item) => {
if (item.custom) {
this._createCustomControl(item);
}
else {
item.create();
}
});
});
Object.keys(this.items).forEach((position) => {
this.items[position].forEach((item) => {
const allowDefault = !this.player.getOptions().detachMenus || item instanceof Settings;
if (allowDefault && !item.custom && typeof item.addSettings === 'function') {
const menuItem = item.addSettings();
if (this.settings && Object.keys(menuItem).length) {
this.settings.addItem(menuItem.name, menuItem.key, menuItem.default, menuItem.subitems, menuItem.className);
}
}
});
});
const e = addEvent('controlschanged');
this.controls.dispatchEvent(e);
}
_createCustomControl(item) {
const control = document.createElement('button');
const icon = /\.(jpg|png|svg|gif)$/.test(item.icon) ? `<img src="${item.icon}">` : item.icon;
control.className = `op-controls__${item.id} op-control__${item.position} ${item.showInAds ? '' : 'op-control__hide-in-ad'}`;
control.tabIndex = 0;
control.id = item.id;
control.title = item.title;
control.innerHTML = `${icon} <span class="op-sr">${item.title}</span>`;
control.addEventListener('click', item.click, EVENT_OPTIONS);
if (item.layer) {
if (item.layer === 'main') {
this.player.getContainer().appendChild(control);
}
else {
this.getLayer(item.layer).appendChild(control);
}
}
}
_destroyCustomControl(item) {
const key = item.title.toLowerCase().replace(' ', '-');
const control = this.getContainer().querySelector(`.op-controls__${key}`);
if (control) {
control.removeEventListener('click', item.click);
removeElement(control);
}
}
}
export default Controls;
|
mit
|
cdnjs/cdnjs
|
ajax/libs/simple-icons/1.13.0/zerply.js
|
913
|
module.exports={title:'Zerply',slug:'zerply',svg:'<svg role="img" viewBox="0 0 24 24" xmlns="http://www.w3.org/2000/svg"><title>Zerply icon</title><path d="M20.779 18.746c-.747.714-1.562 1.017-2.543 1.017-1.32 0-3.322-.61-4.846-1.66-1.629-1.119-3.765-2.237-5.562-2.271 1.323-1.798 3.39-3.628 5.322-5.798.713-.78 4.983-5.7 5.73-6.586.54-.645.813-1.424.813-2.205 0-.3-.033-.585-.101-.855-2.035.405-3.561.601-6.001.601-2.677.015-4.607-.314-5.73-.989-.78 1.018-1.56 2.373-1.56 3.12 0 .948.918 1.728 3.189 1.728.746 0 1.965-.034 3.66-.169-3.492 4.5-6.949 8.16-9.016 10.47-.713.781-1.121 1.83-1.121 2.881 0 .405.075.81.18 1.185.645-.104 1.291-.179 1.965-.179 1.395 0 2.79.299 4.081.839C11.805 21.014 14.205 24 16.921 24c2.204 0 4.065-1.741 4.065-4.036 0-.404-.061-.825-.195-1.229l-.012.011z"/></svg>',get path(){return this.svg.match(/<path\s+d="([^"]*)/)[1];},source:'https://zerply.com/about/resources',hex:'9DBC7A'};
|
mit
|
cdnjs/cdnjs
|
ajax/libs/highcharts/8.0.0/es-modules/masters/modules/exporting.src.js
|
316
|
/**
* @license Highcharts JS v8.0.0 (2019-12-10)
* @module highcharts/modules/exporting
* @requires highcharts
*
* Exporting module
*
* (c) 2010-2019 Torstein Honsi
*
* License: www.highcharts.com/license
*/
'use strict';
import '../../modules/full-screen.src.js';
import '../../modules/exporting.src.js';
|
mit
|
cdnjs/cdnjs
|
ajax/libs/vue/3.0.0-beta.3/vue.runtime.esm-bundler.js
|
693
|
import { warn } from '@vue/runtime-dom';
export * from '@vue/runtime-dom';
if ( (process.env.NODE_ENV !== 'production')) {
console[console.info ? 'info' : 'log'](`You are running a development build of Vue.\n` +
`Make sure to use the production build (*.prod.js) when deploying for production.`);
}
// This entry exports the runtime only, and is built as
const compile = () => {
if ((process.env.NODE_ENV !== 'production')) {
warn(`Runtime compilation is not supported in this build of Vue.` +
( ` Configure your bundler to alias "vue" to "vue/dist/vue.esm-bundler.js".`
) /* should not happen */);
}
};
export { compile };
|
mit
|
cdnjs/cdnjs
|
ajax/libs/jquery.sumoselect/3.3.9/jquery.sumoselect.js
|
32062
|
/*!
* jquery.sumoselect
* http://hemantnegi.github.io/jquery.sumoselect
* 2016-12-12
*
* Copyright 2015 Hemant Negi
* Email : hemant.frnz@gmail.com
* Compressor http://refresh-sf.com/
*/
(function (factory) {
'use strict';
if (typeof define === 'function' && define.amd) {
define(['jquery'], factory);
} else if (typeof exports !== 'undefined') {
module.exports = factory(require('jquery'));
} else {
factory(jQuery);
}
})(function ($) {
'namespace sumo';
$.fn.SumoSelect = function (options) {
// This is the easiest way to have default options.
var defaultOptions = {
placeholder: 'Select Here', // Dont change it here.
csvDispCount: 3, // display no. of items in multiselect. 0 to display all.
captionFormat: '{0} Selected', // format of caption text. you can set your locale.
captionFormatAllSelected: '{0} all selected!', // format of caption text when all elements are selected. set null to use captionFormat. It will not work if there are disabled elements in select.
floatWidth: 400, // Screen width of device at which the list is rendered in floating popup fashion.
forceCustomRendering: false, // force the custom modal on all devices below floatWidth resolution.
nativeOnDevice: ['Android', 'BlackBerry', 'iPhone', 'iPad', 'iPod', 'Opera Mini', 'IEMobile', 'Silk'], //
outputAsCSV: false, // true to POST data as csv ( false for Html control array ie. default select )
csvSepChar: ',', // separation char in csv mode
okCancelInMulti: false, // display ok cancel buttons in desktop mode multiselect also.
isClickAwayOk: false, // for okCancelInMulti=true. sets whether click outside will trigger Ok or Cancel (default is cancel).
triggerChangeCombined: true, // im multi select mode whether to trigger change event on individual selection or combined selection.
selectAll: false, // to display select all button in multiselect mode.|| also select all will not be available on mobile devices.
search: false, // to display input for filtering content. selectAlltext will be input text placeholder
searchText: 'Search...', // placeholder for search input
searchFn: function (haystack, needle) { // search function
return haystack.toLowerCase().indexOf(needle.toLowerCase()) < 0;
},
noMatch: 'No matches for "{0}"',
prefix: '', // some prefix usually the field name. eg. '<b>Hello</b>'
locale: ['OK', 'Cancel', 'Select All'], // all text that is used. don't change the index.
up: false, // set true to open upside.
showTitle: true, // set to false to prevent title (tooltip) from appearing
max: null, // Maximum number of options selected (when multiple)
// eslint-disable-next-line no-unused-vars
renderLi: (li, _originalOption) => li // Custom <li> item renderer
};
var ret = this.each(function () {
var selObj = this; // the original select object.
if (this.sumo || !$(this).is('select')) return; //already initialized
var settings = $.extend({}, defaultOptions, options, $(this).data());
this.sumo = {
E: $(selObj), //the jquery object of original select element.
is_multi: $(selObj).attr('multiple'), //if its a multiple select
select: '',
caption: '',
placeholder: '',
optDiv: '',
CaptionCont: '',
ul: '',
is_floating: false,
is_opened: false,
//backdrop: '',
mob: false, // if to open device default select
Pstate: [],
lastUnselected: null,
selectedCount: 0,
createElems: function () {
var O = this;
const selectedIndex = O.E[0].selectedIndex;
O.E.wrap('<div class="SumoSelect" tabindex="0" role="button" aria-expanded="false">');
O.E[0].selectedIndex = selectedIndex; // Fix for IE resetting index to 0 when -1
O.select = O.E.parent();
O.caption = $('<span>');
O.CaptionCont = $(`<p class="CaptionCont SelectBox ${O.E.attr('class')}" ><label><i></i></label></p>`)
.attr('style', O.E.attr('style'))
.prepend(O.caption);
O.select.append(O.CaptionCont);
// default turn off if no multiselect
if (!O.is_multi) settings.okCancelInMulti = false;
if (O.E.attr('disabled'))
O.select.addClass('disabled').removeAttr('tabindex');
//if output as csv and is a multiselect.
if (settings.outputAsCSV && O.is_multi && O.E.attr('name')) {
//create a hidden field to store csv value.
O.select.append($('<input class="HEMANT123" type="hidden" />').attr('name', O.E.attr('name')).val(O.getSelStr()));
// so it can not post the original select.
O.E.removeAttr('name');
}
//break for mobile rendring.. if forceCustomRendering is false
if (O.isMobile() && !settings.forceCustomRendering) {
O.setNativeMobile();
return;
}
// if there is a name attr in select add a class to container div
if (O.E.attr('name')) O.select.addClass('sumo_' + O.E.attr('name').replace(/\[\]/, ''));
//hide original select
O.E.addClass('SumoUnder').attr('tabindex', '-1');
//## Creating the list...
O.optDiv = $('<div class="optWrapper ' + (settings.up ? 'up' : '') + '">');
//branch for floating list in low res devices.
O.floatingList();
//Creating the markup for the available options
O.ul = $('<ul class="options">');
O.optDiv.append(O.ul);
// Select all functionality
if (settings.selectAll && O.is_multi && !settings.max) O.SelAll();
// search functionality
if (settings.search) O.Search();
O.ul.append(O.prepItems(O.E.children()));
//if multiple then add the class multiple and add OK / CANCEL button
if (O.is_multi) O.multiSelelect();
O.select.append(O.optDiv);
O._handleMax();
O.basicEvents();
O.selAllState();
},
prepItems: function (opts, d) {
var lis = [], O = this;
$(opts).each(function (i, opt) { // parsing options to li
opt = $(opt);
lis.push(opt.is('optgroup') ?
$('<li class="group ' + (opt[0].disabled ? 'disabled' : '') + '"><label></label><ul></ul></li>')
.find('label')
.text(opt.attr('label'))
.end()
.find('ul')
.append(O.prepItems(opt.children(), opt[0].disabled))
.end()
:
O.createLi(opt, d)
);
});
return lis;
},
//## Creates a LI element from a given option and binds events to it
//## returns the jquery instance of li (not inserted in dom)
createLi: function (opt, d) {
var O = this;
if (!opt.attr('value')) opt.attr('value', opt.val());
var li = $('<li class="opt"><label>' + opt.html() + '</label></li>');
li.data('opt', opt); // store a direct reference to option.
opt.data('li', li); // store a direct reference to list item.
if (O.is_multi) li.prepend('<span><i></i></span>');
if (opt[0].disabled || d)
li = li.addClass('disabled');
O.onOptClick(li);
if (opt[0].selected) {
li.addClass('selected');
O.selectedCount++;
}
if (opt.attr('class'))
li.addClass(opt.attr('class'));
if (opt.attr('title'))
li.attr('title', opt.attr('title'));
return settings.renderLi(li, opt);
},
//## Returns the selected items as string in a Multiselect.
getSelStr: function () {
// get the pre selected items.
var sopt = [];
this.E.find('option:checked').each(function () { sopt.push($(this).val()); });
return sopt.join(settings.csvSepChar);
},
//## THOSE OK/CANCEL BUTTONS ON MULTIPLE SELECT.
multiSelelect: function () {
var O = this;
O.optDiv.addClass('multiple');
O.okbtn = $('<p tabindex="0" class="btnOk"></p>').click(function () {
//if combined change event is set.
O._okbtn();
O.hideOpts();
});
O.okbtn[0].innerText = settings.locale[0];
O.cancelBtn = $('<p tabindex="0" class="btnCancel"></p>').click(function () {
O._cnbtn();
O.hideOpts();
});
O.cancelBtn[0].innerText = settings.locale[1];
var btns = O.okbtn.add(O.cancelBtn);
O.optDiv.append($('<div class="MultiControls">').append(btns));
// handling keyboard navigation on ok cancel buttons.
btns.on('keydown.sumo', function (e) {
var el = $(this);
switch (e.which) {
case 32: // space
case 13: // enter
el.trigger('click');
break;
case 9: //tab
if (el.hasClass('btnOk')) return;
break;
case 27: // esc
O._cnbtn();
O.hideOpts();
return;
}
e.stopPropagation();
e.preventDefault();
});
},
_okbtn: function () {
var O = this, cg = 0;
//if combined change event is set.
if (settings.triggerChangeCombined) {
//check for a change in the selection.
if (O.E.find('option:checked').length !== O.Pstate.length) {
cg = 1;
}
else {
O.E.find('option').each(function (i, e) {
if (e.selected && O.Pstate.indexOf(i) < 0) cg = 1;
});
}
if (cg) {
O.callChange();
O.setText();
}
}
},
_cnbtn: function () {
var O = this;
//remove all selections
O.E.find('option:checked').each(function () { this.selected = false; });
O.optDiv.find('li.selected').removeClass('selected');
//restore selections from saved state.
for (var i = 0; i < O.Pstate.length; i++) {
O.E.find('option')[O.Pstate[i]].selected = true;
O.ul.find('li.opt').eq(O.Pstate[i]).addClass('selected');
}
O.selAllState();
},
_handleMax: function () {
// Disable options if max reached
if (settings.max) {
if (this.selectedCount >= +settings.max) {
this.optDiv.find('li.opt').not('.hidden').each(function (ix, e) {
if (!$(e).hasClass('selected')) {
$(e).addClass('temporary-disabled disabled');
}
});
} else {
// Enable options back
this.optDiv.find('li.opt').not('.hidden').each(function (ix, e) {
if ($(e).hasClass('temporary-disabled')) {
$(e).removeClass('temporary-disabled disabled');
}
});
}
}
},
SelAll: function () {
var O = this;
if (!O.is_multi) return;
O.selAll = $('<p class="select-all"><span><i></i></span><label></label></p>');
O.selAll.find('label')[0].innerText = settings.locale[2];
O.optDiv.addClass('selall');
O.selAll.on('click', function () {
O.selAll.toggleClass('selected');
O.toggSelAll(O.selAll.hasClass('selected'), 1);
//O.selAllState();
});
O.optDiv.prepend(O.selAll);
},
// search module (can be removed if not required.)
Search: function () {
var O = this,
cc = O.CaptionCont.addClass('search'),
P = $('<p class="no-match">'),
fn = (options.searchFn && typeof options.searchFn == 'function') ? options.searchFn : settings.searchFn;
O.ftxt = $('<input type="text" class="search-txt" value="" autocomplete="off">')
.on('click', function (e) {
e.stopPropagation();
});
O.ftxt.placeholder = settings.searchText;
cc.append(O.ftxt);
O.optDiv.children('ul').after(P);
O.ftxt.on('keyup.sumo', function () {
var hid = O.optDiv.find('ul.options li.opt').each(function (ix, e) {
var el = $(e),
opt = el.data('opt')[0];
opt.hidden = fn(el.text(), O.ftxt.val());
el.toggleClass('hidden', opt.hidden);
}).not('.hidden');
// Hide opt-groups with no options matched
O.optDiv[0].querySelectorAll('li.group').forEach(optGroup => {
if(optGroup.querySelector('li:not(.hidden)')){
optGroup.classList.remove('hidden');
}else{
optGroup.classList.add('hidden');
}
});
P.html(settings.noMatch.replace(/\{0\}/g, '<em></em>')).toggle(!hid.length);
P.find('em').text(O.ftxt.val());
O.selAllState();
});
},
selAllState: function () {
var O = this;
if (settings.selectAll && O.is_multi) {
var sc = 0, vc = 0;
O.optDiv.find('li.opt').not('.hidden .disabled').each(function (ix, e) {
if ($(e).hasClass('selected')) sc++;
vc++;
});
//select all checkbox state change.
if (sc === vc) O.selAll.removeClass('partial').addClass('selected');
else if (sc === 0) O.selAll.removeClass('selected partial');
else O.selAll.addClass('partial');//.removeClass('selected');
}
},
showOpts: function () {
var O = this;
if (O.E.attr('disabled')) return; // if select is disabled then retrun
O.E.trigger('sumo:opening', O);
O.is_opened = true;
O.select.addClass('open').attr('aria-expanded', 'true');
O.E.trigger('sumo:opened', O);
if (O.ftxt) O.ftxt.focus();
else O.select.focus();
// hide options on click outside.
$(document).on('click.sumo', function (e) {
if (!O.select.is(e.target) // if the target of the click isn't the container...
&& O.select.has(e.target).length === 0) { // ... nor a descendant of the container
if (!O.is_opened) return;
O.hideOpts();
if (settings.okCancelInMulti) {
if (settings.isClickAwayOk)
O._okbtn();
else
O._cnbtn();
}
}
});
if (O.is_floating) {
var H = O.optDiv.children('ul').outerHeight() + 2; // +2 is clear fix
if (O.is_multi) H = H + parseInt(O.optDiv.css('padding-bottom'));
O.optDiv.css('height', H);
$('body').addClass('sumoStopScroll');
}
O.setPstate();
},
//maintain state when ok/cancel buttons are available storing the indexes.
setPstate: function () {
var O = this;
if (O.is_multi && (O.is_floating || settings.okCancelInMulti)) {
O.Pstate = [];
// assuming that find returns elements in tree order
O.E.find('option').each(function (i, e) { if (e.selected) O.Pstate.push(i); });
}
},
callChange: function () {
this.E.get().forEach(e => {
e.dispatchEvent(new Event('change'));
e.dispatchEvent(new Event('click'));
});
},
hideOpts: function () {
var O = this;
if (O.is_opened) {
O.E.trigger('sumo:closing', O);
O.is_opened = false;
O.select.removeClass('open').attr('aria-expanded', 'false').find('ul li.sel').removeClass('sel');
O.E.trigger('sumo:closed', O);
$(document).off('click.sumo');
$('body').removeClass('sumoStopScroll');
// clear the search
if (settings.search) {
O.ftxt.val('');
O.ftxt.trigger('keyup.sumo');
}
}
},
setOnOpen: function () {
var O = this,
li = O.optDiv.find('li.opt:not(.hidden)').eq(settings.search ? 0 : O.E[0].selectedIndex);
if (li.hasClass('disabled')) {
li = li.next(':not(disabled)');
if (!li.length) return;
}
O.optDiv.find('li.sel').removeClass('sel');
li.addClass('sel');
O.showOpts();
},
nav: function (up) {
var O = this, c,
s = O.ul.find('li.opt:not(.disabled):not(.hidden)'),
sel = O.ul.find('li.opt.sel:not(.hidden)'),
idx = s.index(sel);
if (O.is_opened && sel.length) {
if (up && idx > 0)
c = s.eq(idx - 1);
else if (!up && idx < s.length - 1 && idx > -1)
c = s.eq(idx + 1);
else return; // if no items before or after
sel.removeClass('sel');
sel = c.addClass('sel');
// setting sel item to visible view.
var ul = O.ul,
st = ul.scrollTop(),
t = sel.position().top + st;
if (t >= st + ul.height() - sel.outerHeight())
ul.scrollTop(t - ul.height() + sel.outerHeight());
if (t < st)
ul.scrollTop(t);
}
else
O.setOnOpen();
},
basicEvents: function () {
var O = this;
O.CaptionCont.click(function (evt) {
O.E.trigger('click');
if (O.is_opened) O.hideOpts(); else O.showOpts();
evt.stopPropagation();
});
O.select.on('keydown.sumo', function (e) {
switch (e.which) {
case 38: // up
O.nav(true);
break;
case 40: // down
O.nav(false);
break;
case 65: // shortcut ctrl + a to select all and ctrl + shift + a to unselect all.
if (O.is_multi && !settings.max && e.ctrlKey) {
O.toggSelAll(!e.shiftKey, 1);
break;
}
else
return;
case 32: // space
if (settings.search && O.ftxt.is(e.target)) return;
break;
case 13: // enter
if (O.is_opened)
O.optDiv.find('ul li.sel').trigger('click');
else
O.setOnOpen();
break;
case 9: //tab
if (!settings.okCancelInMulti)
O.hideOpts();
return;
case 27: // esc
if (settings.okCancelInMulti) O._cnbtn();
O.hideOpts();
return;
default:
return; // exit this handler for other keys
}
e.preventDefault(); // prevent the default action (scroll / move caret)
});
$(window).on('resize.sumo', function () {
O.floatingList();
});
},
onOptClick: function (li) {
var O = this;
li.click(function () {
var li = $(this);
if (li.hasClass('disabled')) return;
if (O.is_multi) {
li.toggleClass('selected');
li.data('opt')[0].selected = li.hasClass('selected');
if (li.data('opt')[0].selected === false) {
O.lastUnselected = li.data('opt')[0].textContent;
O.selectedCount--;
} else {
O.selectedCount++;
}
if (settings.max) {
O._handleMax();
}
O.selAllState();
}
else {
li.parent().find('li.selected').removeClass('selected'); //if not multiselect then remove all selections from this list
li.toggleClass('selected');
li.data('opt')[0].selected = true;
O.selectedCount = 1;
}
//branch for combined change event.
if (!(O.is_multi && settings.triggerChangeCombined && (O.is_floating || settings.okCancelInMulti))) {
O.setText();
O.callChange();
}
if (!O.is_multi) O.hideOpts(); //if its not a multiselect then hide on single select.
});
},
// fixed some variables that were not explicitly typed (michc)
setText: function () {
var O = this;
O.placeholder = "";
if (O.is_multi) {
var sels = O.E.find(':checked').not(':disabled'); //selected options.
if (settings.csvDispCount && sels.length > settings.csvDispCount) {
if (sels.length === O.E.find('option').length && settings.captionFormatAllSelected) {
O.placeholder = settings.captionFormatAllSelected.replace(/\{0\}/g, sels.length);
}
else {
O.placeholder = settings.captionFormat.replace(/\{0\}/g, sels.length);
}
}
else {
O.placeholder = sels.toArray().map(selected => selected.innerText).join(', ');
}
}
else {
O.placeholder = O.E.find(':checked').not(':disabled').text();
}
var is_placeholder = false;
if (!O.placeholder) {
is_placeholder = true;
O.placeholder = O.E.attr('placeholder');
if (!O.placeholder) //if placeholder is there then set it
O.placeholder = O.E.find('option:disabled:checked').text();
}
O.placeholder = O.placeholder ? (settings.prefix + ' ' + O.placeholder) : settings.placeholder;
//set display text
O.caption.text(O.placeholder);
if (settings.showTitle) O.CaptionCont.attr('title', O.placeholder);
//set the hidden field if post as csv is true.
var csvField = O.select.find('input.HEMANT123');
if (csvField.length) csvField.val(O.getSelStr());
//add class placeholder if its a placeholder text.
if (is_placeholder) O.caption.addClass('placeholder'); else O.caption.removeClass('placeholder');
return O.placeholder;
},
isMobile: function () {
// Adapted from http://www.detectmobilebrowsers.com
var ua = navigator.userAgent || navigator.vendor || window.opera;
// Checks for iOs, Android, Blackberry, Opera Mini, and Windows mobile devices
for (var i = 0; i < settings.nativeOnDevice.length; i++) if (ua.toString().toLowerCase().indexOf(settings.nativeOnDevice[i].toLowerCase()) > 0) return settings.nativeOnDevice[i];
return false;
},
setNativeMobile: function () {
var O = this;
O.E.addClass('SelectClass');//.css('height', O.select.outerHeight());
O.mob = true;
O.E.change(function () {
O.setText();
});
},
floatingList: function () {
var O = this;
//called on init and also on resize.
//O.is_floating = true if window width is < specified float width
O.is_floating = $(window).width() <= settings.floatWidth;
//set class isFloating
O.optDiv.toggleClass('isFloating', O.is_floating);
//remove height if not floating
if (!O.is_floating) O.optDiv.css('height', '');
//toggle class according to okCancelInMulti flag only when it is not floating
O.optDiv.toggleClass('okCancelInMulti', settings.okCancelInMulti && !O.is_floating);
},
//HELPERS FOR OUTSIDERS
// validates range of given item operations
vRange: function (i) {
var O = this;
var opts = O.E.find('option');
if (opts.length <= i || i < 0) throw "index out of bounds";
return O;
},
//toggles selection on c as boolean.
toggSel: function (c, i) {
var O = this;
var opt;
if (typeof (i) === "number") {
O.vRange(i);
opt = O.E.find('option')[i];
}
else {
opt = O.E.find('option[value="' + i + '"]')[0] || 0;
}
if (!opt || opt.disabled)
return;
if (opt.selected !== c) {
if ((settings.max && !opt.selected && O.selectedCount < settings.max) || opt.selected || (!settings.max && !opt.selected)) {
opt.selected = c;
if (!O.mob) $(opt).data('li').toggleClass('selected', c);
O.callChange();
O.setPstate();
O.setText();
O.selAllState();
}
}
},
//toggles disabled on c as boolean.
toggDis: function (c, i) {
var O = this.vRange(i);
O.E.find('option')[i].disabled = c;
if (c) O.E.find('option')[i].selected = false;
if (!O.mob) O.optDiv.find('ul.options li.opt').eq(i).toggleClass('disabled', c).removeClass('selected');
O.setText();
},
// toggle disable/enable on complete select control
toggSumo: function (val) {
var O = this;
O.enabled = val;
O.select.toggleClass('disabled', val);
if (val) {
O.E.attr('disabled', 'disabled');
O.select.removeAttr('tabindex');
}
else {
O.E.removeAttr('disabled');
O.select.attr('tabindex', '0');
}
return O;
},
// toggles all option on c as boolean.
// set direct=false/0 bypasses okCancelInMulti behaviour.
toggSelAll: function (c, direct) {
var O = this;
var cloneOriginalEvents = $.extend(true, {}, $._data(O.E.get(0), "events")); // clone original select elements events
O.E.off(); // unbind original select elements events because we do not want the following clicks to trigger change on it
if(O.is_multi){
// Select all
if(!!c){
O.E.find('option:not(:checked):not(:disabled):not(:hidden)').toArray().forEach(option => {
if(!$(option).data('li').hasClass('hidden')){
option.selected = true;
$(option).data('li').toggleClass('selected', true);
}
});
}else{
// Unselect all
O.E.find('option:checked:not(:disabled):not(:hidden)').toArray().forEach(option => {
if(!$(option).data('li').hasClass('hidden')){
option.selected = false;
$(option).data('li').toggleClass('selected', false);
}
});
}
}else{
if(!c) O.E[0].selectedIndex = -1;
else console.warn('You called `SelectAll` on a non-multiple select');
}
// rebind original select elements events
$.each(cloneOriginalEvents, function (_, e) {
$.each(e, function (_, e) {
O.E.on(e.type, e.handler);
});
});
O.callChange(); // call change on original select element
if (!direct) {
if (!O.mob && O.selAll) O.selAll.removeClass('partial').toggleClass('selected', !!c);
O.setText();
O.setPstate();
}
},
/* outside accessibility options
which can be accessed from the element instance.
*/
reload: function () {
var elm = this.unload();
return $(elm).SumoSelect(settings);
},
unload: function () {
var O = this;
O.select.before(O.E);
O.E.show();
O.E[0].classList.remove('SumoUnder');
if (settings.outputAsCSV && O.is_multi && O.select.find('input.HEMANT123').length) {
O.E.attr('name', O.select.find('input.HEMANT123').attr('name')); // restore the name;
}
O.select.remove();
delete selObj.sumo;
return selObj;
},
//## add a new option to select at a given index.
add: function (val, txt, i, attr) {
if (typeof val === "undefined") throw "No value to add";
var O = this;
var opts = O.E.find('option');
if (typeof txt === "number") { i = txt; txt = val; }
if (typeof txt === "undefined") { txt = val; }
var opt = $("<option></option>").val(val).html(txt);
if (attr && typeof attr == "object") {
$.each(attr, function (i, v) {
opt.attr(i, v);
});
}
if (opts.length < i) throw "index out of bounds";
if (typeof i === "undefined" || opts.length === i) { // add it to the last if given index is last no or no index provides.
O.E.append(opt);
if (!O.mob) O.ul.append(O.createLi(opt));
}
else {
opts.eq(i).before(opt);
if (!O.mob) O.ul.find('li.opt').eq(i).before(O.createLi(opt));
}
return selObj;
},
//## removes an item at a given index.
remove: function (i) {
var O = this.vRange(i);
O.E.find('option').eq(i).remove();
if (!O.mob) O.optDiv.find('ul.options li.opt').eq(i).remove();
O.setText();
},
// removes all but the selected one
removeAll: function () {
var O = this;
var options = O.E.find('option');
for (var x = (options.length - 1); x >= 0; x--) {
if (options[x].selected !== true) {
O.remove(x);
}
}
},
find: function (val) {
var O = this;
var options = O.E.find('option');
for (var x in options) {
if (options[x].value === val) {
return parseInt(x);
}
}
return -1;
},
//## Select an item at a given index.
selectItem: function (i) { this.toggSel(true, i); },
//## UnSelect an iten at a given index.
unSelectItem: function (i) { this.toggSel(false, i); },
//## Select all items of the select.
selectAll: function () { this.toggSelAll(true); },
//## UnSelect all items of the select.
unSelectAll: function () { this.toggSelAll(false); },
//## Disable an iten at a given index.
disableItem: function (i) { this.toggDis(true, i); },
//## Removes disabled an iten at a given index.
enableItem: function (i) { this.toggDis(false, i); },
//## New simple methods as getter and setter are not working fine in ie8-
//## variable to check state of control if enabled or disabled.
enabled: true,
//## Enables the control
enable: function () { return this.toggSumo(false); },
//## Disables the control
disable: function () { return this.toggSumo(true); },
init: function () {
var O = this;
O.createElems();
O.setText();
return O;
}
};
selObj.sumo.init();
});
return ret.length === 1 ? ret[0] : ret;
};
});
|
mit
|
cdnjs/cdnjs
|
ajax/libs/deepstream.io-client-js/2.3.4/lib/constants/constants.js
|
3937
|
'use strict';
exports.CONNECTION_STATE = {};
exports.CONNECTION_STATE.CLOSED = 'CLOSED';
exports.CONNECTION_STATE.AWAITING_CONNECTION = 'AWAITING_CONNECTION';
exports.CONNECTION_STATE.CHALLENGING = 'CHALLENGING';
exports.CONNECTION_STATE.AWAITING_AUTHENTICATION = 'AWAITING_AUTHENTICATION';
exports.CONNECTION_STATE.AUTHENTICATING = 'AUTHENTICATING';
exports.CONNECTION_STATE.OPEN = 'OPEN';
exports.CONNECTION_STATE.ERROR = 'ERROR';
exports.CONNECTION_STATE.RECONNECTING = 'RECONNECTING';
exports.MESSAGE_SEPERATOR = String.fromCharCode(30); // ASCII Record Seperator 1E
exports.MESSAGE_PART_SEPERATOR = String.fromCharCode(31); // ASCII Unit Separator 1F
exports.TYPES = {};
exports.TYPES.STRING = 'S';
exports.TYPES.OBJECT = 'O';
exports.TYPES.NUMBER = 'N';
exports.TYPES.NULL = 'L';
exports.TYPES.TRUE = 'T';
exports.TYPES.FALSE = 'F';
exports.TYPES.UNDEFINED = 'U';
exports.TOPIC = {};
exports.TOPIC.CONNECTION = 'C';
exports.TOPIC.AUTH = 'A';
exports.TOPIC.ERROR = 'X';
exports.TOPIC.EVENT = 'E';
exports.TOPIC.RECORD = 'R';
exports.TOPIC.RPC = 'P';
exports.TOPIC.PRESENCE = 'U';
exports.TOPIC.PRIVATE = 'PRIVATE/';
exports.EVENT = {};
exports.EVENT.CONNECTION_ERROR = 'connectionError';
exports.EVENT.CONNECTION_STATE_CHANGED = 'connectionStateChanged';
exports.EVENT.MAX_RECONNECTION_ATTEMPTS_REACHED = 'MAX_RECONNECTION_ATTEMPTS_REACHED';
exports.EVENT.CONNECTION_AUTHENTICATION_TIMEOUT = 'CONNECTION_AUTHENTICATION_TIMEOUT';
exports.EVENT.ACK_TIMEOUT = 'ACK_TIMEOUT';
exports.EVENT.NO_RPC_PROVIDER = 'NO_RPC_PROVIDER';
exports.EVENT.RESPONSE_TIMEOUT = 'RESPONSE_TIMEOUT';
exports.EVENT.DELETE_TIMEOUT = 'DELETE_TIMEOUT';
exports.EVENT.UNSOLICITED_MESSAGE = 'UNSOLICITED_MESSAGE';
exports.EVENT.MESSAGE_DENIED = 'MESSAGE_DENIED';
exports.EVENT.MESSAGE_PARSE_ERROR = 'MESSAGE_PARSE_ERROR';
exports.EVENT.VERSION_EXISTS = 'VERSION_EXISTS';
exports.EVENT.NOT_AUTHENTICATED = 'NOT_AUTHENTICATED';
exports.EVENT.MESSAGE_PERMISSION_ERROR = 'MESSAGE_PERMISSION_ERROR';
exports.EVENT.LISTENER_EXISTS = 'LISTENER_EXISTS';
exports.EVENT.NOT_LISTENING = 'NOT_LISTENING';
exports.EVENT.TOO_MANY_AUTH_ATTEMPTS = 'TOO_MANY_AUTH_ATTEMPTS';
exports.EVENT.INVALID_AUTH_MSG = 'INVALID_AUTH_MSG';
exports.EVENT.IS_CLOSED = 'IS_CLOSED';
exports.EVENT.RECORD_NOT_FOUND = 'RECORD_NOT_FOUND';
exports.EVENT.NOT_SUBSCRIBED = 'NOT_SUBSCRIBED';
exports.ACTIONS = {};
exports.ACTIONS.PING = 'PI';
exports.ACTIONS.PONG = 'PO';
exports.ACTIONS.ACK = 'A';
exports.ACTIONS.REDIRECT = 'RED';
exports.ACTIONS.CHALLENGE = 'CH';
exports.ACTIONS.CHALLENGE_RESPONSE = 'CHR';
exports.ACTIONS.READ = 'R';
exports.ACTIONS.CREATE = 'C';
exports.ACTIONS.UPDATE = 'U';
exports.ACTIONS.PATCH = 'P';
exports.ACTIONS.DELETE = 'D';
exports.ACTIONS.SUBSCRIBE = 'S';
exports.ACTIONS.UNSUBSCRIBE = 'US';
exports.ACTIONS.HAS = 'H';
exports.ACTIONS.HEAD = 'HD';
exports.ACTIONS.SNAPSHOT = 'SN';
exports.ACTIONS.INVOKE = 'I';
exports.ACTIONS.SUBSCRIPTION_FOR_PATTERN_FOUND = 'SP';
exports.ACTIONS.SUBSCRIPTION_FOR_PATTERN_REMOVED = 'SR';
exports.ACTIONS.SUBSCRIPTION_HAS_PROVIDER = 'SH';
exports.ACTIONS.LISTEN = 'L';
exports.ACTIONS.UNLISTEN = 'UL';
exports.ACTIONS.LISTEN_ACCEPT = 'LA';
exports.ACTIONS.LISTEN_REJECT = 'LR';
exports.ACTIONS.PROVIDER_UPDATE = 'PU';
exports.ACTIONS.QUERY = 'Q';
exports.ACTIONS.CREATEORREAD = 'CR';
exports.ACTIONS.CREATEANDUPDATE = 'CU';
exports.ACTIONS.EVENT = 'EVT';
exports.ACTIONS.ERROR = 'E';
exports.ACTIONS.REQUEST = 'REQ';
exports.ACTIONS.RESPONSE = 'RES';
exports.ACTIONS.REJECTION = 'REJ';
exports.ACTIONS.PRESENCE_JOIN = 'PNJ';
exports.ACTIONS.PRESENCE_LEAVE = 'PNL';
exports.ACTIONS.WRITE_ACKNOWLEDGEMENT = 'WA';
exports.CALL_STATE = {};
exports.CALL_STATE.INITIAL = 'INITIAL';
exports.CALL_STATE.CONNECTING = 'CONNECTING';
exports.CALL_STATE.ESTABLISHED = 'ESTABLISHED';
exports.CALL_STATE.ACCEPTED = 'ACCEPTED';
exports.CALL_STATE.DECLINED = 'DECLINED';
exports.CALL_STATE.ENDED = 'ENDED';
exports.CALL_STATE.ERROR = 'ERROR';
|
mit
|
dfdx2/ancfinder
|
ancfindersite/static/jquery-utils.js
|
7428
|
// Some of my own utilities.
// Modernizing placeholder="...".
//
// Display a default value in text fields with a "default" class until
// user focuses the field, at which point the field is cleared and
// the "default" class is removed. If the user leaves the field and
// it's empty, the default text is replaced.
//
// If value is null then it works differently: the existing text in the
// field is taken to be its existing/default value. The "default" class
// is applied. When the field takes focus, the text is left unchanged
// so the user can edit the existing value but the default class is
// removed. When the user leaves the field, if it has the same value
// as its original value the default class is put back. So, the user
// can see if he has made a change.
jQuery.fn.input_default = function(value) {
return this.each(function(){
var default_value = value;
var clear_on_focus = true;
if (!default_value) {
// If no value is specified, the default is whatever is currently
// set in field but we don't do a clear-on-focus.
default_value = jQuery(this).val();
jQuery(this).addClass("default");
clear_on_focus = false;
} else if (jQuery(this).val() == "" || jQuery(this).val() == default_value) {
// Otherwise, if the field is empty, replace it with the default.
// If the field already has the default text (e.g. navigating back to
// the page), make sure it has the default class.
jQuery(this).val(default_value);
jQuery(this).addClass("default");
}
jQuery(this).focus(function() {
if (jQuery(this).val() == default_value && clear_on_focus)
jQuery(this).val("");
jQuery(this).removeClass("default");
});
jQuery(this).blur(function() {
if (clear_on_focus) {
if (jQuery(this).val() == "") {
jQuery(this).val(default_value);
jQuery(this).addClass("default");
}
} else {
if (jQuery(this).val() == default_value) {
jQuery(this).addClass("default");
}
}
});
});
};
function clear_default_fields(form) {
for (var i = 0; i < form.elements.length; i++) {
if ($(form.elements[i]).hasClass('default'))
$(form.elements[i]).val('');
}
};
// This provides a delayed keyup event that fires once
// even if there are multiple keyup events between the
// first and the time the event handler is called.
jQuery.fn.keyup_delayed = function(callback, delay) {
if (!delay) delay = 500;
return this.each(function(){
var last_press = null;
jQuery(this).keyup(function() {
last_press = (new Date()).getTime();
jQuery(this).delay(delay);
jQuery(this).queue(function(next) { if (last_press != null && ((new Date()).getTime() - last_press > delay*.75)) { callback(); last_press = null; } next(); } );
});
});
};
// This provides a callback for the enter keypress.
jQuery.fn.keydown_enter = function(callback) {
return this.each(function(){
jQuery(this).keydown(function(ev) {
if (ev.keyCode == '13')
callback()
});
});
};
// Tabs that work via the window hash. Call this method over a node set
// of <a href="#tabname"> elements, and have corresponding <div id="tabname">
// elements. Requires jquery.ba-bbq.min.js.
jQuery.fn.tabs = function(panes, subordinate_to) {
function get_href(elem) {
// In IE7, getAttribute('href') always returns an absolute URL
// even if that's not what is specified in the HTML source. Doh.
var href = elem.getAttribute('href');
var h = href.indexOf('#');
if (h > 0) href = href.substring(h);
return href;
}
var tabs = this;
var default_tab = get_href(tabs[0]);
panes = $(panes);
// make a list of valid hrefs
var tab_links = { };
tabs.each(function() { tab_links[get_href(this)] = 1; });
if (subordinate_to)
subordinate_to += "/";
else
subordinate_to = "";
var current_tab = null;
// What happens when the page hash changes?
function activate_tab(is_initial) {
var p = location.hash;
// for top-level tabs, act on only the top part of the tab structure
if (subordinate_to == "") p = p.replace(/\/.*/, "");
if (!(p in tab_links)) p = default_tab;
// the event fires twice?
if (p == current_tab) return;
current_tab = p;
// get the height of the current tab
var cur_height = panes.filter(":visible").height();
// activate the new tab pane
panes.each(function() {
if ("#" + subordinate_to + this.getAttribute('id') == p || "#" + this.getAttribute('tab') == p) {
// Show it immediately if this is on page load, otherwise fade it in fast.
if (is_initial) $(this).show(); else $(this).fadeIn("fast");
// Set a min-height so that the window height doesn't go down,
// which can cause the page to scroll up and confuse the user.
if (cur_height) $(this).css({ "min-height": cur_height + "px" });
if (!is_initial) {
// Scroll to the tab if we are far away.
if ( (tabs.offset().top > $(window).scrollTop() + $(window).height()/3)
|| (tabs.offset().top < $(window).scrollTop())
) {
$("html, body").animate({ scrollTop: tabs.offset().top - $(window).height()/7 });
}
}
}
});
// hide the old tab pane
// Do this after showing the new pane to prevent the window
// height from decreasing (because no tabs are shown) which
// could cause the page to scroll up, which would confuse the user.
panes.each(function() {
if (!("#" + subordinate_to + this.getAttribute('id') == p || "#" + this.getAttribute('tab') == p)) {
$(this).hide();
}
});
// set the link to .active
tabs.removeClass('active');
tabs.each(function() {
if (get_href(this) == p)
$(this).addClass('active');
});
}
// On first load, load the tab corresponding to the page hash.
activate_tab(true);
// Register the hash change handler.
$(window).on("hashchange", function() { activate_tab(false); });
};
// Smart ellipsis.
//
// Truncate text with an ellipsis so that it fits exactly within its
// max-width/max-height CSS properties. Only works on elements that
// contain only text and no child elements.
//
// Also, works well in Chrome but not quite right in FF/IE, although
// the result in presentable.
jQuery.fn.truncate_text = function(callback) {
var elem = $(this);
// elem's width/height are equal to its max-width/height. Wrap
// elem in a new div with those dimensions, and remove the
// max-width/height from elem.
var w = elem.width();
var h = elem.height();
elem.css({ "max-width": "", "max-height": "", "overflow": "" });
var remaining = elem.text();
var chopped = null;
function do_cut() {
// Cut words from elem until it fits, or no text is left.
while (elem.height() > h || elem.width() > w) {
var idx = remaining.lastIndexOf(" ");
if (idx <= 0) break;
if (chopped == null) chopped = "";
chopped = remaining.substring(idx) + chopped;
remaining = remaining.substring(0, idx);
elem.text(remaining + " ...");
}
if (callback)
callback(remaining, chopped);
}
do_cut();
// In FF and IE, the dimensions of the element may change. Perhaps
// this is due to font loading. So we should repeat once the document
// is loaded. We should do the ellipsis early to get things layed out
// as early as possible.
var w1 = elem.width();
var h1 = elem.height();
$(function() {
// have the dimensions changed?
if (elem.width() != w1 || elem.height() != h1) {
// reset text
elem.text(remaining + (chopped ? chopped : ""));
// re-do ellipsis
do_cut();
}
});
}
|
cc0-1.0
|
BackFront/BT_EstruturaDados
|
src/aulaRecursividade/Pessoa.java
|
320
|
package aulaRecursividade;
public class Pessoa {
private String Nome;
private double Altura;
public String getNome() {
return Nome;
}
public void setNome(String nome) {
Nome = nome;
}
public double getAltura() {
return Altura;
}
public void setAltura(double altura) {
Altura = altura;
}
}
|
cc0-1.0
|
jacksongomesbr/livro-desenvolvimento-software-web
|
angularjs/app-click/app.js
|
276
|
(function(){
angular.module('appsimples', [])
.controller('HomeController', function($scope) {
$scope.cidades = ['Araguaína', 'Gurupi', 'Palmas', 'Paraíso', 'Porto Nacional'];
$scope.salvar = function(cidade) {
$scope.cidades.push(cidade);
}
});
})();
|
cc0-1.0
|
mtov/Truck-Factor
|
gittruckfactor/src/aserg/gtf/util/Alias.java
|
2894
|
package aserg.gtf.util;
import java.io.BufferedReader;
import java.io.FileReader;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import com.google.common.io.LineReader;
public class Alias {
private String repository;
private String dev1;
private String dev2;
public Alias(String repository, String dev1, String dev2) {
super();
this.repository = repository;
this.dev1 = dev1;
this.dev2 = dev2;
}
public static List<Alias> getAliasFromFile(String fileName) throws IOException{
List<Alias> fileAliases = new ArrayList<Alias>();
BufferedReader br = new BufferedReader(new FileReader(fileName));
LineReader lineReader = new LineReader(br);
String sCurrentLine;
String[] values;
int countcfs = 0;
while ((sCurrentLine = lineReader.readLine()) != null) {
values = sCurrentLine.split(";");
if (values.length<3)
System.err.println("Erro na linha " + countcfs);
String rep = values[0];
String dev1 = values[1];
String dev2 = values[2];
fileAliases.add(new Alias(rep, dev1, dev2));
countcfs++;
}
return fileAliases;
}
public static boolean isAlias(String repository, String dev1, String dev2){
if (notAliases == null)
try {
notAliases = readFile("notalias.txt");
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
Alias newNot = new Alias(repository, dev1, dev2);
for (int i = 0; i < notAliases.length; i++) {
if (notAliases[i].equals(newNot))
return false;
}
return true;
}
@Override
public boolean equals(Object obj) {
Alias other = (Alias)obj;
if(this.repository.equals(other.repository) && this.dev1.equals(other.dev1) && this.dev2.equals(other.dev2))
return true;
return false;
}
private static Alias[] readFile(String fileName) throws IOException{
List<Alias> fileAliases = new ArrayList<Alias>();
BufferedReader br = new BufferedReader(new FileReader(fileName));
LineReader lineReader = new LineReader(br);
String sCurrentLine;
String[] values;
int countcfs = 0;
while ((sCurrentLine = lineReader.readLine()) != null) {
values = sCurrentLine.split(";");
if (values.length<3)
System.err.println("Erro na linha " + countcfs);
String rep = values[0];
String dev1 = values[1];
String dev2 = values[2];
fileAliases.add(new Alias(rep, dev1, dev2));
countcfs++;
}
return fileAliases.toArray(new Alias[0]);
}
public String getRepository() {
return repository;
}
public String getDev1() {
return dev1;
}
public String getDev2() {
return dev2;
}
private static Alias[] notAliases = null;
// {new NotAlias("rails/rails","Nick", "rick"), new NotAlias("rails/rails","Nick", "Nico"),
// new NotAlias("ruby/ruby","kou", "knu"), new NotAlias("ruby/ruby","kou", "ko1"),new NotAlias("ruby/ruby","nahi", "nari"),
// new NotAlias("ruby/ruby","eban", "evan")};
}
|
cc0-1.0
|
sazgin/elexis-3-core
|
ch.rgw.utility/src/ch/rgw/compress/CompEx.java
|
5817
|
/*******************************************************************************
* Copyright (c) 2005-2011, G. Weirich and Elexis
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*
* Contributors:
* G. Weirich - initial implementation
*
* TODO To change the template for this generated file go to
* Window - Preferences - Java - Code Style - Code Templates
*******************************************************************************/
package ch.rgw.compress;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.util.zip.ZipEntry;
import java.util.zip.ZipInputStream;
import java.util.zip.ZipOutputStream;
import org.apache.commons.compress.bzip2.CBZip2InputStream;
import org.apache.commons.compress.bzip2.CBZip2OutputStream;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import ch.rgw.tools.BinConverter;
import ch.rgw.tools.ExHandler;
import ch.rgw.tools.StringTool;
/**
* Compressor/Expander
*/
public class CompEx {
public static final Logger log = LoggerFactory.getLogger(CompEx.class);
public static final int NONE = 0;
public static final int GLZ = 1 << 29;
public static final int RLL = 2 << 29;
public static final int HUFF = 3 << 29;
public static final int BZIP2 = 4 << 29;
public static final int ZIP = 5 << 29;
public static final byte[] Compress(String in, int mode){
if (StringTool.isNothing(in)) {
return null;
}
try {
return Compress(in.getBytes(StringTool.getDefaultCharset()), mode);
} catch (Exception ex) {
ExHandler.handle(ex);
return null;
}
}
public static final byte[] Compress(byte[] in, int mode){
if (in == null) {
return null;
}
ByteArrayInputStream bais = new ByteArrayInputStream(in);
return Compress(bais, mode);
}
public static final byte[] Compress(InputStream in, int mode){
try {
switch (mode) {
case GLZ:
return CompressGLZ(in);
case BZIP2:
return CompressBZ2(in);
case ZIP:
return CompressZIP(in);
// case HUFF: return CompressHuff(in);
}
} catch (Exception ex) {
ExHandler.handle(ex);
}
return null;
}
public static byte[] CompressGLZ(InputStream in) throws IOException{
ByteArrayOutputStream baos = new ByteArrayOutputStream();
byte[] buf = new byte[4];
// BinConverter.intToByteArray(0,buf,0);
baos.write(buf);
GLZ glz = new GLZ();
long total = glz.compress(in, baos);
byte[] ret = baos.toByteArray();
total &= 0x1fffffff;
total |= GLZ;
BinConverter.intToByteArray((int) total, ret, 0);
return ret;
}
public static byte[] CompressBZ2(InputStream in) throws Exception{
ByteArrayOutputStream baos = new ByteArrayOutputStream();
byte[] buf = new byte[8192];
baos.write(buf, 0, 4); // Länge des Originalstroms
CBZip2OutputStream bzo = new CBZip2OutputStream(baos);
int l;
int total = 0;
;
while ((l = in.read(buf, 0, buf.length)) != -1) {
bzo.write(buf, 0, l);
total += l;
}
bzo.close();
byte[] ret = baos.toByteArray();
// Die höchstwertigen 3 Bit als Typmarker setzen
total &= 0x1fffffff;
total |= BZIP2;
BinConverter.intToByteArray(total, ret, 0);
return ret;
}
public static byte[] CompressZIP(InputStream in) throws Exception{
ByteArrayOutputStream baos = new ByteArrayOutputStream();
byte[] buf = new byte[8192];
baos.write(buf, 0, 4); // Länge des Originalstroms
ZipOutputStream zo = new ZipOutputStream(baos);
zo.putNextEntry(new ZipEntry("Data"));
int l;
long total = 0;
;
while ((l = in.read(buf, 0, buf.length)) != -1) {
zo.write(buf, 0, l);
total += l;
}
zo.close();
byte[] ret = baos.toByteArray();
// Die höchstwertigen 3 Bit als Typmarker setzen
total &= 0x1fffffff;
total |= ZIP;
BinConverter.intToByteArray((int) total, ret, 0);
return ret;
}
public static byte[] expand(byte[] in){
if (in == null) {
return null;
}
ByteArrayInputStream bais = new ByteArrayInputStream(in);
return expand(bais);
}
public static byte[] expand(InputStream in){
ByteArrayOutputStream baos;
byte[] siz = new byte[4];
try {
int read = in.read(siz);
// if we can not read the first 4 bytes, we can not expand
if (read == -1) {
return null;
}
long size = BinConverter.byteArrayToInt(siz, 0);
long typ = size & ~0x1fffffff;
size &= 0x1fffffff;
// more than 100 MB
if (size > 100000000) {
log.warn("Given InputStream exceeds 100 MB please check DB");
String empty = "... Text nicht lesbar. \nBitte Datenbankeintrag prüfen!";
return empty.getBytes();
}
byte[] ret = new byte[(int) size];
switch ((int) typ) {
case BZIP2:
CBZip2InputStream bzi = new CBZip2InputStream(in);
int off = 0;
int l = 0;
while ((l = bzi.read(ret, off, ret.length - off)) > 0) {
off += l;
}
bzi.close();
in.close();
return ret;
case GLZ:
GLZ glz = new GLZ();
baos = new ByteArrayOutputStream();
glz.expand(in, baos);
return baos.toByteArray();
case HUFF:
HuffmanInputStream hin = new HuffmanInputStream(in);
off = 0;
l = 0;
while ((l = hin.read(ret, off, ret.length - off)) > 0) {
off += l;
}
hin.close();
return ret;
case ZIP:
ZipInputStream zi = new ZipInputStream(in);
zi.getNextEntry();
off = 0;
l = 0;
while ((l = zi.read(ret, off, ret.length - off)) > 0) {
off += l;
}
zi.close();
return ret;
default:
throw new Exception("Invalid compress format");
}
} catch (Exception ex) {
ExHandler.handle(ex);
return null;
}
}
}
|
epl-1.0
|
kgibm/open-liberty
|
dev/com.ibm.ws.security.wim.scim.2.0/test/com/ibm/ws/security/wim/scim20/model/users/EntitlementImplTest.java
|
1959
|
/*******************************************************************************
* Copyright (c) 2018 IBM Corporation and others.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*
* Contributors:
* IBM Corporation - initial API and implementation
*******************************************************************************/
package com.ibm.ws.security.wim.scim20.model.users;
import static org.junit.Assert.assertEquals;
import org.junit.Test;
import com.ibm.websphere.security.wim.scim20.model.users.Entitlement;
import com.ibm.ws.security.wim.scim20.SCIMUtil;
public class EntitlementImplTest {
@Test
public void serialize() throws Exception {
EntitlementImpl address = getTestInstance();
/*
* The expected serialized JSON string.
*/
StringBuffer expected = new StringBuffer();
expected.append("{");
expected.append("\"value\":\"value\",");
expected.append("\"display\":\"display\",");
expected.append("\"type\":\"type\",");
expected.append("\"primary\":false");
expected.append("}");
/*
* Serialize.
*/
String serialized = SCIMUtil.serialize(address);
assertEquals(expected.toString(), serialized);
/*
* Deserialize.
*/
Entitlement deserialized = SCIMUtil.deserialize(serialized, Entitlement.class);
assertEquals(address, deserialized);
}
public static EntitlementImpl getTestInstance() {
EntitlementImpl entitlement = new EntitlementImpl();
entitlement.setDisplay("display");
entitlement.setPrimary(false);
entitlement.setType("type");
entitlement.setValue("value");
return entitlement;
}
}
|
epl-1.0
|
kgibm/open-liberty
|
dev/com.ibm.rls.jdbc/src/com/ibm/ws/recoverylog/custom/jdbc/impl/SQLNonTransactionalDataSource.java
|
6382
|
/*******************************************************************************
* Copyright (c) 2012, 2021 IBM Corporation and others.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*
* Contributors:
* IBM Corporation - initial API and implementation
*******************************************************************************/
package com.ibm.ws.recoverylog.custom.jdbc.impl;
import javax.sql.DataSource;
import com.ibm.websphere.ras.Tr;
import com.ibm.websphere.ras.TraceComponent;
import com.ibm.ws.ffdc.annotation.FFDCIgnore;
import com.ibm.ws.recoverylog.spi.CustomLogProperties;
import com.ibm.ws.recoverylog.spi.InternalLogException;
import com.ibm.ws.recoverylog.spi.TraceConstants;
import com.ibm.wsspi.resource.ResourceFactory;
//------------------------------------------------------------------------------
// Class: SQLNonTransactionalDataSource
//------------------------------------------------------------------------------
/**
* <p>
* The SQLNonTransactionalDataSource class provides a wrapper for the java.sql.DataSource
* object that represents the special non-transactional data source that has been defined
* by an administrator for storing Transaction Logs.
* </p>
*
* <p>
* The Liberty implementation relies on Declarative Services to coordinate the initialisation
* of the Transaction and DataSource (com.ibm.ws.jdbc) components.
* </p>
*/
public class SQLNonTransactionalDataSource {
/**
* WebSphere RAS TraceComponent registration.
*/
private static final TraceComponent tc = Tr.register(SQLNonTransactionalDataSource.class,
TraceConstants.TRACE_GROUP, TraceConstants.NLS_FILE);
//private NonTransactionalDataSource nonTranDataSource;
DataSource nonTranDataSource = null;
private CustomLogProperties _customLogProperties = null;
//------------------------------------------------------------------------------
// Method: SQLNonTransactionalDataSource.SQLNonTransactionalDataSource
//------------------------------------------------------------------------------
/**
* <p> Constructor for the creation of
* SQLNonTransactionalDataSource objects.
* </p>
*
* @param dsName The name of the Data Source.
* @param customLogProperties The custom properties of the log.
*/
public SQLNonTransactionalDataSource(String dsName, CustomLogProperties customLogProperties) {
_customLogProperties = customLogProperties;
if (tc.isDebugEnabled())
Tr.debug(tc, "Setting CustomLogProperties in constructor" + customLogProperties);
}
//------------------------------------------------------------------------------
// Method: SQLNonTransactionalDataSource.getDataSource
//------------------------------------------------------------------------------
/**
* Locates a DataSource in config
*
* @return The DataSource.
*
* @exception
*/
@FFDCIgnore(Exception.class)
public DataSource getDataSource() throws Exception {
if (tc.isEntryEnabled())
Tr.entry(tc, "getDataSource");
// Retrieve the data source factory from the CustomLogProperties. This Factory should be set in the JTMConfigurationProvider
// by the jdbc component using DeclarativeServices. TxRecoveryAgentImpl gets the factory from the ConfigurationProvider and
// then sets it into CustomLogProperties.
ResourceFactory dataSourceFactory = _customLogProperties.resourceFactory();
if (dataSourceFactory != null) {
if (tc.isDebugEnabled())
Tr.debug(tc, "Using DataSourceFactory " + dataSourceFactory);
} else {
if (tc.isEntryEnabled())
Tr.exit(tc, "getDataSource", "Null ResourceFactory InternalLogException");
throw new InternalLogException("Failed to locate DataSource, null Resourcefactory", null);
}
try {
nonTranDataSource = (DataSource) dataSourceFactory.createResource(null);
} catch (Exception e) {
//e.printStackTrace();
if (tc.isEntryEnabled())
Tr.exit(tc, "getDataSource", "Caught exception " + e + "throw InternalLogException");
throw new InternalLogException("Failed to locate DataSource, caught exception ", null);
}
/*
* TEMPORARY This is waiting on fixes to DeclarativeServices which impact the jdbc component. At present it is
* possible that the DataSource will have been set but that its associated jdbc driver service will still be initialising
*/
// boolean refSet = false;
// while (!refSet)
// {
// if (tc.isDebugEnabled())
// Tr.debug(tc, "getDataSource after sleep");
// try {
//
// nonTranDataSource = (DataSource) dataSourceFactory.createResource(null);
// if (tc.isDebugEnabled())
// Tr.debug(tc, "Non Tran dataSource is " + nonTranDataSource);
// Connection conn = nonTranDataSource.getConnection();
// if (tc.isDebugEnabled())
// Tr.debug(tc, "Established connection " + conn);
//
// DatabaseMetaData mdata = conn.getMetaData();
//
// String dbName = mdata.getDatabaseProductName();
// if (tc.isDebugEnabled())
// Tr.debug(tc, "Database name " + dbName);
//
// String dbVersion = mdata.getDatabaseProductVersion();
// if (tc.isDebugEnabled())
// Tr.debug(tc, "Database version " + dbVersion);
// refSet = true;
// } catch (Exception e) {
// // We will catch an exception if the DataSource is not yet fully formed
// if (tc.isDebugEnabled())
// Tr.debug(tc, "Caught exception: " + e);
// }
//
// if (!refSet)
// Thread.sleep(200);
// }
// eof TEMPORARY
if (tc.isEntryEnabled())
Tr.exit(tc, "getDataSource", nonTranDataSource);
return nonTranDataSource;
}
}
|
epl-1.0
|
kgibm/open-liberty
|
dev/com.ibm.ws.microprofile.config.1.1/test/src/com/ibm/ws/microprofile/config/converter/test/StringConverter101b.java
|
915
|
/*******************************************************************************
* Copyright (c) 2017 IBM Corporation and others.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*
* Contributors:
* IBM Corporation - initial API and implementation
*******************************************************************************/
package com.ibm.ws.microprofile.config.converter.test;
import javax.annotation.Priority;
import org.eclipse.microprofile.config.spi.Converter;
@Priority(101)
public class StringConverter101b implements Converter<String> {
/** {@inheritDoc} */
@Override
public String convert(String value) throws IllegalArgumentException {
return "101b=" + value;
}
}
|
epl-1.0
|
alastrina123/debrief
|
org.mwc.debrief.satc.core/tests/com/planetmayo/debrief/satc/model/contributions/BearingMeasurementContributionTest.java
|
6432
|
/*
* Debrief - the Open Source Maritime Analysis Application
* http://debrief.info
*
* (C) 2000-2014, PlanetMayo Ltd
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the Eclipse Public License v1.0
* (http://www.eclipse.org/legal/epl-v10.html)
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
*/
package com.planetmayo.debrief.satc.model.contributions;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertTrue;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Date;
import org.junit.Test;
import com.planetmayo.debrief.satc.model.ModelTestBase;
import com.planetmayo.debrief.satc.model.legs.StraightRoute;
import com.planetmayo.debrief.satc.model.states.BoundedState;
import com.planetmayo.debrief.satc.model.states.ProblemSpace;
import com.planetmayo.debrief.satc.support.TestSupport;
import com.planetmayo.debrief.satc.util.GeoSupport;
import com.planetmayo.debrief.satc.util.ObjectUtils;
import com.vividsolutions.jts.geom.Coordinate;
import com.vividsolutions.jts.geom.Geometry;
import com.vividsolutions.jts.geom.Point;
@SuppressWarnings("deprecation")
public class BearingMeasurementContributionTest extends ModelTestBase
{
public static final String THE_PATH = "tests/com/planetmayo/debrief/satc/model/contributions/data/bearing_measurement_data.txt";
public static final String THE_SHORT_PATH = "tests/com/planetmayo/debrief/satc/model/contributions/data/short_bearing_measurement_data.txt";
private BearingMeasurementContribution bmc;
@Test
public void testLoadFrom() throws Exception
{
bmc = new BearingMeasurementContribution();
assertFalse("should be empty", bmc.hasData());
bmc.loadFrom(TestSupport.getShortData());
assertTrue("should not be empty", bmc.hasData());
assertEquals("correct start date", new Date(110, 00, 12, 12, 13, 29),
bmc.getStartDate());
assertEquals("correct finish date", new Date(110, 00, 12, 12, 24, 29),
bmc.getFinishDate());
assertEquals(Math.toRadians(3d), bmc.getBearingError(), EPS);
}
@Test
public void testActUpon() throws Exception
{
testLoadFrom();
ProblemSpace ps = new ProblemSpace();
bmc.actUpon(ps);
for (BoundedState state : ps.states())
{
Geometry geo = state.getLocation().getGeometry();
Coordinate[] coords = geo.getCoordinates();
for (int i = 0; i <= 4; i++)
{
Coordinate coordinate = coords[i];
assertNotNull("we should have a coordinate", coordinate);
}
}
assertEquals("read in all lines", 5, ps.size());
}
@Test
public void testErrorCalc()
{
bmc = new BearingMeasurementContribution();
bmc.loadFrom(TestSupport.getLongData());
double score;
// ok, create a well-performing route to use
Point startP = GeoSupport.getFactory().createPoint(new Coordinate(-30.005, 0.010));
Date startT = ObjectUtils.safeParseDate(new SimpleDateFormat("yyMMdd HHmmss"),
"100112 121300");
Point endP = GeoSupport.getFactory().createPoint(new Coordinate(-30.075, 0.010));
Date endT = ObjectUtils.safeParseDate(new SimpleDateFormat("yyMMdd HHmmss"),
"100112 122836");
StraightRoute goodRoute = new StraightRoute("rName", startP, startT, endP,
endT);
// and a performing route to use
startP = GeoSupport.getFactory().createPoint(new Coordinate(-30.003, -0.05));
startT = ObjectUtils.safeParseDate(new SimpleDateFormat("yyMMdd HHmmss"),
"100112 121300");
endP = GeoSupport.getFactory().createPoint(new Coordinate(-30.075, 0.010));
endT = ObjectUtils.safeParseDate(new SimpleDateFormat("yyMMdd HHmmss"), "100112 122836");
StraightRoute badRoute = new StraightRoute("rName", startP, startT, endP,
endT);
// we'll need some states, so the route can correctly segment itself
ArrayList<BoundedState> states = new ArrayList<BoundedState>();
// inject some early states for which there isn't a measurement
states.add(new BoundedState(parseDate("yyMMdd HHmmss", "100112 121300")));
states.add(new BoundedState(parseDate("yyMMdd HHmmss", "100112 121301")));
// now for our real states
states.add(new BoundedState(parseDate("yyMMdd HHmmss", "100112 121459")));
states.add(new BoundedState(parseDate("yyMMdd HHmmss", "100112 121629")));
states.add(new BoundedState(parseDate("yyMMdd HHmmss", "100112 121814")));
// inject a state for which there isn't a measurement
states.add(new BoundedState(parseDate("yyMMdd HHmmss", "100112 122300")));
// and carry on
states.add(new BoundedState(parseDate("yyMMdd HHmmss", "100112 122329")));
states.add(new BoundedState(parseDate("yyMMdd HHmmss", "100112 122829")));
// inject some late states for which there isn't a measurement
states.add(new BoundedState(parseDate("yyMMdd HHmmss", "100112 122832")));
states.add(new BoundedState(parseDate("yyMMdd HHmmss", "100112 122836")));
// test when we shouldn't run
bmc.setActive(false);
bmc.setWeight(0);
score = bmc.calculateErrorScoreFor(null);
assertEquals("should not be able to calc", 0, score, 0.0001);
// ok, make it active
bmc.setActive(true);
score = bmc.calculateErrorScoreFor(null);
assertEquals("still should not be able to calc", 0, score, 0.0001);
// ok, let it start
bmc.setWeight(1);
score = bmc.calculateErrorScoreFor(null);
assertEquals("still should not be able to calc", 0, score, 0.0001);
score = bmc.calculateErrorScoreFor(goodRoute);
assertEquals("still should not be able to calc", 0, score, 0.0001);
// ok, flesh out the routes
goodRoute.generateSegments(states);
badRoute.generateSegments(states);
score = bmc.calculateErrorScoreFor(goodRoute);
// NOTE: BMC no longer produces an error score. The bearing isn't a forecast, it's an absolute measurement.
// assertTrue("still should not be able to calc", 0 != score);
// System.out.println("good score:" + score);
//
// score = bmc.calculateErrorScoreFor(badRoute);
// assertTrue("still should not be able to calc", 0 != score);
// System.out.println("bad score:" + score);
}
}
|
epl-1.0
|
rkadle/Tank
|
web/web_support/src/main/java/com/intuit/tank/converter/ReplaceModeConverter.java
|
1302
|
/**
* Copyright 2011 Intuit Inc. All Rights Reserved
*/
package com.intuit.tank.converter;
/*
* #%L
* JSF Support Beans
* %%
* Copyright (C) 2011 - 2015 Intuit Inc.
* %%
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
* #L%
*/
import javax.faces.component.UIComponent;
import javax.faces.context.FacesContext;
import javax.faces.convert.Converter;
import javax.faces.convert.FacesConverter;
import com.intuit.tank.script.replace.ReplaceMode;
/**
* ReplaceModeConverter
*
* @author pquinn
*
*/
@FacesConverter(value = "tsReplaceModeConverter")
public class ReplaceModeConverter implements Converter {
/**
* @{inheritDoc
*/
@Override
public Object getAsObject(FacesContext facesContext, UIComponent uiComponent, String value) {
return ReplaceMode.valueOf(value);
}
/**
* @{inheritDoc
*/
@Override
public String getAsString(FacesContext facesContext, UIComponent uiComponent, Object obj) {
if (obj instanceof ReplaceMode) {
return ((ReplaceMode) obj).name();
}
return obj.toString();
}
}
|
epl-1.0
|
Mirage20/che
|
core/che-core-api-jdbc/src/main/java/org/eclipse/che/api/core/jdbc/jpa/DuplicateKeyException.java
|
979
|
/*******************************************************************************
* Copyright (c) 2012-2016 Codenvy, S.A.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*
* Contributors:
* Codenvy, S.A. - initial API and implementation
*******************************************************************************/
package org.eclipse.che.api.core.jdbc.jpa;
import org.eclipse.che.api.core.jdbc.DBErrorCode;
/**
* Thrown when data couldn't be updated/stored due to unique constrain violation.
*
* @author Yevhenii Voevodin
* @see DBErrorCode#DUPLICATE_KEY
*/
public class DuplicateKeyException extends DetailedRollbackException {
public DuplicateKeyException(String message, Throwable cause) {
super(message, cause, DBErrorCode.DUPLICATE_KEY);
}
}
|
epl-1.0
|
kstkelvin/vpm-kelvin-st
|
src/view/menu/RelatorioMenu.java
|
1092
|
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package view.menu;
/**
*
* @author kelvin
*/
public class RelatorioMenu {
/**
*
*/
public static final int OP_PRODUTOS_VENDIDOS = 1;
public static final int OP_COMPRAS_FEITAS__CLIENTE = 2;
public static final int OP_CLIENTES_MAIS_COMPRAM = 3;
public static final int OP_CLIENTES_OPERACOES = 4;
public static final int OP_RETORNAR = 0;
public static String getOpcoes() {
return "\n----------------------------------\n"
+ "Histórico: \n"
+ "\n1- Produtos vendidos\n"
+ "2- Compras feitas pelo cliente\n"
+ "3- Clientes que mais compram\n"
+ "4- Clientes que mais realizam operações de monetização\n"
+ "0- Retornar ao menu principal\n"
+ "\nDigite sua Opção: "
+ "\n----------------------------------\n";
}
}
|
epl-1.0
|
openhab/openhab
|
bundles/binding/org.openhab.binding.insteonplm/src/main/java/org/openhab/binding/insteonplm/internal/message/DataType.java
|
1921
|
/**
* Copyright (c) 2010-2020 Contributors to the openHAB project
*
* See the NOTICE file(s) distributed with this work for additional
* information.
*
* This program and the accompanying materials are made available under the
* terms of the Eclipse Public License 2.0 which is available at
* http://www.eclipse.org/legal/epl-2.0
*
* SPDX-License-Identifier: EPL-2.0
*/
package org.openhab.binding.insteonplm.internal.message;
import java.util.HashMap;
/**
* Defines the data types that can be used in the fields of a message.
*
* @author Daniel Pfrommer
* @since 1.5.0
*/
public enum DataType {
BYTE("byte", 1),
INT("int", 4),
FLOAT("float", 4),
ADDRESS("address", 3),
INVALID("INVALID", -1);
private static HashMap<String, DataType> s_typeMap = new HashMap<String, DataType>();
private int m_size = -1; // number of bytes consumed
private String m_name = "";
static {
s_typeMap.put(BYTE.getName(), BYTE);
s_typeMap.put(INT.getName(), INT);
s_typeMap.put(FLOAT.getName(), FLOAT);
s_typeMap.put(ADDRESS.getName(), ADDRESS);
}
/**
* Constructor
*
* @param name the name of the data type
* @param size the size (in bytes) of this data type
*/
DataType(String name, int size) {
m_size = size;
m_name = name;
}
/**
* @return the size (in bytes) of this data type
*/
public int getSize() {
return m_size;
}
/**
* @return clear text string with the name
*/
public String getName() {
return m_name;
}
/**
* Turns a string into the corresponding data type
*
* @param name the string to translate to a type
* @return the data type corresponding to the name string, or null if not found
*/
public static DataType s_getDataType(String name) {
return s_typeMap.get(name);
}
}
|
epl-1.0
|
TypeFox/che
|
dashboard/src/components/api/che-team.factory.ts
|
11617
|
/*
* Copyright (c) 2015-2017 Red Hat, Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*
* Contributors:
* Red Hat, Inc. - initial API and implementation
*/
'use strict';
import {CheTeamRoles} from './che-team-roles';
import {CheResourceLimits} from './che-resource-limits';
import {CheTeamEventsManager} from './che-team-events-manager.factory';
interface ITeamsResource<T> extends ng.resource.IResourceClass<T> {
findTeam(data: { teamName: string }): ng.resource.IResource<T>;
}
/**
* This class is handling the interactions with Team management API.
*
* @author Ann Shumilova
*/
export class CheTeam implements che.api.ICheTeam {
/**
* Angular Resource service.
*/
private $resource: ng.resource.IResourceService;
private $q: ng.IQService;
private lodash: any;
/**
* Teams map by team's id.
*/
private teamsMap: Map<string, any> = new Map();
/**
* Array of teams.
*/
private teams: any = [];
/**
* The registry for managing available namespaces.
*/
private cheNamespaceRegistry: any;
/**
* The user API.
*/
private cheUser : any;
/**
* The Che Team notifications.
*/
private teamEventsManager: che.api.ICheTeamEventsManager;
/**
* User's personal account.
*/
private personalAccount: any;
/**
* Client for requesting Team API.
*/
private remoteTeamAPI: ITeamsResource<any>;
/**
* Deferred object which will be resolved when teams are fetched
*/
private fetchTeamsDefer: ng.IDeferred<any>;
/**
* The Che Organization Service.
*/
private cheOrganization: che.api.ICheOrganization;
/**
* The Che resources API.
*/
private cheResourcesDistribution: che.api.ICheResourcesDistribution;
/**
* Default constructor that is using resource
* @ngInject for Dependency injection
*/
constructor($resource: ng.resource.IResourceService, $q: ng.IQService, lodash: any, cheNamespaceRegistry: any, cheUser: any,
cheOrganization: che.api.ICheOrganization, cheTeamEventsManager: CheTeamEventsManager, cheResourcesDistribution: che.api.ICheResourcesDistribution) {
this.$resource = $resource;
this.$q = $q;
this.lodash = lodash;
this.cheNamespaceRegistry = cheNamespaceRegistry;
this.cheUser = cheUser;
this.teamEventsManager = cheTeamEventsManager;
this.cheOrganization = cheOrganization;
this.cheResourcesDistribution = cheResourcesDistribution;
this.remoteTeamAPI = <ITeamsResource<any>>$resource('/api/organization', {}, {
findTeam: {method: 'GET', url: '/api/organization/find?name=:teamName'}
});
this.fetchTeamsDefer = this.$q.defer();
const fetchTeamsPromise = this.fetchTeamsDefer.promise;
this.cheNamespaceRegistry.setFetchPromise(fetchTeamsPromise);
cheTeamEventsManager.addRenameHandler(() => {
this.fetchTeams();
});
cheTeamEventsManager.addDeleteHandler(() => {
this.fetchTeams();
});
cheTeamEventsManager.addNewTeamHandler(() => {
this.fetchTeams();
});
}
/**
* Request the list of available teams.
*
* @returns {ng.IPromise<any>}
*/
fetchTeams(): ng.IPromise<any> {
let defer = this.$q.defer();
this.cheOrganization.fetchOrganizations().then((teams: any[]) => {
this.processTeams(teams, this.cheUser.getUser());
defer.resolve();
}, (error: any) => {
if (error.status === 304) {
defer.resolve();
} else {
defer.reject(error);
}
});
return defer.promise.then(() => {
this.fetchTeamsDefer.resolve();
}, (error: any) => {
this.fetchTeamsDefer.reject();
return this.$q.reject(error);
});
}
/**
* Process teams to retrieve personal account (name of the organization === current user's name) and
* teams (organization with parent).
*
* @param organizations {che.IOrganization}
* @param user {che.IUser}
*/
processTeams(organizations: Array<che.IOrganization>, user: any): void {
this.teamsMap = new Map();
this.teams = [];
this.cheNamespaceRegistry.getNamespaces().length = 0;
let name = user.name;
// detection personal account (organization which name equals to current user's name):
this.personalAccount = this.lodash.find(organizations, (organization: che.IOrganization) => {
return organization.qualifiedName === name;
});
if (this.personalAccount) {
// display personal account as "personal" on UI, namespace(id) stays the same for API interactions:
this.cheNamespaceRegistry.getNamespaces().push({id: this.personalAccount.qualifiedName, label: 'personal', location: '/billing'});
this.cheNamespaceRegistry.setCaption('Team');
} else {
this.cheNamespaceRegistry.setCaption('Organization');
// todo add back, when API is ready: this.cheNamespaceRegistry.setEmptyMessage('You are not member of any organization and not able to create workspace. Please, contact your administrator.');
this.processOrganizationInfoRetriever(organizations);
}
organizations.forEach((organization: che.IOrganization) => {
this.teamsMap.set(organization.id, organization);
// team has to have parent (root organizations are skipped):
if (organization.parent) {
this.teams.push(organization);
this.teamEventsManager.subscribeTeamNotifications(organization.id);
}
if (this.personalAccount) {
if (organization.id !== this.personalAccount.id) {
this.cheNamespaceRegistry.getNamespaces().push({
id: organization.qualifiedName,
label: organization.qualifiedName,
location: '/team/' + organization.qualifiedName
});
}
} else {
this.cheNamespaceRegistry.getNamespaces().push({id: organization.qualifiedName, label: organization.qualifiedName, location: '/organization/' + organization.qualifiedName});
}
});
}
/**
* Process organization information retriever.
*
* @param organizations
*/
processOrganizationInfoRetriever(organizations: Array<che.IOrganization>): void {
this.cheNamespaceRegistry.setGetAdditionalInfo((namespaceId: string) => {
let organization = this.lodash.find(organizations, (organization: che.IOrganization) => {
return organization.qualifiedName === namespaceId;
});
if (!organization) {
return null;
}
return this.cheResourcesDistribution.fetchAvailableOrganizationResources(organization.id).then(() => {
let resource = this.cheResourcesDistribution.getOrganizationAvailableResourceByType(organization.id, CheResourceLimits.RAM);
return resource ? 'Available RAM: ' + (resource.amount / 1024) + 'GB' : null;
});
});
}
/**
* Return current user's personal account.
*
* @returns {any} personal account
*/
getPersonalAccount(): any {
return this.personalAccount;
}
/**
* Returns the array of teams.
*
* @returns {Array<any>} the array of teams
*/
getTeams(): Array<any> {
return this.teams;
}
/**
* Requests team by it's id.
*
* @param id {string} the team's Id
* @returns {ng.IPromise<any>} result promise
*/
fetchTeamById(id: string): ng.IPromise<any> {
let promise = this.cheOrganization.fetchOrganizationById(id);
let resultPromise = promise.then((organization: che.IOrganization) => {
this.teamsMap.set(id, organization);
return organization;
}, (error: any) => {
if (error.status === 304) {
return this.teamsMap.get(id);
}
return this.$q.reject();
});
return resultPromise;
}
/**
* Requests team by it's name.
*
* @param name the team's name
* @returns {ng.IPromise<any>} result promise
*/
fetchTeamByName(name: string): ng.IPromise<any> {
let promise = this.cheOrganization.fetchOrganizationByName(name);
return promise;
}
/**
* Returns team by it's name.
*
* @param name team's name
* @returns {any} team or <code>null</code> if not found
*/
getTeamByName(name: string): any {
if (this.personalAccount && this.personalAccount.qualifiedName === name) {
return this.personalAccount;
}
const team = this.cheOrganization.getOrganizationByName(name);
if (angular.isDefined(team)) {
return team;
}
for (let i = 0; i < this.teams.length; i++) {
if (this.teams[i].qualifiedName === name) {
return this.teams[i];
}
}
return null;
}
/**
* Returns team by it's id.
*
* @param id {string} team's id
* @returns {any} team or <code>null</code> if not found
*/
getTeamById(id: string): any {
return this.teamsMap.get(id);
}
/**
* Creates new team with pointed name.
*
* @param name the name of the team to be created
* @returns {ng.IPromise<any>} result promise
*/
createTeam(name: string): ng.IPromise<any> {
return this.cheOrganization.createOrganization(name, this.personalAccount.id);
}
/**
* Delete team by pointed id.
*
* @param id team's id to be deleted
* @returns {ng.IPromise<any>} result promise
*/
deleteTeam(id: string): ng.IPromise<any> {
return this.cheOrganization.deleteOrganization(id);
}
/**
* Update team's info.
*
* @param team the team info to be updated
* @returns {ng.IPromise<any>} result promise
*/
updateTeam(team: any): ng.IPromise<any> {
return this.cheOrganization.updateOrganization(team);
}
/**
* Forms the list of roles based on the list of actions
*
* @param actions array of actions
* @returns {Array<any>} array of roles
*/
getRolesFromActions(actions: Array<string>): Array<any> {
let roles = [];
let teamRoles = CheTeamRoles.getValues();
teamRoles.forEach((role: any) => {
if (this.lodash.difference(role.actions, actions).length === 0) {
roles.push(role);
}
});
// avoid roles intake (filter if any role's action is subset of any other):
roles = this.lodash.filter(roles, (role: any) => {
return !this._checkIsSubset(role, roles);
});
return roles;
}
/**
* Checks the actions in provided role to be part (subset) of any other role's actions.
*
* @param role role to be checked
* @param roles list of roles
* @returns {boolean} <code>true</code> if subset
* @private
*/
_checkIsSubset(role: any, roles: Array<any>): boolean {
let isSubset = false;
for (let i = 0; i < roles.length; i++) {
let r = roles[i];
// checks provided role's action is subset of any other role's actions in the roles list:
if (role.actions.length === this.lodash.intersection(role.actions, r.actions).length && role.actions.length !== r.actions.length) {
return true;
}
}
return isSubset;
}
/**
* Forms the list actions based on the list of roles.
*
* @param roles array of roles
* @returns {Array<string>} actions array
*/
getActionsFromRoles(roles: Array<any>): Array<string> {
let actions = [];
roles.forEach((role: any) => {
actions = actions.concat(role.actions);
});
return actions;
}
getTeamDisplayName(team: any): string {
let teamNames = this.lodash.pluck(this.teams, 'name');
let size = this.lodash.pull(teamNames, team.name).length;
return (this.teams.length - size) > 1 ? team.qualifiedName : team.name;
}
}
|
epl-1.0
|
braverokmc79/macaronics-spring-one
|
web04 - Finish/src/main/java/com/example/wbe04/controller/memo/MemoController.java
|
2094
|
package com.example.wbe04.controller.memo;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import javax.inject.Inject;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.http.HttpStatus;
import org.springframework.http.ResponseEntity;
import org.springframework.stereotype.Controller;
import org.springframework.web.bind.annotation.RequestBody;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import org.springframework.web.bind.annotation.ResponseBody;
import org.springframework.web.bind.annotation.RestController;
import com.example.wbe04.model.memo.dao.MemoDAO;
import com.example.wbe04.model.memo.dto.MemoDTO;
@Controller
@RequestMapping(value="/memo")
public class MemoController {
private static final Logger logger=LoggerFactory.getLogger(MemoController.class);
@Inject
private MemoDAO memoDao;
@RequestMapping("/list")
public String list(){
logger.info(" memo list 호출");
return "memo/memo";
}
@ResponseBody
@RequestMapping(value="/memo_list", method=RequestMethod.GET)
public ResponseEntity<Map<String, Object>> memo_list(){
//dao 호출
ResponseEntity<Map<String, Object>> entity =null;
try{
List<MemoDTO> list=memoDao.memoList();
Map<String, Object> map=new HashMap<>();
map.put("list", list);
entity =new ResponseEntity<Map<String, Object>>(map , HttpStatus.OK);
//모델 자료 저장
}catch(Exception e){
e.getStackTrace();
entity =new ResponseEntity<Map<String, Object>>(HttpStatus.BAD_REQUEST);
}
return entity;
}
@ResponseBody
@RequestMapping(value="/memo_add", method=RequestMethod.POST)
public ResponseEntity<String> memo_add(@RequestBody MemoDTO dto){
ResponseEntity<String> entity=null;
try {
memoDao.memoAdd(dto);
entity=new ResponseEntity<String>("SUCCESS", HttpStatus.OK);
} catch (Exception e) {
// TODO: handle exception
entity=new ResponseEntity<String>(HttpStatus.BAD_REQUEST);
}
return entity;
}
}
|
epl-1.0
|
zzzzgc/ZGC4Obj
|
transfer/src/main/java/com/xinxing/transfer/common/util/CommonUtils.java
|
1315
|
package com.xinxing.transfer.common.util;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import com.xinxing.boss.business.api.domain.SendOrderInfo;
import com.xinxing.boss.interaction.pojo.customer.OrderInfo;
public class CommonUtils {
/**
* 获取查询中打日志时参数为list的情况,获取里面的id来打印,简洁日志
* @return
*/
public static String getLogParamListId(List<SendOrderInfo> list){
List<String> nList=new ArrayList<>();
if(list!=null&&list.size()>0){
for (SendOrderInfo order : list) {
nList.add(order.getOrderId());
}
}
return nList.toString();
}
/**
* 获取查询中打日志时参数为list的情况,获取里面的id来打印,简洁日志
* @return
*/
public static String getLogParamList(List<OrderInfo> list){
List<Integer> nList=new ArrayList<>();
if(list!=null&&list.size()>0){
for (OrderInfo order : list) {
nList.add(order.getId());
}
}
return nList.toString();
}
/**
* 排序成 key=value
* @param map
* @return
*/
public static String sort(Map<String,String> map){
StringBuffer sb=new StringBuffer();
for (Map.Entry<String, String> entry : map.entrySet()) {
sb.append("&"+entry.getKey()+"="+entry.getValue());
}
return sb.toString().substring(1);
}
}
|
epl-1.0
|
kgibm/open-liberty
|
dev/com.ibm.ws.security.javaeesec_fat/test-bundles/security.jaspic.user.feature.test/src/com/ibm/ws/security/jaspi/test/AuthModule.java
|
24211
|
/*******************************************************************************
* Copyright (c) 2014 IBM Corporation and others.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*
* Contributors:
* IBM Corporation - initial API and implementation
*******************************************************************************/
package com.ibm.ws.security.jaspi.test;
import java.io.IOException;
import java.security.Principal;
import java.util.HashMap;
import java.util.Hashtable;
import java.util.Map;
import java.util.logging.Level;
import java.util.logging.Logger;
import javax.security.auth.Subject;
import javax.security.auth.callback.Callback;
import javax.security.auth.callback.CallbackHandler;
import javax.security.auth.message.AuthException;
import javax.security.auth.message.AuthStatus;
import javax.security.auth.message.MessageInfo;
import javax.security.auth.message.MessagePolicy;
import javax.security.auth.message.MessagePolicy.ProtectionPolicy;
import javax.security.auth.message.MessagePolicy.TargetPolicy;
import javax.security.auth.message.callback.CallerPrincipalCallback;
import javax.security.auth.message.callback.GroupPrincipalCallback;
import javax.security.auth.message.callback.PasswordValidationCallback;
import javax.security.auth.message.module.ServerAuthModule;
import javax.servlet.RequestDispatcher;
import javax.servlet.ServletException;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletRequestWrapper;
import javax.servlet.http.HttpServletResponse;
import javax.servlet.http.HttpServletResponseWrapper;
import com.ibm.ws.common.internal.encoder.Base64Coder;
import com.ibm.wsspi.security.token.AttributeNameConstants;
/*
* This JASPI authentication module performs basic and form auth when validateRequest is called,
* depending on the request's authType.
*/
public class AuthModule implements ServerAuthModule {
private static Logger log = Logger.getLogger(AuthModule.class.getName());
private static Class[] supportedMessageTypes = new Class[] { HttpServletRequest.class, HttpServletResponse.class };
private static final String IS_MANDATORY_POLICY = "javax.security.auth.message.MessagePolicy.isMandatory";
private static final String REGISTER_SESSION = "javax.servlet.http.registerSession";
private static final String JASPI_USER = "com.ibm.websphere.jaspi.user";
private static final String JASPI_PASSWORD = "com.ibm.websphere.jaspi.password";
private static final String JASPI_WEB_REQUEST = "com.ibm.websphere.jaspi.request";
private MessagePolicy requestPolicy;
private CallbackHandler handler;
private Map<String, String> options;
private String cpcbType = null;
private enum CBvalues {
YES, NO, MANUAL
};
@Override
public Class[] getSupportedMessageTypes() {
return supportedMessageTypes;
}
class JASPIHttpServletRequestWrapper extends HttpServletRequestWrapper {
public JASPIHttpServletRequestWrapper(HttpServletRequest request) {
super(request);
}
@Override
public String getHeader(String name) {
if ("hasWrapper".equals(name)) {
return "true";
}
return "false";
}
}
class JASPIPrincipal implements Principal {
private String name = null;
public JASPIPrincipal(String name) {
this.name = name;
}
@Override
public String getName() {
return name;
}
}
class JASPIHttpServletResponseWrapper extends HttpServletResponseWrapper {
public JASPIHttpServletResponseWrapper(HttpServletResponse response) {
super(response);
}
@Override
public String getHeader(String name) {
if ("hasWrapper".equals(name)) {
return "true";
}
return "false";
}
}
@Override
public void initialize(MessagePolicy requestPolicy, MessagePolicy responsePolicy, CallbackHandler handler, Map options) throws AuthException {
this.requestPolicy = requestPolicy;
this.handler = handler;
this.options = new HashMap<String, String>();
if (options != null) {
this.options.putAll(options);
}
log.info("initialize " + AuthModule.class.getSimpleName() + " requestPolicy=" + requestPolicy + ", responsePolicy=" + responsePolicy + ", handler=" + handler
+ ", options=" + this.options);
if (requestPolicy != null && requestPolicy.getTargetPolicies() != null) {
for (TargetPolicy target : requestPolicy.getTargetPolicies()) {
ProtectionPolicy protectionPolicy = target.getProtectionPolicy();
if (protectionPolicy != null) {
log.info("target request ProtectionPolicy=" + protectionPolicy.getID());
}
}
}
}
@Override
public void cleanSubject(MessageInfo messageInfo, Subject subject) throws AuthException {
log.info("cleanSubject");
}
@Override
public AuthStatus secureResponse(MessageInfo messageInfo, Subject serviceSubject) throws AuthException {
log.log(Level.FINE, "enter secureResponse", new Object[] { messageInfo, serviceSubject });
HttpServletResponse rsp = (HttpServletResponse) messageInfo.getResponseMessage();
try {
rsp.getWriter().println("JASPI secureResponse called with auth provider=" + options.get("provider.name"));
} catch (Exception e) {
log.info(this.getClass().getName() + " failed to write to response object.");
}
log.log(Level.FINE, "exit secureResponse");
return AuthStatus.SEND_SUCCESS;
}
@Override
public AuthStatus validateRequest(MessageInfo messageInfo, Subject clientSubject, Subject serviceSubject) throws AuthException {
log.log(Level.FINE, "enter validateRequest", new Object[] { messageInfo, clientSubject });
if (requestPolicy == null || messageInfo == null) {
return AuthStatus.SUCCESS;
}
Map<String, String> msgMap = messageInfo.getMap();
log.info("MessageInfo Map: " + msgMap);
boolean isAuthenticate = "authenticate".equalsIgnoreCase(msgMap.get(JASPI_WEB_REQUEST));
boolean isLogin = "login".equalsIgnoreCase(msgMap.get(JASPI_WEB_REQUEST));
AuthStatus status = AuthStatus.SEND_FAILURE;
HttpServletRequest req = (HttpServletRequest) messageInfo.getRequestMessage();
HttpServletResponse rsp = (HttpServletResponse) messageInfo.getResponseMessage();
String authType = req.getAuthType();
log.info("AuthType: " + authType);
// userPrincipal will be set by the runtime if a previous request resulted in registerSession=true
Principal userPrincipal = req.getUserPrincipal();
if (userPrincipal != null) {
log.info("userPrincipal.getName: " + userPrincipal.getName());
} else {
log.info("UserPrincipal is null ");
}
String authHeader = req.getHeader("Authorization");
log.info("Authorization=[" + authHeader + "]");
String methodName = req.getParameter("method");
log.info("Request parameter: method=" + methodName);
cpcbType = req.getParameter("cpcbType");
log.info("Request parameter: cpcbType=" + cpcbType);
// By default, use all callback -- PasswordValidation, CallerPrincipal, GroupPrincipal
String[] useCallbacks = { "YES", "YES", "YES" };
//Set cacheKey
String cacheKey = "Jaspi:JASPIee5EAR:default_host" + req.getContextPath().substring(1);
// If test method is registerSession, then set callback property for javax.servlet.http.registerSession=true
if (methodName != null && methodName.equalsIgnoreCase("registerSession")) {
log.info("Set registerSession=true so that provider will set javax.servlet.http.registerSession=true in msgMap");
msgMap.put(REGISTER_SESSION, Boolean.TRUE.toString().toLowerCase());
}
if (isLogin) {
log.info("request is for method login()");
String username = msgMap.get(JASPI_USER);
String password = msgMap.get(JASPI_PASSWORD);
status = handleUserPassword(username, password, rsp, msgMap, clientSubject, useCallbacks, cacheKey);
} else if (isAuthenticate) {
log.info("request is for method authenticate()");
if (authHeader == null) {
status = setChallengeAuthorizationHeader(rsp);
} else {
status = handleAuthorizationHeader(authHeader, rsp, msgMap, clientSubject, useCallbacks, cacheKey);
}
} else if (methodName != null && methodName.equalsIgnoreCase("wrap")) {
log.info("Wrap the request and response");
// Wrap the request so the invoked servlet can invoke a method on the wrapper
messageInfo.setRequestMessage(new JASPIHttpServletRequestWrapper((HttpServletRequest) messageInfo.getRequestMessage()));
// Wrap the response so the invoked servlet can invoke a method on the wrapper
messageInfo.setResponseMessage(new JASPIHttpServletResponseWrapper((HttpServletResponse) messageInfo.getResponseMessage()));
status = AuthStatus.SUCCESS;
} else if (methodName != null && methodName.equalsIgnoreCase("processRegisteredSession")) {
if (userPrincipal != null) {
log.info("If userPrincipal already set by runtime, then process callerPrincipal callback to establish subject and return AuthStatus.SUCCESS");
useCallbacks[0] = "NO";
useCallbacks[2] = "NO";
handleCallbacks(clientSubject, userPrincipal.getName(), useCallbacks);
status = AuthStatus.SUCCESS;
}
} else {
String queryString = req.getQueryString();
if (queryString != null && queryString.startsWith("PVCB")) {
String[] queryInfo = queryString.split("&");
if (queryInfo.length == 3) {
for (int i = 0; i < queryInfo.length; i++)
useCallbacks[i] = queryInfo[i].substring(5);
log.info("Overriding Callback Settings:\n PasswordValidation-" + useCallbacks[0] + "\nCallerPrincipal" + useCallbacks[1] + "\nGroupPrincipal" + useCallbacks[2]);
}
}
if ("BASIC".equals(authType) || authType == null) {
if (authHeader == null) {
// If isMandatory=false, the servlet is unprotected and the provider will not authenticate and will return SUCCESS
if (msgMap.get(IS_MANDATORY_POLICY).equalsIgnoreCase("FALSE")) {
log.info("BasicAuth request with isMandatory=false does not require JASPI authentication and returns success");
status = AuthStatus.SUCCESS;
}
else
// if isMandatory=true, this indicates a protected servlet which requires authentication, so must challenge if basic auth header is null
status = setChallengeAuthorizationHeader(rsp);
} else {
status = handleAuthorizationHeader(authHeader, rsp, msgMap, clientSubject, useCallbacks, cacheKey);
}
} else if ("FORM".equals(authType)) {
log.info("requestURL=" + req.getRequestURL() + ", requestURI=" + req.getRequestURI());
String username = req.getParameter("j_username");
String password = req.getParameter("j_password");
log.info("j_username=" + username);
//Added description for form submit - contains callback info for 2nd validateRequest call
String description = req.getParameter("j_description");
if (description != null && description.startsWith("PVCB")) {
log.info("j_description=" + username);
String[] callbackInfo = description.split("&");
if (callbackInfo.length == 3) {
for (int i = 0; i < callbackInfo.length; i++)
useCallbacks[i] = callbackInfo[i].substring(5);
}
}
if (username != null && password != null) {
status = handleUserPassword(username, password, rsp, msgMap, clientSubject, useCallbacks, cacheKey);
} else {
// Process RequestDispatcher forward or include if specified by test request parameter
if ((methodName != null && (methodName.equalsIgnoreCase("forward") || methodName.equalsIgnoreCase("include"))))
{
log.info("Acquiring a RequestDispatcher.");
RequestDispatcher rd = req.getRequestDispatcher("loginJaspi.jsp");
{
try {
if (methodName.equalsIgnoreCase("include")) {
log.info("RequestDispatcher is including a loginJaspi.jsp");
rd.include(req, rsp);
} else {
log.info("RequestDispatcher is forwarding to loginJaspi.jsp");
rd.forward(req, rsp);
}
} catch (ServletException e) {
log.info("Exception caught including loginJaspi.jsp " + e);
} catch (IOException e) {
log.info("Exception caught including loginJaspi.jsp " + e);
}
}
}
status = AuthStatus.SEND_CONTINUE;
rsp.setStatus(HttpServletResponse.SC_MOVED_TEMPORARILY);
}
} else {
throw new AuthException("Certificate Authentication is not supported by this module.");
}
}
try {
rsp.getWriter().println("JASPI validateRequest called with auth provider=" + options.get("provider.name"));
} catch (Exception e) {
log.info(this.getClass().getName() + " failed to write to response object.");
}
log.log(Level.FINE, "exit validateRequest", status);
return status;
}
private AuthStatus handleUserPassword(String user, String password, HttpServletResponse rsp, Map<String, String> msgMap, Subject clientSubject, String[] useCallbacks,
String cacheKey)
throws AuthException {
log.log(Level.FINE, "enter handleUserPassword", new Object[] { user, password, msgMap, clientSubject, useCallbacks, cacheKey });
int rspStatus = HttpServletResponse.SC_OK;
log.info("Authenticating user=" + user);
AuthStatus status = validateUserAndPassword(clientSubject, user, password, useCallbacks, cacheKey);
if (status == AuthStatus.SUCCESS) {
handleCallbacks(clientSubject, user, useCallbacks);
msgMap.put("javax.servlet.http.authType", "JASPI_AUTH");
} else {
rspStatus = HttpServletResponse.SC_FORBIDDEN;
log.info("Invalid user or password");
}
rsp.setStatus(rspStatus);
log.log(Level.FINE, "exit handleUserPassword", status);
return status;
}
private AuthStatus handleAuthorizationHeader(String authHeader, HttpServletResponse rsp, Map<String, String> msgMap, Subject clientSubject, String[] useCallbacks,
String cacheKey)
throws AuthException {
log.log(Level.FINE, "enter handleAuthorizationHeader", new Object[] { authHeader, msgMap, clientSubject, useCallbacks, cacheKey });
AuthStatus status = AuthStatus.SEND_FAILURE;
int rspStatus = HttpServletResponse.SC_FORBIDDEN;
if (authHeader.startsWith("Basic ")) {
String basicAuthHeader = decodeCookieString(authHeader.substring(6));
String uid = getUserName(basicAuthHeader);
String pw = getPassword(basicAuthHeader);
log.info("user=" + uid);
if (isAuthorizationHeaderValid(basicAuthHeader)) {
status = validateUserAndPassword(clientSubject, uid, pw, useCallbacks, cacheKey);
if (status == AuthStatus.SUCCESS) {
rspStatus = HttpServletResponse.SC_OK;
handleCallbacks(clientSubject, uid, useCallbacks);
msgMap.put("javax.servlet.http.authType", "JASPI_AUTH");
} else {
log.info("Invalid user or password");
}
} else {
log.info("Both user and password must be non-null and non-empty.");
}
} else {
log.info("Authorization header does not begin with \"Basic \"");
}
rsp.setStatus(rspStatus);
log.log(Level.FINE, "exit handleAuthorizationHeader", status);
return status;
}
private AuthStatus setChallengeAuthorizationHeader(HttpServletResponse rsp) {
log.log(Level.FINE, "enter setChallengeAuthorizationHeader");
String realmName = options.get("realm.name");
rsp.setHeader("WWW-Authenticate", "Basic realm=\"" + realmName + "\"");
log.info("Challenge WWW-Authenticate header = Basic realm=\"" + realmName + "\"");
rsp.setStatus(HttpServletResponse.SC_UNAUTHORIZED); // 401
log.log(Level.FINE, "exit setChallengeAuthorizationHeader");
return AuthStatus.SEND_CONTINUE;
}
private String getUserName(String basicAuthHeader) {
log.log(Level.FINE, "enter getUserName", basicAuthHeader);
String uid = null;
if (isAuthorizationHeaderValid(basicAuthHeader)) {
int index = basicAuthHeader.indexOf(':');
uid = basicAuthHeader.substring(0, index);
} else {
log.info(basicAuthHeader + "Authorization header is not valid: " + basicAuthHeader);
}
log.log(Level.FINE, "exit getUserName", uid);
return uid;
}
private String getPassword(String basicAuthHeader) {
log.log(Level.FINE, "enter getPassword", basicAuthHeader);
String pw = null;
if (isAuthorizationHeaderValid(basicAuthHeader)) {
int index = basicAuthHeader.indexOf(':');
pw = basicAuthHeader.substring(index + 1);
} else {
log.info(basicAuthHeader + "Authorization header is not valid: " + basicAuthHeader);
}
log.log(Level.FINE, "exit getPassword", pw);
return pw;
}
private boolean isAuthorizationHeaderValid(String basicAuthHeader) {
log.log(Level.FINE, "enter isAuthorizationHeaderValid", basicAuthHeader);
int index = -1;
boolean isNotValid = basicAuthHeader == null || basicAuthHeader.isEmpty() || (index = basicAuthHeader.indexOf(':')) <= 0 || index == basicAuthHeader.length() - 1;
log.log(Level.FINE, "exit isAuthorizationHeaderValid", !isNotValid);
return !isNotValid;
}
private void handleCallbacks(Subject clientSubject, String userName, String[] useCallbacks) throws AuthException {
log.log(Level.FINE, "enter handleCallbacks", new Object[] { clientSubject, userName, useCallbacks });
log.log(Level.FINE, "handleCallbacks cpcbType: " + cpcbType);
Callback[] callbacks = new Callback[2];
int index = 0;
// caller principal
switch (CBvalues.valueOf(useCallbacks[1])) {
case NO:
break; // skip
default:
CallerPrincipalCallback cpcb;
if (cpcbType != null && cpcbType.equals("JASPI_PRINCIPAL"))
cpcb = new CallerPrincipalCallback(clientSubject, new JASPIPrincipal(userName));
else
cpcb = new CallerPrincipalCallback(clientSubject, userName);
callbacks[index] = cpcb;
index++;
log.log(Level.FINE, "added callback", cpcb);
break;
}
// group principal
switch (CBvalues.valueOf(useCallbacks[2])) {
case NO:
break; // skip
default:
GroupPrincipalCallback gpcb = new GroupPrincipalCallback(clientSubject, new String[] { options.get("group.name") });
callbacks[index] = gpcb;
index++;
log.log(Level.FINE, "added callback", gpcb);
break;
}
if (index > 0) {
Callback[] cbs = new Callback[index];
for (int i = 0; i < index; i++) {
cbs[i] = callbacks[i];
}
try {
log.log(Level.FINE, "handling callbacks: ", cbs);
handler.handle(cbs);
} catch (Exception e) {
e.printStackTrace();
throw new AuthException(e.toString());
}
}
log.log(Level.FINE, "exit handleCallbacks");
}
private AuthStatus validateUserAndPassword(Subject clientSubject, String user, String password, String[] useCallbacks, String cacheKey) throws AuthException {
log.log(Level.FINE, "enter validateUserAndPassword", new Object[] { user, password, clientSubject, useCallbacks, cacheKey });
AuthStatus status = AuthStatus.SEND_FAILURE;
switch (CBvalues.valueOf(useCallbacks[0])) {
// skip password validation
case NO:
status = AuthStatus.SUCCESS;
break;
// manually add user and password to HashTable in subject
case MANUAL:
status = AuthStatus.SUCCESS;
manualAddUserAndPassword(clientSubject, user, password, cacheKey);
break;
// use JaspiCallbackHandler for PasswordValidationCallback
default:
log.info("validate password for user=" + user + " clientSubject=" + clientSubject);
if (handler != null) {
PasswordValidationCallback pwcb = new PasswordValidationCallback(clientSubject, user, password.toCharArray());
try {
handler.handle(new Callback[] { pwcb });
boolean isValidPassword = pwcb.getResult();
log.info("isValidPassword? " + isValidPassword);
if (isValidPassword) {
status = AuthStatus.SUCCESS;
}
} catch (Exception e) {
throw new AuthException(e.toString());
}
}
break;
}
log.log(Level.FINE, "exit validateUserAndPassword", status);
return status;
}
private void manualAddUserAndPassword(Subject clientSubject, String user, String password, String cacheKey) {
log.log(Level.FINE, "enter manualAddUserAndPassword", new Object[] { user, password, clientSubject, cacheKey });
Hashtable<String, Object> cred = new Hashtable<String, Object>();
cred.put(AttributeNameConstants.WSCREDENTIAL_CACHE_KEY, cacheKey);
cred.put(AttributeNameConstants.WSCREDENTIAL_USERID, user);
cred.put(AttributeNameConstants.WSCREDENTIAL_PASSWORD, password);
clientSubject.getPrivateCredentials().add(cred);
}
private String decodeCookieString(String cookieString)
{
try
{
return Base64Coder.base64Decode(cookieString);
} catch (Exception e)
{
return null;
}
}
}
|
epl-1.0
|
kgibm/open-liberty
|
dev/com.ibm.ws.webcontainer/src/com/ibm/ws/webcontainer/osgi/collaborator/CollaboratorHelperImpl.java
|
14661
|
/*******************************************************************************
* Copyright (c) 1997, 2008 IBM Corporation and others.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*
* Contributors:
* IBM Corporation - initial API and implementation
*******************************************************************************/
package com.ibm.ws.webcontainer.osgi.collaborator;
/*
* LIBERTY overrides:
* 1. Get registered collaborators from CollaboratorService
* 2. Support only the spi generic WebAppInvocationCollaborators
* 3. Leave all the transaction work to the transaction collaborator
*/
import java.io.IOException;
import java.util.Dictionary;
import java.util.EnumSet;
import java.util.Set;
import java.util.logging.Level;
import java.util.logging.Logger;
import javax.servlet.ServletContext;
import javax.servlet.ServletException;
import javax.servlet.ServletRequest;
import javax.servlet.ServletResponse;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import com.ibm.websphere.servlet.error.ServletErrorReport;
import com.ibm.ws.container.DeployedModule;
import com.ibm.ws.ffdc.annotation.FFDCIgnore;
import com.ibm.ws.runtime.metadata.ComponentMetaData;
import com.ibm.ws.threadContext.ComponentMetaDataAccessorImpl;
import com.ibm.ws.webcontainer.collaborator.WebAppSecurityCollaborator;
import com.ibm.ws.webcontainer.webapp.WebAppDispatcherContext;
import com.ibm.ws.webcontainer.spiadapter.collaborator.IInvocationCollaborator;
import com.ibm.ws.webcontainer.webapp.WebApp;
import com.ibm.ws.webcontainer.osgi.webapp.WebAppConfiguration;
import com.ibm.ws.webcontainer.webapp.WebAppErrorReport;
import com.ibm.wsspi.webcontainer.RequestProcessor;
import com.ibm.wsspi.webcontainer.collaborator.CollaboratorHelper;
import com.ibm.wsspi.webcontainer.collaborator.CollaboratorInvocationEnum;
import com.ibm.wsspi.webcontainer.collaborator.ICollaboratorHelper;
import com.ibm.wsspi.webcontainer.collaborator.ICollaboratorMetaData;
import com.ibm.wsspi.webcontainer.collaborator.IWebAppSecurityCollaborator;
import com.ibm.wsspi.webcontainer.collaborator.WebAppInvocationCollaborator;
import com.ibm.wsspi.webcontainer.logging.LoggerFactory;
import com.ibm.wsspi.webcontainer.metadata.WebComponentMetaData;
import com.ibm.wsspi.webcontainer.metadata.WebModuleMetaData;
import com.ibm.wsspi.webcontainer.security.SecurityViolationException;
public class CollaboratorHelperImpl extends CollaboratorHelper
{
private Set<WebAppInvocationCollaborator> webAppInvCollabs;
private String securityDomainForApp = null;
protected static final Logger logger = LoggerFactory.getInstance().getLogger("com.ibm.ws.webcontainer.collaborator");
public CollaboratorHelperImpl(WebApp webApp, DeployedModule deployedMod)
{
super(webApp);
if (webApp != null)
{
nameSpaceCollaborator = new WebAppNameSpaceCollaboratorImpl();
transactionCollaborator = CollaboratorServiceImpl.getWebAppTransactionCollaborator();
// IBM-Authorization-Roles header used to map security collaborator - may be null
Dictionary<String,String> headers = ((WebAppConfiguration)webApp.getConfiguration()).getBundleHeaders();
if (headers != null)
securityDomainForApp = headers.get("IBM-Authorization-Roles");
connectionCollaborator = CollaboratorServiceImpl.getWebAppConnectionCollaborator();
webAppInvCollabs = CollaboratorServiceImpl.getWebAppInvocationCollaborators();
}
}
/*
* Returns the security collaborator that has been registered (by security services) for the
* SecurityDomain specified by the application. If no collaborator has been registered for
* that domain then the super class provides a default security collaborator implementation.
*/
private static WebAppSecurityCollaborator staticDefaultSecurityCollaborator = new WebAppSecurityCollaborator();
@Override
public IWebAppSecurityCollaborator getSecurityCollaborator() {
// Security service may have been added or removed since app was installed so get 'live' collab service
IWebAppSecurityCollaborator service = CollaboratorServiceImpl.getWebAppSecurityCollaborator(securityDomainForApp);
if (service != null)
{
// set for use by super class on pre/postInvoke calls
securityCollaborator = service;
return securityCollaborator;
}
else
{
// Reset the local variable so we can get the default implementation.
securityCollaborator = staticDefaultSecurityCollaborator; //this can be static since it's a stubbed out version and just has no-op methods
return securityCollaborator;
}
}
/*
* LIBERTY: collaborators are not passed through each web app but are managed within this class
* (so ignore the null arg and use the local list)
*/
public void doInvocationCollaboratorsPreInvoke(IInvocationCollaborator[] webAppInvocationCollaborators, WebComponentMetaData cmd,
ServletRequest request, ServletResponse response)
{
if (webAppInvCollabs != null && !webAppInvCollabs.isEmpty())
{
for (WebAppInvocationCollaborator inv : webAppInvCollabs)
{
inv.preInvoke(cmd,request,response);
}
}
}
public void doInvocationCollaboratorsPostInvoke(IInvocationCollaborator[] webAppInvocationCollaborators, WebComponentMetaData cmd,
ServletRequest request, ServletResponse response)
{
if (webAppInvCollabs != null && !webAppInvCollabs.isEmpty())
{
for (WebAppInvocationCollaborator inv : webAppInvCollabs)
{
inv.postInvoke(cmd,request,response);
}
}
}
public void doInvocationCollaboratorsPreInvoke(IInvocationCollaborator[] webAppInvocationCollaborators, com.ibm.wsspi.webcontainer.metadata.WebComponentMetaData cmd)
{
if (webAppInvCollabs != null && !webAppInvCollabs.isEmpty())
{
for (WebAppInvocationCollaborator inv : webAppInvCollabs)
{
inv.preInvoke(cmd);
}
}
}
public void doInvocationCollaboratorsPostInvoke(IInvocationCollaborator[] webAppInvocationCollaborators, com.ibm.wsspi.webcontainer.metadata.WebComponentMetaData cmd)
{
if (webAppInvCollabs != null && !webAppInvCollabs.isEmpty())
{
for (WebAppInvocationCollaborator inv : webAppInvCollabs)
{
inv.postInvoke(cmd);
}
}
}
// The following 3 methods are concrete implementations of the abstract ones in
// CollaboratorHelper. Eventually those abstract methods should be removed.
protected void checkTransaction(Object tx1)
{
/**
* LIBERTY: This function moved into the transaction collaborator
*/
}
protected void checkForRollback()
{
/**
* LIBERTY: This function moved into the transaction collaborator
*/
}
@Override
protected Object getTransaction() throws Exception
{
/**
* LIBERTY: This function moved into the transaction collaborator
*/
return null;
}
/*
* (non-Javadoc)
*
* @seecom.ibm.wsspi.webcontainer.collaborator.ICollaboratorHelper#
* processSecurityPreInvokeException
* (com.ibm.wsspi.webcontainer.security.SecurityViolationException,
* com.ibm.wsspi.webcontainer.RequestProcessor,
* javax.servlet.http.HttpServletRequest,
* javax.servlet.http.HttpServletResponse,
* com.ibm.ws.webcontainer.webapp.WebAppDispatcherContext,
* com.ibm.ws.webcontainer.webapp.WebApp, java.lang.String)
*/
public Object processSecurityPreInvokeException(SecurityViolationException sve, RequestProcessor requestProcessor, HttpServletRequest request,
HttpServletResponse response, WebAppDispatcherContext dispatchContext, WebApp context, String name) throws ServletErrorReport {
Object secObject = null;
secObject = sve.getWebSecurityContext();
int sc = sve.getStatusCode();
Throwable cause = sve.getCause();
if (sc == HttpServletResponse.SC_FORBIDDEN) {
// If the user has defined a custom error page for
// SC_FORBIDDEN (HTTP status code 403) then send
// it to the client ...
if (context.isErrorPageDefined(sc) == true) {
WebAppErrorReport wErrorReport = new WebAppErrorReport(cause);
wErrorReport.setErrorCode(sc);
context.sendError(request, response, wErrorReport);
} else {
// ... otherwise, use the one provided by the
// SecurityCollaborator
try {
securityCollaborator.handleException(request, response, cause);
} catch (Exception ex) {
if (requestProcessor != null) {
throw WebAppErrorReport.constructErrorReport(ex, requestProcessor);
} else {
throw WebAppErrorReport.constructErrorReport(ex, name);
}
}
// reply.sendError(wResp);
} // end if-else
} else if (sc == HttpServletResponse.SC_UNAUTHORIZED) {
// Invoking handleException will add the necessary headers
// to the response ...
try {
securityCollaborator.handleException(request, response, cause);
} catch (Exception ex) {
if (requestProcessor != null) {
throw WebAppErrorReport.constructErrorReport(ex, requestProcessor);
} else {
throw WebAppErrorReport.constructErrorReport(ex, name);
}
}
// ... if the user has defined a custom error page for
// SC_UNAUTHORIZED (HTTP status code 401) then
// send it to the client
if (context.isErrorPageDefined(sc) == true) {
WebAppErrorReport wErrorReport = new WebAppErrorReport(cause);
wErrorReport.setErrorCode(sc);
context.sendError(request, response, wErrorReport);
} else {
// reply.sendError(wResp); comment-out 140967
}
} else {
// Unexpected status code ... not SC_UNAUTHORIZED or SC_FORBIDDEN
try {
securityCollaborator.handleException(request, response, cause);
} catch (Exception ex) {
if (requestProcessor != null) {
throw WebAppErrorReport.constructErrorReport(ex, requestProcessor);
} else {
throw WebAppErrorReport.constructErrorReport(ex, name);
}
}
}
return secObject;
}
@Override
public void preInvokeCollaborators(ICollaboratorMetaData collabMetaData, EnumSet<CollaboratorInvocationEnum> colEnum) throws ServletException,
IOException, Exception {
// refresh dynamic collaborators before using
getSecurityCollaborator();
super.preInvokeCollaborators(collabMetaData, colEnum);
}
@Override
public void postInvokeCollaborators(ICollaboratorMetaData collabMetaData, EnumSet<CollaboratorInvocationEnum> colEnum) throws ServletException,
IOException, Exception {
// refresh dynamic collaborators before using
getSecurityCollaborator();
super.postInvokeCollaborators(collabMetaData, colEnum);
}
@FFDCIgnore(ClassCastException.class)
public static IWebAppSecurityCollaborator getCurrentSecurityCollaborator(ServletContext sc) {
IWebAppSecurityCollaborator secCollab = null;
ICollaboratorHelper instance = null;
try {
instance = ((WebApp)sc).getCollaboratorHelper();
} catch (ClassCastException cce) {
if (com.ibm.ejs.ras.TraceComponent.isAnyTracingEnabled() && logger.isLoggable(Level.FINE)) {
logger.logp(Level.FINE, "CollaboratorHelperImpl", "getCurrentSecurityCollaborator", "ClassCastException on ServletContext - returning null");
}
//check if the security information was added during preInvoke
return CollaboratorHelperImpl.getCurrentSecurityCollaborator();
}
if (instance != null)
secCollab = instance.getSecurityCollaborator();
return secCollab;
}
/*
* Returns a security collaborator for the currently active web application - can be called
* while a request is being processed for the application.
*/
public static IWebAppSecurityCollaborator getCurrentSecurityCollaborator()
{
IWebAppSecurityCollaborator currentCollab = null;
ICollaboratorHelper instance = getCurrentInstance();
if (instance != null)
currentCollab = instance.getSecurityCollaborator();
return currentCollab;
}
/*
* Returns true/false to indicate whether there is a 'real' collaborator registered for the current application (based on the SecurityDomain
* specified by the application).
*
*/
public static boolean getCurrentSecurityEnabled()
{
boolean enabled = false;
ICollaboratorHelper instance = getCurrentInstance();
if (instance != null)
enabled = ((CollaboratorHelperImpl)instance).isSecurityEnabled();
return enabled;
}
/*
* Returns the instance of this class for the currently active web application. Will return null if there is no active component.
*/
private static ICollaboratorHelper getCurrentInstance()
{
ComponentMetaData cmd = ComponentMetaDataAccessorImpl.getComponentMetaDataAccessor().getComponentMetaData();
if (cmd != null)
{
WebModuleMetaData wmmd = (WebModuleMetaData)cmd.getModuleMetaData();
return ((WebAppConfiguration)wmmd.getConfiguration()).getWebApp().getCollaboratorHelper();
}
else
{
return null;
}
}
/*
* Returns true/false to indicate whether there is a 'real' collaborator registered for the current application
* (based on the SecurityDomain specified by the application).
*
*/
public boolean isSecurityEnabled()
{
return (CollaboratorServiceImpl.getWebAppSecurityCollaborator(securityDomainForApp) != null);
}
/*
* Returns true/false based on the registration of a collaborator for the specified security domain
*/
public static boolean isSecurityDomainEnabled(String secDomain)
{
return (CollaboratorServiceImpl.getWebAppSecurityCollaborator(secDomain) != null);
}
}
|
epl-1.0
|
yashkulkarni/Om
|
code/om/language/operation/back_pull_term_operation.cpp
|
2426
|
/*!
\file
\brief
<a href="http://github.com/sparist/Om">Om</a> source file.
\version
0.1.3
\date
2012-2014
\copyright
Copyright (c) <a href="http://sparist.com">Sparist</a>. All rights reserved. This program and the accompanying materials are made available under the terms of the <a href="http://www.eclipse.org/legal/epl-v10.html">Eclipse Public License, Version 1.0</a>, which accompanies this distribution.
\author
Jason Erb
*/
#ifndef Om_Language_Operation_BackPullTermOperation_
#include "om/language/operation/back_pull_term_operation.hpp"
#ifdef Om_Macro_Test_
#include "om/language/system.hpp"
#ifndef Om_Macro_Precompilation_
#include "boost/test/unit_test.hpp"
#endif
namespace Om {
namespace Language {
namespace Operation {
BOOST_AUTO_TEST_SUITE(BackPullTermOperationTest)
BOOST_AUTO_TEST_CASE(DefinitionTest) {
BOOST_CHECK_EQUAL(
"{[terms]->}",
System::Get().Evaluate("drop find {[terms]->} system")
);
}
BOOST_AUTO_TEST_CASE(GeneralTest) {
BOOST_CHECK_EQUAL(
"{3}{1{2}}",
System::Get().Evaluate("[terms]-> {1{2}3}")
);
BOOST_CHECK_EQUAL(
"{2}{1}",
System::Get().Evaluate("[terms]-> {1 2}")
);
BOOST_CHECK_EQUAL(
"{}{}",
System::Get().Evaluate("[terms]-> {}")
);
BOOST_CHECK_EQUAL(
"{only}{}",
System::Get().Evaluate("[terms]-> {only}")
);
BOOST_CHECK_EQUAL(
"[terms]->",
System::Get().Evaluate("[terms]->")
);
BOOST_CHECK_EQUAL(
"{{c}}{a{b}}",
System::Get().Evaluate("[terms]->{a{b}{c}}")
);
}
BOOST_AUTO_TEST_SUITE_END()
}
}
}
#endif
#else
#include "om/language/expression.hpp"
#include "om/language/operation/pull_operation.hpp"
// MARK: - Om::Language::Operation::BackPullTermOperation
#define Type_ \
Om::Language::Operation::BackPullTermOperation
// MARK: public (static)
inline char const * Type_::GetName() {
return Om_Language_Operation_BackPullTermOperation_GetName_();
}
inline void Type_::Give(Evaluation & theEvaluation) {
theEvaluation.TakeOperation(
std::auto_ptr<IncompleteOperation>(
new PullOperation<
Expression,
BackPullTermOperation
>
)
);
}
template <typename TheConsumer>
inline void Type_::Pull(
Expression & theExpression,
TheConsumer & theConsumer
) {
theExpression.BackGiveTerm(theConsumer);
}
#undef Type_
#endif
|
epl-1.0
|
jerr/jbossforge-core
|
git/impl/src/main/java/org/jboss/forge/addon/git/ui/GitCloneCommandImpl.java
|
3119
|
/**
* Copyright 2016 Red Hat, Inc. and/or its affiliates.
*
* Licensed under the Eclipse Public License version 1.0, available at
* http://www.eclipse.org/legal/epl-v10.html
*/
package org.jboss.forge.addon.git.ui;
import org.eclipse.jgit.api.CloneCommand;
import org.eclipse.jgit.api.Git;
import org.jboss.forge.addon.resource.DirectoryResource;
import org.jboss.forge.addon.ui.context.UIBuilder;
import org.jboss.forge.addon.ui.context.UIContext;
import org.jboss.forge.addon.ui.context.UIExecutionContext;
import org.jboss.forge.addon.ui.context.UIValidationContext;
import org.jboss.forge.addon.ui.input.UIInput;
import org.jboss.forge.addon.ui.metadata.UICommandMetadata;
import org.jboss.forge.addon.ui.result.Result;
import org.jboss.forge.addon.ui.result.Results;
import org.jboss.forge.addon.ui.util.Metadata;
/**
* @author <a href="mailto:lincolnbaxter@gmail.com">Lincoln Baxter, III</a>
* @author <a href="mailto:jevgeni.zelenkov@gmail.com">Jevgeni Zelenkov</a>
*
*/
public class GitCloneCommandImpl extends AbstractGitCommand implements GitCloneCommand
{
private UIInput<String> uri;
private UIInput<DirectoryResource> targetDirectory;
@Override
public UICommandMetadata getMetadata(UIContext context)
{
return Metadata.from(super.getMetadata(context), this.getClass()).name("Git: Clone")
.description("Clone a GIT repository");
}
@Override
public void initializeUI(UIBuilder builder) throws Exception
{
this.uri = getInputComponentFactory().createInput("uri", String.class).setLabel("URI")
.setDescription("Git repository URI").setRequired(true);
this.targetDirectory = getInputComponentFactory().createInput("targetDirectory", DirectoryResource.class)
.setLabel("Target directory").setRequired(true);
builder.add(uri).add(targetDirectory);
}
@Override
public Result execute(UIExecutionContext context) throws Exception
{
DirectoryResource cloneFolder = targetDirectory.getValue();
if (!cloneFolder.exists())
{
cloneFolder.mkdirs();
}
Git clone = null;
try
{
CloneCommand cloneCommand = Git.cloneRepository().setURI(uri.getValue())
.setDirectory(cloneFolder.getUnderlyingResourceObject());
cloneCommand.setProgressMonitor(new ProgressMonitorAdapter(context.getProgressMonitor()));
clone = cloneCommand.call();
}
finally
{
getGitUtils().close(clone);
}
context.getUIContext().setSelection(cloneFolder);
return Results.success();
}
@Override
public void validate(UIValidationContext validator)
{
DirectoryResource folder = targetDirectory.getValue();
if (folder == null || (folder.exists() && (!folder.isDirectory() || !folder.listResources().isEmpty())))
{
validator.addValidationError(targetDirectory,
"The specified target directory should not exist or should be empty directory");
}
}
@Override
protected boolean isProjectRequired()
{
return false;
}
}
|
epl-1.0
|
jerr/jbossforge-core
|
shell/tests/src/test/java/org/jboss/forge/addon/shell/parser/CoreCommandTest.java
|
3703
|
/**
* Copyright 2016 Red Hat, Inc. and/or its affiliates.
*
* Licensed under the Eclipse Public License version 1.0, available at
* http://www.eclipse.org/legal/epl-v10.html
*/
package org.jboss.forge.addon.shell.parser;
import static org.hamcrest.CoreMatchers.nullValue;
import java.io.File;
import java.util.concurrent.TimeUnit;
import javax.inject.Inject;
import org.hamcrest.CoreMatchers;
import org.jboss.arquillian.container.test.api.Deployment;
import org.jboss.arquillian.junit.Arquillian;
import org.jboss.forge.addon.resource.DirectoryResource;
import org.jboss.forge.addon.resource.FileResource;
import org.jboss.forge.addon.resource.ResourceFactory;
import org.jboss.forge.addon.shell.Shell;
import org.jboss.forge.addon.shell.mock.command.Career;
import org.jboss.forge.addon.shell.mock.command.FooCommand;
import org.jboss.forge.addon.shell.test.ShellTest;
import org.jboss.forge.addon.ui.result.Result;
import org.jboss.forge.arquillian.AddonDeployment;
import org.jboss.forge.arquillian.AddonDeployments;
import org.jboss.forge.arquillian.archive.AddonArchive;
import org.jboss.forge.furnace.repositories.AddonDependencyEntry;
import org.jboss.forge.furnace.util.OperatingSystemUtils;
import org.jboss.shrinkwrap.api.ShrinkWrap;
import org.junit.After;
import org.junit.Assert;
import org.junit.Test;
import org.junit.runner.RunWith;
/**
*
* @author <a href="ggastald@redhat.com">George Gastaldi</a>
*/
@RunWith(Arquillian.class)
public class CoreCommandTest
{
@Deployment
@AddonDeployments({
@AddonDeployment(name = "org.jboss.forge.addon:shell-test-harness")
})
public static AddonArchive getDeployment()
{
AddonArchive archive = ShrinkWrap.create(AddonArchive.class)
.addClasses(FooCommand.class, Career.class)
.addBeansXML()
.addAsAddonDependencies(
AddonDependencyEntry.create("org.jboss.forge.addon:shell-test-harness"),
AddonDependencyEntry.create("org.jboss.forge.furnace.container:cdi"));
return archive;
}
@Inject
private ShellTest test;
@Inject
private ResourceFactory resourceFactory;
@After
public void tearDown() throws Exception
{
test.close();
}
@Test
public void testEscapes() throws Exception
{
File tempDir = OperatingSystemUtils.createTempDir();
tempDir.deleteOnExit();
DirectoryResource currentResource = resourceFactory.create(DirectoryResource.class, tempDir);
Shell shell = test.getShell();
shell.setCurrentResource(currentResource);
DirectoryResource child = currentResource.getChildDirectory("Forge 2 Escape");
child.mkdir();
child.deleteOnExit();
Result result = test.execute("cd Forge\\ 2\\ Escape", 10, TimeUnit.SECONDS);
Assert.assertThat(result.getMessage(), CoreMatchers.nullValue());
Assert.assertEquals(shell.getCurrentResource(), child);
currentResource.delete(true);
}
@Test
public void testQuotes() throws Exception
{
File tempDir = OperatingSystemUtils.createTempDir();
tempDir.deleteOnExit();
DirectoryResource currentResource = resourceFactory.create(DirectoryResource.class, tempDir);
Shell shell = test.getShell();
shell.setCurrentResource(currentResource);
FileResource<?> child = currentResource.getChildDirectory("Forge 2 Escape");
child.mkdir();
child.deleteOnExit();
Result result = test.execute("cd \"Forge 2 Escape\"", 10, TimeUnit.SECONDS);
Assert.assertThat(result.getMessage(), nullValue());
Assert.assertEquals(shell.getCurrentResource(), child);
currentResource.delete(true);
}
}
|
epl-1.0
|
paulianttila/openhab2
|
bundles/org.openhab.binding.heos/src/main/java/org/openhab/binding/heos/internal/resources/HeosSendCommand.java
|
3589
|
/**
* Copyright (c) 2010-2021 Contributors to the openHAB project
*
* See the NOTICE file(s) distributed with this work for additional
* information.
*
* This program and the accompanying materials are made available under the
* terms of the Eclipse Public License 2.0 which is available at
* http://www.eclipse.org/legal/epl-2.0
*
* SPDX-License-Identifier: EPL-2.0
*/
package org.openhab.binding.heos.internal.resources;
import java.io.IOException;
import org.openhab.binding.heos.internal.json.HeosJsonParser;
import org.openhab.binding.heos.internal.json.dto.HeosResponseObject;
import org.openhab.binding.heos.internal.resources.Telnet.ReadException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* The {@link HeosSendCommand} is responsible to send a command
* to the HEOS bridge
*
* @author Johannes Einig - Initial contribution
*/
public class HeosSendCommand {
private final Logger logger = LoggerFactory.getLogger(HeosSendCommand.class);
private final Telnet client;
private final HeosJsonParser parser = new HeosJsonParser();
public HeosSendCommand(Telnet client) {
this.client = client;
}
public <T> HeosResponseObject<T> send(String command, Class<T> clazz) throws IOException, ReadException {
HeosResponseObject<T> result;
int attempt = 0;
boolean send = client.send(command);
if (clazz == null) {
return null;
} else if (send) {
String line = client.readLine();
if (line == null) {
throw new IOException("No valid input was received");
}
result = parser.parseResponse(line, clazz);
while (!result.isFinished() && attempt < 3) {
attempt++;
logger.trace("Retrying \"{}\" (attempt {})", command, attempt);
line = client.readLine(15000);
if (line != null) {
result = parser.parseResponse(line, clazz);
}
}
if (attempt >= 3 && !result.isFinished()) {
throw new IOException("No valid input was received after multiple attempts");
}
return result;
} else {
throw new IOException("Not connected");
}
}
public boolean isHostReachable() {
return client.isHostReachable();
}
public boolean isConnected() {
return client.isConnected();
}
public void stopInputListener(String registerChangeEventOFF) {
logger.debug("Stopping HEOS event line listener");
client.stopInputListener();
if (client.isConnected()) {
try {
client.send(registerChangeEventOFF);
} catch (IOException e) {
logger.debug("Failure during closing connection to HEOS with message: {}", e.getMessage());
}
}
}
public void disconnect() {
if (client.isConnected()) {
return;
}
try {
logger.debug("Disconnecting HEOS command line");
client.disconnect();
} catch (IOException e) {
logger.debug("Failure during closing connection to HEOS with message: {}", e.getMessage());
}
logger.debug("Connection to HEOS system closed");
}
public void startInputListener(String command) throws IOException, ReadException {
HeosResponseObject<Void> response = send(command, Void.class);
if (response.result) {
client.startInputListener();
}
}
}
|
epl-1.0
|
HeliumProject/ThreadBuildingBlocks
|
src/tbb/dynamic_link.cpp
|
22652
|
/*
Copyright 2005-2013 Intel Corporation. All Rights Reserved.
This file is part of Threading Building Blocks.
Threading Building Blocks is free software; you can redistribute it
and/or modify it under the terms of the GNU General Public License
version 2 as published by the Free Software Foundation.
Threading Building Blocks is distributed in the hope that it will be
useful, but WITHOUT ANY WARRANTY; without even the implied warranty
of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with Threading Building Blocks; if not, write to the Free Software
Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
As a special exception, you may use this file as part of a free software
library without restriction. Specifically, if other files instantiate
templates or use macros or inline functions from this file, or you compile
this file and link it with other files to produce an executable, this
file does not by itself cause the resulting executable to be covered by
the GNU General Public License. This exception does not however
invalidate any other reasons why the executable file might be covered by
the GNU General Public License.
*/
#include "dynamic_link.h"
#include "tbb/tbb_config.h"
/*
This file is used by both TBB and OpenMP RTL. Do not use __TBB_ASSERT() macro
and runtime_warning() function because they are not available in OpenMP. Use
LIBRARY_ASSERT and DYNAMIC_LINK_WARNING instead.
*/
#include <cstdarg> // va_list etc.
#if _WIN32
#include <malloc.h>
// Unify system calls
#define dlopen( name, flags ) LoadLibrary( name )
#define dlsym( handle, name ) GetProcAddress( handle, name )
#define dlclose( handle ) ( ! FreeLibrary( handle ) )
#define dlerror() GetLastError()
#ifndef PATH_MAX
#define PATH_MAX MAX_PATH
#endif
#else /* _WIN32 */
#include <dlfcn.h>
#include <string.h>
#include <unistd.h>
#include <limits.h>
#include <stdlib.h>
#endif /* _WIN32 */
#if __TBB_WEAK_SYMBOLS_PRESENT
//TODO: use function attribute for weak symbols instead of the pragma.
#pragma weak dlopen
#pragma weak dlsym
#pragma weak dlclose
#pragma weak dlerror
#pragma weak dladdr
#endif /* __TBB_WEAK_SYMBOLS_PRESENT */
#include "tbb/tbb_misc.h"
#define __USE_TBB_ATOMICS ( !(__linux__&&__ia64__) || __TBB_BUILD )
#define __USE_STATIC_DL_INIT (!__ANDROID__)
#if !__USE_TBB_ATOMICS
#include <pthread.h>
#endif
/*
dynamic_link is a common interface for searching for required symbols in an
executable and dynamic libraries.
dynamic_link provides certain guarantees:
1. Either all or none of the requested symbols are resolved. Moreover, if
symbols are not resolved, the dynamic_link_descriptor table is not modified;
2. All returned symbols have secured life time: this means that none of them
can be invalidated until dynamic_unlink is called;
3. Any loaded library is loaded only via the full path. The full path is that
from which the runtime itself was loaded. (This is done to avoid security
issues caused by loading libraries from insecure paths).
dynamic_link searches for the requested symbols in three stages, stopping as
soon as all of the symbols have been resolved.
1. Search the global scope:
a. On Windows: dynamic_link tries to obtain the handle of the requested
library and if it succeeds it resolves the symbols via that handle.
b. On Linux: dynamic_link tries to search for the symbols in the global
scope via the main program handle. If the symbols are present in the global
scope their life time is not guaranteed (since dynamic_link does not know
anything about the library from which they are exported). Therefore it
tries to "pin" the symbols by obtaining the library name and reopening it.
dlopen may fail to reopen the library in two cases:
i. The symbols are exported from the executable. Currently dynamic _link
cannot handle this situation, so it will not find these symbols in this
step.
ii. The necessary library has been unloaded and cannot be reloaded. It
seems there is nothing that can be done in this case. No symbols are
returned.
2. Dynamic load: an attempt is made to load the requested library via the
full path.
The full path used is that from which the runtime itself was loaded. If the
library can be loaded, then an attempt is made to resolve the requested
symbols in the newly loaded library.
If the symbols are not found the library is unloaded.
3. Weak symbols: if weak symbols are available they are returned.
*/
OPEN_INTERNAL_NAMESPACE
#if __TBB_WEAK_SYMBOLS_PRESENT || __TBB_DYNAMIC_LOAD_ENABLED
#if !defined(DYNAMIC_LINK_WARNING) && !__TBB_WIN8UI_SUPPORT
// Report runtime errors and continue.
#define DYNAMIC_LINK_WARNING dynamic_link_warning
static void dynamic_link_warning( dynamic_link_error_t code, ... ) {
(void) code;
} // library_warning
#endif /* DYNAMIC_LINK_WARNING */
static bool resolve_symbols( dynamic_link_handle module, const dynamic_link_descriptor descriptors[], size_t required )
{
LIBRARY_ASSERT( module != NULL, "Module handle is NULL" );
if ( module == NULL )
return false;
#if __TBB_WEAK_SYMBOLS_PRESENT
if ( !dlsym ) return false;
#endif /* __TBB_WEAK_SYMBOLS_PRESENT */
const size_t n_desc=20; // Usually we don't have more than 20 descriptors per library
LIBRARY_ASSERT( required <= n_desc, "Too many descriptors is required" );
if ( required > n_desc ) return false;
pointer_to_handler h[n_desc];
for ( size_t k = 0; k < required; ++k ) {
dynamic_link_descriptor const & desc = descriptors[k];
pointer_to_handler addr = (pointer_to_handler)dlsym( module, desc.name );
if ( !addr ) {
return false;
}
h[k] = addr;
}
// Commit the entry points.
// Cannot use memset here, because the writes must be atomic.
for( size_t k = 0; k < required; ++k )
*descriptors[k].handler = h[k];
return true;
}
#if __TBB_WIN8UI_SUPPORT
bool dynamic_link( const char* library, const dynamic_link_descriptor descriptors[], size_t required, dynamic_link_handle*, int flags ) {
dynamic_link_handle tmp_handle = NULL;
TCHAR wlibrary[256];
if ( MultiByteToWideChar(CP_UTF8, 0, library, -1, wlibrary, 255) == 0 ) return false;
if ( flags & DYNAMIC_LINK_LOAD )
tmp_handle = LoadPackagedLibrary( wlibrary, 0 );
if (tmp_handle != NULL){
return resolve_symbols(tmp_handle, descriptors, required);
}else{
return false;
}
}
void dynamic_unlink( dynamic_link_handle ) {
}
void dynamic_unlink_all() {
}
#else
/*
There is a security issue on Windows: LoadLibrary() may load and execute malicious code.
See http://www.microsoft.com/technet/security/advisory/2269637.mspx for details.
To avoid the issue, we have to pass full path (not just library name) to LoadLibrary. This
function constructs full path to the specified library (it is assumed the library located
side-by-side with the tbb.dll.
The function constructs absolute path for given relative path. Important: Base directory is not
current one, it is the directory tbb.dll loaded from.
Example:
Let us assume "tbb.dll" is located in "c:\program files\common\intel\" directory, e. g.
absolute path of tbb library is "c:\program files\common\intel\tbb.dll". Absolute path for
"tbbmalloc.dll" would be "c:\program files\common\intel\tbbmalloc.dll". Absolute path for
"malloc\tbbmalloc.dll" would be "c:\program files\common\intel\malloc\tbbmalloc.dll".
*/
// Struct handle_storage is used by dynamic_link routine to store handles of
// all loaded or pinned dynamic libraries. When TBB is shut down, it calls
// dynamic_unlink_all() that unloads modules referenced by handle_storage.
// This struct should not have any constructors since it may be used before
// the constructor is called.
#define MAX_LOADED_MODULES 8 // The number of maximum possible modules which can be loaded
struct handle_storage {
#if __USE_TBB_ATOMICS
::tbb::atomic<size_t> my_size;
#else
size_t my_size;
pthread_spinlock_t my_lock;
#endif
dynamic_link_handle my_handles[MAX_LOADED_MODULES];
void add_handle(const dynamic_link_handle &handle) {
#if !__USE_TBB_ATOMICS
int res = pthread_spin_lock( &my_lock );
LIBRARY_ASSERT( res==0, "pthread_spin_lock failed" );
#endif
const size_t ind = my_size++;
#if !__USE_TBB_ATOMICS
res = pthread_spin_unlock( &my_lock );
LIBRARY_ASSERT( res==0, "pthread_spin_unlock failed" );
#endif
LIBRARY_ASSERT( ind < MAX_LOADED_MODULES, "Too many modules are loaded" );
my_handles[ind] = handle;
}
void free_handles() {
const size_t size = my_size;
for (size_t i=0; i<size; ++i)
dynamic_unlink( my_handles[i] );
}
};
handle_storage handles;
#if __USE_TBB_ATOMICS
static void atomic_once ( void (*func) (void), tbb::atomic< tbb::internal::do_once_state > &once_state ) {
tbb::internal::atomic_do_once( func, once_state );
}
#define ATOMIC_ONCE_DECL( var ) tbb::atomic< tbb::internal::do_once_state > var
#else
static void atomic_once ( void (*func) (), pthread_once_t &once_state ) {
pthread_once( &once_state, func );
}
#define ATOMIC_ONCE_DECL( var ) pthread_once_t var = PTHREAD_ONCE_INIT
#endif
ATOMIC_ONCE_DECL( init_dl_data_state );
static struct _ap_data {
char _path[PATH_MAX+1];
size_t _len;
} ap_data;
static void init_ap_data() {
#if _WIN32
// Get handle of our DLL first.
HMODULE handle;
BOOL brc = GetModuleHandleEx(
GET_MODULE_HANDLE_EX_FLAG_FROM_ADDRESS | GET_MODULE_HANDLE_EX_FLAG_UNCHANGED_REFCOUNT,
(LPCSTR)( & dynamic_link ), // any function inside the library can be used for the address
& handle
);
if ( !brc ) { // Error occurred.
int err = GetLastError();
DYNAMIC_LINK_WARNING( dl_sys_fail, "GetModuleHandleEx", err );
return;
}
// Now get path to our DLL.
DWORD drc = GetModuleFileName( handle, ap_data._path, static_cast< DWORD >( PATH_MAX ) );
if ( drc == 0 ) { // Error occurred.
int err = GetLastError();
DYNAMIC_LINK_WARNING( dl_sys_fail, "GetModuleFileName", err );
return;
}
if ( drc >= PATH_MAX ) { // Buffer too short.
DYNAMIC_LINK_WARNING( dl_buff_too_small );
return;
}
// Find the position of the last backslash.
char *backslash = strrchr( ap_data._path, '\\' );
if ( !backslash ) { // Backslash not found.
LIBRARY_ASSERT( backslash!=NULL, "Unbelievable.");
return;
}
LIBRARY_ASSERT( backslash >= ap_data._path, "Unbelievable.");
ap_data._len = (size_t)(backslash - ap_data._path) + 1;
*(backslash+1) = 0;
#else
// Get the library path
#if __TBB_WEAK_SYMBOLS_PRESENT
if ( !dladdr || !dlerror ) return;
#endif /* __TBB_WEAK_SYMBOLS_PRESENT */
Dl_info dlinfo;
int res = dladdr( (void*)&dynamic_link, &dlinfo ); // any function inside the library can be used for the address
if ( !res ) {
char const * err = dlerror();
DYNAMIC_LINK_WARNING( dl_sys_fail, "dladdr", err );
return;
} else {
LIBRARY_ASSERT( dlinfo.dli_fname!=NULL, "Unbelievable." );
}
char const *slash = strrchr( dlinfo.dli_fname, '/' );
size_t fname_len=0;
if ( slash ) {
LIBRARY_ASSERT( slash >= dlinfo.dli_fname, "Unbelievable.");
fname_len = (size_t)(slash - dlinfo.dli_fname) + 1;
}
size_t rc;
if ( dlinfo.dli_fname[0]=='/' ) {
// The library path is absolute
rc = 0;
ap_data._len = 0;
} else {
// The library path is relative so get the current working directory
if ( !getcwd( ap_data._path, sizeof(ap_data._path)/sizeof(ap_data._path[0]) ) ) {
DYNAMIC_LINK_WARNING( dl_buff_too_small );
return;
}
ap_data._len = strlen( ap_data._path );
ap_data._path[ap_data._len++]='/';
rc = ap_data._len;
}
if ( fname_len>0 ) {
if ( ap_data._len>PATH_MAX ) {
DYNAMIC_LINK_WARNING( dl_buff_too_small );
ap_data._len=0;
return;
}
strncpy( ap_data._path+rc, dlinfo.dli_fname, fname_len );
ap_data._len += fname_len;
ap_data._path[ap_data._len]=0;
}
#endif /* _WIN32 */
}
static void init_dl_data() {
init_ap_data();
#if !__USE_TBB_ATOMICS
int res;
res = pthread_spin_init( &handles.my_lock, PTHREAD_PROCESS_SHARED );
LIBRARY_ASSERT( res==0, "pthread_spin_init failed" );
#endif
}
// ap_data structure is initialized with current directory on Linux.
// So it should be initialized as soon as possible since the current directory may be changed.
// static_init_ap_data object provides this initialization during library loading.
static class _static_init_dl_data {
public:
_static_init_dl_data() {
#if __USE_STATIC_DL_INIT
atomic_once( &init_dl_data, init_dl_data_state );
#endif
}
#if !__USE_TBB_ATOMICS
~_static_init_dl_data() {
int res;
res = pthread_spin_destroy( &handles.my_lock );
LIBRARY_ASSERT( res==0, "pthread_spin_destroy failed" );
}
#endif
} static_init_dl_data;
/*
The function constructs absolute path for given relative path. Important: Base directory is not
current one, it is the directory libtbb.so loaded from.
Arguments:
in name -- Name of a file (may be with relative path; it must not be an absolute one).
out path -- Buffer to save result (absolute path) to.
in len -- Size of buffer.
ret -- 0 -- Error occurred.
> len -- Buffer too short, required size returned.
otherwise -- Ok, number of characters (not counting terminating null) written to
buffer.
*/
#if __TBB_DYNAMIC_LOAD_ENABLED
static size_t abs_path( char const * name, char * path, size_t len ) {
atomic_once( &init_dl_data, init_dl_data_state );
if ( !ap_data._len )
return 0;
size_t name_len = strlen( name );
size_t full_len = name_len+ap_data._len;
if ( full_len < len ) {
strncpy( path, ap_data._path, ap_data._len );
strncpy( path+ap_data._len, name, name_len );
path[full_len] = 0;
}
return full_len;
}
#endif // __TBB_DYNAMIC_LOAD_ENABLED
#if __TBB_WEAK_SYMBOLS_PRESENT
static bool weak_symbol_link( const dynamic_link_descriptor descriptors[], size_t required )
{
// Check if the required entries are present in what was loaded into our process.
for ( size_t k = 0; k < required; ++k )
if ( !descriptors[k].ptr )
return false;
// Commit the entry points.
for ( size_t k = 0; k < required; ++k )
*descriptors[k].handler = (pointer_to_handler) descriptors[k].ptr;
return true;
}
#else
static bool weak_symbol_link( const dynamic_link_descriptor[], size_t ) {
return false;
}
#endif /* __TBB_WEAK_SYMBOLS_PRESENT */
void dynamic_unlink( dynamic_link_handle handle ) {
if ( handle ) {
#if __TBB_WEAK_SYMBOLS_PRESENT
LIBRARY_ASSERT( dlclose != NULL, "dlopen is present but dlclose is NOT present!?" );
#endif /* __TBB_WEAK_SYMBOLS_PRESENT */
#if __TBB_DYNAMIC_LOAD_ENABLED
dlclose( handle );
#endif /* __TBB_DYNAMIC_LOAD_ENABLED */
}
}
void dynamic_unlink_all() {
handles.free_handles();
}
#if _WIN32
static dynamic_link_handle global_symbols_link( const char* library, const dynamic_link_descriptor descriptors[], size_t required ) {
dynamic_link_handle library_handle;
if ( GetModuleHandleEx( 0, library, &library_handle ) ) {
if ( resolve_symbols( library_handle, descriptors, required ) )
return library_handle;
else
FreeLibrary( library_handle );
}
return 0;
}
#else /* _WIN32 */
// It is supposed that all symbols are from the only one library
static dynamic_link_handle pin_symbols( dynamic_link_descriptor desc, const dynamic_link_descriptor descriptors[], size_t required ) {
// The library has been loaded by another module and contains at least one requested symbol.
// But after we obtained the symbol the library can be unloaded by another thread
// invalidating our symbol. Therefore we need to pin the library in memory.
dynamic_link_handle library_handle;
Dl_info info;
// Get library's name from earlier found symbol
if ( dladdr( (void*)*desc.handler, &info ) ) {
// Pin the library
library_handle = dlopen( info.dli_fname, RTLD_LAZY );
if ( library_handle ) {
// If original library was unloaded before we pinned it
// and then another module loaded in its place, the earlier
// found symbol would become invalid. So revalidate them.
if ( !resolve_symbols( library_handle, descriptors, required ) ) {
// Wrong library.
dynamic_unlink(library_handle);
library_handle = 0;
}
} else {
char const * err = dlerror();
DYNAMIC_LINK_WARNING( dl_lib_not_found, info.dli_fname, err );
}
}
else {
// The library have been unloaded by another thread
library_handle = 0;
}
return library_handle;
}
static dynamic_link_handle global_symbols_link( const char*, const dynamic_link_descriptor descriptors[], size_t required ) {
#if __TBB_WEAK_SYMBOLS_PRESENT
if ( !dlopen ) return 0;
#endif /* __TBB_WEAK_SYMBOLS_PRESENT */
dynamic_link_handle library_handle = dlopen( NULL, RTLD_LAZY );
// Check existence of only the first symbol, then use it to find the library and load all necessary symbols
pointer_to_handler handler;
dynamic_link_descriptor desc = { descriptors[0].name, &handler };
if ( resolve_symbols( library_handle, &desc, 1 ) )
return pin_symbols( desc, descriptors, required );
return 0;
}
#endif /* _WIN32 */
static void save_library_handle( dynamic_link_handle src, dynamic_link_handle *dst ) {
if ( dst )
*dst = src;
else
handles.add_handle( src );
}
dynamic_link_handle dynamic_load( const char* library, const dynamic_link_descriptor descriptors[], size_t required ) {
#if __TBB_DYNAMIC_LOAD_ENABLED
#if _XBOX
return LoadLibrary (library);
#else /* _XBOX */
size_t const len = PATH_MAX + 1;
char path[ len ];
size_t rc = abs_path( library, path, len );
if ( 0 < rc && rc < len ) {
#if _WIN32
// Prevent Windows from displaying silly message boxes if it fails to load library
// (e.g. because of MS runtime problems - one of those crazy manifest related ones)
UINT prev_mode = SetErrorMode (SEM_FAILCRITICALERRORS);
#endif /* _WIN32 */
#if __TBB_WEAK_SYMBOLS_PRESENT
if ( !dlopen ) return 0;
#endif /* __TBB_WEAK_SYMBOLS_PRESENT */
dynamic_link_handle library_handle = dlopen( path, RTLD_LAZY );
#if _WIN32
SetErrorMode (prev_mode);
#endif /* _WIN32 */
if( library_handle ) {
if( !resolve_symbols( library_handle, descriptors, required ) ) {
// The loaded library does not contain all the expected entry points
dynamic_unlink( library_handle );
library_handle = NULL;
}
} else
DYNAMIC_LINK_WARNING( dl_lib_not_found, path, dlerror() );
return library_handle;
} else if ( rc>=len )
DYNAMIC_LINK_WARNING( dl_buff_too_small );
// rc == 0 means failing of init_ap_data so the warning has already been issued.
#endif /* _XBOX */
#endif /* __TBB_DYNAMIC_LOAD_ENABLED */
return 0;
}
bool dynamic_link( const char* library, const dynamic_link_descriptor descriptors[], size_t required, dynamic_link_handle *handle, int flags ) {
// TODO: May global_symbols_link find weak symbols?
dynamic_link_handle library_handle = ( flags & DYNAMIC_LINK_GLOBAL ) ? global_symbols_link( library, descriptors, required ) : 0;
if ( !library_handle && ( flags & DYNAMIC_LINK_LOAD ) )
library_handle = dynamic_load( library, descriptors, required );
if ( !library_handle && ( flags & DYNAMIC_LINK_WEAK ) )
return weak_symbol_link( descriptors, required );
save_library_handle( library_handle, handle );
return true;
}
#endif /*__TBB_WIN8UI_SUPPORT*/
#else /* __TBB_WEAK_SYMBOLS_PRESENT || __TBB_DYNAMIC_LOAD_ENABLED */
bool dynamic_link( const char*, const dynamic_link_descriptor*, size_t, dynamic_link_handle *handle, int ) {
if ( handle )
*handle=0;
return false;
}
void dynamic_unlink( dynamic_link_handle ) {
}
void dynamic_unlink_all() {
}
#endif /* __TBB_WEAK_SYMBOLS_PRESENT || __TBB_DYNAMIC_LOAD_ENABLED */
CLOSE_INTERNAL_NAMESPACE
|
gpl-2.0
|
wkurniawan07/repo
|
src/main/java/com/google/appengine/logging/v1/SourceReferenceOrBuilder.java
|
1392
|
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/appengine/logging/v1/request_log.proto
package com.google.appengine.logging.v1;
public interface SourceReferenceOrBuilder extends
// @@protoc_insertion_point(interface_extends:google.appengine.logging.v1.SourceReference)
com.google.protobuf.MessageOrBuilder {
/**
* <pre>
* Optional. A URI string identifying the repository.
* Example: "https://github.com/GoogleCloudPlatform/kubernetes.git"
* </pre>
*
* <code>string repository = 1;</code>
*/
java.lang.String getRepository();
/**
* <pre>
* Optional. A URI string identifying the repository.
* Example: "https://github.com/GoogleCloudPlatform/kubernetes.git"
* </pre>
*
* <code>string repository = 1;</code>
*/
com.google.protobuf.ByteString
getRepositoryBytes();
/**
* <pre>
* The canonical and persistent identifier of the deployed revision.
* Example (git): "0035781c50ec7aa23385dc841529ce8a4b70db1b"
* </pre>
*
* <code>string revision_id = 2;</code>
*/
java.lang.String getRevisionId();
/**
* <pre>
* The canonical and persistent identifier of the deployed revision.
* Example (git): "0035781c50ec7aa23385dc841529ce8a4b70db1b"
* </pre>
*
* <code>string revision_id = 2;</code>
*/
com.google.protobuf.ByteString
getRevisionIdBytes();
}
|
gpl-2.0
|
skyHALud/codenameone
|
Ports/iOSPort/xmlvm/apache-harmony-6.0-src-r991881/classlib/modules/awt/src/main/java/common/java/awt/datatransfer/FlavorEvent.java
|
1101
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package java.awt.datatransfer;
import java.util.EventObject;
public class FlavorEvent extends EventObject {
private static final long serialVersionUID = -5842664112252414548L;
public FlavorEvent(Clipboard source) {
super(source);
}
}
|
gpl-2.0
|
xclouder/godbattle
|
Assets/Plugins/BehaviourMachine/Source/Nodes/UnityGUI/Drawable/GUIBox.cs
|
983
|
//----------------------------------------------
// Behaviour Machine
// Copyright © 2014 Anderson Campos Cardoso
//----------------------------------------------
using UnityEngine;
using System.Collections;
namespace BehaviourMachine {
/// <summary>
/// Make an auto-layout box.
/// </summary>
[NodeInfo( category = "UnityGUI/Drawable/GUI/",
icon = "GUIText",
description = "Make an auto-layout box",
url = "http://docs.unity3d.com/Documentation/ScriptReference/GUI.Box.html")]
public class GUIBox : GUIContentNode {
public override Status Update () {
// Is OnGUI?
if (Event.current == null)
return Status.Error;
if (guiStyle.isNone)
GUI.Box(this.GetRect(), GetGUIContent());
else
GUI.Box(this.GetRect(), GetGUIContent(), guiStyle.Value);
return Status.Success;
}
}
}
|
gpl-2.0
|
jolay/ayansa
|
cache/mod_vvisit_counter/860aea6b5aac75573e8d7d8ebc839c97-cache-mod_vvisit_counter-16fd84fd48a2334ea0406362f60e47b2.php
|
86
|
<?php die("Access Denied"); ?>#x#a:2:{s:6:"output";s:0:"";s:6:"result";s:7:"1459634";}
|
gpl-2.0
|
alexnunes2015/ZED-OS
|
Qualcomm Augmented Reality/Scripts/QCARBehaviour.cs
|
808
|
/*==============================================================================
Copyright (c) 2010-2013 Qualcomm Connected Experiences, Inc.
All Rights Reserved.
Confidential and Proprietary - Qualcomm Connected Experiences, Inc.
==============================================================================*/
using System;
using System.Collections.Generic;
using System.Runtime.InteropServices;
using System.Text.RegularExpressions;
using UnityEngine;
/// <summary>
/// The QCARBehaviour class handles tracking and triggers native video
/// background rendering. The class updates all Trackables in the scene.
/// </summary>
[RequireComponent(typeof(Camera))]
public class QCARBehaviour : QCARAbstractBehaviour
{
QCARBehaviour()
{
mAndroidUnityPlayer = new AndroidUnityPlayer();
}
}
|
gpl-2.0
|
gsnerf/Emby
|
MediaBrowser.WebDashboard/dashboard-ui/components/categorysyncbuttons.js
|
1432
|
define(['itemHelper'], function (itemHelper) {
'use strict';
function initSyncButtons(view) {
var apiClient = window.ApiClient;
if (!apiClient || !apiClient.getCurrentUserId()) {
return;
}
apiClient.getCurrentUser().then(function (user) {
var item = {
SupportsSync: true
};
var categorySyncButtons = view.querySelectorAll('.categorySyncButton');
for (var i = 0, length = categorySyncButtons.length; i < length; i++) {
categorySyncButtons[i].addEventListener('click', onCategorySyncButtonClick);
if (itemHelper.canSync(user, item)) {
categorySyncButtons[i].classList.remove('hide');
} else {
categorySyncButtons[i].classList.add('hide');
}
}
});
}
function onCategorySyncButtonClick(e) {
var button = this;
var category = button.getAttribute('data-category');
var parentId = LibraryMenu.getTopParentId();
require(['syncDialog'], function (syncDialog) {
syncDialog.showMenu({
ParentId: parentId,
Category: category,
serverId: ApiClient.serverId()
});
});
}
return {
init: function (view) {
initSyncButtons(view);
}
};
});
|
gpl-2.0
|
jrsix/cms
|
src/JR.Cms/Repository/SiteRepository.cs
|
9243
|
using System;
using System.Collections.Generic;
using System.Data.Common;
using System.Linq;
using System.Text.RegularExpressions;
using JR.Cms.Domain.Interface.Site;
using JR.Cms.Domain.Interface.Site.Category;
using JR.Cms.Domain.Interface.Site.Extend;
using JR.Cms.Domain.Interface.Site.Link;
using JR.Cms.Domain.Interface.Site.Template;
using JR.Cms.Domain.Interface.User;
using JR.Cms.Domain.Site;
using JR.Cms.Infrastructure;
using JR.Cms.Library.DataAccess.DAL;
namespace JR.Cms.Repository
{
public class SiteRepository : BaseSiteRepository, ISiteRepo
{
private static readonly SiteDal siteDal = new SiteDal();
private static readonly LinkDal linkDal = new LinkDal();
private readonly IExtendFieldRepository _extendFieldRepository;
private readonly ICategoryRepo _categoryRep;
private readonly ITemplateRepo _tempRep;
private readonly IUserRepository _userRep;
public SiteRepository(
IExtendFieldRepository extendFieldRepository,
ICategoryRepo categoryRepository,
ITemplateRepo tempRep,
IUserRepository userRep
)
{
_extendFieldRepository = extendFieldRepository;
_categoryRep = categoryRepository;
_tempRep = tempRep;
_userRep = userRep;
}
public ISite CreateSite(CmsSiteEntity site)
{
return base.CreateSite(this,
_extendFieldRepository,
_categoryRep,
_tempRep,
_userRep,
site);
}
public int SaveSite(ISite site)
{
var siteId = site.GetAggregateRootId();
if (site.GetAggregateRootId() <= 0)
{
siteId = siteDal.CreateSite(site);
if (siteId <= 0) throw new ArgumentException("创建站点失败");
}
else
{
if (siteDal.UpdateSite(site) != 1) throw new ArgumentException("站点不存在,保存失败");
}
//清理缓存
RepositoryDataCache._siteDict = null;
RepositoryDataCache._categories = null;
return siteId;
}
public IList<ISite> GetSites()
{
if (RepositoryDataCache._siteDict == null)
{
RepositoryDataCache._siteDict = new Dictionary<int, ISite>();
siteDal.ReadSites(rd =>
{
while (rd.Read())
{
var site = new CmsSiteEntity();
site.SiteId = Convert.ToInt32(rd["site_id"]);
site.Name = rd["name"].ToString();
//ISite site = this.CreateSite(Convert.ToInt32(rd["site_id"]), );
//rd.CopyToEntity<ISite>(site);
site.AppPath = rd["app_name"].ToString();
site.Tpl = rd["tpl"].ToString();
site.State = int.Parse(rd["state"].ToString());
site.Location = rd["location"].ToString();
if (site.Location != null && site.Location.Trim() == "") site.Location = "";
site.Domain = rd["domain"].ToString();
site.ProAddress = rd["pro_address"].ToString();
site.ProEmail = rd["pro_email"].ToString();
site.ProFax = rd["pro_fax"].ToString();
site.ProPost = rd["pro_post"].ToString();
site.Note = rd["note"].ToString();
site.ProNotice = rd["pro_notice"].ToString();
site.ProPhone = rd["pro_phone"].ToString();
site.ProIm = rd["pro_im"].ToString();
site.SeoTitle = rd["seo_title"].ToString();
site.SeoKeywords = rd["seo_keywords"].ToString();
site.SeoDescription = rd["seo_description"].ToString();
site.ProSlogan = rd["pro_slogan"].ToString();
site.ProTel = rd["pro_tel"].ToString();
site.Language = int.Parse(rd["language"].ToString());
var ist = CreateSite(site);
RepositoryDataCache._siteDict.Add(site.SiteId, ist);
}
});
}
return RepositoryDataCache._siteDict.Values.ToList();
}
public ISite GetSiteById(int siteId)
{
var sites = GetSites();
if (sites.Count == 0) throw new Exception("Missing site");
return BinarySearch.IntSearch(sites, 0, sites.Count, siteId, a => a.GetAggregateRootId());
}
public ISite GetSingleOrDefaultSite(string host, string appPath)
{
var site = GetSiteByUri(host, appPath);
if (site != null) return site;
var sites = GetSites();
if (sites.Count == 0) throw new Exception("Missing site");
//获取host和dir均为空的站点
foreach (var _site in sites)
//if (_site.Id == 7)
//{
// return _site;
//}
if (_site.Get().Domain == "" && _site.Get().AppPath == "")
return _site;
return sites[0];
}
public ISite GetSiteByUri(string host, string appPath)
{
var sites = GetSites();
var i = host.IndexOf(":");
if (i != -1) host = host.Substring(0, i);
var appName = appPath != null && appPath.StartsWith("/") ? appPath.Substring(1) : appPath;
//todo:
// site = sites[0];
//return sites;
//获取使用域名标识的网站
var _hostName = string.Concat(
"^", host.Replace(".", "\\."),
"$|\\s+", host.Replace(".", "\\."),
"\\s*|\\s*", host.Replace(".", "\\."), "\\s+");
ISite curr = null;
foreach (var s in sites)
{
if (string.IsNullOrEmpty(s.Get().Domain)) continue;
// 判断域名相同
if (Regex.IsMatch(s.Get().Domain, _hostName, RegexOptions.IgnoreCase))
{
s.SetRunType(SiteRunType.Stand);
if (string.IsNullOrEmpty(appName)) return s;
// 判断应用名称相同
if (string.Compare(s.Get().AppPath, appName, true) == 0)
{
s.SetRunType(SiteRunType.VirtualDirectory);
return s;
}
curr = s;
}
}
if (curr != null) return curr;
//获取使用目录绑定的网站
if (!string.IsNullOrEmpty(appName))
foreach (var s in sites)
if (string.Compare(s.Get().AppPath, appName, true) == 0)
{
s.SetRunType(SiteRunType.VirtualDirectory);
return s;
}
return null;
}
public ISiteLink CreateLink(ISite site, int id, string text)
{
return base.CreateLink(this, site, id, text);
}
public int SaveSiteLink(int siteId, ISiteLink link)
{
if (link.GetDomainId() <= 0) return linkDal.AddSiteLink(siteId, link);
return linkDal.UpdateSiteLink(siteId, link);
}
public ISiteLink ConvertToILink(int siteId, DbDataReader reader)
{
var link = CreateLink(
GetSiteById(siteId),
int.Parse(reader["id"].ToString()),
reader["text"].ToString()
);
link.Bind = reader["bind"].ToString();
link.ImgUrl = reader["img_url"].ToString();
link.SortNumber = int.Parse(reader["sort_number"].ToString());
link.Pid = int.Parse(reader["pid"].ToString());
link.Target = reader["target"].ToString();
link.Type = (SiteLinkType) int.Parse(reader["type"].ToString());
link.Uri = reader["uri"].ToString();
link.Visible = Convert.ToBoolean(reader["visible"]);
return link;
}
public bool DeleteSiteLink(int siteId, int linkId)
{
return linkDal.DeleteSiteLink(siteId, linkId) == 1;
}
public ISiteLink GetSiteLinkById(int siteId, int linkId)
{
ISiteLink link = null;
linkDal.GetSiteLinkById(siteId, linkId, rd =>
{
if (rd.Read()) link = ConvertToILink(siteId, rd);
});
return link;
}
public IEnumerable<ISiteLink> GetSiteLinks(int siteId, SiteLinkType type)
{
IList<ISiteLink> links = new List<ISiteLink>();
linkDal.GetAllSiteLinks(siteId, type, rd =>
{
while (rd.Read()) links.Add(ConvertToILink(siteId, rd));
});
return links;
}
}
}
|
gpl-2.0
|
bfay/maniacal-kitten
|
wp-content/themes/smartbox-theme-1.01/javascripts/jquery.form.js
|
39061
|
/*!
* jQuery Form Plugin
* version: 3.18 (28-SEP-2012)
* @requires jQuery v1.5 or later
*
* Examples and documentation at: http://malsup.com/jquery/form/
* Project repository: https://github.com/malsup/form
* Dual licensed under the MIT and GPL licenses:
* http://malsup.github.com/mit-license.txt
* http://malsup.github.com/gpl-license-v2.txt
*/
/*global ActiveXObject alert */
;(function($) {
"use strict";
/*
Usage Note:
-----------
Do not use both ajaxSubmit and ajaxForm on the same form. These
functions are mutually exclusive. Use ajaxSubmit if you want
to bind your own submit handler to the form. For example,
$(document).ready(function() {
$('#myForm').on('submit', function(e) {
e.preventDefault(); // <-- important
$(this).ajaxSubmit({
target: '#output'
});
});
});
Use ajaxForm when you want the plugin to manage all the event binding
for you. For example,
$(document).ready(function() {
$('#myForm').ajaxForm({
target: '#output'
});
});
You can also use ajaxForm with delegation (requires jQuery v1.7+), so the
form does not have to exist when you invoke ajaxForm:
$('#myForm').ajaxForm({
delegation: true,
target: '#output'
});
When using ajaxForm, the ajaxSubmit function will be invoked for you
at the appropriate time.
*/
/**
* Feature detection
*/
var feature = {};
feature.fileapi = $("<input type='file'/>").get(0).files !== undefined;
feature.formdata = window.FormData !== undefined;
/**
* ajaxSubmit() provides a mechanism for immediately submitting
* an HTML form using AJAX.
*/
$.fn.ajaxSubmit = function(options) {
/*jshint scripturl:true */
// fast fail if nothing selected (http://dev.jquery.com/ticket/2752)
if (!this.length) {
log('ajaxSubmit: skipping submit process - no element selected');
return this;
}
var method, action, url, $form = this;
if (typeof options == 'function') {
options = { success: options };
}
method = this.attr('method');
action = this.attr('action');
url = (typeof action === 'string') ? $.trim(action) : '';
url = url || window.location.href || '';
if (url) {
// clean url (don't include hash vaue)
url = (url.match(/^([^#]+)/)||[])[1];
}
options = $.extend(true, {
url: url,
success: $.ajaxSettings.success,
type: method || 'GET',
iframeSrc: /^https/i.test(window.location.href || '') ? 'javascript:false' : 'about:blank'
}, options);
// hook for manipulating the form data before it is extracted;
// convenient for use with rich editors like tinyMCE or FCKEditor
var veto = {};
this.trigger('form-pre-serialize', [this, options, veto]);
if (veto.veto) {
log('ajaxSubmit: submit vetoed via form-pre-serialize trigger');
return this;
}
// provide opportunity to alter form data before it is serialized
if (options.beforeSerialize && options.beforeSerialize(this, options) === false) {
log('ajaxSubmit: submit aborted via beforeSerialize callback');
return this;
}
var traditional = options.traditional;
if ( traditional === undefined ) {
traditional = $.ajaxSettings.traditional;
}
var elements = [];
var qx, a = this.formToArray(options.semantic, elements);
if (options.data) {
options.extraData = options.data;
qx = $.param(options.data, traditional);
}
// give pre-submit callback an opportunity to abort the submit
if (options.beforeSubmit && options.beforeSubmit(a, this, options) === false) {
log('ajaxSubmit: submit aborted via beforeSubmit callback');
return this;
}
// fire vetoable 'validate' event
this.trigger('form-submit-validate', [a, this, options, veto]);
if (veto.veto) {
log('ajaxSubmit: submit vetoed via form-submit-validate trigger');
return this;
}
var q = $.param(a, traditional);
if (qx) {
q = ( q ? (q + '&' + qx) : qx );
}
if (options.type.toUpperCase() == 'GET') {
options.url += (options.url.indexOf('?') >= 0 ? '&' : '?') + q;
options.data = null; // data is null for 'get'
}
else {
options.data = q; // data is the query string for 'post'
}
var callbacks = [];
if (options.resetForm) {
callbacks.push(function() { $form.resetForm(); });
}
if (options.clearForm) {
callbacks.push(function() { $form.clearForm(options.includeHidden); });
}
// perform a load on the target only if dataType is not provided
if (!options.dataType && options.target) {
var oldSuccess = options.success || function(){};
callbacks.push(function(data) {
var fn = options.replaceTarget ? 'replaceWith' : 'html';
$(options.target)[fn](data).each(oldSuccess, arguments);
});
}
else if (options.success) {
callbacks.push(options.success);
}
options.success = function(data, status, xhr) { // jQuery 1.4+ passes xhr as 3rd arg
var context = options.context || this ; // jQuery 1.4+ supports scope context
for (var i=0, max=callbacks.length; i < max; i++) {
callbacks[i].apply(context, [data, status, xhr || $form, $form]);
}
};
// are there files to upload?
var fileInputs = $('input:file:enabled[value]', this); // [value] (issue #113)
var hasFileInputs = fileInputs.length > 0;
var mp = 'multipart/form-data';
var multipart = ($form.attr('enctype') == mp || $form.attr('encoding') == mp);
var fileAPI = feature.fileapi && feature.formdata;
log("fileAPI :" + fileAPI);
var shouldUseFrame = (hasFileInputs || multipart) && !fileAPI;
var jqxhr;
// options.iframe allows user to force iframe mode
// 06-NOV-09: now defaulting to iframe mode if file input is detected
if (options.iframe !== false && (options.iframe || shouldUseFrame)) {
// hack to fix Safari hang (thanks to Tim Molendijk for this)
// see: http://groups.google.com/group/jquery-dev/browse_thread/thread/36395b7ab510dd5d
if (options.closeKeepAlive) {
$.get(options.closeKeepAlive, function() {
jqxhr = fileUploadIframe(a);
});
}
else {
jqxhr = fileUploadIframe(a);
}
}
else if ((hasFileInputs || multipart) && fileAPI) {
jqxhr = fileUploadXhr(a);
}
else {
jqxhr = $.ajax(options);
}
$form.removeData('jqxhr').data('jqxhr', jqxhr);
// clear element array
for (var k=0; k < elements.length; k++)
elements[k] = null;
// fire 'notify' event
this.trigger('form-submit-notify', [this, options]);
return this;
// utility fn for deep serialization
function deepSerialize(extraData){
var serialized = $.param(extraData).split('&');
var len = serialized.length;
var result = {};
var i, part;
for (i=0; i < len; i++) {
part = serialized[i].split('=');
result[decodeURIComponent(part[0])] = decodeURIComponent(part[1]);
}
return result;
}
// XMLHttpRequest Level 2 file uploads (big hat tip to francois2metz)
function fileUploadXhr(a) {
var formdata = new FormData();
for (var i=0; i < a.length; i++) {
formdata.append(a[i].name, a[i].value);
}
if (options.extraData) {
var serializedData = deepSerialize(options.extraData);
for (var p in serializedData)
if (serializedData.hasOwnProperty(p))
formdata.append(p, serializedData[p]);
}
options.data = null;
var s = $.extend(true, {}, $.ajaxSettings, options, {
contentType: false,
processData: false,
cache: false,
type: method || 'POST'
});
if (options.uploadProgress) {
// workaround because jqXHR does not expose upload property
s.xhr = function() {
var xhr = jQuery.ajaxSettings.xhr();
if (xhr.upload) {
xhr.upload.onprogress = function(event) {
var percent = 0;
var position = event.loaded || event.position; /*event.position is deprecated*/
var total = event.total;
if (event.lengthComputable) {
percent = Math.ceil(position / total * 100);
}
options.uploadProgress(event, position, total, percent);
};
}
return xhr;
};
}
s.data = null;
var beforeSend = s.beforeSend;
s.beforeSend = function(xhr, o) {
o.data = formdata;
if(beforeSend)
beforeSend.call(this, xhr, o);
};
return $.ajax(s);
}
// private function for handling file uploads (hat tip to YAHOO!)
function fileUploadIframe(a) {
var form = $form[0], el, i, s, g, id, $io, io, xhr, sub, n, timedOut, timeoutHandle;
var useProp = !!$.fn.prop;
var deferred = $.Deferred();
if ($(':input[name=submit],:input[id=submit]', form).length) {
// if there is an input with a name or id of 'submit' then we won't be
// able to invoke the submit fn on the form (at least not x-browser)
alert('Error: Form elements must not have name or id of "submit".');
deferred.reject();
return deferred;
}
if (a) {
// ensure that every serialized input is still enabled
for (i=0; i < elements.length; i++) {
el = $(elements[i]);
if ( useProp )
el.prop('disabled', false);
else
el.removeAttr('disabled');
}
}
s = $.extend(true, {}, $.ajaxSettings, options);
s.context = s.context || s;
id = 'jqFormIO' + (new Date().getTime());
if (s.iframeTarget) {
$io = $(s.iframeTarget);
n = $io.attr('name');
if (!n)
$io.attr('name', id);
else
id = n;
}
else {
$io = $('<iframe name="' + id + '" src="'+ s.iframeSrc +'" />');
$io.css({ position: 'absolute', top: '-1000px', left: '-1000px' });
}
io = $io[0];
xhr = { // mock object
aborted: 0,
responseText: null,
responseXML: null,
status: 0,
statusText: 'n/a',
getAllResponseHeaders: function() {},
getResponseHeader: function() {},
setRequestHeader: function() {},
abort: function(status) {
var e = (status === 'timeout' ? 'timeout' : 'aborted');
log('aborting upload... ' + e);
this.aborted = 1;
// #214
if (io.contentWindow.document.execCommand) {
try { // #214
io.contentWindow.document.execCommand('Stop');
} catch(ignore) {}
}
$io.attr('src', s.iframeSrc); // abort op in progress
xhr.error = e;
if (s.error)
s.error.call(s.context, xhr, e, status);
if (g)
$.event.trigger("ajaxError", [xhr, s, e]);
if (s.complete)
s.complete.call(s.context, xhr, e);
}
};
g = s.global;
// trigger ajax global events so that activity/block indicators work like normal
if (g && 0 === $.active++) {
$.event.trigger("ajaxStart");
}
if (g) {
$.event.trigger("ajaxSend", [xhr, s]);
}
if (s.beforeSend && s.beforeSend.call(s.context, xhr, s) === false) {
if (s.global) {
$.active--;
}
deferred.reject();
return deferred;
}
if (xhr.aborted) {
deferred.reject();
return deferred;
}
// add submitting element to data if we know it
sub = form.clk;
if (sub) {
n = sub.name;
if (n && !sub.disabled) {
s.extraData = s.extraData || {};
s.extraData[n] = sub.value;
if (sub.type == "image") {
s.extraData[n+'.x'] = form.clk_x;
s.extraData[n+'.y'] = form.clk_y;
}
}
}
var CLIENT_TIMEOUT_ABORT = 1;
var SERVER_ABORT = 2;
function getDoc(frame) {
var doc = frame.contentWindow ? frame.contentWindow.document : frame.contentDocument ? frame.contentDocument : frame.document;
return doc;
}
// Rails CSRF hack (thanks to Yvan Barthelemy)
var csrf_token = $('meta[name=csrf-token]').attr('content');
var csrf_param = $('meta[name=csrf-param]').attr('content');
if (csrf_param && csrf_token) {
s.extraData = s.extraData || {};
s.extraData[csrf_param] = csrf_token;
}
// take a breath so that pending repaints get some cpu time before the upload starts
function doSubmit() {
// make sure form attrs are set
var t = $form.attr('target'), a = $form.attr('action');
// update form attrs in IE friendly way
form.setAttribute('target',id);
if (!method) {
form.setAttribute('method', 'POST');
}
if (a != s.url) {
form.setAttribute('action', s.url);
}
// ie borks in some cases when setting encoding
if (! s.skipEncodingOverride && (!method || /post/i.test(method))) {
$form.attr({
encoding: 'multipart/form-data',
enctype: 'multipart/form-data'
});
}
// support timout
if (s.timeout) {
timeoutHandle = setTimeout(function() { timedOut = true; cb(CLIENT_TIMEOUT_ABORT); }, s.timeout);
}
// look for server aborts
function checkState() {
try {
var state = getDoc(io).readyState;
log('state = ' + state);
if (state && state.toLowerCase() == 'uninitialized')
setTimeout(checkState,50);
}
catch(e) {
log('Server abort: ' , e, ' (', e.name, ')');
cb(SERVER_ABORT);
if (timeoutHandle)
clearTimeout(timeoutHandle);
timeoutHandle = undefined;
}
}
// add "extra" data to form if provided in options
var extraInputs = [];
try {
if (s.extraData) {
for (var n in s.extraData) {
if (s.extraData.hasOwnProperty(n)) {
// if using the $.param format that allows for multiple values with the same name
if($.isPlainObject(s.extraData[n]) && s.extraData[n].hasOwnProperty('name') && s.extraData[n].hasOwnProperty('value')) {
extraInputs.push(
$('<input type="hidden" name="'+s.extraData[n].name+'">').attr('value',s.extraData[n].value)
.appendTo(form)[0]);
} else {
extraInputs.push(
$('<input type="hidden" name="'+n+'">').attr('value',s.extraData[n])
.appendTo(form)[0]);
}
}
}
}
if (!s.iframeTarget) {
// add iframe to doc and submit the form
$io.appendTo('body');
if (io.attachEvent)
io.attachEvent('onload', cb);
else
io.addEventListener('load', cb, false);
}
setTimeout(checkState,15);
form.submit();
}
finally {
// reset attrs and remove "extra" input elements
form.setAttribute('action',a);
if(t) {
form.setAttribute('target', t);
} else {
$form.removeAttr('target');
}
$(extraInputs).remove();
}
}
if (s.forceSync) {
doSubmit();
}
else {
setTimeout(doSubmit, 10); // this lets dom updates render
}
var data, doc, domCheckCount = 50, callbackProcessed;
function cb(e) {
if (xhr.aborted || callbackProcessed) {
return;
}
try {
doc = getDoc(io);
}
catch(ex) {
log('cannot access response document: ', ex);
e = SERVER_ABORT;
}
if (e === CLIENT_TIMEOUT_ABORT && xhr) {
xhr.abort('timeout');
deferred.reject(xhr, 'timeout');
return;
}
else if (e == SERVER_ABORT && xhr) {
xhr.abort('server abort');
deferred.reject(xhr, 'error', 'server abort');
return;
}
if (!doc || doc.location.href == s.iframeSrc) {
// response not received yet
if (!timedOut)
return;
}
if (io.detachEvent)
io.detachEvent('onload', cb);
else
io.removeEventListener('load', cb, false);
var status = 'success', errMsg;
try {
if (timedOut) {
throw 'timeout';
}
var isXml = s.dataType == 'xml' || doc.XMLDocument || $.isXMLDoc(doc);
log('isXml='+isXml);
if (!isXml && window.opera && (doc.body === null || !doc.body.innerHTML)) {
if (--domCheckCount) {
// in some browsers (Opera) the iframe DOM is not always traversable when
// the onload callback fires, so we loop a bit to accommodate
log('requeing onLoad callback, DOM not available');
setTimeout(cb, 250);
return;
}
// let this fall through because server response could be an empty document
//log('Could not access iframe DOM after mutiple tries.');
//throw 'DOMException: not available';
}
//log('response detected');
var docRoot = doc.body ? doc.body : doc.documentElement;
xhr.responseText = docRoot ? docRoot.innerHTML : null;
xhr.responseXML = doc.XMLDocument ? doc.XMLDocument : doc;
if (isXml)
s.dataType = 'xml';
xhr.getResponseHeader = function(header){
var headers = {'content-type': s.dataType};
return headers[header];
};
// support for XHR 'status' & 'statusText' emulation :
if (docRoot) {
xhr.status = Number( docRoot.getAttribute('status') ) || xhr.status;
xhr.statusText = docRoot.getAttribute('statusText') || xhr.statusText;
}
var dt = (s.dataType || '').toLowerCase();
var scr = /(json|script|text)/.test(dt);
if (scr || s.textarea) {
// see if user embedded response in textarea
var ta = doc.getElementsByTagName('textarea')[0];
if (ta) {
xhr.responseText = ta.value;
// support for XHR 'status' & 'statusText' emulation :
xhr.status = Number( ta.getAttribute('status') ) || xhr.status;
xhr.statusText = ta.getAttribute('statusText') || xhr.statusText;
}
else if (scr) {
// account for browsers injecting pre around json response
var pre = doc.getElementsByTagName('pre')[0];
var b = doc.getElementsByTagName('body')[0];
if (pre) {
xhr.responseText = pre.textContent ? pre.textContent : pre.innerText;
}
else if (b) {
xhr.responseText = b.textContent ? b.textContent : b.innerText;
}
}
}
else if (dt == 'xml' && !xhr.responseXML && xhr.responseText) {
xhr.responseXML = toXml(xhr.responseText);
}
try {
data = httpData(xhr, dt, s);
}
catch (e) {
status = 'parsererror';
xhr.error = errMsg = (e || status);
}
}
catch (e) {
log('error caught: ',e);
status = 'error';
xhr.error = errMsg = (e || status);
}
if (xhr.aborted) {
log('upload aborted');
status = null;
}
if (xhr.status) { // we've set xhr.status
status = (xhr.status >= 200 && xhr.status < 300 || xhr.status === 304) ? 'success' : 'error';
}
// ordering of these callbacks/triggers is odd, but that's how $.ajax does it
if (status === 'success') {
if (s.success)
s.success.call(s.context, data, 'success', xhr);
deferred.resolve(xhr.responseText, 'success', xhr);
if (g)
$.event.trigger("ajaxSuccess", [xhr, s]);
}
else if (status) {
if (errMsg === undefined)
errMsg = xhr.statusText;
if (s.error)
s.error.call(s.context, xhr, status, errMsg);
deferred.reject(xhr, 'error', errMsg);
if (g)
$.event.trigger("ajaxError", [xhr, s, errMsg]);
}
if (g)
$.event.trigger("ajaxComplete", [xhr, s]);
if (g && ! --$.active) {
$.event.trigger("ajaxStop");
}
if (s.complete)
s.complete.call(s.context, xhr, status);
callbackProcessed = true;
if (s.timeout)
clearTimeout(timeoutHandle);
// clean up
setTimeout(function() {
if (!s.iframeTarget)
$io.remove();
xhr.responseXML = null;
}, 100);
}
var toXml = $.parseXML || function(s, doc) { // use parseXML if available (jQuery 1.5+)
if (window.ActiveXObject) {
doc = new ActiveXObject('Microsoft.XMLDOM');
doc.async = 'false';
doc.loadXML(s);
}
else {
doc = (new DOMParser()).parseFromString(s, 'text/xml');
}
return (doc && doc.documentElement && doc.documentElement.nodeName != 'parsererror') ? doc : null;
};
var parseJSON = $.parseJSON || function(s) {
/*jslint evil:true */
return window['eval']('(' + s + ')');
};
var httpData = function( xhr, type, s ) { // mostly lifted from jq1.4.4
var ct = xhr.getResponseHeader('content-type') || '',
xml = type === 'xml' || !type && ct.indexOf('xml') >= 0,
data = xml ? xhr.responseXML : xhr.responseText;
if (xml && data.documentElement.nodeName === 'parsererror') {
if ($.error)
$.error('parsererror');
}
if (s && s.dataFilter) {
data = s.dataFilter(data, type);
}
if (typeof data === 'string') {
if (type === 'json' || !type && ct.indexOf('json') >= 0) {
data = parseJSON(data);
} else if (type === "script" || !type && ct.indexOf("javascript") >= 0) {
$.globalEval(data);
}
}
return data;
};
return deferred;
}
};
/**
* ajaxForm() provides a mechanism for fully automating form submission.
*
* The advantages of using this method instead of ajaxSubmit() are:
*
* 1: This method will include coordinates for <input type="image" /> elements (if the element
* is used to submit the form).
* 2. This method will include the submit element's name/value data (for the element that was
* used to submit the form).
* 3. This method binds the submit() method to the form for you.
*
* The options argument for ajaxForm works exactly as it does for ajaxSubmit. ajaxForm merely
* passes the options argument along after properly binding events for submit elements and
* the form itself.
*/
$.fn.ajaxForm = function(options) {
options = options || {};
options.delegation = options.delegation && $.isFunction($.fn.on);
// in jQuery 1.3+ we can fix mistakes with the ready state
if (!options.delegation && this.length === 0) {
var o = { s: this.selector, c: this.context };
if (!$.isReady && o.s) {
log('DOM not ready, queuing ajaxForm');
$(function() {
$(o.s,o.c).ajaxForm(options);
});
return this;
}
// is your DOM ready? http://docs.jquery.com/Tutorials:Introducing_$(document).ready()
log('terminating; zero elements found by selector' + ($.isReady ? '' : ' (DOM not ready)'));
return this;
}
if ( options.delegation ) {
$(document)
.off('submit.form-plugin', this.selector, doAjaxSubmit)
.off('click.form-plugin', this.selector, captureSubmittingElement)
.on('submit.form-plugin', this.selector, options, doAjaxSubmit)
.on('click.form-plugin', this.selector, options, captureSubmittingElement);
return this;
}
return this.ajaxFormUnbind()
.bind('submit.form-plugin', options, doAjaxSubmit)
.bind('click.form-plugin', options, captureSubmittingElement);
};
// private event handlers
function doAjaxSubmit(e) {
/*jshint validthis:true */
var options = e.data;
if (!e.isDefaultPrevented()) { // if event has been canceled, don't proceed
e.preventDefault();
$(this).ajaxSubmit(options);
}
}
function captureSubmittingElement(e) {
/*jshint validthis:true */
var target = e.target;
var $el = $(target);
if (!($el.is(":submit,input:image"))) {
// is this a child element of the submit el? (ex: a span within a button)
var t = $el.closest(':submit');
if (t.length === 0) {
return;
}
target = t[0];
}
var form = this;
form.clk = target;
if (target.type == 'image') {
if (e.offsetX !== undefined) {
form.clk_x = e.offsetX;
form.clk_y = e.offsetY;
} else if (typeof $.fn.offset == 'function') {
var offset = $el.offset();
form.clk_x = e.pageX - offset.left;
form.clk_y = e.pageY - offset.top;
} else {
form.clk_x = e.pageX - target.offsetLeft;
form.clk_y = e.pageY - target.offsetTop;
}
}
// clear form vars
setTimeout(function() { form.clk = form.clk_x = form.clk_y = null; }, 100);
}
// ajaxFormUnbind unbinds the event handlers that were bound by ajaxForm
$.fn.ajaxFormUnbind = function() {
return this.unbind('submit.form-plugin click.form-plugin');
};
/**
* formToArray() gathers form element data into an array of objects that can
* be passed to any of the following ajax functions: $.get, $.post, or load.
* Each object in the array has both a 'name' and 'value' property. An example of
* an array for a simple login form might be:
*
* [ { name: 'username', value: 'jresig' }, { name: 'password', value: 'secret' } ]
*
* It is this array that is passed to pre-submit callback functions provided to the
* ajaxSubmit() and ajaxForm() methods.
*/
$.fn.formToArray = function(semantic, elements) {
var a = [];
if (this.length === 0) {
return a;
}
var form = this[0];
var els = semantic ? form.getElementsByTagName('*') : form.elements;
if (!els) {
return a;
}
var i,j,n,v,el,max,jmax;
for(i=0, max=els.length; i < max; i++) {
el = els[i];
n = el.name;
if (!n) {
continue;
}
if (semantic && form.clk && el.type == "image") {
// handle image inputs on the fly when semantic == true
if(!el.disabled && form.clk == el) {
a.push({name: n, value: $(el).val(), type: el.type });
a.push({name: n+'.x', value: form.clk_x}, {name: n+'.y', value: form.clk_y});
}
continue;
}
v = $.fieldValue(el, true);
if (v && v.constructor == Array) {
if (elements)
elements.push(el);
for(j=0, jmax=v.length; j < jmax; j++) {
a.push({name: n, value: v[j]});
}
}
else if (feature.fileapi && el.type == 'file' && !el.disabled) {
if (elements)
elements.push(el);
var files = el.files;
if (files.length) {
for (j=0; j < files.length; j++) {
a.push({name: n, value: files[j], type: el.type});
}
}
else {
// #180
a.push({ name: n, value: '', type: el.type });
}
}
else if (v !== null && typeof v != 'undefined') {
if (elements)
elements.push(el);
a.push({name: n, value: v, type: el.type, required: el.required});
}
}
if (!semantic && form.clk) {
// input type=='image' are not found in elements array! handle it here
var $input = $(form.clk), input = $input[0];
n = input.name;
if (n && !input.disabled && input.type == 'image') {
a.push({name: n, value: $input.val()});
a.push({name: n+'.x', value: form.clk_x}, {name: n+'.y', value: form.clk_y});
}
}
return a;
};
/**
* Serializes form data into a 'submittable' string. This method will return a string
* in the format: name1=value1&name2=value2
*/
$.fn.formSerialize = function(semantic) {
//hand off to jQuery.param for proper encoding
return $.param(this.formToArray(semantic));
};
/**
* Serializes all field elements in the jQuery object into a query string.
* This method will return a string in the format: name1=value1&name2=value2
*/
$.fn.fieldSerialize = function(successful) {
var a = [];
this.each(function() {
var n = this.name;
if (!n) {
return;
}
var v = $.fieldValue(this, successful);
if (v && v.constructor == Array) {
for (var i=0,max=v.length; i < max; i++) {
a.push({name: n, value: v[i]});
}
}
else if (v !== null && typeof v != 'undefined') {
a.push({name: this.name, value: v});
}
});
//hand off to jQuery.param for proper encoding
return $.param(a);
};
/**
* Returns the value(s) of the element in the matched set. For example, consider the following form:
*
* <form><fieldset>
* <input name="A" type="text" />
* <input name="A" type="text" />
* <input name="B" type="checkbox" value="B1" />
* <input name="B" type="checkbox" value="B2"/>
* <input name="C" type="radio" value="C1" />
* <input name="C" type="radio" value="C2" />
* </fieldset></form>
*
* var v = $(':text').fieldValue();
* // if no values are entered into the text inputs
* v == ['','']
* // if values entered into the text inputs are 'foo' and 'bar'
* v == ['foo','bar']
*
* var v = $(':checkbox').fieldValue();
* // if neither checkbox is checked
* v === undefined
* // if both checkboxes are checked
* v == ['B1', 'B2']
*
* var v = $(':radio').fieldValue();
* // if neither radio is checked
* v === undefined
* // if first radio is checked
* v == ['C1']
*
* The successful argument controls whether or not the field element must be 'successful'
* (per http://www.w3.org/TR/html4/interact/forms.html#successful-controls).
* The default value of the successful argument is true. If this value is false the value(s)
* for each element is returned.
*
* Note: This method *always* returns an array. If no valid value can be determined the
* array will be empty, otherwise it will contain one or more values.
*/
$.fn.fieldValue = function(successful) {
for (var val=[], i=0, max=this.length; i < max; i++) {
var el = this[i];
var v = $.fieldValue(el, successful);
if (v === null || typeof v == 'undefined' || (v.constructor == Array && !v.length)) {
continue;
}
if (v.constructor == Array)
$.merge(val, v);
else
val.push(v);
}
return val;
};
/**
* Returns the value of the field element.
*/
$.fieldValue = function(el, successful) {
var n = el.name, t = el.type, tag = el.tagName.toLowerCase();
if (successful === undefined) {
successful = true;
}
if (successful && (!n || el.disabled || t == 'reset' || t == 'button' ||
(t == 'checkbox' || t == 'radio') && !el.checked ||
(t == 'submit' || t == 'image') && el.form && el.form.clk != el ||
tag == 'select' && el.selectedIndex == -1)) {
return null;
}
if (tag == 'select') {
var index = el.selectedIndex;
if (index < 0) {
return null;
}
var a = [], ops = el.options;
var one = (t == 'select-one');
var max = (one ? index+1 : ops.length);
for(var i=(one ? index : 0); i < max; i++) {
var op = ops[i];
if (op.selected) {
var v = op.value;
if (!v) { // extra pain for IE...
v = (op.attributes && op.attributes['value'] && !(op.attributes['value'].specified)) ? op.text : op.value;
}
if (one) {
return v;
}
a.push(v);
}
}
return a;
}
return $(el).val();
};
/**
* Clears the form data. Takes the following actions on the form's input fields:
* - input text fields will have their 'value' property set to the empty string
* - select elements will have their 'selectedIndex' property set to -1
* - checkbox and radio inputs will have their 'checked' property set to false
* - inputs of type submit, button, reset, and hidden will *not* be effected
* - button elements will *not* be effected
*/
$.fn.clearForm = function(includeHidden) {
return this.each(function() {
$('input,select,textarea', this).clearFields(includeHidden);
});
};
/**
* Clears the selected form elements.
*/
$.fn.clearFields = $.fn.clearInputs = function(includeHidden) {
var re = /^(?:color|date|datetime|email|month|number|password|range|search|tel|text|time|url|week)$/i; // 'hidden' is not in this list
return this.each(function() {
var t = this.type, tag = this.tagName.toLowerCase();
if (re.test(t) || tag == 'textarea') {
this.value = '';
}
else if (t == 'checkbox' || t == 'radio') {
this.checked = false;
}
else if (tag == 'select') {
this.selectedIndex = -1;
}
else if (includeHidden) {
// includeHidden can be the value true, or it can be a selector string
// indicating a special test; for example:
// $('#myForm').clearForm('.special:hidden')
// the above would clean hidden inputs that have the class of 'special'
if ( (includeHidden === true && /hidden/.test(t)) ||
(typeof includeHidden == 'string' && $(this).is(includeHidden)) )
this.value = '';
}
});
};
/**
* Resets the form data. Causes all form elements to be reset to their original value.
*/
$.fn.resetForm = function() {
return this.each(function() {
// guard against an input with the name of 'reset'
// note that IE reports the reset function as an 'object'
if (typeof this.reset == 'function' || (typeof this.reset == 'object' && !this.reset.nodeType)) {
this.reset();
}
});
};
/**
* Enables or disables any matching elements.
*/
$.fn.enable = function(b) {
if (b === undefined) {
b = true;
}
return this.each(function() {
this.disabled = !b;
});
};
/**
* Checks/unchecks any matching checkboxes or radio buttons and
* selects/deselects and matching option elements.
*/
$.fn.selected = function(select) {
if (select === undefined) {
select = true;
}
return this.each(function() {
var t = this.type;
if (t == 'checkbox' || t == 'radio') {
this.checked = select;
}
else if (this.tagName.toLowerCase() == 'option') {
var $sel = $(this).parent('select');
if (select && $sel[0] && $sel[0].type == 'select-one') {
// deselect all other options
$sel.find('option').selected(false);
}
this.selected = select;
}
});
};
// expose debug var
$.fn.ajaxSubmit.debug = false;
// helper fn for console logging
function log() {
if (!$.fn.ajaxSubmit.debug)
return;
var msg = '[jquery.form] ' + Array.prototype.join.call(arguments,'');
if (window.console && window.console.log) {
window.console.log(msg);
}
else if (window.opera && window.opera.postError) {
window.opera.postError(msg);
}
}
})(jQuery);
|
gpl-2.0
|
kingvuplus/boom
|
lib/python/Tools/Alternatives.py
|
742
|
from enigma import eServiceCenter, eServiceReference
def getAlternativeChannels(service):
alternativeServices = eServiceCenter.getInstance().list(eServiceReference(service))
return alternativeServices and alternativeServices.getContent('S', True)
def CompareWithAlternatives(serviceA, serviceB):
return serviceA and serviceB and (serviceA == serviceB or serviceA.startswith('1:134:') and serviceB in getAlternativeChannels(serviceA) or serviceB.startswith('1:134:') and serviceA in getAlternativeChannels(serviceB))
def GetWithAlternative(service):
if service.startswith('1:134:'):
channels = getAlternativeChannels(service)
if channels:
return channels[0]
return service
|
gpl-2.0
|
enoordeh/Pangloss
|
Magnifier.py
|
15272
|
#!/usr/bin/env python
# ======================================================================
import pangloss
import sys,getopt,cPickle,numpy,glob
import matplotlib.pyplot as plt
from scipy.stats.kde import gaussian_kde
from math import pi
from astropy.io import ascii
# ======================================================================
def Magnifier(argv):
"""
NAME
Magnifier.py
PURPOSE
Read in a simulation lightcone (or list of lightcones) and compute all
quantities needed to estimate mu, the magnification along the LoS to a
background source. This file create p(mu) for all lines of sight, and
also p(mu) for observed fields which are matched by overdensity (or
simply for LoS with a given overdensity, not necessarily real ones)
Requires extra inputs in the config file, detailed below
COMMENTS
The config file contains the list of lightcones to be
reconstructed, in the form of either a directory or a single
instance. If a directory is specified, one also gives a number
("batchsize") of lightcones to be reconstructed at a time.
FLAGS
-h Print this message [0]
-c, --contributions Plot cumulative contributions
INPUTS
configfile Plain text file containing Pangloss configuration
additional configfile fields:
FieldName Plain text or csv file containing list of field
names, can be None
FieldOverdensity Plain text or csv file containing list of field
overdensities. Must be in same order as field
names, can be None
If FieldName is none, FieldOverdensity can be a list
of overdensities, e.g. [0.75, 1.0, 1.25] for testing
OUTPUTS
stdout Useful information
samples Catalog(s) of samples from Pr(kappah|D)
EXAMPLE
Magnifier.py --contributions example.config
BUGS
Plotting contributions is SLOW
AUTHORS
This file is part of the Pangloss project, distributed under the
GPL v2, by Tom Collett (IoA) and Phil Marshall (Oxford). This
Magnifier code was developed by Charlotte Mason (UCSB)
Please cite: Collett et al. 2013, http://arxiv.org/abs/1303.6564
Mason et al. 2015, http://arxiv.org/abs/1502.03795
HISTORY
2013-03-21 started Collett & Marshall (Oxford)
2014-04-09 modified for BoRG, C Mason (UCSB)
"""
# --------------------------------------------------------------------
try:
opts, args = getopt.getopt(argv,"hc",["help","contributions"])
except getopt.GetoptError, err:
print str(err) # will print something like "option -a not recognized"
print Magnifier.__doc__ # will print the big comment above.
return
for o,a in opts:
if o in ("-h", "--help"):
print "HELP!"
print Magnifier.__doc__
return
elif o in ("-c", "--contributions"):
print "Magnifier: plotting cumulative contributions"
plot_contributions = True
else:
assert False, "unhandled option"
# Check for setup file in array args:
print args
print opts
if len(args) == 1:
configfile = args[0]
print pangloss.doubledashedline
print pangloss.hello
print pangloss.doubledashedline
print "Magnifier: generating magnification PDFs for lightcones of given density"
print "Magnifier: taking instructions from",configfile
else:
print "Magnifier: are you sure you put the options before the config file?"
print Magnifier.__doc__
return
# ==============================================================
# Read in configuration, and extract the ones we need:
# ==============================================================
experiment = pangloss.Configuration(configfile)
# Get the experiment name from the configfile name instead?
EXP_NAME = experiment.parameters['ExperimentName']
CALIB_DIR = experiment.parameters['CalibrationFolder'][0]
mag = experiment.parameters['MagName']
Ncats = 21
field_name = experiment.parameters['FieldName']
field_overdensity = experiment.parameters['FieldOverdensity']
if field_name == None:
field_name = ''
else:
field_name = numpy.genfromtxt(str(field_name), comments='#', usecols=0, dtype='S30')
if len(field_overdensity)==1: field_overdensity = numpy.genfromtxt(str(field_overdensity), comments='#')
Rc = experiment.parameters['LightconeRadius'] # in arcmin
zd = experiment.parameters['StrongLensRedshift']
zs = experiment.parameters['SourceRedshift']
# --------------------------------------------------------------------
# Load the lightcone pickles
calpickles = []
Nc = experiment.parameters['NCalibrationLightcones'] * Ncats ### should be 24
paths = '%s/*_lightcone.pickle' % (CALIB_DIR)
found = glob.glob(paths)
if len(found) > 0: calpickles = found
print "Magnifier: found the lightcones..."
# Ray tracing:
RTscheme = experiment.parameters['RayTracingScheme']
# Reconstruct calibration lines of sight?
DoCal = experiment.parameters['ReconstructCalibrations']
# --------------------------------------------------------------------
# Make redshift grid:
grid = pangloss.Grid(zd,zs,nplanes=100)
# --------------------------------------------------------------------
# Read in lightcones from pickles:
calcones = []
for i in xrange(Nc):
calcones.append(pangloss.readPickle(calpickles[i]))
if i==0: print calpickles[i]
if DoCal=="False": #must be string type
calcones=[]
calpickles=[]
allcones = calcones
allconefiles = calpickles
# ==============================================================
# Find the overdensity of lightcones cut at m<22 in F125W
# ==============================================================
print "Magnifier: finding the distribution of lightcones with density..."
lc_dens = []
lc_galaxies = []
total_galaxies = 0
# Sort into lightcones for each field
for i in xrange(Nc):
lc = pangloss.readPickle(calpickles[i])
num_galaxies = lc.numberWithin(radius=Rc,cut=[16,22],band=mag,units="arcmin")
lc_galaxies.append(num_galaxies)
# Add to the total number of galaxies
total_galaxies += num_galaxies
del lc
lc_dens = [Nc * float(x)/float(total_galaxies) for x in lc_galaxies]
numpy.savetxt(CALIB_DIR+"/lc_density.txt", lc_dens)
print 'Mean overdensity in all fields = %.3f (this should =1)' % numpy.mean(lc_density)
print 'Lightcone overdensities saved to file'
print pangloss.dashedline
del lc_dens
del lc_density
# ==============================================================
# Sample all lightcones to make the pdfs
# ==============================================================
# --------------------------------------------------------------
# Set up overdensity range
density = field_overdensity
drange = 0.02 # ~0.02 is what Zach used
# --------------------------------------------------------------------
# Find contribution to total kappa and mass at redshift intervals
zmax = zs+0.1
zbin = 25
zbins = numpy.linspace(0.0,zmax,zbin)
kappa_cont = numpy.zeros(Nc, zbin)
Mh_cont = numpy.zeros(Nc, zbin)
Mstell_cont = numpy.zeros(Nc, zbin)
# --------------------------------------------------------------------
# Select ALL lightcones and find their convergences
pk = []
pmu =[]
for j in xrange(Nc):
# Get lightcone
lc = pangloss.readPickle(calpickles[j])
# --------------------------------------------------------------------
# Calculate mu and kappa for all lightcones
# Redshift scaffolding:
lc.defineSystem(zd,zs)
lc.loadGrid(grid)
# Figure out data quality etc:
lc.configureForSurvey(experiment)
if j % 1000 == 0 and j !=0:
print ("Magnifier: ...on lightcone %i out of %i..." % (j,Nc))
lc.snapToGrid(grid)
# Draw c from Mhalo:
lc.drawConcentrations(errors=True)
# Compute each halo's contribution to the convergence:
lc.makeKappas(truncationscale=5)
k_add=lc.combineKappas()
mu_add=lc.combineMus(weakapprox=False)
# Add magnification and convergence to global PDF
pmu.append(lc.mu_add_total)
pk.append(lc.kappa_add_total)
if plot_contributions is True:
kappa_cont[j:,] = lc.findContributions('kappa')
Mh_cont[j:,] = lc.findContributions('mass')
Mstell_cont[j:,] = lc.findContributions('stellarmass')
# Make a nice visualisation of one of the lightcones
if j ==0:
lc.plots('kappa', output=CALIB_DIR+"/example_snapshot_kappa_uncalib_z=1.4.png")
lc.plots('mu', output=CALIB_DIR+"/example_snapshot_mu_uncalibz=1.4.png")
del lc
pk = numpy.array(pk)
pmu = numpy.array(pmu)
# --------------------------------------------------------------------
# Write PDFs to pickles
pangloss.writePickle(pk,CALIB_DIR+"/Pofk_z="+str(zs)+".pickle")
pangloss.writePickle(pmu,CALIB_DIR+"/PofMu_z="+str(zs)+".pickle")
del pk
del pmu
print "Magnifier: saved PofMu to "+CALIB_DIR+"/Pofk_z="+str(zs)+".pickle"
pmu = pangloss.readPickle(CALIB_DIR+"/PofMu_z="+str(zs)+".pickle")
pk = pangloss.readPickle(CALIB_DIR+"/Pofk_z="+str(zs)+".pickle")
# --------------------------------------------------------------------
# Plot contributions to total kappa and mass at redshifts
if plot_contributions:
mean_kappa_cont = numpy.mean(kappa_cont, axis=0)
mean_Mh_cont = numpy.mean(Mh_cont, axis=0)
mean_Mstell_cont = numpy.mean(Mstell_cont, axis=0)
plt.clf()
plt.figure()
ax1 = plt.subplot(2,1,1)
ax1.plot(zbins, mean_Mh_cont)
ax1.set_ylabel(r'Cumulative Sum of $M_h/M_{\odot}$')
ax2 = plt.subplot(2,1,2)
ax2.plot(zbins, mean_kappa_cont)
ax2.set_ylabel(r'Cumulative Sum of $\kappa_h$')
plt.tight_layout()
plt.xlabel('Redshift, z')
plt.savefig("figs/"+EXP_NAME+"contribution_z.pdf",dpi=300)
# --------------------------------------------------------------------
# Calculate the smooth components
kappa_smooth = numpy.mean(pk)
mu_smooth = numpy.mean(pmu)
print ' uncalibrated: <kappa> =',kappa_smooth, '<mu> =',mu_smooth
pmu = pmu - mu_smooth + 1.
pk = pk - kappa_smooth + 0.
print ' mean mu now = ', numpy.mean(pmu)
params = [{'param':'Mu', 'name':r'$\mu$', 'lc':pmu, 'smooth':mu_smooth, 'mean':1.0, 'height':30, 'min':0.4, 'max':2.0}]
lc_density = numpy.genfromtxt(CALIB_DIR+"/lc_density.txt", comments="#")
# =====================================================================
# For only <=4 values of density
# =====================================================================
for k in range(len(params)):
var = params[k]
full_pdf = var['lc']
name = var['name']
print 'Magnifier: Old min and max:', full_pdf.min(), full_pdf.max()
# --------------------------------------------------------------------
# Remove outliers !!! I think this is only important for v. high overdensity
if var['param']=='Kappa':
mask = numpy.where(full_pdf==full_pdf)
if var['param']=='Mu':
mask = numpy.where((full_pdf >= 0.) & (full_pdf < 2.))
par = full_pdf[mask]
new_density = lc_density[mask]
print ' Removing Outliers...'
print ' New min and max:', par.min(), par.max()
# Recalculate means and the calibrated pdf
smooth_new = numpy.mean(par)
new_pdf = par - smooth_new + var['mean']
par_mean = numpy.mean(new_pdf)
print ' New mean (this should be',var['mean'],'):', par_mean
# --------------------------------------------------------------------
# Plot all lines of sight
print pangloss.dashedline
print "Magnifier: constructing PDF for", var['param'],"..."
outputfile = CALIB_DIR+"/"+EXP_NAME+"_Pof"+var['param']+"_"+"_z="+str(zs)+"_allLoS.txt"
numpy.savetxt(outputfile, sub_pdf)
print "Magnifier: saved all LoS PDFs to",outputfile
# --------------------------------------------------------------------
# Select only lightcones within certain number density limits
# Need to mask out the outliers
means, fieldname = [], []
for i in range(len(density)):
sub_pdf = []
for j in xrange(len(new_pdf)):
if density[i] - drange <= round(new_density[j],2) <= density[i] + drange:
sub_pdf.append(new_pdf[j])
sub_pdf = numpy.array(sub_pdf)
Nlos = len(sub_pdf)
if Nlos == 0:
print "Magnifier: %s - there are NO LoS with number density ~ %.2f the average" % (field_name[i], density[i])
print "Magnifier: %s - no PDF will be made for this field" % (field_name[i])
else:
sub_mean = numpy.mean(sub_pdf)
print "Magnifier: %s - sampling %i LoS with number density ~ %.2f the average, mean mu=%.2f" % (field_name[i], Nlos, density[i], sub_mean)
if var['param']=='Mu':
numpy.savetxt(CALIB_DIR+"/"+EXP_NAME+str(field_name[i])+"_PofMu.txt", sub_pdf)
means.append(sub_mean)
fieldname.append(field_name[i])
meanmu_table = numpy.array([fieldname, means]).T
ascii.write(meanmu_table, CALIB_DIR+"/"+EXP_NAME+"_table_meanmu.txt", names=['#field','mean_mu'])
print " Mean mu of all the fields = ",numpy.mean(means)
print "Magnifier: saved PDFs to",outputfile
print pangloss.doubledashedline
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
print pangloss.doubledashedline
return
# ======================================================================
if __name__ == '__main__':
Magnifier(sys.argv[1:])
# ======================================================================
|
gpl-2.0
|
holzgeist/Piwigo
|
language/tr_TR/common.lang.php
|
25079
|
<?php
// +-----------------------------------------------------------------------+
// | Piwigo - a PHP based photo gallery |
// +-----------------------------------------------------------------------+
// | Copyright(C) 2008-2014 Piwigo Team http://piwigo.org |
// | Copyright(C) 2003-2008 PhpWebGallery Team http://phpwebgallery.net |
// | Copyright(C) 2002-2003 Pierrick LE GALL http://le-gall.net/pierrick |
// +-----------------------------------------------------------------------+
// | This program is free software; you can redistribute it and/or modify |
// | it under the terms of the GNU General Public License as published by |
// | the Free Software Foundation |
// | |
// | This program is distributed in the hope that it will be useful, but |
// | WITHOUT ANY WARRANTY; without even the implied warranty of |
// | MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU |
// | General Public License for more details. |
// | |
// | You should have received a copy of the GNU General Public License |
// | along with this program; if not, write to the Free Software |
// | Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, |
// | USA. |
// +-----------------------------------------------------------------------+
/*
Language Name: Türkçe [TR]
Version: 2.7.0
Language URI: http://piwigo.org/ext/extension_view.php?eid=436
Author: Piwigo team
Author URI: http://piwigo.org
*/
// Langage informations
$lang_info['language_name'] = 'Turkish';
$lang_info['country'] = 'Turkey';
$lang_info['direction'] = 'ltr';
$lang_info['code'] = 'tr';
$lang_info['zero_plural'] = true;
$lang['... or browse your empty gallery'] = '... yada boş galerinizde gezin';
$lang['... or please deactivate this message, I will find my way by myself'] = '... yada lütfen bu mesajı etkisizleştirin, kendi yolumu kendim bulacağım';
$lang['%d album updated'] = '%d albüm güncellendi';
$lang['%d albums updated'] = '%d albüm güncellendi';
$lang['%d comment to validate'] = '%d onaylanacak yorum';
$lang['%d comment'] = '%d yorum';
$lang['%d comments to validate'] = '%d onaylanacak yorum ';
$lang['%d comments'] = '%d yorum';
$lang['%d hit'] = '%d ziyaret';
$lang['%d hits'] = '%d ziyaret';
$lang['%d Kb'] = '%d Kilobayt';
$lang['%d new comment'] = '%d yeni yorum';
$lang['%d new comments'] = '%d yeni yorum';
$lang['%d new photo'] = '%d yeni fotoğraf';
$lang['%d new photos'] = '%d yeni fotoğraf';
$lang['%d new user'] = '%d yeni kullanıcı';
$lang['%d new users'] = '%d yeni kullanıcı';
$lang['%d photo is also linked to current tags'] = '%d fotoğraf aynı zamanda bu etiketlere sahip';
$lang['%d photo'] = '%d fotoğraf';
$lang['%d photos are also linked to current tags'] = '%d fotoğraf aynı zamanda bu etiketlere sahip';
$lang['%d photos'] = '%d fotoğraf';
$lang['%d rates'] = '%d oy';
$lang['(!) This comment requires validation'] = '(!) Bu yoruma onay gerekmektedir.';
$lang['A comment on your site'] = 'Sitenize bir yorum yapıldı';
$lang['About Piwigo'] = 'Piwigo Hakkında';
$lang['About'] = 'Hakkında';
$lang['Accelerate diaporama speed'] = 'Slayt gösterisi hızını yükselt';
$lang['Add a comment'] = 'Bir yorum ekle';
$lang['add this photo to your favorites'] = 'Bu fotoğrafı favorilerinize ekleyin';
$lang['Admin: %s'] = 'Yönetici: %s';
$lang['Administration'] = 'Yönetim';
$lang['Album results for'] = 'Albüm sonuçları';
$lang['Album'] = 'Albüm';
$lang['Album: %s'] = 'Albüm: %s';
$lang['Albums'] = 'Albümler';
$lang['All tags'] = 'Tüm Etiketler';
$lang['All'] = 'Tümü';
$lang['all'] = 'tümü';
$lang['An administrator must authorize your comment before it is visible.'] = 'Yorumunuzun görünür olabilmesi için yönetici tarafindan onaylanmasi gerekir.';
$lang['Anti-flood system : please wait for a moment before trying to post another comment'] = 'Anti-flood sistemi : lütfen bir müddet bekledikten sonra ikinci yorumu gönderiniz';
$lang['Any tag'] = 'Herhangi bir etiket';
$lang['Are you sure?'] = 'Emin misiniz?';
$lang['ascending'] = 'Artan';
$lang['At least one listed rule must be satisfied.'] = 'Listelenen kriterlerden en az biri yerine getirilmelidir.';
$lang['Author'] = 'Yayımcı';
$lang['author(s) : %s'] = 'Yazar(lar) : %s';
$lang['Author: %s'] = 'Yayımcı: %s';
$lang['Auto login'] = 'Beni hatırla';
$lang['available for administrators only'] = 'yalnızca yöneticilere için';
$lang['Bad status for user "guest", using default status. Please notify the webmaster.'] = 'Kullanıcı için kötü statü "ziyaretçi", varsayılan statü kullanılıyor. Lütfen webmaster\'a bildirin';
$lang['Best rated'] = 'En çok oylanan';
$lang['Browser: %s'] = 'Tarayıcı: %s';
$lang['Calendar'] = 'Takvim';
$lang['Change my password'] = 'Şifremi değiştir';
$lang['Check your email for the confirmation link'] = 'Onaylama bağlantısı için e-postanızı kontrol ediniz.';
$lang['chronology_monthly_calendar'] = 'Aylık takvim';
$lang['chronology_monthly_list'] = 'Aylık liste';
$lang['chronology_weekly_list'] = 'Haftalık liste';
$lang['Click here if your browser does not automatically forward you'] = 'Eğer otomatik olarak yönlendirilmediyseniz buraya tıklayınız.';
$lang['Click on the photo to see it in high definition'] = 'Fotoğrafı yüksek çözünürlükte görmek için üzerine tıklayın';
$lang['Close this window'] = 'Pencereyi kapat';
$lang['cloud'] = 'küme';
$lang['Comment by %s'] = 'Yorumlayan %s';
$lang['comment date'] = 'yorum tarihi';
$lang['Comment'] = 'Yorum';
$lang['Comment: %s'] = 'Yorum: %s';
$lang['Comments'] = 'Yorumlar';
$lang['Complete RSS feed (photos, comments)'] = 'RSS besleme tamamla (resimler, yorumlar)';
$lang['Confirm Password'] = 'Şifreyi Onayla';
$lang['Connected user: %s'] = 'Bağlanan kullanıcı: %s';
$lang['Connection settings'] = 'Bağlantı ayarları';
$lang['Contact webmaster'] = 'Webmaster ile bağlantı kur';
$lang['Contact'] = 'İletişim';
$lang['Cookies are blocked or not supported by your browser. You must enable cookies to connect.'] = 'Çerezler engelli yada web tarayıcınız tarafından desteklenmiyor. Oturum açmak için çerezlere izin verin.';
$lang['Create a new account'] = 'Yeni hesap oluştur';
$lang['created after %s (%s)'] = '%s (%s) \'den sonra oluşturulanlar';
$lang['created before %s (%s)'] = '%s (%s) \'den önce oluşturulanlar';
$lang['created between %s (%s) and %s (%s)'] = '%s (%s) ile %s (%s) arasında oluşturulanlar';
$lang['created on %s'] = '%s \'de oluşturulanlar';
$lang['Created on'] = 'Oluşturuldu';
$lang['Creation date'] = 'Oluşturulma Tarihi';
$lang['Current password is wrong'] = 'Şimdiki parola yanlış';
$lang['customize the appareance of the gallery'] = 'galeri görünümünü kişiselleştir';
$lang['Customize'] = 'Kişiselleştir';
$lang['Date'] = 'Tarih';
$lang['day'][0] = 'Pazar';
$lang['day'][1] = 'Pazartesi';
$lang['day'][2] = 'Salı';
$lang['day'][3] = 'Çarşamba';
$lang['day'][4] = 'Perşembe';
$lang['day'][5] = 'Cuma';
$lang['day'][6] = 'Cumartesi';
$lang['Default'] = 'Varsayılan';
$lang['delete all photos from your favorites'] = 'favorilerinizdeki tüm fotoğrafları silin';
$lang['delete this photo from your favorites'] = 'bu fotoğrafı favorilerinizden çıkarın';
$lang['Delete'] = 'Sil';
$lang['descending'] = 'Azalan';
$lang['Dimensions'] = 'Boyutlar';
$lang['display a calendar by creation date'] = 'oluşturma tarihine göre bir takvim göster';
$lang['display a calendar by posted date'] = 'gönderme gününü takvimde göster';
$lang['display a set of random photos'] = 'rastgele birkaç fotoğraf göster';
$lang['display all photos in all sub-albums'] = 'tüm alt albümlerdeki tüm fotoğrafları göster';
$lang['display available tags'] = 'uygun etiketleri göster';
$lang['display best rated photos'] = 'Ençok oy alan fotoğrafları göster';
$lang['display each day with photos, month per month'] = 'ay ay her günü fotoğraflarla göster';
$lang['display last user comments'] = 'Son kullanıcı yorumlarını göster';
$lang['display most recent photos'] = 'En son eklenen fotoğrafları göster';
$lang['display most visited photos'] = 'en çok görüntülenen fotoğrafları göster';
$lang['display only recently posted photos'] = 'sadece yakın zamanda gönderilen fotoğrafları göster';
$lang['display photos linked to this tag'] = 'sadece bu etikete sahip fotoğrafları göster';
$lang['display recently updated albums'] = 'Son güncellenen albümleri göster';
$lang['display this album'] = 'bu albümü göster';
$lang['display your favorites photos'] = 'Favori fotoğraflarımı göster';
$lang['Display'] = 'Görünüm';
$lang['Download this file'] = 'Bu dosyayı indir';
$lang['Download'] = 'İndir';
$lang['Each listed rule must be satisfied.'] = 'Listelenen her bir kriter yerine getirilmelidir.';
$lang['Edit a comment'] = 'Yorumu düzenle';
$lang['Edit'] = 'Düzenle';
$lang['Email address is missing. Please specify an email address.'] = 'e-Posta adresi yok. Lütfen bir e-Posta adresi belirtiniz.';
$lang['Email address'] = 'e-Posta adresi';
$lang['Email: %s'] = 'e-Posta: %s';
$lang['Empty query. No criteria has been entered.'] = 'Boş sorgulama. Arama kriteri girilmedi.';
$lang['End-Date'] = 'Bitiş tarihi';
$lang['Enter your new password below.'] = 'Yeni şifrenizi alt kısma giriniz';
$lang['Enter your personnal informations'] = 'Kişisel bilgilerinizi giriniz';
$lang['Error sending email'] = 'e-Posta göndermede hata';
$lang['excluded'] = 'çıkartıldı';
$lang['EXIF Metadata'] = 'EXIF Meta verisi';
$lang['Expand all albums'] = 'Tüm albümleri genişlet';
$lang['Favorites'] = 'Favoriler';
$lang['File name'] = 'Dosya adı';
$lang['File'] = 'Dosya';
$lang['Filesize'] = 'Dosya boyutu';
$lang['Filter and display'] = 'Filtrele ve Görüntüle';
$lang['Filter'] = 'Filtrele';
$lang['First'] = 'İlk';
$lang['Forgot your password?'] = 'Şifrenizi mi unuttunuz?';
$lang['from %s to %s'] = 'şundan %s buna %s';
$lang['Go through the gallery as a visitor'] = 'Galeride ziyaretçi olarak gezin';
$lang['group by letters'] = 'harflere göre grupla';
$lang['guest'] = 'Misafir';
$lang['Hello %s, your Piwigo photo gallery is empty!'] = 'Merhaba %s, Piwigo fotoğraf galeriniz boş!';
$lang['Hello'] = 'Merhaba';
$lang['Help'] = 'Yardım';
$lang['Home'] = 'Anasayfa';
$lang['html tags are not allowed in login'] = 'girişte html etiketlerine izin verilmez';
$lang['I want to add photos'] = 'Fotoğraf eklemek istiyorum';
$lang['Identification'] = 'Kullanıcı Girişi';
$lang['If this was a mistake, just ignore this email and nothing will happen.'] = 'Eğer bu bir hata ise, bu e-Postayı görmezden geliniz.';
$lang['in %d sub-album'] = '%d alt albümde';
$lang['in %d sub-albums'] = '%d alt albümde';
$lang['in this album'] = 'bu albümde';
$lang['included'] = 'dahil edildi';
$lang['Invalid key'] = 'Hatalı anahtar';
$lang['Invalid password!'] = 'Yanlış şifre!';
$lang['Invalid username or email'] = 'Hatalı kullanıcı adı veya e-Posta';
$lang['IP: %s'] = 'IP:%s';
$lang['IPTC Metadata'] = 'IPTC Meta verisi';
$lang['Keyword'] = 'Anahtar Kelime';
$lang['Kind of date'] = 'tarih çeşidi';
$lang['Language']='Dil';
$lang['large'] = 'L - büyük';
$lang['last %d days'] = 'son %d gün';
$lang['Last'] = 'Son';
$lang['letters'] = 'harfler';
$lang['Links'] = 'Bağlantılar';
$lang['login mustn\'t end with a space character'] = 'kullanıcı adı boşluk karakteri ile bitemez';
$lang['login mustn\'t start with a space character'] = 'kullanıcı adı boşluk karakteri ile başlayamaz';
$lang['Login'] = 'Oturum aç';
$lang['Logout'] = 'Çıkış yap';
$lang['mail address must be like xxx@yyy.eee (example : jack@altern.org)'] = 'e-Posta adresi xxx@yyy.eee (örnek : jack@altern.org) gibi olmalıdır';
$lang['Manage this user comment: %s'] = 'Bu kullanıcı yorumunu yönet: %s';
$lang['medium'] = 'M - orta';
$lang['Menu'] = 'Menü';
$lang['month'][1] = "Ocak";
$lang['month'][2] = "Şubat";
$lang['month'][3] = "Mart";
$lang['month'][4] = "Nisan";
$lang['month'][5] = "Mayıs";
$lang['month'][6] = "Haziran";
$lang['month'][7] = "Temmuz";
$lang['month'][8] = "Ağustos";
$lang['month'][9] = "Eylül";
$lang['month'][10] = "Ekim";
$lang['month'][11] = "Kasım";
$lang['month'][12] = "Aralık";
$lang['Most visited'] = 'En çok ziyaret edilenler';
$lang['N/A'] = 'N/A';
$lang['New on %s'] = 'Yeni %s';
$lang['New password'] = 'Yeni şifre';
$lang['Next'] = 'Sonraki';
$lang['no rate'] = 'puan verilmemiş';
$lang['No'] = 'Hayır';
$lang['Not repeat the slideshow'] = 'Slayt gösterisini tekrar etme';
$lang['Notification'] = 'Bildiri';
$lang['Number of items'] = 'Öğe adedi';
$lang['Number of photos per page'] = 'Sayfa başına fotoğraf sayısı';
$lang['obligatory'] = 'zorunlu';
$lang['Original dimensions'] = 'Orijinal boyutlar';
$lang['Original'] = "asıl";
$lang['Page generated in'] = 'Sayfa oluşturuldu';
$lang['Password forgotten'] = 'Unutulmuş şifre';
$lang['Password reset is not allowed for this user'] = 'Bu kullanıcı için şifre yenilemeye izin verilmiyor';
$lang['Password Reset'] = 'Şifreyi Sıfırla';
$lang['password updated'] = 'şifre güncellendi';
$lang['Password'] = 'Şifre';
$lang['Pause of slideshow'] = 'Slayt gösterisini dondur (Pause)';
$lang['photo'] = 'fotoğraf';
$lang['Photos only RSS feed'] = 'Yalnızca fotoğraflar için RSS beslemesi';
$lang['photos posted during the last %d days'] = 'son %d günde gösterilen fotoğraflar';
$lang['Photos posted within the last %d day.'] = 'Sadece son %d gün içinde yüklenen fotoğraflar.';
$lang['Photos posted within the last %d days.'] = 'Sadece son %d gün içinde yüklenen fotoğraflar.';
$lang['Piwigo Help'] = 'Piwigo yardım';
$lang['Play of slideshow'] = 'Slayt gösterisini yürüt';
$lang['Please enter your username or email address.'] = 'Lütfen kullanıcı adınızı veya e-posta adresinizi giriniz.';
$lang['Please, enter a login'] = 'Lütfen kullanıcı adınızı giriniz';
$lang['Post date'] = 'Gönderim tarihi';
$lang['posted after %s (%s)'] = '%s (%s) \'den sonra gönderilenler';
$lang['posted before %s (%s)'] = '%s (%s) \'den önce gönderilenler';
$lang['posted between %s (%s) and %s (%s)'] = '%s (%s) ile %s (%s) arasında gönderilenler';
$lang['posted on %s'] = '%s \'de gönderilenler';
$lang['Posted on'] = 'Gönderim saati';
$lang['Powered by'] = 'Destekleyen';
$lang['Preferences'] = 'Özellikler';
$lang['Previous'] = 'Önceki';
$lang['Profile'] = 'Profil';
$lang['Quick connect'] = 'Kullanıcı Girişi';
$lang['Quick search'] = 'Hızlı arama';
$lang['Random photos'] = 'Karışık fotoğraflar';
$lang['Rank'] = 'Sıralama';
$lang['Rate this photo'] = 'Bu fotoğrafa puan ver';
$lang['Rating score'] = 'Oy sayısı';
$lang['Recent albums'] = 'Son albümler';
$lang['Recent period must be a positive integer value'] = 'Yakın dönem pozitif bir tam sayı değerine sahip olmalıdır';
$lang['Recent period'] = 'Yakın dönem';
$lang['Recent photos'] = 'Son eklenenler';
$lang['Redirection...'] = 'Yönlendirme...';
$lang['Reduce diaporama speed'] = 'Slayt gösterisi hızını düşür';
$lang['Register'] = 'Kayıt Ol';
$lang['Registration of %s'] = 'Tescil Eden: %s';
$lang['Registration'] = 'Kayıt';
$lang['Related tags'] = 'İlgili Etiketler';
$lang['remove this tag from the list'] = 'listedeki işaretlemeyi kaldır';
$lang['Repeat the slideshow'] = 'Slayt gösterisini tekrar et';
$lang['representative'] = 'albüm pul resmi';
$lang['Reset to default values'] = 'Varsayılan ayarlara döndür';
$lang['Reset'] = 'Sıfırla';
$lang['Retrieve password'] = 'Şifre al';
$lang['Return to home page'] = 'Anasayfaya dön';
$lang['return to normal view mode'] = 'normal görüntü moduna geri dön';
$lang['return to the display of all photos'] = 'tüm fotoğraf görünümüne geri dön';
$lang['RSS feed'] = 'RSS Beslemesi';
$lang['Search by date'] = 'Tarihe göre ara';
$lang['Search for all terms'] = 'Tüm ifadelerde ara';
$lang['Search for any term'] = 'Herhangi bir ifadede ara';
$lang['Search for Author'] = 'Yayımcı adıyla ara';
$lang['Search for words'] = 'Kelimelerde ara';
$lang['Search in albums'] = 'Albümlerde ara';
$lang['Search in sub-albums'] = 'Alt albümlerde ara';
$lang['Search results'] = 'Arama sonuçları';
$lang['Search rules'] = 'Arama kriteri';
$lang['Search tags'] = 'Arama etiketi';
$lang['Search'] = 'Ara';
$lang['search'] = 'Arama';
$lang['searched words : %s'] = 'aranan kelimeler : %s';
$lang['Sent by'] = 'Gönderen';
$lang['set as album representative'] = 'albüm pul resmi olarak ayarla';
$lang['Show file metadata'] = 'Dosyanin meta verilerini göster';
$lang['Show number of comments'] = 'Yorumların sayısını göster';
$lang['Show number of hits'] = 'Ziyaret sayısını göster';
$lang['show tag cloud'] = 'etiket kümesini göster';
$lang['Since'] = 'Şu tarihten beri:';
$lang['slideshow'] = 'slayt gösterisi';
$lang['small'] = 'S - küçük';
$lang['Someone requested that the password be reset for the following user account:'] = 'Birisi belirtilen kullanıcı için şifre sıfırlaması istedi';
$lang['Sort by'] = 'Sırala';
$lang['Sort order'] = 'Sıralama düzeni';
$lang['Specials'] = 'Hızlı Gezinti';
$lang['SQL queries in'] = 'SQL Sorgulaması';
$lang['square'] = "kare";
$lang['stop the slideshow'] = 'slayt gösterisini durdur';
$lang['Submit'] = 'Gönder';
$lang['Tag results for'] = 'Etiket sonuçları';
$lang['Tag'] = 'Etiket';
$lang['Tags'] = 'Etiketler';
$lang['the beginning'] = 'başlangıç';
$lang['The gallery is locked for maintenance. Please, come back later.'] = 'Bakım çalışmaları nedeniyle kapalıyız. Daha sonra tekrar ziyaret ediniz.';
$lang['The number of photos per page must be a not null scalar'] = 'Sayfa başına fotoğraf sayısı sıfırdan farklı bir tam sayı olmalıdır';
$lang['The passwords do not match'] = 'Şifreler eşleşmiyor';
$lang['The RSS notification feed provides notification on news from this website : new photos, updated albums, new comments. Use a RSS feed reader.'] = 'Bu RSS beslemesi bu siteden yeni resimler, güncellenen albümler, yeni yorumlar içerir. Bir RSS okuyucu ile kullanılabilir';
$lang['the username must be given'] = 'kullanıcı adı gerekmektedir';
$lang['This author modified following comment:'] = 'Bu yayımcı bu yorumu değiştirdi:';
$lang['This author removed the comment with id %d'] = 'Bu yayımcı %d id numaralı yorumu silmiştir';
$lang['this email address is already in use'] = 'bu e-posta adresi başka bir kullanıcı tarafından kullanılmaktadır.';
$lang['This login is already used by another user'] = 'Bu kullanıcı adı başka biri tarafından kullanılıyor';
$lang['this login is already used'] = 'kullanıcı adı zaten kullanılmakta';
$lang['thumb'] = 'Pul resmi';
$lang['Thumbnails'] = 'Küçük resimler';
$lang['To reset your password, visit the following address:'] = 'Şifrenizi yenilemek için bu adrese gidiniz:';
$lang['today'] = 'bugün';
$lang['Unknown feed identifier'] = 'Bilinmeyen besleme kimliği';
$lang['Unknown identifier'] = 'Bilinmeyen kimlik';
$lang['Update your rating'] = 'Puanlamanızı güncelleyin';
$lang['useful when password forgotten'] = 'şifrenizi unuttuğunuz zamanlarda kullanışlıdır';
$lang['User "%s" has no email address, password reset is not possible'] = 'Kullanıcı "%s" e-posta adresi belirtmemiş, yeniden şifre alması imkansız';
$lang['User comments'] = 'Ziyaretçi yorumları';
$lang['User: %s'] = 'Kullanıcı: %s';
$lang['Username "%s" on gallery %s'] = 'Kullanici adi "%s" zaten galeride %s';
$lang['Username or email'] = 'Kullanıcı adı veya e-posta';
$lang['Username'] = 'Kullanıcı Adı';
$lang['View'] = 'Görünüm';
$lang['Visits'] = 'Görüntülenme';
$lang['Webmaster'] = 'Site Yöneticisi';
$lang['Week %d'] = 'Hafta %d';
$lang['Welcome to your Piwigo photo gallery!'] = 'Piwigo fotoğraf galerinize hoş geldiniz!';
$lang['Welcome'] = 'Hoş geldiniz';
$lang['wrong date'] = 'Yanlış Tarih';
$lang['xlarge'] = 'XL - ekstra geniş';
$lang['xxlarge'] = 'XXL - devasa';
$lang['Yes'] = 'Evet';
$lang['You are not authorized to access the requested page'] = 'İstenen sayfaya erişim yetkiniz yok';
$lang['You will receive a link to create a new password via email.'] = 'E-posta yoluyla yeni bir şifre oluşturmak için bir bağlantı alacaksınız.';
$lang['Your comment has been registered'] = 'Yorumunuz kaydedildi';
$lang['Your comment has NOT been registered because it did not pass the validation rules'] = 'Doğrulama kriterlerini geçemedigi için yorumunuz kayit edilmedi';
$lang['Your favorites'] = 'Favorilerim';
$lang['Your Gallery Customization'] = 'Galeri Özelleştirmesi ';
$lang['Your password has been reset'] = 'Şifreniz yenilendi';
$lang['Hello %s,'] = 'Merhaba %s,';
$lang['Send my connection settings by email'] = 'Bağlantı ayarlarımı e-posta yoluyla gönder';
$lang['Show latest comments first'] = 'Yeni yorumları başta göster';
$lang['Show oldest comments first'] = 'Eski yorumları baştar göster';
$lang['Successfully registered, you will soon receive an email with your connection settings. Welcome!'] = 'Başarılı olarak kayıt oldunuz, yakında bağlantı ayarlarını içeren bir e-posta alacaksınız. Hoş geldiniz.';
$lang['Username modification'] = 'Kullanıcı adı değişikliği';
$lang['edit'] = 'düzenle';
$lang['2small'] = 'XXS - minicik';
$lang['Thank you for registering at %s!'] = '%s de kayıt için teşekkür ederiz!';
$lang['Your username has been successfully changed to : %s'] = 'Kullanıcı adınız başarılı şekilde %s olarak değiştirildi.';
$lang['xsmall'] = 'XS - çok küçük';
$lang['Here are your connection settings'] = 'Bağlantı ayarlarınız bunlardır';
$lang['If you think you\'ve received this email in error, please contact us at %s'] = 'Bu e-Postayı yanlışlıkla aldığınızı düşünüyorsanız bizimle %s ^\'den bağlantı kurabilirsiniz.';
$lang['Password: %s'] = 'Şifre: %s';
$lang['Username: %s'] = 'Kullanıcı adı: %s';
$lang['Date created, new → old'] = 'Oluşturulan tarih, yeni → eski';
$lang['Date created, old → new'] = 'Oluşturulan tarih, eski → yeni';
$lang['Date posted, new → old'] = 'Gönderilme tarihi, yeni → eski';
$lang['Date posted, old → new'] = 'Gönderilme tarihi, eski → yeni';
$lang['Desktop'] = 'Masaüstü';
$lang['File name, A → Z'] = 'Dosya adı, A → Z';
$lang['File name, Z → A'] = 'Dosya adı, Z → A';
$lang['Manual sort order'] = 'Elle sıralama';
$lang['Mobile'] = 'Mobil';
$lang['Numeric identifier, 1 → 9'] = 'Sayısal Tanımlayıcı (1-9)';
$lang['Numeric identifier, 9 → 1'] = 'Sayısal Tanımlayıcı (9-1)';
$lang['Photo sizes'] = 'Fotoğraf boyutları';
$lang['Photo title, A → Z'] = 'Fotoğraf başlığı, A → Z';
$lang['Photo title, Z → A'] = 'Fotoğraf başlığı, Z → A';
$lang['Rating score, high → low'] = 'Puan, yüksek → düşük';
$lang['Rating score, low → high'] = 'Puan, düşük → yüksek';
$lang['View in'] = 'Görüntüle';
$lang['Visits, high → low'] = 'Görüntülenme, yüksek → düşük';
$lang['Visits, low → high'] = 'Görüntülenme, düşük → yüksek';
$lang['%d rate'] = '%d oy';
$lang['Bad request'] = 'Geçersiz istek';
$lang['Forbidden'] = 'Yasaklanmış';
$lang['Page not found'] = 'Sayfa bulunamadı';
$lang['Permalink for album not found'] = 'Albüm kalıcı bağlantısı bulunamadı';
$lang['Piwigo encountered a non recoverable error'] = 'Piwigo düzeltilemeyen bir hata ile karşılaştı';
$lang['Requested album does not exist'] = 'Talep edilen albüm yok';
$lang['Requested tag does not exist'] = 'Talep edilen etiket yok';
$lang['Email address is mandatory'] = 'e-Posta adresi mecburidir';
$lang['Username is mandatory'] = 'Kullanıcı adı mecburidir';
$lang['mandatory'] = 'zorunlu';
$lang['Website'] = 'Web sitesi';
$lang['Your website URL is invalid'] = 'Web sitenizin URL adresi geçersiz';
$lang['Email'] = 'E-posta';
$lang['First Page'] = 'İlk sayfa';
$lang['Go back to the album'] = 'Albüme geri dön';
$lang['Last Page'] = 'Son sayfa';
$lang['Password is missing. Please enter the password.'] = 'Şifre eksik. Lütfen şifrenizi girin.';
$lang['Password confirmation is missing. Please confirm the chosen password.'] = 'Şifre doğrulama eksik. Lütfen seçtiğiniz şifreyi doğrulayın.';
$lang['%d photos per page'] = 'sayfa başına %d fotoğraf';
$lang['Theme'] = 'Tema';
$lang['No results for'] = 'Sonuç bulunamadı:';
$lang['Photo description'] = 'Fotoğraf açıklamaları';
$lang['Photo title'] = 'Fotoğraf başlığı';
$lang['Apply on properties'] = 'Niteliklerine uygula';
$lang['Album name, A → Z'] = 'Albüm adı, A → Z';
$lang['Album name, Z → A'] = '
Albüm adı, Z → A';
$lang['Link: %s'] = 'Bağlantı: %s';
|
gpl-2.0
|
VytautasBoznis/l2.skilas.lt
|
aCis_gameserver/java/net/sf/l2j/gameserver/datatables/PetDataTable.java
|
7749
|
/*
* This program is free software: you can redistribute it and/or modify it under
* the terms of the GNU General Public License as published by the Free Software
* Foundation, either version 3 of the License, or (at your option) any later
* version.
*
* This program is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
* FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
* details.
*
* You should have received a copy of the GNU General Public License along with
* this program. If not, see <http://www.gnu.org/licenses/>.
*/
package net.sf.l2j.gameserver.datatables;
import java.io.File;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.HashMap;
import java.util.Map;
import java.util.logging.Logger;
import net.sf.l2j.L2DatabaseFactory;
import net.sf.l2j.gameserver.model.L2PetData;
import net.sf.l2j.gameserver.model.L2PetData.L2PetLevelData;
import net.sf.l2j.gameserver.model.actor.instance.L2PetInstance;
import net.sf.l2j.gameserver.model.item.kind.Item;
import net.sf.l2j.gameserver.model.item.type.EtcItemType;
import net.sf.l2j.gameserver.xmlfactory.XMLDocumentFactory;
import org.w3c.dom.Document;
import org.w3c.dom.Node;
public class PetDataTable
{
private static final Logger _log = Logger.getLogger(L2PetInstance.class.getName());
private static final Map<Integer, L2PetData> _petTable = new HashMap<>();
public static PetDataTable getInstance()
{
return SingletonHolder._instance;
}
protected PetDataTable()
{
load();
}
public void reload()
{
_petTable.clear();
load();
}
public void load()
{
try
{
File f = new File("./data/xml/pets_stats.xml");
Document doc = XMLDocumentFactory.getInstance().loadDocument(f);
Node n = doc.getFirstChild();
for (Node d = n.getFirstChild(); d != null; d = d.getNextSibling())
{
// General behavior of the pet (currently, petId / foodId)
if (d.getNodeName().equalsIgnoreCase("pet"))
{
int petId = Integer.parseInt(d.getAttributes().getNamedItem("id").getNodeValue());
L2PetData petData = new L2PetData();
String[] values = d.getAttributes().getNamedItem("food").getNodeValue().split(";");
int[] food = new int[values.length];
for (int i = 0; i < values.length; i++)
food[i] = Integer.parseInt(values[i]);
petData.setFood(food);
// Then check particular stats (each line equals one different level)
for (Node p = d.getFirstChild(); p != null; p = p.getNextSibling())
{
if (p.getNodeName().equals("stat"))
{
int petLevel = Integer.parseInt(p.getAttributes().getNamedItem("level").getNodeValue());
L2PetLevelData stat = new L2PetLevelData();
stat.setPetMaxExp(Integer.parseInt(p.getAttributes().getNamedItem("expMax").getNodeValue()));
stat.setPetMaxHP(Integer.parseInt(p.getAttributes().getNamedItem("hpMax").getNodeValue()));
stat.setPetMaxMP(Integer.parseInt(p.getAttributes().getNamedItem("mpMax").getNodeValue()));
stat.setPetPAtk(Integer.parseInt(p.getAttributes().getNamedItem("patk").getNodeValue()));
stat.setPetPDef(Integer.parseInt(p.getAttributes().getNamedItem("pdef").getNodeValue()));
stat.setPetMAtk(Integer.parseInt(p.getAttributes().getNamedItem("matk").getNodeValue()));
stat.setPetMDef(Integer.parseInt(p.getAttributes().getNamedItem("mdef").getNodeValue()));
stat.setPetMaxFeed(Integer.parseInt(p.getAttributes().getNamedItem("feedMax").getNodeValue()));
stat.setPetFeedNormal(Integer.parseInt(p.getAttributes().getNamedItem("feednormal").getNodeValue()));
stat.setPetFeedBattle(Integer.parseInt(p.getAttributes().getNamedItem("feedbattle").getNodeValue()));
stat.setPetRegenHP(Integer.parseInt(p.getAttributes().getNamedItem("hpregen").getNodeValue()));
stat.setPetRegenMP(Integer.parseInt(p.getAttributes().getNamedItem("mpregen").getNodeValue()));
stat.setOwnerExpTaken(Float.valueOf(p.getAttributes().getNamedItem("owner_exp_taken").getNodeValue()));
// Create a line with pet level as "cursor"
petData.addNewStat(petLevel, stat);
}
}
// Attach this stat line to the pet
_petTable.put(petId, petData);
}
}
}
catch (Exception e)
{
_log.warning("L2PetDataTable: Error while creating table" + e);
}
_log.info("PetDataTable: Loaded " + _petTable.size() + " pets.");
}
public L2PetLevelData getPetLevelData(int petID, int petLevel)
{
return _petTable.get(petID).getPetLevelData(petLevel);
}
public L2PetData getPetData(int petID)
{
return _petTable.get(petID);
}
/*
* Pets stuffs
*/
public static boolean isWolf(int npcId)
{
return npcId == 12077;
}
public static boolean isSinEater(int npcId)
{
return npcId == 12564;
}
public static boolean isHatchling(int npcId)
{
return npcId > 12310 && npcId < 12314;
}
public static boolean isStrider(int npcId)
{
return npcId > 12525 && npcId < 12529;
}
public static boolean isWyvern(int npcId)
{
return npcId == 12621;
}
public static boolean isBaby(int npcId)
{
return npcId > 12779 && npcId < 12783;
}
public static boolean isPetFood(int itemId)
{
switch (itemId)
{
case 2515:
case 4038:
case 5168:
case 5169:
case 6316:
case 7582:
return true;
default:
return false;
}
}
public static boolean isPetCollar(int itemId)
{
Item item = ItemTable.getInstance().getTemplate(itemId);
if (item != null && item.getItemType() == EtcItemType.PET_COLLAR)
return true;
return false;
}
public static int[] getPetItemsAsNpc(int npcId)
{
switch (npcId)
{
case 12077:// wolf pet a
return new int[]
{
2375
};
case 12564:// Sin Eater
return new int[]
{
4425
};
case 12311:// hatchling of wind
case 12312:// hatchling of star
case 12313:// hatchling of twilight
return new int[]
{
3500,
3501,
3502
};
case 12526:// wind strider
case 12527:// Star strider
case 12528:// Twilight strider
return new int[]
{
4422,
4423,
4424
};
case 12621:// Wyvern
return new int[]
{
8663
};
case 12780:// Baby Buffalo
case 12782:// Baby Cougar
case 12781:// Baby Kookaburra
return new int[]
{
6648,
6649,
6650
};
// unknown item id.. should never happen
default:
return new int[]
{
0
};
}
}
public static boolean isMountable(int npcId)
{
return npcId == 12526 // wind strider
|| npcId == 12527 // star strider
|| npcId == 12528 // twilight strider
|| npcId == 12621; // wyvern
}
public boolean doesPetNameExist(String name, int petNpcId)
{
boolean result = true;
try (Connection con = L2DatabaseFactory.getInstance().getConnection())
{
PreparedStatement statement = con.prepareStatement("SELECT name FROM pets p, items i WHERE p.item_obj_id = i.object_id AND name=? AND i.item_id IN (?)");
statement.setString(1, name);
String cond = "";
for (int it : PetDataTable.getPetItemsAsNpc(petNpcId))
{
if (!cond.isEmpty())
cond += ", ";
cond += it;
}
statement.setString(2, cond);
ResultSet rset = statement.executeQuery();
result = rset.next();
rset.close();
statement.close();
}
catch (SQLException e)
{
_log.warning("could not check existing petname:" + e.getMessage());
}
return result;
}
private static class SingletonHolder
{
protected static final PetDataTable _instance = new PetDataTable();
}
}
|
gpl-2.0
|
shannah/cn1
|
Ports/iOSPort/xmlvm/apache-harmony-6.0-src-r991881/classlib/modules/awt/src/test/impl/boot/java/awt/AccessibleAWTWindowTest.java
|
3497
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* @author Dmitry A. Durnev
*/
package java.awt;
import java.awt.Window.AccessibleAWTWindow;
import javax.accessibility.AccessibleContext;
import javax.accessibility.AccessibleRole;
import javax.accessibility.AccessibleState;
import javax.accessibility.AccessibleStateSet;
import junit.framework.TestCase;
/**
* AccessibleAWTWindowTest
*/
public class AccessibleAWTWindowTest extends TestCase {
private Window window;
AccessibleContext ac;
private Frame frame;
private Robot robot;
@Override
protected void setUp() throws Exception {
super.setUp();
frame = new Frame();
window = new Window(frame);
ac = window.getAccessibleContext();
robot = new Robot();
}
@Override
protected void tearDown() throws Exception {
super.tearDown();
if ((frame != null) && frame.isDisplayable()) {
frame.dispose();
}
}
public final void testGetAccessibleRole() {
assertSame(AccessibleRole.WINDOW, ac.getAccessibleRole());
}
@SuppressWarnings("deprecation")
public final void testGetAccessibleStateSet() {
frame.show();
window.setFocusable(true);
window.setVisible(true);
waitFocus();
assertTrue(window.isFocusOwner());
AccessibleStateSet aStateSet = ac.getAccessibleStateSet();
assertTrue("accessible window is active",
aStateSet.contains(AccessibleState.ACTIVE));
assertTrue("accessible window is showing",
aStateSet.contains(AccessibleState.SHOWING));
assertTrue("accessible window is focusable",
aStateSet.contains(AccessibleState.FOCUSABLE));
assertTrue("accessible window is focused",
aStateSet.contains(AccessibleState.FOCUSED));
assertFalse("accessible window is NOT resizable",
aStateSet.contains(AccessibleState.RESIZABLE));
assertTrue(frame.isActive());
aStateSet = frame.getAccessibleContext().getAccessibleStateSet();
assertFalse("accessible frame is NOT active",
aStateSet.contains(AccessibleState.ACTIVE));
}
public final void testAccessibleAWTWindow() {
assertTrue(ac instanceof AccessibleAWTWindow);
}
private void waitFocus() {
int time = 0;
int timeout = 32;
int threshold = 60000;
while (!window.isFocused() && (time < threshold)) {
robot.delay(timeout);
time += timeout;
timeout <<= 1;
}
}
}
|
gpl-2.0
|
samskivert/ikvm-openjdk
|
build/linux-amd64/impsrc/com/sun/tools/internal/xjc/reader/dtd/Messages.java
|
2633
|
/*
* Copyright (c) 2005, 2006, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation. Oracle designates this
* particular file as subject to the "Classpath" exception as provided
* by Oracle in the LICENSE file that accompanied this code.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
package com.sun.tools.internal.xjc.reader.dtd;
import java.text.MessageFormat;
import java.util.ResourceBundle;
/**
* Formats error messages.
*/
class Messages
{
/** Loads a string resource and formats it with specified arguments. */
static String format( String property, Object... args ) {
String text = ResourceBundle.getBundle(Messages.class.getPackage().getName() + ".MessageBundle").getString(property);
return MessageFormat.format(text,args);
}
public static final String ERR_NO_ROOT_ELEMENT = // arg:0
"TDTDReader.NoRootElement";
public static final String ERR_UNDEFINED_ELEMENT_IN_BINDINFO = // arg:1
"TDTDReader.UndefinedElementInBindInfo";
public static final String ERR_CONVERSION_FOR_NON_VALUE_ELEMENT = // arg:1
"TDTDReader.ConversionForNonValueElement";
public static final String ERR_CONTENT_PROPERTY_PARTICLE_MISMATCH = // arg:1
"TDTDReader.ContentProperty.ParticleMismatch";
public static final String ERR_CONTENT_PROPERTY_DECLARATION_TOO_SHORT = // arg:1
"TDTDReader.ContentProperty.DeclarationTooShort";
public static final String ERR_BINDINFO_NON_EXISTENT_ELEMENT_DECLARATION = // arg:1
"TDTDReader.BindInfo.NonExistentElementDeclaration";
public static final String ERR_BINDINFO_NON_EXISTENT_INTERFACE_MEMBER = // arg:1
"TDTDReader.BindInfo.NonExistentInterfaceMember";
}
|
gpl-2.0
|
cpinfold/eventum
|
docs/examples/partner/class.example.php
|
2572
|
<?php
/*
* This file is part of the Eventum (Issue Tracking System) package.
*
* @copyright (c) Eventum Team
* @license GNU General Public License, version 2 or later (GPL-2+)
*
* For the full copyright and license information,
* please see the COPYING and AUTHORS files
* that were distributed with this source code.
*/
/**
* Example class for partner business logic.
*/
class Example_Partner_Backend extends Abstract_Partner_Backend
{
public function __construct()
{
// setup the backend
}
public function getName()
{
return 'Example';
}
public function issueAdded($iss_id)
{
echo "partner: issue $iss_id added for " . $this->getName();
}
public function issueRemoved($iss_id)
{
echo "partner: issue $iss_id removed for " . $this->getName();
}
public function handleNewEmail($iss_id, $sup_id)
{
echo "partner: new email $sup_id on issue $iss_id";
}
public function handleNewNote($iss_id, $not_id)
{
echo "partner: new note $not_id on $iss_id";
}
public function handleIssueChange($iss_id, $usr_id, $old_details, $changes)
{
echo "partner: issue $iss_id changed";
}
public function getIssueMessage($iss_id)
{
return 'foo blah blah';
}
public static function canUserAccessFeature($usr_id, $feature)
{
switch ($feature) {
case 'create_issue':
return false;
case 'associate_emails':
return false;
case 'reports':
return false;
}
}
public static function canUserAccessIssueSection($usr_id, $section)
{
switch ($section) {
case 'partners':
return false;
case 'drafts':
return false;
case 'time':
return false;
case 'notes':
return false;
case 'phone':
return false;
case 'files':
return false;
case 'history':
return false;
case 'notification_list':
return false;
case 'authorized_repliers':
return false;
case 'change_reporter':
return false;
case 'change_status':
return false;
case 'convert_note':
return false;
}
}
public static function canUpdateIssue($issue_id, $usr_id)
{
return false;
}
}
|
gpl-2.0
|
fberst/xcsoar
|
src/Screen/Memory/Buffer.hpp
|
3311
|
/*
Copyright_License {
XCSoar Glide Computer - http://www.xcsoar.org/
Copyright (C) 2000-2016 The XCSoar Project
A detailed list of copyright holders can be found in the file "AUTHORS".
This program is free software; you can redistribute it and/or
modify it under the terms of the GNU General Public License
as published by the Free Software Foundation; either version 2
of the License, or (at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
}
*/
#ifndef XCSOAR_SCREEN_BUFFER_HPP
#define XCSOAR_SCREEN_BUFFER_HPP
#include "Util/Compiler.h"
/**
* A reference to an image buffer (or a portion of it) that we can
* write to. This class does not allocate or free any memory, it
* refers to a buffer owned by somebody else.
*/
template<typename PixelTraits>
struct WritableImageBuffer {
typedef typename PixelTraits::pointer pointer;
typedef typename PixelTraits::rpointer rpointer;
typedef typename PixelTraits::const_pointer const_pointer;
rpointer data;
unsigned pitch, width, height;
static constexpr WritableImageBuffer<PixelTraits> Empty() {
return { nullptr, 0, 0, 0 };
}
void Allocate(unsigned _width, unsigned _height) {
unsigned i = PixelTraits::CalcIncrement(_width);
data = new typename PixelTraits::color_type[i * _height];
pitch = i * sizeof(typename PixelTraits::color_type);
width = _width;
height = _height;
}
void Free() {
delete[] data;
data = nullptr;
}
constexpr bool Check(unsigned x, unsigned y) const {
return x < width && y < height;
}
constexpr pointer At(unsigned x, unsigned y) {
return PixelTraits::At(data, pitch, x, y);
}
constexpr const_pointer At(unsigned x, unsigned y) const {
return PixelTraits::At(data, pitch, x, y);
}
};
/**
* A reference to an image buffer (or a portion of it) that is
* read-only. This class does not allocate or free any memory, it
* refers to a buffer owned by somebody else.
*/
template<typename PixelTraits>
struct ConstImageBuffer {
typedef typename PixelTraits::const_pointer pointer;
typedef typename PixelTraits::const_rpointer rpointer;
rpointer data;
unsigned pitch, width, height;
ConstImageBuffer() = default;
constexpr ConstImageBuffer(rpointer _data, unsigned _pitch,
unsigned _width, unsigned _height)
:data(_data), pitch(_pitch), width(_width), height(_height) {}
constexpr ConstImageBuffer(WritableImageBuffer<PixelTraits> other)
:data(other.data), pitch(other.pitch),
width(other.width), height(other.height) {}
static constexpr WritableImageBuffer<PixelTraits> Empty() {
return { nullptr, 0, 0, 0 };
}
constexpr bool Check(unsigned x, unsigned y) const {
return x < width && y < height;
}
constexpr pointer At(unsigned x, unsigned y) const {
return PixelTraits::At(data, pitch, x, y);
}
};
#endif
|
gpl-2.0
|
HiT-Hi-FiT-Hai/dcplusplus
|
boost/boost/archive/binary_wiarchive.hpp
|
1946
|
#ifndef BOOST_ARCHIVE_BINARY_WIARCHIVE_HPP
#define BOOST_ARCHIVE_BINARY_WIARCHIVE_HPP
// MS compatible compilers support #pragma once
#if defined(_MSC_VER) && (_MSC_VER >= 1020)
# pragma once
#endif
/////////1/////////2/////////3/////////4/////////5/////////6/////////7/////////8
// binary_wiarchive.hpp
// (C) Copyright 2002 Robert Ramey - http://www.rrsd.com .
// Use, modification and distribution is subject to the Boost Software
// License, Version 1.0. (See accompanying file LICENSE_1_0.txt or copy at
// http://www.boost.org/LICENSE_1_0.txt)
// See http://www.boost.org for updates, documentation, and revision history.
#include <boost/config.hpp>
#ifdef BOOST_NO_STD_WSTREAMBUF
#error "wide char i/o not supported on this platform"
#else
#include <istream> // wistream
#include <boost/archive/binary_iarchive_impl.hpp>
namespace boost {
namespace archive {
// do not derive from this class. If you want to extend this functionality
// via inhertance, derived from binary_iarchive_impl instead. This will
// preserve correct static polymorphism.
class binary_wiarchive :
public binary_iarchive_impl<
binary_wiarchive, std::wistream::char_type, std::wistream::traits_type
>
{
public:
binary_wiarchive(std::wistream & is, unsigned int flags = 0) :
binary_iarchive_impl<
binary_wiarchive, std::wistream::char_type, std::wistream::traits_type
>(is, flags)
{}
binary_wiarchive(std::wstreambuf & bsb, unsigned int flags = 0) :
binary_iarchive_impl<
binary_wiarchive, std::wistream::char_type, std::wistream::traits_type
>(bsb, flags)
{}
};
} // namespace archive
} // namespace boost
// required by smart_cast for compilers not implementing
// partial template specialization
BOOST_BROKEN_COMPILER_TYPE_TRAITS_SPECIALIZATION(boost::archive::binary_wiarchive)
#endif // BOOST_NO_STD_WSTREAMBUF
#endif // BOOST_ARCHIVE_BINARY_WIARCHIVE_HPP
|
gpl-2.0
|
wassemgtk/OpenX-using-S3-and-CloudFront
|
lib/pear/Image/Graph/Grid/Polar.php
|
3443
|
<?php
/* vim: set expandtab tabstop=4 shiftwidth=4 softtabstop=4: */
/**
* Image_Graph - PEAR PHP OO Graph Rendering Utility.
*
* PHP versions 4 and 5
*
* LICENSE: This library is free software; you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation; either version 2.1 of the License, or (at your
* option) any later version. This library is distributed in the hope that it
* will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty
* of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser
* General Public License for more details. You should have received a copy of
* the GNU Lesser General Public License along with this library; if not, write
* to the Free Software Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA
* 02111-1307 USA
*
* @category Images
* @package Image_Graph
* @subpackage Grid
* @author Jesper Veggerby <pear.nosey@veggerby.dk>
* @copyright Copyright (C) 2003, 2004 Jesper Veggerby Hansen
* @license http://www.gnu.org/copyleft/lesser.html LGPL License 2.1
* @version CVS: $Id: Polar.php 47481 2009-12-15 20:29:37Z chris.nutting $
* @link http://pear.php.net/package/Image_Graph
* @since File available since Release 0.3.0dev2
*/
/**
* Include file Image/Graph/Grid.php
*/
require_once 'Image/Graph/Grid.php';
/**
* Display a line grid on the plotarea.
*
* {@link Image_Graph_Grid}
*
* @category Images
* @package Image_Graph
* @subpackage Grid
* @author Jesper Veggerby <pear.nosey@veggerby.dk>
* @copyright Copyright (C) 2003, 2004 Jesper Veggerby Hansen
* @license http://www.gnu.org/copyleft/lesser.html LGPL License 2.1
* @version Release: 0.7.2
* @link http://pear.php.net/package/Image_Graph
* @since Class available since Release 0.3.0dev2
*/
class Image_Graph_Grid_Polar extends Image_Graph_Grid
{
/**
* GridLines [Constructor]
*/
function Image_Graph_Grid_Polar()
{
parent::__construct();
$this->_lineStyle = 'lightgrey';
}
/**
* Output the grid
*
* @return bool Was the output 'good' (true) or 'bad' (false).
* @access private
*/
function _done()
{
if (parent::_done() === false) {
return false;
}
if (!$this->_primaryAxis) {
return false;
}
$this->_canvas->startGroup(get_class($this));
$value = false;
$p0 = array ('X' => '#min#', 'Y' => '#min#');
if ($this->_primaryAxis->_type == IMAGE_GRAPH_AXIS_Y) {
$p1 = array ('X' => '#min#', 'Y' => '#max#');
$r0 = abs($this->_pointY($p1) - $this->_pointY($p0));
} else {
$p1 = array ('X' => '#max#', 'Y' => '#min#');
$r0 = abs($this->_pointX($p1) - $this->_pointX($p0));
}
$cx = $this->_pointX($p0);
$cy = $this->_pointY($p0);
$span = $this->_primaryAxis->_axisSpan;
while (($value = $this->_primaryAxis->_getNextLabel($value)) !== false) {
$r = $r0 * ($value - $this->_primaryAxis->_getMinimum()) / $span;
$this->_getLineStyle();
$this->_canvas->ellipse(array('x' => $cx, 'y' => $cy, 'rx' => $r, 'ry' => $r));
}
$this->_canvas->endGroup();
return true;
}
}
?>
|
gpl-2.0
|
rlugojr/rekall
|
rekall-core/rekall/args.py
|
18423
|
#!/usr/bin/python
# Rekall Memory Forensics
# Copyright (C) 2012 Michael Cohen <scudette@gmail.com>
# Copyright 2013 Google Inc. All Rights Reserved.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or (at
# your option) any later version.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
#
"""This module manages the command line parsing logic.
Rekall uses the argparse module for command line parsing, however this module
contains so many bugs it might be worth to implement our own parser in future.
"""
__author__ = "Michael Cohen <scudette@gmail.com>"
import argparse
import logging
import re
import os
import sys
import zipfile
from rekall import config
from rekall import constants
from rekall import plugin
from rekall import utils
config.DeclareOption("--plugin", default=[], type="ArrayStringParser",
help="Load user provided plugin bundle.")
config.DeclareOption(
"-h", "--help", default=False, type="Boolean",
help="Show help about global paramters.")
class RekallHelpFormatter(argparse.RawDescriptionHelpFormatter):
def add_argument(self, action):
# Allow us to suppress an arg from the --help output for those options
# which do not make sense on the command line.
if action.dest != "SUPPRESS":
super(RekallHelpFormatter, self).add_argument(action)
class RekallArgParser(argparse.ArgumentParser):
ignore_errors = False
def __init__(self, session=None, **kwargs):
kwargs["formatter_class"] = RekallHelpFormatter
if session == None:
raise RuntimeError("Session must be set")
self.session = session
super(RekallArgParser, self).__init__(**kwargs)
def error(self, message):
if self.ignore_errors:
return
# We trap this error especially since we launch the volshell.
if message == "too few arguments":
return
super(RekallArgParser, self).error(message)
def parse_known_args(self, args=None, namespace=None, force=False, **_):
self.ignore_errors = force
result = super(RekallArgParser, self).parse_known_args(
args=args, namespace=namespace)
return result
def print_help(self, file=None):
if self.ignore_errors:
return
return super(RekallArgParser, self).print_help(file=file)
def exit(self, *args, **kwargs):
if self.ignore_errors:
return
return super(RekallArgParser, self).exit(*args, **kwargs)
def LoadPlugins(paths=None):
PYTHON_EXTENSIONS = [".py", ".pyo", ".pyc"]
for path in paths:
if not os.access(path, os.R_OK):
logging.error("Unable to find %s", path)
continue
path = os.path.abspath(path)
directory, filename = os.path.split(path)
module_name, ext = os.path.splitext(filename)
# Its a python file.
if ext in PYTHON_EXTENSIONS:
# Make sure python can find the file.
sys.path.insert(0, directory)
try:
logging.info("Loading user plugin %s", path)
__import__(module_name)
except Exception as e:
logging.error("Error loading user plugin %s: %s", path, e)
finally:
sys.path.pop(0)
elif ext == ".zip":
zfile = zipfile.ZipFile(path)
# Make sure python can find the file.
sys.path.insert(0, path)
try:
logging.info("Loading user plugin archive %s", path)
for name in zfile.namelist():
# Change from filename to python package name.
module_name, ext = os.path.splitext(name)
if ext in PYTHON_EXTENSIONS:
module_name = module_name.replace("/", ".").replace(
"\\", ".")
try:
__import__(module_name.strip("\\/"))
except Exception as e:
logging.error("Error loading user plugin %s: %s",
path, e)
finally:
sys.path.pop(0)
else:
logging.error("Plugin %s has incorrect extension.", path)
def _TruncateARGV(argv):
"""Truncate the argv list at the first sign of a plugin name.
At this stage we do not know which module is valid, or its options. The
syntax of the command line is:
rekal -x -y -z plugin_name -a -b -c
Where -x -y -z are global options, and -a -b -c are plugin option. We only
want to parse up to the plugin name.
"""
short_argv = [argv[0]]
for item in argv[1:]:
for plugin_cls in plugin.Command.classes.values():
if plugin_cls.name == item:
return short_argv
short_argv.append(item)
return short_argv
# Argparser stupidly matches short options for options it does not know yet
# (with parse_known_args()). This list allows us to declare placeholders to
# avoid the partial option matching behaviour in some cases.
DISAMBIGUATE_OPTIONS = [
"profile",
]
def ParseGlobalArgs(parser, argv, user_session):
"""Parse some session wide args which must be done before anything else."""
# Register global args.
ConfigureCommandLineParser(config.OPTIONS, parser)
# Parse the known args.
known_args, unknown_args = parser.parse_known_args(args=argv)
with user_session.state as state:
for arg, value in vars(known_args).items():
# Argparser erroneously parses flags as str objects, but they are
# really unicode objects.
if isinstance(value, str):
value = utils.SmartUnicode(value)
# Argparse tries to interpolate defaults into the parsed data in the
# event that the args are not present - even when calling
# parse_known_args. Before we get to this point, the config system
# has already set the state from the config file, so if we allow
# argparse to set the default we would override the config file
# (with the defaults). We solve this by never allowing argparse
# itself to handle the defaults. We always set default=None, when
# configuring the parser, and rely on the
# config.MergeConfigOptions() to set the defaults.
if value is not None:
state.Set(arg, value)
# Enforce the appropriate logging level if user supplies the --verbose
# or --quiet command line flags.
verbose_flag = getattr(known_args, "verbose", None)
quiet_flag = getattr(known_args, "quiet", None)
if verbose_flag and quiet_flag:
raise ValueError("Cannot set both --verbose and --quiet!")
if verbose_flag:
state.Set("logging_level", "DEBUG")
elif quiet_flag:
state.Set("logging_level", "CRITICAL")
# Now load the third party user plugins. These may introduce additional
# plugins with args.
if user_session.state.plugin:
LoadPlugins(user_session.state.plugin)
# External files might have introduced new plugins - rebuild the plugin
# DB.
user_session.plugins.plugin_db.Rebuild()
return known_args, unknown_args
def FindPlugin(argv=None, user_session=None):
"""Search the argv for the first occurrence of a valid plugin name.
Returns a mutated argv where the plugin is moved to the front. If a plugin
is not found we assume the plugin is "shell" (i.e. the interactive session).
This maintains backwards compatibility with the old global/plugin specific
options. In the current implementation, the plugin name should probably come
first:
rekal pslist -v -f foo.elf --pid 4
but this still works:
rekal -v -f foo.elf pslist --pid 4
"""
result = argv[:]
for i, item in enumerate(argv):
if item in user_session.plugins.plugin_db.db:
result.pop(i)
return item, result
return "shell", result
def ConfigureCommandLineParser(command_metadata, parser, critical=False):
"""Apply the plugin configuration to an argparse parser.
This method is the essential glue between the abstract plugin metadata and
argparse.
The main intention is to de-couple the plugin's args definition from arg
parser's specific implementation. The plugin then conveys semantic meanings
about its arguments rather than argparse implementation specific
details. Note that args are parsed through other mechanisms in a number of
cases so this gives us flexibility to implement arbitrary parsing:
- Directly provided to the plugin in the constructor.
- Parsed from json from the web console.
"""
# This is used to allow the user to break the command line arbitrarily.
parser.add_argument('-', dest='__dummy', action="store_true",
help="A do nothing arg. Useful to separate options "
"which take multiple args from positional. Can be "
"specified many times.")
try:
groups = parser.groups
except AttributeError:
groups = parser.groups = {
"None": parser.add_argument_group("Global options")
}
if command_metadata.plugin_cls:
groups[command_metadata.plugin_cls.name] = parser.add_argument_group(
"Plugin %s options" % command_metadata.plugin_cls.name)
for name, options in command_metadata.args.iteritems():
# We need to modify options to feed into argparse.
options = options.copy()
# Skip this option since it is hidden.
if options.pop("hidden", None):
options["help"] = argparse.SUPPRESS
# Prevent None getting into the kwargs because it upsets argparser.
kwargs = dict((k, v) for k, v in options.items() if v is not None)
name = kwargs.pop("name", None) or name
# If default is specified we assume the parameter is not required.
# However, defaults are not passed on to argparse in most cases, and
# instead applied separately through ApplyDefaults. For exceptions,
# see below.
default = kwargs.pop("default", None)
try:
required = kwargs.pop("required")
except KeyError:
required = default is None
group_name = kwargs.pop("group", None)
if group_name is None and command_metadata.plugin_cls:
group_name = command_metadata.plugin_cls.name
group = groups.get(group_name)
if group is None:
groups[group_name] = group = parser.add_argument_group(group_name)
positional_args = []
short_opt = kwargs.pop("short_opt", None)
# A positional arg is allowed to be specified without a flag.
if kwargs.pop("positional", None):
positional_args.append(name)
# If a position arg is optional we need to specify nargs=?
if not required:
kwargs["nargs"] = "?"
# Otherwise argparse wants to have - in front of the arg.
else:
if short_opt:
positional_args.append("-" + short_opt)
positional_args.append("--" + name)
arg_type = kwargs.pop("type", None)
choices = kwargs.pop("choices", [])
if callable(choices):
choices = choices()
if arg_type == "ArrayIntParser":
kwargs["action"] = ArrayIntParser
kwargs["nargs"] = "+" if required else "*"
if arg_type in ["ArrayString", "ArrayStringParser"]:
kwargs["action"] = ArrayStringParser
kwargs["nargs"] = "+" if required else "*"
elif arg_type == "IntParser":
kwargs["action"] = IntParser
elif arg_type == "Float":
kwargs["type"] = float
elif arg_type == "Boolean" or arg_type == "Bool":
# Argparse will assume default False for flags and not return
# None, which is required by ApplyDefaults to recognize an unset
# argument. To solve this issue, we just pass the default on.
kwargs["default"] = default
kwargs["action"] = "store_true"
# Multiple entries of choices (requires a choices paramter).
elif arg_type == "ChoiceArray":
kwargs["nargs"] = "+" if required else "*"
kwargs["choices"] = list(choices)
elif arg_type == "Choices":
kwargs["choices"] = list(choices)
# Skip option if not critical.
critical_arg = kwargs.pop("critical", False)
if critical and critical_arg:
group.add_argument(*positional_args, **kwargs)
continue
if not (critical or critical_arg):
group.add_argument(*positional_args, **kwargs)
def parse_args(argv=None, user_session=None, global_arg_cb=None):
"""Parse the args from the command line argv.
Args:
argv: The args to process.
user_session: The session we work with.
global_arg_cb: A callback that will be used to process global
args. Global args are those which affect the state of the
Rekall framework and must be processed prior to any plugin
specific args. In essence these flags control which plugins
can be available.
"""
if argv is None:
argv = sys.argv[1:]
parser = RekallArgParser(
description=constants.BANNER,
conflict_handler='resolve',
add_help=True,
session=user_session,
epilog="When no module is provided, drops into interactive mode",
formatter_class=RekallHelpFormatter)
# Parse the global and critical args from the command line.
global_flags, unknown_flags = ParseGlobalArgs(parser, argv, user_session)
if global_arg_cb:
global_arg_cb(global_flags, unknown_flags)
# The plugin name is taken from the command line, but it is not enough to
# know which specific implementation will be used. For example there are 3
# classes implementing the pslist plugin WinPsList, LinPsList and OSXPsList.
plugin_name, argv = FindPlugin(argv, user_session)
# Add all critical parameters. Critical parameters are those which are
# common to all implementations of a certain plugin and are required in
# order to choose from these implementations. For example, the profile or
# filename are usually used to select the specific implementation of a
# plugin.
for metadata in user_session.plugins.plugin_db.MetadataByName(plugin_name):
ConfigureCommandLineParser(metadata, parser, critical=True)
# Parse the global and critical args from the command line.
ParseGlobalArgs(parser, argv, user_session)
# Find the specific implementation of the plugin that applies here. For
# example, we have 3 different pslist implementations depending on the
# specific profile loaded.
command_metadata = user_session.plugins.Metadata(plugin_name)
if not command_metadata:
raise plugin.PluginError(
"Plugin %s is not available for this configuration" % plugin_name)
# Configure the arg parser for this command's options.
plugin_cls = command_metadata.plugin_cls
ConfigureCommandLineParser(command_metadata, parser)
# We handle help especially.
if global_flags.help:
parser.print_help()
sys.exit(-1)
# Parse the final command line.
result = parser.parse_args(argv)
# Apply the defaults to the parsed args.
result = utils.AttributeDict(vars(result))
result.pop("__dummy", None)
command_metadata.ApplyDefaults(result)
return plugin_cls, result
## Parser for special args.
class IntParser(argparse.Action):
"""Class to parse ints either in hex or as ints."""
def parse_int(self, value):
# Support suffixes
multiplier = 1
m = re.search("(.*)(Mb|mb|kb|m|M|k|g|G|Gb)", value)
if m:
value = m.group(1)
suffix = m.group(2).lower()
if suffix in ("gb", "g"):
multiplier = 1024 * 1024 * 1024
elif suffix in ("mb", "m"):
multiplier = 1024 * 1024
elif suffix in ("kb", "k"):
multiplier = 1024
try:
if value.startswith("0x"):
value = int(value, 16) * multiplier
else:
value = int(value) * multiplier
except ValueError:
raise argparse.ArgumentError(self, "Invalid integer value")
return value
def __call__(self, parser, namespace, values, option_string=None):
if isinstance(values, basestring):
values = self.parse_int(values)
setattr(namespace, self.dest, values)
class ArrayIntParser(IntParser):
"""Parse input as a comma separated list of integers.
We support input in the following forms:
--pid 1,2,3,4,5
--pid 1 2 3 4 5
--pid 0x1 0x2 0x3
"""
def Validate(self, value):
return self.parse_int(value)
def __call__(self, parser, namespace, values, option_string=None):
result = []
if isinstance(values, basestring):
values = [values]
for value in values:
result.extend([self.Validate(x) for x in value.split(",")])
setattr(namespace, self.dest, result or None)
class ArrayStringParser(argparse.Action):
def __call__(self, parser, namespace, values, option_string=None):
result = []
if isinstance(values, basestring):
values = [values]
for value in values:
result.extend([x for x in value.split(",")])
setattr(namespace, self.dest, result)
|
gpl-2.0
|
videoP/jaPRO
|
codeJK2/cgame/cg_predict.cpp
|
19807
|
/*
This file is part of Jedi Knight 2.
Jedi Knight 2 is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 2 of the License, or
(at your option) any later version.
Jedi Knight 2 is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with Jedi Knight 2. If not, see <http://www.gnu.org/licenses/>.
*/
// Copyright 2001-2013 Raven Software
// cg_predict.c -- this file generates cg.predicted_player_state by either
// interpolating between snapshots from the server or locally predicting
// ahead the client's movement
#include "cg_local.h"
#include "../game/g_local.h"
#include "cg_media.h"
static pmove_t cg_pmove;
static int cg_numSolidEntities;
static centity_t *cg_solidEntities[MAX_ENTITIES_IN_SNAPSHOT];
#if MEM_DEBUG
#include "../smartheap/heapagnt.h"
#define CG_TRACE_PROFILE (0)
#endif
/*
====================
CG_BuildSolidList
When a new cg.snap has been set, this function builds a sublist
of the entities that are actually solid, to make for more
efficient collision detection
====================
*/
void CG_BuildSolidList( void )
{
int i;
centity_t *cent;
cg_numSolidEntities = 0;
if(!cg.snap)
{
return;
}
for ( i = 0 ; i < cg.snap->numEntities ; i++ )
{
if ( cg.snap->entities[ i ].number < ENTITYNUM_WORLD )
{
cent = &cg_entities[ cg.snap->entities[ i ].number ];
if ( cent->gent != NULL && cent->gent->s.solid )
{
cg_solidEntities[cg_numSolidEntities] = cent;
cg_numSolidEntities++;
}
}
}
}
/*
====================
CG_ClipMoveToEntities
====================
*/
void CG_ClipMoveToEntities ( const vec3_t start, const vec3_t mins, const vec3_t maxs, const vec3_t end,
int skipNumber, int mask, trace_t *tr ) {
int i, x, zd, zu;
trace_t trace;
entityState_t *ent;
clipHandle_t cmodel;
vec3_t bmins, bmaxs;
vec3_t origin, angles;
centity_t *cent;
for ( i = 0 ; i < cg_numSolidEntities ; i++ ) {
cent = cg_solidEntities[ i ];
ent = ¢->currentState;
if ( ent->number == skipNumber ) {
continue;
}
if ( ent->eType == ET_PUSH_TRIGGER ) {
continue;
}
if ( ent->eType == ET_TELEPORT_TRIGGER ) {
continue;
}
if ( ent->solid == SOLID_BMODEL ) {
// special value for bmodel
cmodel = cgi_CM_InlineModel( ent->modelindex );
VectorCopy( cent->lerpAngles, angles );
//Hmm... this would cause traces against brush movers to snap at 20fps (as with the third person camera)...
//Let's use the lerpOrigin for now and see if it breaks anything...
//EvaluateTrajectory( ¢->currentState.pos, cg.snap->serverTime, origin );
VectorCopy( cent->lerpOrigin, origin );
} else {
// encoded bbox
x = (ent->solid & 255);
zd = ((ent->solid>>8) & 255);
zu = ((ent->solid>>16) & 255) - 32;
bmins[0] = bmins[1] = -x;
bmaxs[0] = bmaxs[1] = x;
bmins[2] = -zd;
bmaxs[2] = zu;
cmodel = cgi_CM_TempBoxModel( bmins, bmaxs );//, cent->gent->contents );
VectorCopy( vec3_origin, angles );
VectorCopy( cent->lerpOrigin, origin );
}
cgi_CM_TransformedBoxTrace ( &trace, start, end,
mins, maxs, cmodel, mask, origin, angles);
if (trace.allsolid || trace.fraction < tr->fraction) {
trace.entityNum = ent->number;
*tr = trace;
} else if (trace.startsolid) {
tr->startsolid = qtrue;
}
if ( tr->allsolid ) {
return;
}
}
}
/*
================
CG_Trace
================
*/
void CG_Trace( trace_t *result, const vec3_t start, const vec3_t mins, const vec3_t maxs, const vec3_t end,
const int skipNumber, const int mask, const EG2_Collision eG2TraceType/*=G2_NOCOLLIDE*/, const int useLod/*=0*/) {
trace_t t;
#if CG_TRACE_PROFILE
#if MEM_DEBUG
{
int old=dbgMemSetCheckpoint(2004);
malloc(1);
dbgMemSetCheckpoint(old);
}
#endif
#endif
cgi_CM_BoxTrace ( &t, start, end, mins, maxs, 0, mask);
t.entityNum = t.fraction != 1.0 ? ENTITYNUM_WORLD : ENTITYNUM_NONE;
// check all other solid models
CG_ClipMoveToEntities (start, mins, maxs, end, skipNumber, mask, &t);
*result = t;
}
/*
================
CG_PointContents
================
*/
int CG_PointContents( const vec3_t point, int passEntityNum ) {
int i;
entityState_t *ent;
centity_t *cent;
clipHandle_t cmodel;
int contents;
#if CG_TRACE_PROFILE
#if MEM_DEBUG
{
int old=dbgMemSetCheckpoint(2005);
malloc(1);
dbgMemSetCheckpoint(old);
}
#endif
#endif
contents = cgi_CM_PointContents (point, 0);
for ( i = 0 ; i < cg_numSolidEntities ; i++ ) {
cent = cg_solidEntities[ i ];
ent = ¢->currentState;
if ( ent->number == passEntityNum ) {
continue;
}
if (ent->solid != SOLID_BMODEL) { // special value for bmodel
continue;
}
cmodel = cgi_CM_InlineModel( ent->modelindex );
if ( !cmodel ) {
continue;
}
contents |= cgi_CM_TransformedPointContents( point, cmodel, ent->origin, ent->angles );
}
return contents;
}
void CG_SetClientViewAngles( vec3_t angles, qboolean overrideViewEnt )
{
if ( cg.snap->ps.viewEntity <= 0 || cg.snap->ps.viewEntity >= ENTITYNUM_WORLD || overrideViewEnt )
{//don't clamp angles when looking through a viewEntity
for( int i = 0; i < 3; i++ )
{
cg.predicted_player_state.viewangles[PITCH] = angles[i];
cg.predicted_player_state.delta_angles[i] = 0;
cg.snap->ps.viewangles[PITCH] = angles[i];
cg.snap->ps.delta_angles[i] = 0;
g_entities[0].client->pers.cmd_angles[i] = ANGLE2SHORT(angles[i]);
}
cgi_SetUserCmdAngles( angles[PITCH], angles[YAW], angles[ROLL] );
}
}
extern qboolean PM_AdjustAnglesToGripper( gentity_t *gent, usercmd_t *cmd );
extern qboolean PM_AdjustAngleForWallRun( gentity_t *ent, usercmd_t *ucmd, qboolean doMove );
extern qboolean PM_AdjustAnglesForSpinningFlip( gentity_t *ent, usercmd_t *ucmd, qboolean anglesOnly );
extern qboolean PM_AdjustAnglesForBackAttack( gentity_t *ent, usercmd_t *ucmd );
extern qboolean PM_AdjustAnglesForSaberLock( gentity_t *ent, usercmd_t *ucmd );
extern qboolean PM_AdjustAnglesForKnockdown( gentity_t *ent, usercmd_t *ucmd, qboolean angleClampOnly );
extern qboolean G_CheckClampUcmd( gentity_t *ent, usercmd_t *ucmd );
qboolean CG_CheckModifyUCmd( usercmd_t *cmd, vec3_t viewangles )
{
qboolean overridAngles = qfalse;
if ( cg.snap->ps.viewEntity > 0 && cg.snap->ps.viewEntity < ENTITYNUM_WORLD )
{//controlling something else
memset( cmd, 0, sizeof( usercmd_t ) );
/*
//to keep pointing in same dir, need to set cmd.angles
cmd->angles[PITCH] = ANGLE2SHORT( cg.snap->ps.viewangles[PITCH] ) - cg.snap->ps.delta_angles[PITCH];
cmd->angles[YAW] = ANGLE2SHORT( cg.snap->ps.viewangles[YAW] ) - cg.snap->ps.delta_angles[YAW];
cmd->angles[ROLL] = 0;
*/
VectorCopy( g_entities[0].pos4, viewangles );
overridAngles = qtrue;
//CG_SetClientViewAngles( g_entities[cg.snap->ps.viewEntity].client->ps.viewangles, qtrue );
}
else if ( cg.snap->ps.vehicleModel != 0 )
{//in vehicle flight mode
float speed = VectorLength( cg.snap->ps.velocity );
if ( !speed || cg.snap->ps.groundEntityNum != ENTITYNUM_NONE )
{
cmd->rightmove = 0;
cmd->angles[PITCH] = 0;
cmd->angles[YAW] = ANGLE2SHORT( cg.snap->ps.viewangles[YAW] ) - cg.snap->ps.delta_angles[YAW];
CG_SetClientViewAngles( cg.snap->ps.viewangles, qfalse );
}
}
if ( &g_entities[0] && g_entities[0].client )
{
if ( !PM_AdjustAnglesToGripper( &g_entities[0], cmd ) )
{
if ( PM_AdjustAnglesForSpinningFlip( &g_entities[0], cmd, qtrue ) )
{
CG_SetClientViewAngles( g_entities[0].client->ps.viewangles, qfalse );
if ( viewangles )
{
VectorCopy( g_entities[0].client->ps.viewangles, viewangles );
overridAngles = qtrue;
}
}
}
else
{
CG_SetClientViewAngles( g_entities[0].client->ps.viewangles, qfalse );
if ( viewangles )
{
VectorCopy( g_entities[0].client->ps.viewangles, viewangles );
overridAngles = qtrue;
}
}
if ( G_CheckClampUcmd( &g_entities[0], cmd ) )
{
CG_SetClientViewAngles( g_entities[0].client->ps.viewangles, qfalse );
if ( viewangles )
{
VectorCopy( g_entities[0].client->ps.viewangles, viewangles );
overridAngles = qtrue;
}
}
}
return overridAngles;
}
qboolean CG_OnMovingPlat( playerState_t *ps )
{
if ( ps->groundEntityNum != ENTITYNUM_NONE )
{
entityState_t *es = &cg_entities[ps->groundEntityNum].currentState;
if ( es->eType == ET_MOVER )
{//on a mover
if ( es->pos.trType != TR_STATIONARY )
{
if ( es->pos.trType != TR_LINEAR_STOP && es->pos.trType != TR_NONLINEAR_STOP )
{//a constant mover
if ( !VectorCompare( vec3_origin, es->pos.trDelta ) )
{//is moving
return qtrue;
}
}
else
{//a linear-stop mover
if ( es->pos.trTime+es->pos.trDuration > cg.time )
{//still moving
return qtrue;
}
}
}
}
}
return qfalse;
}
/*
========================
CG_InterpolatePlayerState
Generates cg.predicted_player_state by interpolating between
cg.snap->player_state and cg.nextFrame->player_state
========================
*/
void CG_InterpolatePlayerState( qboolean grabAngles ) {
float f;
int i;
playerState_t *out;
snapshot_t *prev, *next;
qboolean skip = qfalse;
vec3_t oldOrg;
out = &cg.predicted_player_state;
prev = cg.snap;
next = cg.nextSnap;
VectorCopy(out->origin,oldOrg);
*out = cg.snap->ps;
// if we are still allowing local input, short circuit the view angles
if ( grabAngles ) {
usercmd_t cmd;
int cmdNum;
cmdNum = cgi_GetCurrentCmdNumber();
cgi_GetUserCmd( cmdNum, &cmd );
skip = CG_CheckModifyUCmd( &cmd, out->viewangles );
if ( !skip )
{
//NULL so that it doesn't execute a block of code that must be run from game
PM_UpdateViewAngles( out, &cmd, NULL );
}
}
// if the next frame is a teleport, we can't lerp to it
if ( cg.nextFrameTeleport )
{
return;
}
if (!( !next || next->serverTime <= prev->serverTime ) )
{
f = (float)( cg.time - prev->serverTime ) / ( next->serverTime - prev->serverTime );
i = next->ps.bobCycle;
if ( i < prev->ps.bobCycle )
{
i += 256; // handle wraparound
}
out->bobCycle = prev->ps.bobCycle + f * ( i - prev->ps.bobCycle );
for ( i = 0 ; i < 3 ; i++ )
{
out->origin[i] = prev->ps.origin[i] + f * (next->ps.origin[i] - prev->ps.origin[i] );
if ( !grabAngles )
{
out->viewangles[i] = LerpAngle(
prev->ps.viewangles[i], next->ps.viewangles[i], f );
}
out->velocity[i] = prev->ps.velocity[i] +
f * (next->ps.velocity[i] - prev->ps.velocity[i] );
}
}
bool onPlat=false;
centity_t *pent=0;
if (out->groundEntityNum>0)
{
pent=&cg_entities[out->groundEntityNum];
if (pent->currentState.eType == ET_MOVER )
{
onPlat=true;
}
}
if (
cg.validPPS &&
cg_smoothPlayerPos.value>0.0f &&
cg_smoothPlayerPos.value<1.0f &&
!onPlat
)
{
// 0 = no smoothing, 1 = no movement
for (i=0;i<3;i++)
{
out->origin[i]=cg_smoothPlayerPos.value*(oldOrg[i]-out->origin[i])+out->origin[i];
}
}
else if (onPlat&&cg_smoothPlayerPlat.value>0.0f&&cg_smoothPlayerPlat.value<1.0f)
{
// if (cg.frametime<150)
// {
assert(pent);
vec3_t p1,p2,vel;
float lerpTime;
EvaluateTrajectory( &pent->currentState.pos,cg.snap->serverTime, p1 );
if ( cg.nextSnap &&cg.nextSnap->serverTime > cg.snap->serverTime)
{
EvaluateTrajectory( &pent->nextState.pos,cg.nextSnap->serverTime, p2 );
lerpTime=float(cg.nextSnap->serverTime - cg.snap->serverTime);
}
else
{
EvaluateTrajectory( &pent->currentState.pos,cg.snap->serverTime+50, p2 );
lerpTime=50.0f;
}
float accel=cg_smoothPlayerPlatAccel.value*cg.frametime/lerpTime;
if (accel>20.0f)
{
accel=20.0f;
}
for (i=0;i<3;i++)
{
vel[i]=accel*(p2[i]-p1[i]);
}
VectorAdd(out->origin,vel,out->origin);
if (cg.validPPS &&
cg_smoothPlayerPlat.value>0.0f &&
cg_smoothPlayerPlat.value<1.0f
)
{
// 0 = no smoothing, 1 = no movement
for (i=0;i<3;i++)
{
out->origin[i]=cg_smoothPlayerPlat.value*(oldOrg[i]-out->origin[i])+out->origin[i];
}
}
// }
}
}
/*
===================
CG_TouchItem
===================
*/
void CG_TouchItem( centity_t *cent ) {
gitem_t *item;
// never pick an item up twice in a prediction
if ( cent->miscTime == cg.time ) {
return;
}
if ( !BG_PlayerTouchesItem( &cg.predicted_player_state, ¢->currentState, cg.time ) ) {
return;
}
if ( !BG_CanItemBeGrabbed( ¢->currentState, &cg.predicted_player_state ) ) {
return; // can't hold it
}
item = &bg_itemlist[ cent->currentState.modelindex ];
// grab it
AddEventToPlayerstate( EV_ITEM_PICKUP, cent->currentState.modelindex , &cg.predicted_player_state);
// remove it from the frame so it won't be drawn
cent->currentState.eFlags |= EF_NODRAW;
// don't touch it again this prediction
cent->miscTime = cg.time;
// if its a weapon, give them some predicted ammo so the autoswitch will work
if ( item->giType == IT_WEAPON ) {
int ammotype = weaponData[item->giTag].ammoIndex;
cg.predicted_player_state.stats[ STAT_WEAPONS ] |= 1 << item->giTag;
if ( !cg.predicted_player_state.ammo[ ammotype] ) {
cg.predicted_player_state.ammo[ ammotype ] = 1;
}
}
}
/*
=========================
CG_TouchTriggerPrediction
Predict push triggers and items
Only called for the last command
=========================
*/
void CG_TouchTriggerPrediction( void ) {
int i;
trace_t trace;
entityState_t *ent;
clipHandle_t cmodel;
centity_t *cent;
qboolean spectator;
// dead clients don't activate triggers
if ( cg.predicted_player_state.stats[STAT_HEALTH] <= 0 ) {
return;
}
spectator = ( cg.predicted_player_state.pm_type == PM_SPECTATOR );
if ( cg.predicted_player_state.pm_type != PM_NORMAL && !spectator ) {
return;
}
for ( i = 0 ; i < cg.snap->numEntities ; i++ ) {
cent = &cg_entities[ cg.snap->entities[ i ].number ];
ent = ¢->currentState;
if ( ent->eType == ET_ITEM && !spectator ) {
CG_TouchItem( cent );
continue;
}
if ( ent->eType != ET_PUSH_TRIGGER && ent->eType != ET_TELEPORT_TRIGGER ) {
continue;
}
if ( ent->solid != SOLID_BMODEL ) {
continue;
}
cmodel = cgi_CM_InlineModel( ent->modelindex );
if ( !cmodel ) {
continue;
}
cgi_CM_BoxTrace( &trace, cg.predicted_player_state.origin, cg.predicted_player_state.origin,
cg_pmove.mins, cg_pmove.maxs, cmodel, -1 );
if ( !trace.startsolid ) {
continue;
}
if ( ent->eType == ET_TELEPORT_TRIGGER ) {
cg.hyperspace = qtrue;
} else {
// we hit this push trigger
if ( spectator ) {
continue;
}
VectorCopy( ent->origin2, cg.predicted_player_state.velocity );
}
}
}
/*
=================
CG_PredictPlayerState
Generates cg.predicted_player_state for the current cg.time
cg.predicted_player_state is guaranteed to be valid after exiting.
For normal gameplay, it will be the result of predicted usercmd_t on
top of the most recent playerState_t received from the server.
Each new refdef will usually have exactly one new usercmd over the last,
but we have to simulate all unacknowledged commands since the last snapshot
received. This means that on an internet connection, quite a few
pmoves may be issued each frame.
OPTIMIZE: don't re-simulate unless the newly arrived snapshot playerState_t
differs from the predicted one.
We detect prediction errors and allow them to be decayed off over several frames
to ease the jerk.
=================
*/
extern qboolean player_locked;
void CG_PredictPlayerState( void ) {
int cmdNum, current;
playerState_t oldPlayerState;
cg.hyperspace = qfalse; // will be set if touching a trigger_teleport
// if this is the first frame we must guarantee
// predicted_player_state is valid even if there is some
// other error condition
if ( !cg.validPPS ) {
cg.validPPS = qtrue;
cg.predicted_player_state = cg.snap->ps;
}
if ( 1 )//cg_timescale.value >= 1.0f )
{
// demo playback just copies the moves
/*
if ( (cg.snap->ps.pm_flags & PMF_FOLLOW) ) {
CG_InterpolatePlayerState( qfalse );
return;
}
*/
// non-predicting local movement will grab the latest angles
CG_InterpolatePlayerState( qtrue );
return;
}
// prepare for pmove
//FIXME: is this bad???
cg_pmove.gent = NULL;
cg_pmove.ps = &cg.predicted_player_state;
cg_pmove.trace = CG_Trace;
cg_pmove.pointcontents = CG_PointContents;
cg_pmove.tracemask = MASK_PLAYERSOLID;
cg_pmove.noFootsteps = 0;//( cgs.dmflags & DF_NO_FOOTSTEPS ) > 0;
// save the state before the pmove so we can detect transitions
oldPlayerState = cg.predicted_player_state;
// if we are too far out of date, just freeze
cmdNum = cg.snap->cmdNum;
current = cgi_GetCurrentCmdNumber();
if ( current - cmdNum >= CMD_BACKUP ) {
return;
}
// get the most recent information we have
cg.predicted_player_state = cg.snap->ps;
// we should always be predicting at least one frame
if ( cmdNum >= current ) {
return;
}
// run cmds
do {
// check for a prediction error from last frame
// on a lan, this will often be the exact value
// from the snapshot, but on a wan we will have
// to predict several commands to get to the point
// we want to compare
if ( cmdNum == current - 1 ) {
vec3_t delta;
float len;
if ( cg.thisFrameTeleport ) {
// a teleport will not cause an error decay
VectorClear( cg.predictedError );
cg.thisFrameTeleport = qfalse;
} else {
vec3_t adjusted;
CG_AdjustPositionForMover( cg.predicted_player_state.origin,
cg.predicted_player_state.groundEntityNum, cg.oldTime, adjusted );
VectorSubtract( oldPlayerState.origin, adjusted, delta );
len = VectorLength( delta );
if ( len > 0.1 ) {
if ( cg_errorDecay.integer ) {
int t;
float f;
t = cg.time - cg.predictedErrorTime;
f = ( cg_errorDecay.value - t ) / cg_errorDecay.value;
if ( f < 0 ) {
f = 0;
}
VectorScale( cg.predictedError, f, cg.predictedError );
} else {
VectorClear( cg.predictedError );
}
VectorAdd( delta, cg.predictedError, cg.predictedError );
cg.predictedErrorTime = cg.oldTime;
}
}
}
// if the command can't be gotten because it is
// too far out of date, the frame is invalid
// this should never happen, because we check ranges at
// the top of the function
cmdNum++;
if ( !cgi_GetUserCmd( cmdNum, &cg_pmove.cmd ) ) {
break;
}
gentity_t *ent = &g_entities[0];//cheating and dirty, I know, but this is a SP game so prediction can cheat
if ( player_locked ||
(ent && !ent->s.number&&ent->aimDebounceTime>level.time) ||
(ent && ent->client && ent->client->ps.pm_time && (ent->client->ps.pm_flags&PMF_TIME_KNOCKBACK)) ||
(ent && ent->forcePushTime > level.time) )
{//lock out player control unless dead
//VectorClear( cg_pmove.cmd.angles );
cg_pmove.cmd.forwardmove = 0;
cg_pmove.cmd.rightmove = 0;
cg_pmove.cmd.buttons = 0;
cg_pmove.cmd.upmove = 0;
}
CG_CheckModifyUCmd( &cg_pmove.cmd, NULL );
//FIXME: prediction on clients in timescale results in jerky positional translation
Pmove( &cg_pmove );
// add push trigger movement effects
CG_TouchTriggerPrediction();
} while ( cmdNum < current );
// adjust for the movement of the groundentity
CG_AdjustPositionForMover( cg.predicted_player_state.origin,
cg.predicted_player_state.groundEntityNum,
cg.time, cg.predicted_player_state.origin );
// fire events and other transition triggered things
CG_TransitionPlayerState( &cg.predicted_player_state, &oldPlayerState );
}
|
gpl-2.0
|
devBPI/src_libraryfind
|
app/controllers/query_controller.rb
|
3276
|
# LibraryFind - Quality find done better.
# Copyright (C) 2007 Oregon State University
#
# This program is free software; you can redistribute it and/or modify it under
# the terms of the GNU General Public License as published by the Free Software
# Foundation; either version 2 of the License, or (at your option) any later
# version.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc., 59 Temple
# Place, Suite 330, Boston, MA 02111-1307 USA
#
# Questions or comments on this program may be addressed to:
#
# LibraryFind
# 121 The Valley Library
# Corvallis OR 97331-4501
#
# http://libraryfind.org
class QueryController < ApplicationController
# web_service_dispatching_mode :direct
# web_service_api QueryApi
# wsdl_service_name 'query'
# web_service_scaffold :invoke
def get_id(_id)
meta = MetaSearch.new()
return meta.GetId(_id)
end
def simple_search(_sets, _qtype, _arg, _start, _max)
meta = MetaSearch.new()
_lqtype = Array.new
_lqtype[0] = _qtype
_larg = Array.new
_larg[0] = _arg
return meta.Search(_sets, _lqtype, _larg, _start, _max, nil, nil, nil, true)
end
def search_async(_sets, _qtype, _arg, _start, _max)
meta = MetaSearch.new()
return meta.SearchAsync(_sets, _qtype, _arg, _start, _max)
end
def simple_search_async(_sets, _qtype, _arg, _start, _max)
meta = MetaSearch.new()
_lqtype = Array.new
_lqtype[0] = _qtype
_larg = Array.new
_larg[0] = _arg
return meta.SearchAsync(_sets, _lqtype, _larg, _start, _max)
end
def search(_sets, _qtype, _arg, _start, _max)
meta = MetaSearch.new()
return meta.Search(_sets, _qtype, _arg, _start, _max, nil, nil, nil, true);
end
def search_ex(_sets, _qtype, _arg, _start, _max, session_id, action_type, data)
meta = MetaSearch.new()
return meta.Search(_sets, _qtype, _arg, _start, _max, session_id, action_type, nil, true);
end
def list_collections()
meta = MetaSearch.new()
return meta.ListCollections()
end
def list_groups()
meta = MetaSearch.new()
return meta.ListGroups()
end
def list_alpha()
meta = MetaSearch.new()
return meta.ListAlpha()
end
def get_group_members(name)
meta = MetaSearch.new()
return meta.GetGroupMembers(name)
end
def check_job_status(_id)
meta = MetaSearch.new()
return meta.CheckJobStatus(_id)
end
def check_jobs_status(_ids)
meta = MetaSearch.new()
return meta.CheckJobStatus(_ids)
end
def kill_thread(_job_id, _thread_id)
meta = MetaSearch.new()
return meta.KillThread(_job_id, _thread_id)
end
def get_job_record(_id, _max)
meta = MetaSearch.new()
return meta.GetJobRecord(_id, _max)
end
def get_jobs_records(_ids, _max)
meta = MetaSearch.new()
return meta.GetJobsRecords(_ids, _max)
end
end
|
gpl-2.0
|
windskyer/nova
|
nova/tests/unit/db/test_db_api.py
|
413946
|
# encoding=UTF8
# Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Unit tests for the DB API."""
import copy
import datetime
import uuid as stdlib_uuid
import iso8601
import mock
import netaddr
from oslo_config import cfg
from oslo_db import api as oslo_db_api
from oslo_db import exception as db_exc
from oslo_db.sqlalchemy import test_base
from oslo_db.sqlalchemy import update_match
from oslo_db.sqlalchemy import utils as sqlalchemyutils
from oslo_serialization import jsonutils
from oslo_utils import timeutils
from oslo_utils import uuidutils
import six
from six.moves import range
from sqlalchemy import Column
from sqlalchemy.dialects import sqlite
from sqlalchemy.exc import OperationalError
from sqlalchemy.exc import SQLAlchemyError
from sqlalchemy import inspect
from sqlalchemy import Integer
from sqlalchemy import MetaData
from sqlalchemy.orm import query
from sqlalchemy import sql
from sqlalchemy import Table
from nova import block_device
from nova.compute import arch
from nova.compute import task_states
from nova.compute import vm_states
from nova import context
from nova import db
from nova.db.sqlalchemy import api as sqlalchemy_api
from nova.db.sqlalchemy import models
from nova.db.sqlalchemy import types as col_types
from nova.db.sqlalchemy import utils as db_utils
from nova import exception
from nova import objects
from nova.objects import fields
from nova import quota
from nova import test
from nova.tests.unit import matchers
from nova import utils
CONF = cfg.CONF
CONF.import_opt('reserved_host_memory_mb', 'nova.compute.resource_tracker')
CONF.import_opt('reserved_host_disk_mb', 'nova.compute.resource_tracker')
get_engine = sqlalchemy_api.get_engine
get_session = sqlalchemy_api.get_session
def _reservation_get(context, uuid):
result = sqlalchemy_api.model_query(context, models.Reservation,
read_deleted="no").filter_by(uuid=uuid).first()
if not result:
raise exception.ReservationNotFound(uuid=uuid)
return result
def _quota_reserve(context, project_id, user_id):
"""Create sample Quota, QuotaUsage and Reservation objects.
There is no method db.quota_usage_create(), so we have to use
db.quota_reserve() for creating QuotaUsage objects.
Returns reservations uuids.
"""
def get_sync(resource, usage):
def sync(elevated, project_id, user_id, session):
return {resource: usage}
return sync
quotas = {}
user_quotas = {}
resources = {}
deltas = {}
for i in range(3):
resource = 'resource%d' % i
if i == 2:
# test for project level resources
resource = 'fixed_ips'
quotas[resource] = db.quota_create(context,
project_id,
resource, i + 2).hard_limit
user_quotas[resource] = quotas[resource]
else:
quotas[resource] = db.quota_create(context,
project_id,
resource, i + 1).hard_limit
user_quotas[resource] = db.quota_create(context, project_id,
resource, i + 1,
user_id=user_id).hard_limit
sync_name = '_sync_%s' % resource
resources[resource] = quota.ReservableResource(
resource, sync_name, 'quota_res_%d' % i)
deltas[resource] = i
setattr(sqlalchemy_api, sync_name, get_sync(resource, i))
sqlalchemy_api.QUOTA_SYNC_FUNCTIONS[sync_name] = getattr(
sqlalchemy_api, sync_name)
return db.quota_reserve(context, resources, quotas, user_quotas, deltas,
timeutils.utcnow(), CONF.until_refresh,
datetime.timedelta(days=1), project_id, user_id)
class DbTestCase(test.TestCase):
def setUp(self):
super(DbTestCase, self).setUp()
self.user_id = 'fake'
self.project_id = 'fake'
self.context = context.RequestContext(self.user_id, self.project_id)
def create_instance_with_args(self, **kwargs):
args = {'reservation_id': 'a', 'image_ref': 1, 'host': 'host1',
'node': 'node1', 'project_id': self.project_id,
'vm_state': 'fake'}
if 'context' in kwargs:
ctxt = kwargs.pop('context')
args['project_id'] = ctxt.project_id
else:
ctxt = self.context
args.update(kwargs)
return db.instance_create(ctxt, args)
def fake_metadata(self, content):
meta = {}
for i in range(0, 10):
meta["foo%i" % i] = "this is %s item %i" % (content, i)
return meta
def create_metadata_for_instance(self, instance_uuid):
meta = self.fake_metadata('metadata')
db.instance_metadata_update(self.context, instance_uuid, meta, False)
sys_meta = self.fake_metadata('system_metadata')
db.instance_system_metadata_update(self.context, instance_uuid,
sys_meta, False)
return meta, sys_meta
class DecoratorTestCase(test.TestCase):
def _test_decorator_wraps_helper(self, decorator):
def test_func():
"""Test docstring."""
decorated_func = decorator(test_func)
self.assertEqual(test_func.__name__, decorated_func.__name__)
self.assertEqual(test_func.__doc__, decorated_func.__doc__)
self.assertEqual(test_func.__module__, decorated_func.__module__)
def test_require_context_decorator_wraps_functions_properly(self):
self._test_decorator_wraps_helper(sqlalchemy_api.require_context)
def test_require_deadlock_retry_wraps_functions_properly(self):
self._test_decorator_wraps_helper(
oslo_db_api.wrap_db_retry(max_retries=5, retry_on_deadlock=True))
def _get_fake_aggr_values():
return {'name': 'fake_aggregate'}
def _get_fake_aggr_metadata():
return {'fake_key1': 'fake_value1',
'fake_key2': 'fake_value2',
'availability_zone': 'fake_avail_zone'}
def _get_fake_aggr_hosts():
return ['foo.openstack.org']
def _create_aggregate(context=context.get_admin_context(),
values=_get_fake_aggr_values(),
metadata=_get_fake_aggr_metadata()):
return db.aggregate_create(context, values, metadata)
def _create_aggregate_with_hosts(context=context.get_admin_context(),
values=_get_fake_aggr_values(),
metadata=_get_fake_aggr_metadata(),
hosts=_get_fake_aggr_hosts()):
result = _create_aggregate(context=context,
values=values, metadata=metadata)
for host in hosts:
db.aggregate_host_add(context, result['id'], host)
return result
@mock.patch.object(sqlalchemy_api, '_get_regexp_op_for_connection',
return_value='LIKE')
class UnsupportedDbRegexpTestCase(DbTestCase):
def test_instance_get_all_by_filters_paginate(self, mock_get_regexp):
test1 = self.create_instance_with_args(display_name='test1')
test2 = self.create_instance_with_args(display_name='test2')
test3 = self.create_instance_with_args(display_name='test3')
result = db.instance_get_all_by_filters(self.context,
{'display_name': '%test%'},
marker=None)
self.assertEqual(3, len(result))
result = db.instance_get_all_by_filters(self.context,
{'display_name': '%test%'},
sort_dir="asc",
marker=test1['uuid'])
self.assertEqual(2, len(result))
result = db.instance_get_all_by_filters(self.context,
{'display_name': '%test%'},
sort_dir="asc",
marker=test2['uuid'])
self.assertEqual(1, len(result))
result = db.instance_get_all_by_filters(self.context,
{'display_name': '%test%'},
sort_dir="asc",
marker=test3['uuid'])
self.assertEqual(0, len(result))
self.assertRaises(exception.MarkerNotFound,
db.instance_get_all_by_filters,
self.context, {'display_name': '%test%'},
marker=str(stdlib_uuid.uuid4()))
def _assert_equals_inst_order(self, correct_order, filters,
sort_keys=None, sort_dirs=None,
limit=None, marker=None,
match_keys=['uuid', 'vm_state',
'display_name', 'id']):
'''Retrieves instances based on the given filters and sorting
information and verifies that the instances are returned in the
correct sorted order by ensuring that the supplied keys match.
'''
result = db.instance_get_all_by_filters_sort(
self.context, filters, limit=limit, marker=marker,
sort_keys=sort_keys, sort_dirs=sort_dirs)
self.assertEqual(len(correct_order), len(result))
for inst1, inst2 in zip(result, correct_order):
for key in match_keys:
self.assertEqual(inst1.get(key), inst2.get(key))
return result
def test_instance_get_all_by_filters_sort_keys(self, mock_get_regexp):
'''Verifies sort order and direction for multiple instances.'''
# Instances that will reply to the query
test1_active = self.create_instance_with_args(
display_name='test1',
vm_state=vm_states.ACTIVE)
test1_error = self.create_instance_with_args(
display_name='test1',
vm_state=vm_states.ERROR)
test1_error2 = self.create_instance_with_args(
display_name='test1',
vm_state=vm_states.ERROR)
test2_active = self.create_instance_with_args(
display_name='test2',
vm_state=vm_states.ACTIVE)
test2_error = self.create_instance_with_args(
display_name='test2',
vm_state=vm_states.ERROR)
test2_error2 = self.create_instance_with_args(
display_name='test2',
vm_state=vm_states.ERROR)
# Other instances in the DB, will not match name filter
other_error = self.create_instance_with_args(
display_name='other',
vm_state=vm_states.ERROR)
other_active = self.create_instance_with_args(
display_name='other',
vm_state=vm_states.ACTIVE)
filters = {'display_name': '%test%'}
# Verify different sort key/direction combinations
sort_keys = ['display_name', 'vm_state', 'created_at']
sort_dirs = ['asc', 'asc', 'asc']
correct_order = [test1_active, test1_error, test1_error2,
test2_active, test2_error, test2_error2]
self._assert_equals_inst_order(correct_order, filters,
sort_keys=sort_keys,
sort_dirs=sort_dirs)
sort_dirs = ['asc', 'desc', 'asc']
correct_order = [test1_error, test1_error2, test1_active,
test2_error, test2_error2, test2_active]
self._assert_equals_inst_order(correct_order, filters,
sort_keys=sort_keys,
sort_dirs=sort_dirs)
sort_dirs = ['desc', 'desc', 'asc']
correct_order = [test2_error, test2_error2, test2_active,
test1_error, test1_error2, test1_active]
self._assert_equals_inst_order(correct_order, filters,
sort_keys=sort_keys,
sort_dirs=sort_dirs)
# created_at is added by default if not supplied, descending order
sort_keys = ['display_name', 'vm_state']
sort_dirs = ['desc', 'desc']
correct_order = [test2_error2, test2_error, test2_active,
test1_error2, test1_error, test1_active]
self._assert_equals_inst_order(correct_order, filters,
sort_keys=sort_keys,
sort_dirs=sort_dirs)
# Now created_at should be in ascending order (defaults to the first
# sort dir direction)
sort_dirs = ['asc', 'asc']
correct_order = [test1_active, test1_error, test1_error2,
test2_active, test2_error, test2_error2]
self._assert_equals_inst_order(correct_order, filters,
sort_keys=sort_keys,
sort_dirs=sort_dirs)
# Remove name filter, get all instances
correct_order = [other_active, other_error,
test1_active, test1_error, test1_error2,
test2_active, test2_error, test2_error2]
self._assert_equals_inst_order(correct_order, {},
sort_keys=sort_keys,
sort_dirs=sort_dirs)
# Default sorting, 'created_at' then 'id' in desc order
correct_order = [other_active, other_error,
test2_error2, test2_error, test2_active,
test1_error2, test1_error, test1_active]
self._assert_equals_inst_order(correct_order, {})
def test_instance_get_all_by_filters_sort_keys_paginate(self,
mock_get_regexp):
'''Verifies sort order with pagination.'''
# Instances that will reply to the query
test1_active = self.create_instance_with_args(
display_name='test1',
vm_state=vm_states.ACTIVE)
test1_error = self.create_instance_with_args(
display_name='test1',
vm_state=vm_states.ERROR)
test1_error2 = self.create_instance_with_args(
display_name='test1',
vm_state=vm_states.ERROR)
test2_active = self.create_instance_with_args(
display_name='test2',
vm_state=vm_states.ACTIVE)
test2_error = self.create_instance_with_args(
display_name='test2',
vm_state=vm_states.ERROR)
test2_error2 = self.create_instance_with_args(
display_name='test2',
vm_state=vm_states.ERROR)
# Other instances in the DB, will not match name filter
self.create_instance_with_args(display_name='other')
self.create_instance_with_args(display_name='other')
filters = {'display_name': '%test%'}
# Common sort information for every query
sort_keys = ['display_name', 'vm_state', 'created_at']
sort_dirs = ['asc', 'desc', 'asc']
# Overall correct instance order based on the sort keys
correct_order = [test1_error, test1_error2, test1_active,
test2_error, test2_error2, test2_active]
# Limits of 1, 2, and 3, verify that the instances returned are in the
# correct sorted order, update the marker to get the next correct page
for limit in range(1, 4):
marker = None
# Include the maximum number of instances (ie, 6) to ensure that
# the last query (with marker pointing to the last instance)
# returns 0 servers
for i in range(0, 7, limit):
if i == len(correct_order):
correct = []
else:
correct = correct_order[i:i + limit]
insts = self._assert_equals_inst_order(
correct, filters,
sort_keys=sort_keys, sort_dirs=sort_dirs,
limit=limit, marker=marker)
if correct:
marker = insts[-1]['uuid']
self.assertEqual(correct[-1]['uuid'], marker)
def test_instance_get_deleted_by_filters_sort_keys_paginate(self,
mock_get_regexp):
'''Verifies sort order with pagination for deleted instances.'''
ctxt = context.get_admin_context()
# Instances that will reply to the query
test1_active = self.create_instance_with_args(
display_name='test1',
vm_state=vm_states.ACTIVE)
db.instance_destroy(ctxt, test1_active['uuid'])
test1_error = self.create_instance_with_args(
display_name='test1',
vm_state=vm_states.ERROR)
db.instance_destroy(ctxt, test1_error['uuid'])
test1_error2 = self.create_instance_with_args(
display_name='test1',
vm_state=vm_states.ERROR)
db.instance_destroy(ctxt, test1_error2['uuid'])
test2_active = self.create_instance_with_args(
display_name='test2',
vm_state=vm_states.ACTIVE)
db.instance_destroy(ctxt, test2_active['uuid'])
test2_error = self.create_instance_with_args(
display_name='test2',
vm_state=vm_states.ERROR)
db.instance_destroy(ctxt, test2_error['uuid'])
test2_error2 = self.create_instance_with_args(
display_name='test2',
vm_state=vm_states.ERROR)
db.instance_destroy(ctxt, test2_error2['uuid'])
# Other instances in the DB, will not match name filter
self.create_instance_with_args(display_name='other')
self.create_instance_with_args(display_name='other')
filters = {'display_name': '%test%', 'deleted': True}
# Common sort information for every query
sort_keys = ['display_name', 'vm_state', 'created_at']
sort_dirs = ['asc', 'desc', 'asc']
# Overall correct instance order based on the sort keys
correct_order = [test1_error, test1_error2, test1_active,
test2_error, test2_error2, test2_active]
# Limits of 1, 2, and 3, verify that the instances returned are in the
# correct sorted order, update the marker to get the next correct page
for limit in range(1, 4):
marker = None
# Include the maximum number of instances (ie, 6) to ensure that
# the last query (with marker pointing to the last instance)
# returns 0 servers
for i in range(0, 7, limit):
if i == len(correct_order):
correct = []
else:
correct = correct_order[i:i + limit]
insts = self._assert_equals_inst_order(
correct, filters,
sort_keys=sort_keys, sort_dirs=sort_dirs,
limit=limit, marker=marker)
if correct:
marker = insts[-1]['uuid']
self.assertEqual(correct[-1]['uuid'], marker)
class ModelQueryTestCase(DbTestCase):
def test_model_query_invalid_arguments(self):
# read_deleted shouldn't accept invalid values
self.assertRaises(ValueError, sqlalchemy_api.model_query,
self.context, models.Instance, read_deleted=False)
self.assertRaises(ValueError, sqlalchemy_api.model_query,
self.context, models.Instance, read_deleted="foo")
# Check model is a valid model
self.assertRaises(TypeError, sqlalchemy_api.model_query,
self.context, "")
@mock.patch.object(sqlalchemy_api, 'get_session')
def test_model_query_use_slave_false(self, mock_get_session):
sqlalchemy_api.model_query(self.context, models.Instance,
use_slave=False)
mock_get_session.assert_called_once_with(use_slave=False)
@mock.patch.object(sqlalchemy_api, 'get_session')
def test_model_query_use_slave_no_slave_connection(self, mock_get_session):
self.flags(slave_connection='', group='database')
sqlalchemy_api.model_query(self.context, models.Instance,
use_slave=True)
mock_get_session.assert_called_once_with(use_slave=False)
@mock.patch.object(sqlalchemy_api, 'get_session')
def test_model_query_use_slave_true(self, mock_get_session):
self.flags(slave_connection='foo://bar', group='database')
sqlalchemy_api.model_query(self.context, models.Instance,
use_slave=True)
mock_get_session.assert_called_once_with(use_slave=True)
@mock.patch.object(sqlalchemy_api, 'get_session')
def test_model_query_lazy_session_default(self, mock_get_session):
sqlalchemy_api.model_query(self.context, models.Instance,
session=mock.MagicMock())
self.assertFalse(mock_get_session.called)
@mock.patch.object(sqlalchemy_api, 'get_session')
@mock.patch.object(sqlalchemyutils, 'model_query')
def test_model_query_use_context_session(self, mock_model_query,
mock_get_session):
@sqlalchemy_api.main_context_manager.reader
def fake_method(context):
session = context.session
sqlalchemy_api.model_query(context, models.Instance)
return session
session = fake_method(self.context)
self.assertFalse(mock_get_session.called)
mock_model_query.assert_called_once_with(models.Instance, session,
None, deleted=False)
class EngineFacadeTestCase(DbTestCase):
@mock.patch.object(sqlalchemy_api, 'get_session')
def test_use_single_context_session_writer(self, mock_get_session):
# Checks that session in context would not be overwritten by
# annotation @sqlalchemy_api.main_context_manager.writer if annotation
# is used twice.
@sqlalchemy_api.main_context_manager.writer
def fake_parent_method(context):
session = context.session
return fake_child_method(context), session
@sqlalchemy_api.main_context_manager.writer
def fake_child_method(context):
session = context.session
sqlalchemy_api.model_query(context, models.Instance)
return session
parent_session, child_session = fake_parent_method(self.context)
self.assertFalse(mock_get_session.called)
self.assertEqual(parent_session, child_session)
@mock.patch.object(sqlalchemy_api, 'get_session')
def test_use_single_context_session_reader(self, mock_get_session):
# Checks that session in context would not be overwritten by
# annotation @sqlalchemy_api.main_context_manager.reader if annotation
# is used twice.
@sqlalchemy_api.main_context_manager.reader
def fake_parent_method(context):
session = context.session
return fake_child_method(context), session
@sqlalchemy_api.main_context_manager.reader
def fake_child_method(context):
session = context.session
sqlalchemy_api.model_query(context, models.Instance)
return session
parent_session, child_session = fake_parent_method(self.context)
self.assertFalse(mock_get_session.called)
self.assertEqual(parent_session, child_session)
class AggregateDBApiTestCase(test.TestCase):
def setUp(self):
super(AggregateDBApiTestCase, self).setUp()
self.user_id = 'fake'
self.project_id = 'fake'
self.context = context.RequestContext(self.user_id, self.project_id)
def test_aggregate_create_no_metadata(self):
result = _create_aggregate(metadata=None)
self.assertEqual(result['name'], 'fake_aggregate')
def test_aggregate_create_avoid_name_conflict(self):
r1 = _create_aggregate(metadata=None)
db.aggregate_delete(context.get_admin_context(), r1['id'])
values = {'name': r1['name']}
metadata = {'availability_zone': 'new_zone'}
r2 = _create_aggregate(values=values, metadata=metadata)
self.assertEqual(r2['name'], values['name'])
self.assertEqual(r2['availability_zone'],
metadata['availability_zone'])
def test_aggregate_create_raise_exist_exc(self):
_create_aggregate(metadata=None)
self.assertRaises(exception.AggregateNameExists,
_create_aggregate, metadata=None)
def test_aggregate_get_raise_not_found(self):
ctxt = context.get_admin_context()
# this does not exist!
aggregate_id = 1
self.assertRaises(exception.AggregateNotFound,
db.aggregate_get,
ctxt, aggregate_id)
def test_aggregate_metadata_get_raise_not_found(self):
ctxt = context.get_admin_context()
# this does not exist!
aggregate_id = 1
self.assertRaises(exception.AggregateNotFound,
db.aggregate_metadata_get,
ctxt, aggregate_id)
def test_aggregate_create_with_metadata(self):
ctxt = context.get_admin_context()
result = _create_aggregate(context=ctxt)
expected_metadata = db.aggregate_metadata_get(ctxt, result['id'])
self.assertThat(expected_metadata,
matchers.DictMatches(_get_fake_aggr_metadata()))
def test_aggregate_create_delete_create_with_metadata(self):
# test for bug 1052479
ctxt = context.get_admin_context()
result = _create_aggregate(context=ctxt)
expected_metadata = db.aggregate_metadata_get(ctxt, result['id'])
self.assertThat(expected_metadata,
matchers.DictMatches(_get_fake_aggr_metadata()))
db.aggregate_delete(ctxt, result['id'])
result = _create_aggregate(metadata={'availability_zone':
'fake_avail_zone'})
expected_metadata = db.aggregate_metadata_get(ctxt, result['id'])
self.assertEqual(expected_metadata, {'availability_zone':
'fake_avail_zone'})
def test_aggregate_get(self):
ctxt = context.get_admin_context()
result = _create_aggregate_with_hosts(context=ctxt)
expected = db.aggregate_get(ctxt, result['id'])
self.assertEqual(_get_fake_aggr_hosts(), expected['hosts'])
self.assertEqual(_get_fake_aggr_metadata(), expected['metadetails'])
def test_aggregate_get_by_host(self):
ctxt = context.get_admin_context()
values2 = {'name': 'fake_aggregate2'}
values3 = {'name': 'fake_aggregate3'}
values4 = {'name': 'fake_aggregate4'}
values5 = {'name': 'fake_aggregate5'}
a1 = _create_aggregate_with_hosts(context=ctxt)
a2 = _create_aggregate_with_hosts(context=ctxt, values=values2)
# a3 has no hosts and should not be in the results.
_create_aggregate(context=ctxt, values=values3)
# a4 has no matching hosts.
_create_aggregate_with_hosts(context=ctxt, values=values4,
hosts=['foo4.openstack.org'])
# a5 has no matching hosts after deleting the only matching host.
a5 = _create_aggregate_with_hosts(context=ctxt, values=values5,
hosts=['foo5.openstack.org', 'foo.openstack.org'])
db.aggregate_host_delete(ctxt, a5['id'],
'foo.openstack.org')
r1 = db.aggregate_get_by_host(ctxt, 'foo.openstack.org')
self.assertEqual([a1['id'], a2['id']], [x['id'] for x in r1])
def test_aggregate_get_by_host_with_key(self):
ctxt = context.get_admin_context()
values2 = {'name': 'fake_aggregate2'}
values3 = {'name': 'fake_aggregate3'}
values4 = {'name': 'fake_aggregate4'}
a1 = _create_aggregate_with_hosts(context=ctxt,
metadata={'goodkey': 'good'})
_create_aggregate_with_hosts(context=ctxt, values=values2)
_create_aggregate(context=ctxt, values=values3)
_create_aggregate_with_hosts(context=ctxt, values=values4,
hosts=['foo4.openstack.org'], metadata={'goodkey': 'bad'})
# filter result by key
r1 = db.aggregate_get_by_host(ctxt, 'foo.openstack.org', key='goodkey')
self.assertEqual([a1['id']], [x['id'] for x in r1])
def test_aggregate_metadata_get_by_host(self):
ctxt = context.get_admin_context()
values = {'name': 'fake_aggregate2'}
values2 = {'name': 'fake_aggregate3'}
_create_aggregate_with_hosts(context=ctxt)
_create_aggregate_with_hosts(context=ctxt, values=values)
_create_aggregate_with_hosts(context=ctxt, values=values2,
hosts=['bar.openstack.org'], metadata={'badkey': 'bad'})
r1 = db.aggregate_metadata_get_by_host(ctxt, 'foo.openstack.org')
self.assertEqual(r1['fake_key1'], set(['fake_value1']))
self.assertNotIn('badkey', r1)
def test_aggregate_metadata_get_by_host_with_key(self):
ctxt = context.get_admin_context()
values2 = {'name': 'fake_aggregate12'}
values3 = {'name': 'fake_aggregate23'}
a2_hosts = ['foo1.openstack.org', 'foo2.openstack.org']
a2_metadata = {'good': 'value12', 'bad': 'badvalue12'}
a3_hosts = ['foo2.openstack.org', 'foo3.openstack.org']
a3_metadata = {'good': 'value23', 'bad': 'badvalue23'}
_create_aggregate_with_hosts(context=ctxt)
_create_aggregate_with_hosts(context=ctxt, values=values2,
hosts=a2_hosts, metadata=a2_metadata)
a3 = _create_aggregate_with_hosts(context=ctxt, values=values3,
hosts=a3_hosts, metadata=a3_metadata)
r1 = db.aggregate_metadata_get_by_host(ctxt, 'foo2.openstack.org',
key='good')
self.assertEqual(r1['good'], set(['value12', 'value23']))
self.assertNotIn('fake_key1', r1)
self.assertNotIn('bad', r1)
# Delete metadata
db.aggregate_metadata_delete(ctxt, a3['id'], 'good')
r2 = db.aggregate_metadata_get_by_host(ctxt, 'foo3.openstack.org',
key='good')
self.assertNotIn('good', r2)
def test_aggregate_get_by_host_not_found(self):
ctxt = context.get_admin_context()
_create_aggregate_with_hosts(context=ctxt)
self.assertEqual([], db.aggregate_get_by_host(ctxt, 'unknown_host'))
def test_aggregate_delete_raise_not_found(self):
ctxt = context.get_admin_context()
# this does not exist!
aggregate_id = 1
self.assertRaises(exception.AggregateNotFound,
db.aggregate_delete,
ctxt, aggregate_id)
def test_aggregate_delete(self):
ctxt = context.get_admin_context()
result = _create_aggregate(context=ctxt, metadata=None)
db.aggregate_delete(ctxt, result['id'])
expected = db.aggregate_get_all(ctxt)
self.assertEqual(0, len(expected))
aggregate = db.aggregate_get(ctxt.elevated(read_deleted='yes'),
result['id'])
self.assertEqual(aggregate['deleted'], result['id'])
def test_aggregate_update(self):
ctxt = context.get_admin_context()
result = _create_aggregate(context=ctxt, metadata={'availability_zone':
'fake_avail_zone'})
self.assertEqual(result['availability_zone'], 'fake_avail_zone')
new_values = _get_fake_aggr_values()
new_values['availability_zone'] = 'different_avail_zone'
updated = db.aggregate_update(ctxt, result['id'], new_values)
self.assertNotEqual(result['availability_zone'],
updated['availability_zone'])
def test_aggregate_update_with_metadata(self):
ctxt = context.get_admin_context()
result = _create_aggregate(context=ctxt, metadata=None)
values = _get_fake_aggr_values()
values['metadata'] = _get_fake_aggr_metadata()
values['availability_zone'] = 'different_avail_zone'
expected_metadata = copy.deepcopy(values['metadata'])
expected_metadata['availability_zone'] = values['availability_zone']
db.aggregate_update(ctxt, result['id'], values)
metadata = db.aggregate_metadata_get(ctxt, result['id'])
updated = db.aggregate_get(ctxt, result['id'])
self.assertThat(metadata,
matchers.DictMatches(expected_metadata))
self.assertNotEqual(result['availability_zone'],
updated['availability_zone'])
def test_aggregate_update_with_existing_metadata(self):
ctxt = context.get_admin_context()
result = _create_aggregate(context=ctxt)
values = _get_fake_aggr_values()
values['metadata'] = _get_fake_aggr_metadata()
values['metadata']['fake_key1'] = 'foo'
expected_metadata = copy.deepcopy(values['metadata'])
db.aggregate_update(ctxt, result['id'], values)
metadata = db.aggregate_metadata_get(ctxt, result['id'])
self.assertThat(metadata, matchers.DictMatches(expected_metadata))
def test_aggregate_update_zone_with_existing_metadata(self):
ctxt = context.get_admin_context()
result = _create_aggregate(context=ctxt)
new_zone = {'availability_zone': 'fake_avail_zone_2'}
metadata = _get_fake_aggr_metadata()
metadata.update(new_zone)
db.aggregate_update(ctxt, result['id'], new_zone)
expected = db.aggregate_metadata_get(ctxt, result['id'])
self.assertThat(metadata, matchers.DictMatches(expected))
def test_aggregate_update_raise_not_found(self):
ctxt = context.get_admin_context()
# this does not exist!
aggregate_id = 1
new_values = _get_fake_aggr_values()
self.assertRaises(exception.AggregateNotFound,
db.aggregate_update, ctxt, aggregate_id, new_values)
def test_aggregate_update_raise_name_exist(self):
ctxt = context.get_admin_context()
_create_aggregate(context=ctxt, values={'name': 'test1'},
metadata={'availability_zone': 'fake_avail_zone'})
_create_aggregate(context=ctxt, values={'name': 'test2'},
metadata={'availability_zone': 'fake_avail_zone'})
aggregate_id = 1
new_values = {'name': 'test2'}
self.assertRaises(exception.AggregateNameExists,
db.aggregate_update, ctxt, aggregate_id, new_values)
def test_aggregate_get_all(self):
ctxt = context.get_admin_context()
counter = 3
for c in range(counter):
_create_aggregate(context=ctxt,
values={'name': 'fake_aggregate_%d' % c},
metadata=None)
results = db.aggregate_get_all(ctxt)
self.assertEqual(len(results), counter)
def test_aggregate_get_all_non_deleted(self):
ctxt = context.get_admin_context()
add_counter = 5
remove_counter = 2
aggregates = []
for c in range(1, add_counter):
values = {'name': 'fake_aggregate_%d' % c}
aggregates.append(_create_aggregate(context=ctxt,
values=values, metadata=None))
for c in range(1, remove_counter):
db.aggregate_delete(ctxt, aggregates[c - 1]['id'])
results = db.aggregate_get_all(ctxt)
self.assertEqual(len(results), add_counter - remove_counter)
def test_aggregate_metadata_add(self):
ctxt = context.get_admin_context()
result = _create_aggregate(context=ctxt, metadata=None)
metadata = _get_fake_aggr_metadata()
db.aggregate_metadata_add(ctxt, result['id'], metadata)
expected = db.aggregate_metadata_get(ctxt, result['id'])
self.assertThat(metadata, matchers.DictMatches(expected))
def test_aggregate_metadata_add_empty_metadata(self):
ctxt = context.get_admin_context()
result = _create_aggregate(context=ctxt, metadata=None)
metadata = {}
db.aggregate_metadata_add(ctxt, result['id'], metadata)
expected = db.aggregate_metadata_get(ctxt, result['id'])
self.assertThat(metadata, matchers.DictMatches(expected))
def test_aggregate_metadata_add_and_update(self):
ctxt = context.get_admin_context()
result = _create_aggregate(context=ctxt)
metadata = _get_fake_aggr_metadata()
key = list(metadata.keys())[0]
new_metadata = {key: 'foo',
'fake_new_key': 'fake_new_value'}
metadata.update(new_metadata)
db.aggregate_metadata_add(ctxt, result['id'], new_metadata)
expected = db.aggregate_metadata_get(ctxt, result['id'])
self.assertThat(metadata, matchers.DictMatches(expected))
def test_aggregate_metadata_add_retry(self):
ctxt = context.get_admin_context()
result = _create_aggregate(context=ctxt, metadata=None)
def counted():
def get_query(context, id, session, read_deleted):
get_query.counter += 1
raise db_exc.DBDuplicateEntry
get_query.counter = 0
return get_query
get_query = counted()
self.stubs.Set(sqlalchemy_api,
'_aggregate_metadata_get_query', get_query)
self.assertRaises(db_exc.DBDuplicateEntry, sqlalchemy_api.
aggregate_metadata_add, ctxt, result['id'], {},
max_retries=5)
self.assertEqual(get_query.counter, 5)
def test_aggregate_metadata_update(self):
ctxt = context.get_admin_context()
result = _create_aggregate(context=ctxt)
metadata = _get_fake_aggr_metadata()
key = list(metadata.keys())[0]
db.aggregate_metadata_delete(ctxt, result['id'], key)
new_metadata = {key: 'foo'}
db.aggregate_metadata_add(ctxt, result['id'], new_metadata)
expected = db.aggregate_metadata_get(ctxt, result['id'])
metadata[key] = 'foo'
self.assertThat(metadata, matchers.DictMatches(expected))
def test_aggregate_metadata_delete(self):
ctxt = context.get_admin_context()
result = _create_aggregate(context=ctxt, metadata=None)
metadata = _get_fake_aggr_metadata()
db.aggregate_metadata_add(ctxt, result['id'], metadata)
db.aggregate_metadata_delete(ctxt, result['id'],
list(metadata.keys())[0])
expected = db.aggregate_metadata_get(ctxt, result['id'])
del metadata[list(metadata.keys())[0]]
self.assertThat(metadata, matchers.DictMatches(expected))
def test_aggregate_remove_availability_zone(self):
ctxt = context.get_admin_context()
result = _create_aggregate(context=ctxt, metadata={'availability_zone':
'fake_avail_zone'})
db.aggregate_metadata_delete(ctxt, result['id'], 'availability_zone')
expected = db.aggregate_metadata_get(ctxt, result['id'])
aggregate = db.aggregate_get(ctxt, result['id'])
self.assertIsNone(aggregate['availability_zone'])
self.assertThat({}, matchers.DictMatches(expected))
def test_aggregate_metadata_delete_raise_not_found(self):
ctxt = context.get_admin_context()
result = _create_aggregate(context=ctxt)
self.assertRaises(exception.AggregateMetadataNotFound,
db.aggregate_metadata_delete,
ctxt, result['id'], 'foo_key')
def test_aggregate_host_add(self):
ctxt = context.get_admin_context()
result = _create_aggregate_with_hosts(context=ctxt, metadata=None)
expected = db.aggregate_host_get_all(ctxt, result['id'])
self.assertEqual(_get_fake_aggr_hosts(), expected)
def test_aggregate_host_re_add(self):
ctxt = context.get_admin_context()
result = _create_aggregate_with_hosts(context=ctxt, metadata=None)
host = _get_fake_aggr_hosts()[0]
db.aggregate_host_delete(ctxt, result['id'], host)
db.aggregate_host_add(ctxt, result['id'], host)
expected = db.aggregate_host_get_all(ctxt, result['id'])
self.assertEqual(len(expected), 1)
def test_aggregate_host_add_duplicate_works(self):
ctxt = context.get_admin_context()
r1 = _create_aggregate_with_hosts(context=ctxt, metadata=None)
r2 = _create_aggregate_with_hosts(ctxt,
values={'name': 'fake_aggregate2'},
metadata={'availability_zone': 'fake_avail_zone2'})
h1 = db.aggregate_host_get_all(ctxt, r1['id'])
h2 = db.aggregate_host_get_all(ctxt, r2['id'])
self.assertEqual(h1, h2)
def test_aggregate_host_add_duplicate_raise_exist_exc(self):
ctxt = context.get_admin_context()
result = _create_aggregate_with_hosts(context=ctxt, metadata=None)
self.assertRaises(exception.AggregateHostExists,
db.aggregate_host_add,
ctxt, result['id'], _get_fake_aggr_hosts()[0])
def test_aggregate_host_add_raise_not_found(self):
ctxt = context.get_admin_context()
# this does not exist!
aggregate_id = 1
host = _get_fake_aggr_hosts()[0]
self.assertRaises(exception.AggregateNotFound,
db.aggregate_host_add,
ctxt, aggregate_id, host)
def test_aggregate_host_delete(self):
ctxt = context.get_admin_context()
result = _create_aggregate_with_hosts(context=ctxt, metadata=None)
db.aggregate_host_delete(ctxt, result['id'],
_get_fake_aggr_hosts()[0])
expected = db.aggregate_host_get_all(ctxt, result['id'])
self.assertEqual(0, len(expected))
def test_aggregate_host_delete_raise_not_found(self):
ctxt = context.get_admin_context()
result = _create_aggregate(context=ctxt)
self.assertRaises(exception.AggregateHostNotFound,
db.aggregate_host_delete,
ctxt, result['id'], _get_fake_aggr_hosts()[0])
class SqlAlchemyDbApiNoDbTestCase(test.NoDBTestCase):
"""No-DB test class for simple test cases that do not require a backend."""
def test_manual_join_columns_immutable_list(self):
# Tests that _manual_join_columns doesn't modify the list passed in.
columns_to_join = ['system_metadata', 'test']
manual_joins, columns_to_join2 = (
sqlalchemy_api._manual_join_columns(columns_to_join))
self.assertEqual(['system_metadata'], manual_joins)
self.assertEqual(['test'], columns_to_join2)
self.assertEqual(['system_metadata', 'test'], columns_to_join)
def test_convert_objects_related_datetimes(self):
t1 = timeutils.utcnow()
t2 = t1 + datetime.timedelta(seconds=10)
t3 = t2 + datetime.timedelta(hours=1)
t2_utc = t2.replace(tzinfo=iso8601.iso8601.Utc())
t3_utc = t3.replace(tzinfo=iso8601.iso8601.Utc())
datetime_keys = ('created_at', 'deleted_at')
test1 = {'created_at': t1, 'deleted_at': t2, 'updated_at': t3}
expected_dict = {'created_at': t1, 'deleted_at': t2, 'updated_at': t3}
sqlalchemy_api.convert_objects_related_datetimes(test1, *datetime_keys)
self.assertEqual(test1, expected_dict)
test2 = {'created_at': t1, 'deleted_at': t2_utc, 'updated_at': t3}
expected_dict = {'created_at': t1, 'deleted_at': t2, 'updated_at': t3}
sqlalchemy_api.convert_objects_related_datetimes(test2, *datetime_keys)
self.assertEqual(test2, expected_dict)
test3 = {'deleted_at': t2_utc, 'updated_at': t3_utc}
expected_dict = {'deleted_at': t2, 'updated_at': t3_utc}
sqlalchemy_api.convert_objects_related_datetimes(test3, *datetime_keys)
self.assertEqual(test3, expected_dict)
def test_convert_objects_related_datetimes_with_strings(self):
t1 = '2015-05-28T17:15:53.000000'
t2 = '2012-04-21T18:25:43-05:00'
t3 = '2012-04-23T18:25:43.511Z'
datetime_keys = ('created_at', 'deleted_at', 'updated_at')
test1 = {'created_at': t1, 'deleted_at': t2, 'updated_at': t3}
expected_dict = {
'created_at': timeutils.parse_strtime(t1).replace(tzinfo=None),
'deleted_at': timeutils.parse_isotime(t2).replace(tzinfo=None),
'updated_at': timeutils.parse_isotime(t3).replace(tzinfo=None)}
sqlalchemy_api.convert_objects_related_datetimes(test1)
self.assertEqual(test1, expected_dict)
sqlalchemy_api.convert_objects_related_datetimes(test1, *datetime_keys)
self.assertEqual(test1, expected_dict)
def test_get_regexp_op_for_database_sqlite(self):
op = sqlalchemy_api._get_regexp_op_for_connection('sqlite:///')
self.assertEqual('REGEXP', op)
def test_get_regexp_op_for_database_mysql(self):
op = sqlalchemy_api._get_regexp_op_for_connection(
'mysql+pymysql://root@localhost')
self.assertEqual('REGEXP', op)
def test_get_regexp_op_for_database_postgresql(self):
op = sqlalchemy_api._get_regexp_op_for_connection(
'postgresql://localhost')
self.assertEqual('~', op)
def test_get_regexp_op_for_database_unknown(self):
op = sqlalchemy_api._get_regexp_op_for_connection('notdb:///')
self.assertEqual('LIKE', op)
@mock.patch.object(sqlalchemy_api.main_context_manager._factory,
'get_legacy_facade')
def test_get_engine(self, mock_create_facade):
mock_facade = mock.MagicMock()
mock_create_facade.return_value = mock_facade
sqlalchemy_api.get_engine()
mock_create_facade.assert_called_once_with()
mock_facade.get_engine.assert_called_once_with(use_slave=False)
@mock.patch.object(sqlalchemy_api.api_context_manager._factory,
'get_legacy_facade')
def test_get_api_engine(self, mock_create_facade):
mock_facade = mock.MagicMock()
mock_create_facade.return_value = mock_facade
sqlalchemy_api.get_api_engine()
mock_create_facade.assert_called_once_with()
mock_facade.get_engine.assert_called_once_with()
@mock.patch.object(sqlalchemy_api.main_context_manager._factory,
'get_legacy_facade')
def test_get_session(self, mock_create_facade):
mock_facade = mock.MagicMock()
mock_create_facade.return_value = mock_facade
sqlalchemy_api.get_session()
mock_create_facade.assert_called_once_with()
mock_facade.get_session.assert_called_once_with(use_slave=False)
@mock.patch.object(sqlalchemy_api.api_context_manager._factory,
'get_legacy_facade')
def test_get_api_session(self, mock_create_facade):
mock_facade = mock.MagicMock()
mock_create_facade.return_value = mock_facade
sqlalchemy_api.get_api_session()
mock_create_facade.assert_called_once_with()
mock_facade.get_session.assert_called_once_with()
@mock.patch.object(sqlalchemy_api, '_instance_get_by_uuid')
@mock.patch.object(sqlalchemy_api, '_instances_fill_metadata')
@mock.patch('oslo_db.sqlalchemy.utils.paginate_query')
def test_instance_get_all_by_filters_paginated_allows_deleted_marker(
self, mock_paginate, mock_fill, mock_get):
ctxt = mock.MagicMock()
ctxt.elevated.return_value = mock.sentinel.elevated
sqlalchemy_api.instance_get_all_by_filters_sort(ctxt, {}, marker='foo')
mock_get.assert_called_once_with(mock.sentinel.elevated,
'foo', session=mock.ANY)
ctxt.elevated.assert_called_once_with(read_deleted='yes')
class SqlAlchemyDbApiTestCase(DbTestCase):
def test_instance_get_all_by_host(self):
ctxt = context.get_admin_context()
self.create_instance_with_args()
self.create_instance_with_args()
self.create_instance_with_args(host='host2')
result = sqlalchemy_api._instance_get_all_uuids_by_host(ctxt, 'host1')
self.assertEqual(2, len(result))
def test_instance_get_all_uuids_by_host(self):
ctxt = context.get_admin_context()
self.create_instance_with_args()
self.create_instance_with_args()
self.create_instance_with_args(host='host2')
result = sqlalchemy_api._instance_get_all_uuids_by_host(ctxt, 'host1')
self.assertEqual(2, len(result))
self.assertEqual(six.text_type, type(result[0]))
def test_instance_get_active_by_window_joined(self):
now = datetime.datetime(2013, 10, 10, 17, 16, 37, 156701)
start_time = now - datetime.timedelta(minutes=10)
now1 = now + datetime.timedelta(minutes=1)
now2 = now + datetime.timedelta(minutes=2)
now3 = now + datetime.timedelta(minutes=3)
ctxt = context.get_admin_context()
# used for testing columns_to_join
network_info = jsonutils.dumps({'ckey': 'cvalue'})
sample_data = {
'metadata': {'mkey1': 'mval1', 'mkey2': 'mval2'},
'system_metadata': {'smkey1': 'smval1', 'smkey2': 'smval2'},
'info_cache': {'network_info': network_info},
}
self.create_instance_with_args(launched_at=now, **sample_data)
self.create_instance_with_args(launched_at=now1, terminated_at=now2,
**sample_data)
self.create_instance_with_args(launched_at=now2, terminated_at=now3,
**sample_data)
self.create_instance_with_args(launched_at=now3, terminated_at=None,
**sample_data)
result = sqlalchemy_api.instance_get_active_by_window_joined(
ctxt, begin=now)
self.assertEqual(4, len(result))
# verify that all default columns are joined
meta = utils.metadata_to_dict(result[0]['metadata'])
self.assertEqual(sample_data['metadata'], meta)
sys_meta = utils.metadata_to_dict(result[0]['system_metadata'])
self.assertEqual(sample_data['system_metadata'], sys_meta)
self.assertIn('info_cache', result[0])
result = sqlalchemy_api.instance_get_active_by_window_joined(
ctxt, begin=now3, columns_to_join=['info_cache'])
self.assertEqual(2, len(result))
# verify that only info_cache is loaded
meta = utils.metadata_to_dict(result[0]['metadata'])
self.assertEqual({}, meta)
self.assertIn('info_cache', result[0])
result = sqlalchemy_api.instance_get_active_by_window_joined(
ctxt, begin=start_time, end=now)
self.assertEqual(0, len(result))
result = sqlalchemy_api.instance_get_active_by_window_joined(
ctxt, begin=start_time, end=now2,
columns_to_join=['system_metadata'])
self.assertEqual(2, len(result))
# verify that only system_metadata is loaded
meta = utils.metadata_to_dict(result[0]['metadata'])
self.assertEqual({}, meta)
sys_meta = utils.metadata_to_dict(result[0]['system_metadata'])
self.assertEqual(sample_data['system_metadata'], sys_meta)
self.assertNotIn('info_cache', result[0])
result = sqlalchemy_api.instance_get_active_by_window_joined(
ctxt, begin=now2, end=now3,
columns_to_join=['metadata', 'info_cache'])
self.assertEqual(2, len(result))
# verify that only metadata and info_cache are loaded
meta = utils.metadata_to_dict(result[0]['metadata'])
self.assertEqual(sample_data['metadata'], meta)
sys_meta = utils.metadata_to_dict(result[0]['system_metadata'])
self.assertEqual({}, sys_meta)
self.assertIn('info_cache', result[0])
self.assertEqual(network_info, result[0]['info_cache']['network_info'])
@mock.patch('nova.db.sqlalchemy.api.instance_get_all_by_filters_sort')
def test_instance_get_all_by_filters_calls_sort(self,
mock_get_all_filters_sort):
'''Verifies instance_get_all_by_filters calls the sort function.'''
# sort parameters should be wrapped in a list, all other parameters
# should be passed through
ctxt = context.get_admin_context()
sqlalchemy_api.instance_get_all_by_filters(ctxt, {'foo': 'bar'},
'sort_key', 'sort_dir', limit=100, marker='uuid',
columns_to_join='columns', use_slave=True)
mock_get_all_filters_sort.assert_called_once_with(ctxt, {'foo': 'bar'},
limit=100, marker='uuid', columns_to_join='columns',
use_slave=True, sort_keys=['sort_key'], sort_dirs=['sort_dir'])
def test_instance_get_all_by_filters_sort_key_invalid(self):
'''InvalidSortKey raised if an invalid key is given.'''
for keys in [['foo'], ['uuid', 'foo']]:
self.assertRaises(exception.InvalidSortKey,
db.instance_get_all_by_filters_sort,
self.context,
filters={},
sort_keys=keys)
class ProcessSortParamTestCase(test.TestCase):
def test_process_sort_params_defaults(self):
'''Verifies default sort parameters.'''
sort_keys, sort_dirs = sqlalchemy_api.process_sort_params([], [])
self.assertEqual(['created_at', 'id'], sort_keys)
self.assertEqual(['asc', 'asc'], sort_dirs)
sort_keys, sort_dirs = sqlalchemy_api.process_sort_params(None, None)
self.assertEqual(['created_at', 'id'], sort_keys)
self.assertEqual(['asc', 'asc'], sort_dirs)
def test_process_sort_params_override_default_keys(self):
'''Verifies that the default keys can be overridden.'''
sort_keys, sort_dirs = sqlalchemy_api.process_sort_params(
[], [], default_keys=['key1', 'key2', 'key3'])
self.assertEqual(['key1', 'key2', 'key3'], sort_keys)
self.assertEqual(['asc', 'asc', 'asc'], sort_dirs)
def test_process_sort_params_override_default_dir(self):
'''Verifies that the default direction can be overridden.'''
sort_keys, sort_dirs = sqlalchemy_api.process_sort_params(
[], [], default_dir='dir1')
self.assertEqual(['created_at', 'id'], sort_keys)
self.assertEqual(['dir1', 'dir1'], sort_dirs)
def test_process_sort_params_override_default_key_and_dir(self):
'''Verifies that the default key and dir can be overridden.'''
sort_keys, sort_dirs = sqlalchemy_api.process_sort_params(
[], [], default_keys=['key1', 'key2', 'key3'],
default_dir='dir1')
self.assertEqual(['key1', 'key2', 'key3'], sort_keys)
self.assertEqual(['dir1', 'dir1', 'dir1'], sort_dirs)
sort_keys, sort_dirs = sqlalchemy_api.process_sort_params(
[], [], default_keys=[], default_dir='dir1')
self.assertEqual([], sort_keys)
self.assertEqual([], sort_dirs)
def test_process_sort_params_non_default(self):
'''Verifies that non-default keys are added correctly.'''
sort_keys, sort_dirs = sqlalchemy_api.process_sort_params(
['key1', 'key2'], ['asc', 'desc'])
self.assertEqual(['key1', 'key2', 'created_at', 'id'], sort_keys)
# First sort_dir in list is used when adding the default keys
self.assertEqual(['asc', 'desc', 'asc', 'asc'], sort_dirs)
def test_process_sort_params_default(self):
'''Verifies that default keys are added correctly.'''
sort_keys, sort_dirs = sqlalchemy_api.process_sort_params(
['id', 'key2'], ['asc', 'desc'])
self.assertEqual(['id', 'key2', 'created_at'], sort_keys)
self.assertEqual(['asc', 'desc', 'asc'], sort_dirs)
# Include default key value, rely on default direction
sort_keys, sort_dirs = sqlalchemy_api.process_sort_params(
['id', 'key2'], [])
self.assertEqual(['id', 'key2', 'created_at'], sort_keys)
self.assertEqual(['asc', 'asc', 'asc'], sort_dirs)
def test_process_sort_params_default_dir(self):
'''Verifies that the default dir is applied to all keys.'''
# Direction is set, ignore default dir
sort_keys, sort_dirs = sqlalchemy_api.process_sort_params(
['id', 'key2'], ['desc'], default_dir='dir')
self.assertEqual(['id', 'key2', 'created_at'], sort_keys)
self.assertEqual(['desc', 'desc', 'desc'], sort_dirs)
# But should be used if no direction is set
sort_keys, sort_dirs = sqlalchemy_api.process_sort_params(
['id', 'key2'], [], default_dir='dir')
self.assertEqual(['id', 'key2', 'created_at'], sort_keys)
self.assertEqual(['dir', 'dir', 'dir'], sort_dirs)
def test_process_sort_params_unequal_length(self):
'''Verifies that a sort direction list is applied correctly.'''
sort_keys, sort_dirs = sqlalchemy_api.process_sort_params(
['id', 'key2', 'key3'], ['desc'])
self.assertEqual(['id', 'key2', 'key3', 'created_at'], sort_keys)
self.assertEqual(['desc', 'desc', 'desc', 'desc'], sort_dirs)
# Default direction is the first key in the list
sort_keys, sort_dirs = sqlalchemy_api.process_sort_params(
['id', 'key2', 'key3'], ['desc', 'asc'])
self.assertEqual(['id', 'key2', 'key3', 'created_at'], sort_keys)
self.assertEqual(['desc', 'asc', 'desc', 'desc'], sort_dirs)
sort_keys, sort_dirs = sqlalchemy_api.process_sort_params(
['id', 'key2', 'key3'], ['desc', 'asc', 'asc'])
self.assertEqual(['id', 'key2', 'key3', 'created_at'], sort_keys)
self.assertEqual(['desc', 'asc', 'asc', 'desc'], sort_dirs)
def test_process_sort_params_extra_dirs_lengths(self):
'''InvalidInput raised if more directions are given.'''
self.assertRaises(exception.InvalidInput,
sqlalchemy_api.process_sort_params,
['key1', 'key2'],
['asc', 'desc', 'desc'])
def test_process_sort_params_invalid_sort_dir(self):
'''InvalidInput raised if invalid directions are given.'''
for dirs in [['foo'], ['asc', 'foo'], ['asc', 'desc', 'foo']]:
self.assertRaises(exception.InvalidInput,
sqlalchemy_api.process_sort_params,
['key'],
dirs)
class MigrationTestCase(test.TestCase):
def setUp(self):
super(MigrationTestCase, self).setUp()
self.ctxt = context.get_admin_context()
self._create()
self._create()
self._create(status='reverted')
self._create(status='confirmed')
self._create(status='error')
self._create(status='accepted')
self._create(source_compute='host2', source_node='b',
dest_compute='host1', dest_node='a')
self._create(source_compute='host2', dest_compute='host3')
self._create(source_compute='host3', dest_compute='host4')
def _create(self, status='migrating', source_compute='host1',
source_node='a', dest_compute='host2', dest_node='b',
system_metadata=None, migration_type=None):
values = {'host': source_compute}
instance = db.instance_create(self.ctxt, values)
if system_metadata:
db.instance_system_metadata_update(self.ctxt, instance['uuid'],
system_metadata, False)
values = {'status': status, 'source_compute': source_compute,
'source_node': source_node, 'dest_compute': dest_compute,
'dest_node': dest_node, 'instance_uuid': instance['uuid'],
'migration_type': migration_type}
db.migration_create(self.ctxt, values)
def _assert_in_progress(self, migrations):
for migration in migrations:
self.assertNotEqual('confirmed', migration['status'])
self.assertNotEqual('reverted', migration['status'])
self.assertNotEqual('error', migration['status'])
self.assertNotEqual('accepted', migration['status'])
def test_migration_get_in_progress_joins(self):
self._create(source_compute='foo', system_metadata={'foo': 'bar'})
migrations = db.migration_get_in_progress_by_host_and_node(self.ctxt,
'foo', 'a')
system_metadata = migrations[0]['instance']['system_metadata'][0]
self.assertEqual(system_metadata['key'], 'foo')
self.assertEqual(system_metadata['value'], 'bar')
def test_in_progress_host1_nodea(self):
migrations = db.migration_get_in_progress_by_host_and_node(self.ctxt,
'host1', 'a')
# 2 as source + 1 as dest
self.assertEqual(3, len(migrations))
self._assert_in_progress(migrations)
def test_in_progress_host1_nodeb(self):
migrations = db.migration_get_in_progress_by_host_and_node(self.ctxt,
'host1', 'b')
# some migrations are to/from host1, but none with a node 'b'
self.assertEqual(0, len(migrations))
def test_in_progress_host2_nodeb(self):
migrations = db.migration_get_in_progress_by_host_and_node(self.ctxt,
'host2', 'b')
# 2 as dest, 1 as source
self.assertEqual(3, len(migrations))
self._assert_in_progress(migrations)
def test_instance_join(self):
migrations = db.migration_get_in_progress_by_host_and_node(self.ctxt,
'host2', 'b')
for migration in migrations:
instance = migration['instance']
self.assertEqual(migration['instance_uuid'], instance['uuid'])
def test_get_migrations_by_filters(self):
filters = {"status": "migrating", "host": "host3",
"migration_type": None, "hidden": False}
migrations = db.migration_get_all_by_filters(self.ctxt, filters)
self.assertEqual(2, len(migrations))
for migration in migrations:
self.assertEqual(filters["status"], migration['status'])
hosts = [migration['source_compute'], migration['dest_compute']]
self.assertIn(filters["host"], hosts)
def test_get_migrations_by_filters_with_type(self):
self._create(status="special", source_compute="host9",
migration_type="evacuation")
self._create(status="special", source_compute="host9",
migration_type="live-migration")
filters = {"status": "special", "host": "host9",
"migration_type": "evacuation", "hidden": False}
migrations = db.migration_get_all_by_filters(self.ctxt, filters)
self.assertEqual(1, len(migrations))
def test_get_migrations_by_filters_source_compute(self):
filters = {'source_compute': 'host2'}
migrations = db.migration_get_all_by_filters(self.ctxt, filters)
self.assertEqual(2, len(migrations))
sources = [x['source_compute'] for x in migrations]
self.assertEqual(['host2', 'host2'], sources)
dests = [x['dest_compute'] for x in migrations]
self.assertEqual(['host1', 'host3'], dests)
def test_migration_get_unconfirmed_by_dest_compute(self):
# Ensure no migrations are returned.
results = db.migration_get_unconfirmed_by_dest_compute(self.ctxt, 10,
'fake_host')
self.assertEqual(0, len(results))
# Ensure no migrations are returned.
results = db.migration_get_unconfirmed_by_dest_compute(self.ctxt, 10,
'fake_host2')
self.assertEqual(0, len(results))
updated_at = datetime.datetime(2000, 1, 1, 12, 0, 0)
values = {"status": "finished", "updated_at": updated_at,
"dest_compute": "fake_host2"}
migration = db.migration_create(self.ctxt, values)
# Ensure different host is not returned
results = db.migration_get_unconfirmed_by_dest_compute(self.ctxt, 10,
'fake_host')
self.assertEqual(0, len(results))
# Ensure one migration older than 10 seconds is returned.
results = db.migration_get_unconfirmed_by_dest_compute(self.ctxt, 10,
'fake_host2')
self.assertEqual(1, len(results))
db.migration_update(self.ctxt, migration['id'],
{"status": "CONFIRMED"})
# Ensure the new migration is not returned.
updated_at = timeutils.utcnow()
values = {"status": "finished", "updated_at": updated_at,
"dest_compute": "fake_host2"}
migration = db.migration_create(self.ctxt, values)
results = db.migration_get_unconfirmed_by_dest_compute(self.ctxt, 10,
"fake_host2")
self.assertEqual(0, len(results))
db.migration_update(self.ctxt, migration['id'],
{"status": "CONFIRMED"})
def test_migration_update_not_found(self):
self.assertRaises(exception.MigrationNotFound,
db.migration_update, self.ctxt, 42, {})
class ModelsObjectComparatorMixin(object):
def _dict_from_object(self, obj, ignored_keys):
if ignored_keys is None:
ignored_keys = []
if isinstance(obj, dict):
obj_items = obj.items()
else:
obj_items = obj.iteritems()
return {k: v for k, v in obj_items
if k not in ignored_keys}
def _assertEqualObjects(self, obj1, obj2, ignored_keys=None):
obj1 = self._dict_from_object(obj1, ignored_keys)
obj2 = self._dict_from_object(obj2, ignored_keys)
self.assertEqual(len(obj1),
len(obj2),
"Keys mismatch: %s" %
str(set(obj1.keys()) ^ set(obj2.keys())))
for key, value in obj1.items():
self.assertEqual(value, obj2[key])
def _assertEqualListsOfObjects(self, objs1, objs2, ignored_keys=None):
obj_to_dict = lambda o: self._dict_from_object(o, ignored_keys)
sort_key = lambda d: [d[k] for k in sorted(d)]
conv_and_sort = lambda obj: sorted(map(obj_to_dict, obj), key=sort_key)
self.assertEqual(conv_and_sort(objs1), conv_and_sort(objs2))
def _assertEqualOrderedListOfObjects(self, objs1, objs2,
ignored_keys=None):
obj_to_dict = lambda o: self._dict_from_object(o, ignored_keys)
conv = lambda objs: [obj_to_dict(obj) for obj in objs]
self.assertEqual(conv(objs1), conv(objs2))
def _assertEqualListsOfPrimitivesAsSets(self, primitives1, primitives2):
self.assertEqual(len(primitives1), len(primitives2))
for primitive in primitives1:
self.assertIn(primitive, primitives2)
for primitive in primitives2:
self.assertIn(primitive, primitives1)
class InstanceSystemMetadataTestCase(test.TestCase):
"""Tests for db.api.instance_system_metadata_* methods."""
def setUp(self):
super(InstanceSystemMetadataTestCase, self).setUp()
values = {'host': 'h1', 'project_id': 'p1',
'system_metadata': {'key': 'value'}}
self.ctxt = context.get_admin_context()
self.instance = db.instance_create(self.ctxt, values)
def test_instance_system_metadata_get(self):
metadata = db.instance_system_metadata_get(self.ctxt,
self.instance['uuid'])
self.assertEqual(metadata, {'key': 'value'})
def test_instance_system_metadata_update_new_pair(self):
db.instance_system_metadata_update(
self.ctxt, self.instance['uuid'],
{'new_key': 'new_value'}, False)
metadata = db.instance_system_metadata_get(self.ctxt,
self.instance['uuid'])
self.assertEqual(metadata, {'key': 'value', 'new_key': 'new_value'})
def test_instance_system_metadata_update_existent_pair(self):
db.instance_system_metadata_update(
self.ctxt, self.instance['uuid'],
{'key': 'new_value'}, True)
metadata = db.instance_system_metadata_get(self.ctxt,
self.instance['uuid'])
self.assertEqual(metadata, {'key': 'new_value'})
def test_instance_system_metadata_update_delete_true(self):
db.instance_system_metadata_update(
self.ctxt, self.instance['uuid'],
{'new_key': 'new_value'}, True)
metadata = db.instance_system_metadata_get(self.ctxt,
self.instance['uuid'])
self.assertEqual(metadata, {'new_key': 'new_value'})
@test.testtools.skip("bug 1189462")
def test_instance_system_metadata_update_nonexistent(self):
self.assertRaises(exception.InstanceNotFound,
db.instance_system_metadata_update,
self.ctxt, 'nonexistent-uuid',
{'key': 'value'}, True)
class ReservationTestCase(test.TestCase, ModelsObjectComparatorMixin):
"""Tests for db.api.reservation_* methods."""
def setUp(self):
super(ReservationTestCase, self).setUp()
self.ctxt = context.get_admin_context()
self.reservations = _quota_reserve(self.ctxt, 'project1', 'user1')
usage = db.quota_usage_get(self.ctxt, 'project1', 'resource1', 'user1')
self.values = {'uuid': 'sample-uuid',
'project_id': 'project1',
'user_id': 'user1',
'resource': 'resource1',
'delta': 42,
'expire': timeutils.utcnow() + datetime.timedelta(days=1),
'usage': {'id': usage.id}}
def test_reservation_commit(self):
expected = {'project_id': 'project1', 'user_id': 'user1',
'resource0': {'reserved': 0, 'in_use': 0},
'resource1': {'reserved': 1, 'in_use': 1},
'fixed_ips': {'reserved': 2, 'in_use': 2}}
self.assertEqual(expected, db.quota_usage_get_all_by_project_and_user(
self.ctxt, 'project1', 'user1'))
_reservation_get(self.ctxt, self.reservations[0])
db.reservation_commit(self.ctxt, self.reservations, 'project1',
'user1')
self.assertRaises(exception.ReservationNotFound,
_reservation_get, self.ctxt, self.reservations[0])
expected = {'project_id': 'project1', 'user_id': 'user1',
'resource0': {'reserved': 0, 'in_use': 0},
'resource1': {'reserved': 0, 'in_use': 2},
'fixed_ips': {'reserved': 0, 'in_use': 4}}
self.assertEqual(expected, db.quota_usage_get_all_by_project_and_user(
self.ctxt, 'project1', 'user1'))
def test_reservation_rollback(self):
expected = {'project_id': 'project1', 'user_id': 'user1',
'resource0': {'reserved': 0, 'in_use': 0},
'resource1': {'reserved': 1, 'in_use': 1},
'fixed_ips': {'reserved': 2, 'in_use': 2}}
self.assertEqual(expected, db.quota_usage_get_all_by_project_and_user(
self.ctxt, 'project1', 'user1'))
_reservation_get(self.ctxt, self.reservations[0])
db.reservation_rollback(self.ctxt, self.reservations, 'project1',
'user1')
self.assertRaises(exception.ReservationNotFound,
_reservation_get, self.ctxt, self.reservations[0])
expected = {'project_id': 'project1', 'user_id': 'user1',
'resource0': {'reserved': 0, 'in_use': 0},
'resource1': {'reserved': 0, 'in_use': 1},
'fixed_ips': {'reserved': 0, 'in_use': 2}}
self.assertEqual(expected, db.quota_usage_get_all_by_project_and_user(
self.ctxt, 'project1', 'user1'))
def test_reservation_expire(self):
db.reservation_expire(self.ctxt)
expected = {'project_id': 'project1', 'user_id': 'user1',
'resource0': {'reserved': 0, 'in_use': 0},
'resource1': {'reserved': 0, 'in_use': 1},
'fixed_ips': {'reserved': 0, 'in_use': 2}}
self.assertEqual(expected, db.quota_usage_get_all_by_project_and_user(
self.ctxt, 'project1', 'user1'))
class SecurityGroupRuleTestCase(test.TestCase, ModelsObjectComparatorMixin):
def setUp(self):
super(SecurityGroupRuleTestCase, self).setUp()
self.ctxt = context.get_admin_context()
def _get_base_values(self):
return {
'name': 'fake_sec_group',
'description': 'fake_sec_group_descr',
'user_id': 'fake',
'project_id': 'fake',
'instances': []
}
def _get_base_rule_values(self):
return {
'protocol': "tcp",
'from_port': 80,
'to_port': 8080,
'cidr': None,
'deleted': 0,
'deleted_at': None,
'grantee_group': None,
'updated_at': None
}
def _create_security_group(self, values):
v = self._get_base_values()
v.update(values)
return db.security_group_create(self.ctxt, v)
def _create_security_group_rule(self, values):
v = self._get_base_rule_values()
v.update(values)
return db.security_group_rule_create(self.ctxt, v)
def test_security_group_rule_create(self):
security_group_rule = self._create_security_group_rule({})
self.assertIsNotNone(security_group_rule['id'])
for key, value in self._get_base_rule_values().items():
self.assertEqual(value, security_group_rule[key])
def _test_security_group_rule_get_by_security_group(self, columns=None):
instance = db.instance_create(self.ctxt,
{'system_metadata': {'foo': 'bar'}})
security_group = self._create_security_group({
'instances': [instance]})
security_group_rule = self._create_security_group_rule(
{'parent_group': security_group, 'grantee_group': security_group})
security_group_rule1 = self._create_security_group_rule(
{'parent_group': security_group, 'grantee_group': security_group})
found_rules = db.security_group_rule_get_by_security_group(
self.ctxt, security_group['id'], columns_to_join=columns)
self.assertEqual(len(found_rules), 2)
rules_ids = [security_group_rule['id'], security_group_rule1['id']]
for rule in found_rules:
if columns is None:
self.assertIn('grantee_group', dict(rule))
self.assertIn('instances',
dict(rule.grantee_group))
self.assertIn(
'system_metadata',
dict(rule.grantee_group.instances[0]))
self.assertIn(rule['id'], rules_ids)
else:
self.assertNotIn('grantee_group', dict(rule))
def test_security_group_rule_get_by_security_group(self):
self._test_security_group_rule_get_by_security_group()
def test_security_group_rule_get_by_security_group_no_joins(self):
self._test_security_group_rule_get_by_security_group(columns=[])
def test_security_group_rule_destroy(self):
self._create_security_group({'name': 'fake1'})
self._create_security_group({'name': 'fake2'})
security_group_rule1 = self._create_security_group_rule({})
security_group_rule2 = self._create_security_group_rule({})
db.security_group_rule_destroy(self.ctxt, security_group_rule1['id'])
self.assertRaises(exception.SecurityGroupNotFound,
db.security_group_rule_get,
self.ctxt, security_group_rule1['id'])
self._assertEqualObjects(db.security_group_rule_get(self.ctxt,
security_group_rule2['id']),
security_group_rule2, ['grantee_group'])
def test_security_group_rule_destroy_not_found_exception(self):
self.assertRaises(exception.SecurityGroupNotFound,
db.security_group_rule_destroy, self.ctxt, 100500)
def test_security_group_rule_get(self):
security_group_rule1 = (
self._create_security_group_rule({}))
self._create_security_group_rule({})
real_security_group_rule = db.security_group_rule_get(self.ctxt,
security_group_rule1['id'])
self._assertEqualObjects(security_group_rule1,
real_security_group_rule, ['grantee_group'])
def test_security_group_rule_get_not_found_exception(self):
self.assertRaises(exception.SecurityGroupNotFound,
db.security_group_rule_get, self.ctxt, 100500)
def test_security_group_rule_count_by_group(self):
sg1 = self._create_security_group({'name': 'fake1'})
sg2 = self._create_security_group({'name': 'fake2'})
rules_by_group = {sg1: [], sg2: []}
for group in rules_by_group:
rules = rules_by_group[group]
for i in range(0, 10):
rules.append(
self._create_security_group_rule({'parent_group_id':
group['id']}))
db.security_group_rule_destroy(self.ctxt,
rules_by_group[sg1][0]['id'])
counted_groups = [db.security_group_rule_count_by_group(self.ctxt,
group['id'])
for group in [sg1, sg2]]
expected = [9, 10]
self.assertEqual(counted_groups, expected)
class SecurityGroupTestCase(test.TestCase, ModelsObjectComparatorMixin):
def setUp(self):
super(SecurityGroupTestCase, self).setUp()
self.ctxt = context.get_admin_context()
def _get_base_values(self):
return {
'name': 'fake_sec_group',
'description': 'fake_sec_group_descr',
'user_id': 'fake',
'project_id': 'fake',
'instances': []
}
def _create_security_group(self, values):
v = self._get_base_values()
v.update(values)
return db.security_group_create(self.ctxt, v)
def test_security_group_create(self):
security_group = self._create_security_group({})
self.assertIsNotNone(security_group['id'])
for key, value in self._get_base_values().items():
self.assertEqual(value, security_group[key])
def test_security_group_destroy(self):
security_group1 = self._create_security_group({})
security_group2 = \
self._create_security_group({'name': 'fake_sec_group2'})
db.security_group_destroy(self.ctxt, security_group1['id'])
self.assertRaises(exception.SecurityGroupNotFound,
db.security_group_get,
self.ctxt, security_group1['id'])
self._assertEqualObjects(db.security_group_get(
self.ctxt, security_group2['id'],
columns_to_join=['instances']), security_group2)
def test_security_group_get(self):
security_group1 = self._create_security_group({})
self._create_security_group({'name': 'fake_sec_group2'})
real_security_group = db.security_group_get(self.ctxt,
security_group1['id'],
columns_to_join=['instances'])
self._assertEqualObjects(security_group1,
real_security_group)
def test_security_group_get_with_instance_columns(self):
instance = db.instance_create(self.ctxt,
{'system_metadata': {'foo': 'bar'}})
secgroup = self._create_security_group({'instances': [instance]})
secgroup = db.security_group_get(
self.ctxt, secgroup['id'],
columns_to_join=['instances.system_metadata'])
inst = secgroup.instances[0]
self.assertIn('system_metadata', dict(inst).keys())
def test_security_group_get_no_instances(self):
instance = db.instance_create(self.ctxt, {})
sid = self._create_security_group({'instances': [instance]})['id']
security_group = db.security_group_get(self.ctxt, sid,
columns_to_join=['instances'])
self.assertIn('instances', security_group.__dict__)
security_group = db.security_group_get(self.ctxt, sid)
self.assertNotIn('instances', security_group.__dict__)
def test_security_group_get_not_found_exception(self):
self.assertRaises(exception.SecurityGroupNotFound,
db.security_group_get, self.ctxt, 100500)
def test_security_group_get_by_name(self):
security_group1 = self._create_security_group({'name': 'fake1'})
security_group2 = self._create_security_group({'name': 'fake2'})
real_security_group1 = db.security_group_get_by_name(
self.ctxt,
security_group1['project_id'],
security_group1['name'],
columns_to_join=None)
real_security_group2 = db.security_group_get_by_name(
self.ctxt,
security_group2['project_id'],
security_group2['name'],
columns_to_join=None)
self._assertEqualObjects(security_group1, real_security_group1)
self._assertEqualObjects(security_group2, real_security_group2)
def test_security_group_get_by_project(self):
security_group1 = self._create_security_group(
{'name': 'fake1', 'project_id': 'fake_proj1'})
security_group2 = self._create_security_group(
{'name': 'fake2', 'project_id': 'fake_proj2'})
real1 = db.security_group_get_by_project(
self.ctxt,
security_group1['project_id'])
real2 = db.security_group_get_by_project(
self.ctxt,
security_group2['project_id'])
expected1, expected2 = [security_group1], [security_group2]
self._assertEqualListsOfObjects(expected1, real1,
ignored_keys=['instances'])
self._assertEqualListsOfObjects(expected2, real2,
ignored_keys=['instances'])
def test_security_group_get_by_instance(self):
instance = db.instance_create(self.ctxt, dict(host='foo'))
values = [
{'name': 'fake1', 'instances': [instance]},
{'name': 'fake2', 'instances': [instance]},
{'name': 'fake3', 'instances': []},
]
security_groups = [self._create_security_group(vals)
for vals in values]
real = db.security_group_get_by_instance(self.ctxt,
instance['uuid'])
expected = security_groups[:2]
self._assertEqualListsOfObjects(expected, real,
ignored_keys=['instances'])
def test_security_group_get_all(self):
values = [
{'name': 'fake1', 'project_id': 'fake_proj1'},
{'name': 'fake2', 'project_id': 'fake_proj2'},
]
security_groups = [self._create_security_group(vals)
for vals in values]
real = db.security_group_get_all(self.ctxt)
self._assertEqualListsOfObjects(security_groups, real,
ignored_keys=['instances'])
def test_security_group_in_use(self):
instance = db.instance_create(self.ctxt, dict(host='foo'))
values = [
{'instances': [instance],
'name': 'fake_in_use'},
{'instances': []},
]
security_groups = [self._create_security_group(vals)
for vals in values]
real = []
for security_group in security_groups:
in_use = db.security_group_in_use(self.ctxt,
security_group['id'])
real.append(in_use)
expected = [True, False]
self.assertEqual(expected, real)
def test_security_group_ensure_default(self):
self.ctxt.project_id = 'fake'
self.ctxt.user_id = 'fake'
self.assertEqual(0, len(db.security_group_get_by_project(
self.ctxt,
self.ctxt.project_id)))
db.security_group_ensure_default(self.ctxt)
security_groups = db.security_group_get_by_project(
self.ctxt,
self.ctxt.project_id)
self.assertEqual(1, len(security_groups))
self.assertEqual("default", security_groups[0]["name"])
usage = db.quota_usage_get(self.ctxt,
self.ctxt.project_id,
'security_groups',
self.ctxt.user_id)
self.assertEqual(1, usage.in_use)
def test_security_group_ensure_default_until_refresh(self):
self.flags(until_refresh=2)
self.ctxt.project_id = 'fake'
self.ctxt.user_id = 'fake'
db.security_group_ensure_default(self.ctxt)
usage = db.quota_usage_get(self.ctxt,
self.ctxt.project_id,
'security_groups',
self.ctxt.user_id)
self.assertEqual(2, usage.until_refresh)
@mock.patch.object(db.sqlalchemy.api, '_security_group_get_by_names')
def test_security_group_ensure_default_called_concurrently(self, sg_mock):
# make sure NotFound is always raised here to trick Nova to insert the
# duplicate security group entry
sg_mock.side_effect = exception.NotFound
# create the first db entry
self.ctxt.project_id = 1
db.security_group_ensure_default(self.ctxt)
security_groups = db.security_group_get_by_project(
self.ctxt,
self.ctxt.project_id)
self.assertEqual(1, len(security_groups))
# create the second one and ensure the exception is handled properly
default_group = db.security_group_ensure_default(self.ctxt)
self.assertEqual('default', default_group.name)
def test_security_group_update(self):
security_group = self._create_security_group({})
new_values = {
'name': 'sec_group1',
'description': 'sec_group_descr1',
'user_id': 'fake_user1',
'project_id': 'fake_proj1',
}
updated_group = db.security_group_update(self.ctxt,
security_group['id'],
new_values,
columns_to_join=['rules.grantee_group'])
for key, value in new_values.items():
self.assertEqual(updated_group[key], value)
self.assertEqual(updated_group['rules'], [])
def test_security_group_update_to_duplicate(self):
self._create_security_group(
{'name': 'fake1', 'project_id': 'fake_proj1'})
security_group2 = self._create_security_group(
{'name': 'fake1', 'project_id': 'fake_proj2'})
self.assertRaises(exception.SecurityGroupExists,
db.security_group_update,
self.ctxt, security_group2['id'],
{'project_id': 'fake_proj1'})
class InstanceTestCase(test.TestCase, ModelsObjectComparatorMixin):
"""Tests for db.api.instance_* methods."""
sample_data = {
'project_id': 'project1',
'hostname': 'example.com',
'host': 'h1',
'node': 'n1',
'metadata': {'mkey1': 'mval1', 'mkey2': 'mval2'},
'system_metadata': {'smkey1': 'smval1', 'smkey2': 'smval2'},
'info_cache': {'ckey': 'cvalue'},
}
def setUp(self):
super(InstanceTestCase, self).setUp()
self.ctxt = context.get_admin_context()
def _assertEqualInstances(self, instance1, instance2):
self._assertEqualObjects(instance1, instance2,
ignored_keys=['metadata', 'system_metadata', 'info_cache',
'extra'])
def _assertEqualListsOfInstances(self, list1, list2):
self._assertEqualListsOfObjects(list1, list2,
ignored_keys=['metadata', 'system_metadata', 'info_cache',
'extra'])
def create_instance_with_args(self, **kwargs):
if 'context' in kwargs:
context = kwargs.pop('context')
else:
context = self.ctxt
args = self.sample_data.copy()
args.update(kwargs)
return db.instance_create(context, args)
def test_instance_create(self):
instance = self.create_instance_with_args()
self.assertTrue(uuidutils.is_uuid_like(instance['uuid']))
def test_instance_create_with_object_values(self):
values = {
'access_ip_v4': netaddr.IPAddress('1.2.3.4'),
'access_ip_v6': netaddr.IPAddress('::1'),
}
dt_keys = ('created_at', 'deleted_at', 'updated_at',
'launched_at', 'terminated_at')
dt = timeutils.utcnow()
dt_utc = dt.replace(tzinfo=iso8601.iso8601.Utc())
for key in dt_keys:
values[key] = dt_utc
inst = db.instance_create(self.ctxt, values)
self.assertEqual(inst['access_ip_v4'], '1.2.3.4')
self.assertEqual(inst['access_ip_v6'], '::1')
for key in dt_keys:
self.assertEqual(inst[key], dt)
def test_instance_update_with_object_values(self):
values = {
'access_ip_v4': netaddr.IPAddress('1.2.3.4'),
'access_ip_v6': netaddr.IPAddress('::1'),
}
dt_keys = ('created_at', 'deleted_at', 'updated_at',
'launched_at', 'terminated_at')
dt = timeutils.utcnow()
dt_utc = dt.replace(tzinfo=iso8601.iso8601.Utc())
for key in dt_keys:
values[key] = dt_utc
inst = db.instance_create(self.ctxt, {})
inst = db.instance_update(self.ctxt, inst['uuid'], values)
self.assertEqual(inst['access_ip_v4'], '1.2.3.4')
self.assertEqual(inst['access_ip_v6'], '::1')
for key in dt_keys:
self.assertEqual(inst[key], dt)
def test_instance_update_no_metadata_clobber(self):
meta = {'foo': 'bar'}
sys_meta = {'sfoo': 'sbar'}
values = {
'metadata': meta,
'system_metadata': sys_meta,
}
inst = db.instance_create(self.ctxt, {})
inst = db.instance_update(self.ctxt, inst['uuid'], values)
self.assertEqual(meta, utils.metadata_to_dict(inst['metadata']))
self.assertEqual(sys_meta,
utils.metadata_to_dict(inst['system_metadata']))
def test_instance_get_all_with_meta(self):
self.create_instance_with_args()
for inst in db.instance_get_all(self.ctxt):
meta = utils.metadata_to_dict(inst['metadata'])
self.assertEqual(meta, self.sample_data['metadata'])
sys_meta = utils.metadata_to_dict(inst['system_metadata'])
self.assertEqual(sys_meta, self.sample_data['system_metadata'])
def test_instance_update(self):
instance = self.create_instance_with_args()
metadata = {'host': 'bar', 'key2': 'wuff'}
system_metadata = {'original_image_ref': 'baz'}
# Update the metadata
db.instance_update(self.ctxt, instance['uuid'], {'metadata': metadata,
'system_metadata': system_metadata})
# Retrieve the user-provided metadata to ensure it was successfully
# updated
self.assertEqual(metadata,
db.instance_metadata_get(self.ctxt, instance['uuid']))
self.assertEqual(system_metadata,
db.instance_system_metadata_get(self.ctxt, instance['uuid']))
def test_instance_update_bad_str_dates(self):
instance = self.create_instance_with_args()
values = {'created_at': '123'}
self.assertRaises(ValueError,
db.instance_update,
self.ctxt, instance['uuid'], values)
def test_instance_update_good_str_dates(self):
instance = self.create_instance_with_args()
values = {'created_at': '2011-01-31T00:00:00.0'}
actual = db.instance_update(self.ctxt, instance['uuid'], values)
expected = datetime.datetime(2011, 1, 31)
self.assertEqual(expected, actual["created_at"])
def test_create_instance_unique_hostname(self):
context1 = context.RequestContext('user1', 'p1')
context2 = context.RequestContext('user2', 'p2')
self.create_instance_with_args(hostname='h1', project_id='p1')
# With scope 'global' any duplicate should fail, be it this project:
self.flags(osapi_compute_unique_server_name_scope='global')
self.assertRaises(exception.InstanceExists,
self.create_instance_with_args,
context=context1,
hostname='h1', project_id='p3')
# or another:
self.assertRaises(exception.InstanceExists,
self.create_instance_with_args,
context=context2,
hostname='h1', project_id='p2')
# With scope 'project' a duplicate in the project should fail:
self.flags(osapi_compute_unique_server_name_scope='project')
self.assertRaises(exception.InstanceExists,
self.create_instance_with_args,
context=context1,
hostname='h1', project_id='p1')
# With scope 'project' a duplicate in a different project should work:
self.flags(osapi_compute_unique_server_name_scope='project')
self.create_instance_with_args(context=context2, hostname='h2')
self.flags(osapi_compute_unique_server_name_scope=None)
def test_instance_get_all_by_filters_empty_list_filter(self):
filters = {'uuid': []}
instances = db.instance_get_all_by_filters_sort(self.ctxt, filters)
self.assertEqual([], instances)
@mock.patch('nova.db.sqlalchemy.api.undefer')
@mock.patch('nova.db.sqlalchemy.api.joinedload')
def test_instance_get_all_by_filters_extra_columns(self,
mock_joinedload,
mock_undefer):
db.instance_get_all_by_filters_sort(
self.ctxt, {},
columns_to_join=['info_cache', 'extra.pci_requests'])
mock_joinedload.assert_called_once_with('info_cache')
mock_undefer.assert_called_once_with('extra.pci_requests')
@mock.patch('nova.db.sqlalchemy.api.undefer')
@mock.patch('nova.db.sqlalchemy.api.joinedload')
def test_instance_get_active_by_window_extra_columns(self,
mock_joinedload,
mock_undefer):
now = datetime.datetime(2013, 10, 10, 17, 16, 37, 156701)
db.instance_get_active_by_window_joined(
self.ctxt, now,
columns_to_join=['info_cache', 'extra.pci_requests'])
mock_joinedload.assert_called_once_with('info_cache')
mock_undefer.assert_called_once_with('extra.pci_requests')
def test_instance_get_all_by_filters_with_meta(self):
self.create_instance_with_args()
for inst in db.instance_get_all_by_filters(self.ctxt, {}):
meta = utils.metadata_to_dict(inst['metadata'])
self.assertEqual(meta, self.sample_data['metadata'])
sys_meta = utils.metadata_to_dict(inst['system_metadata'])
self.assertEqual(sys_meta, self.sample_data['system_metadata'])
def test_instance_get_all_by_filters_without_meta(self):
self.create_instance_with_args()
result = db.instance_get_all_by_filters(self.ctxt, {},
columns_to_join=[])
for inst in result:
meta = utils.metadata_to_dict(inst['metadata'])
self.assertEqual(meta, {})
sys_meta = utils.metadata_to_dict(inst['system_metadata'])
self.assertEqual(sys_meta, {})
def test_instance_get_all_by_filters(self):
instances = [self.create_instance_with_args() for i in range(3)]
filtered_instances = db.instance_get_all_by_filters(self.ctxt, {})
self._assertEqualListsOfInstances(instances, filtered_instances)
def test_instance_get_all_by_filters_zero_limit(self):
self.create_instance_with_args()
instances = db.instance_get_all_by_filters(self.ctxt, {}, limit=0)
self.assertEqual([], instances)
def test_instance_metadata_get_multi(self):
uuids = [self.create_instance_with_args()['uuid'] for i in range(3)]
meta = sqlalchemy_api._instance_metadata_get_multi(self.ctxt, uuids)
for row in meta:
self.assertIn(row['instance_uuid'], uuids)
def test_instance_metadata_get_multi_no_uuids(self):
self.mox.StubOutWithMock(query.Query, 'filter')
self.mox.ReplayAll()
sqlalchemy_api._instance_metadata_get_multi(self.ctxt, [])
def test_instance_system_system_metadata_get_multi(self):
uuids = [self.create_instance_with_args()['uuid'] for i in range(3)]
sys_meta = sqlalchemy_api._instance_system_metadata_get_multi(
self.ctxt, uuids)
for row in sys_meta:
self.assertIn(row['instance_uuid'], uuids)
def test_instance_system_metadata_get_multi_no_uuids(self):
self.mox.StubOutWithMock(query.Query, 'filter')
self.mox.ReplayAll()
sqlalchemy_api._instance_system_metadata_get_multi(self.ctxt, [])
def test_instance_get_all_by_filters_regex(self):
i1 = self.create_instance_with_args(display_name='test1')
i2 = self.create_instance_with_args(display_name='teeeest2')
self.create_instance_with_args(display_name='diff')
result = db.instance_get_all_by_filters(self.ctxt,
{'display_name': 't.*st.'})
self._assertEqualListsOfInstances(result, [i1, i2])
def test_instance_get_all_by_filters_changes_since(self):
i1 = self.create_instance_with_args(updated_at=
'2013-12-05T15:03:25.000000')
i2 = self.create_instance_with_args(updated_at=
'2013-12-05T15:03:26.000000')
changes_since = iso8601.parse_date('2013-12-05T15:03:25.000000')
result = db.instance_get_all_by_filters(self.ctxt,
{'changes-since':
changes_since})
self._assertEqualListsOfInstances([i1, i2], result)
changes_since = iso8601.parse_date('2013-12-05T15:03:26.000000')
result = db.instance_get_all_by_filters(self.ctxt,
{'changes-since':
changes_since})
self._assertEqualListsOfInstances([i2], result)
db.instance_destroy(self.ctxt, i1['uuid'])
filters = {}
filters['changes-since'] = changes_since
filters['marker'] = i1['uuid']
result = db.instance_get_all_by_filters(self.ctxt,
filters)
self._assertEqualListsOfInstances([i2], result)
def test_instance_get_all_by_filters_exact_match(self):
instance = self.create_instance_with_args(host='host1')
self.create_instance_with_args(host='host12')
result = db.instance_get_all_by_filters(self.ctxt,
{'host': 'host1'})
self._assertEqualListsOfInstances([instance], result)
def test_instance_get_all_by_filters_metadata(self):
instance = self.create_instance_with_args(metadata={'foo': 'bar'})
self.create_instance_with_args()
result = db.instance_get_all_by_filters(self.ctxt,
{'metadata': {'foo': 'bar'}})
self._assertEqualListsOfInstances([instance], result)
def test_instance_get_all_by_filters_system_metadata(self):
instance = self.create_instance_with_args(
system_metadata={'foo': 'bar'})
self.create_instance_with_args()
result = db.instance_get_all_by_filters(self.ctxt,
{'system_metadata': {'foo': 'bar'}})
self._assertEqualListsOfInstances([instance], result)
def test_instance_get_all_by_filters_unicode_value(self):
i1 = self.create_instance_with_args(display_name=u'test♥')
i2 = self.create_instance_with_args(display_name=u'test')
i3 = self.create_instance_with_args(display_name=u'test♥test')
self.create_instance_with_args(display_name='diff')
result = db.instance_get_all_by_filters(self.ctxt,
{'display_name': u'test'})
self._assertEqualListsOfInstances([i1, i2, i3], result)
result = db.instance_get_all_by_filters(self.ctxt,
{'display_name': u'test♥'})
self._assertEqualListsOfInstances(result, [i1, i3])
def test_instance_get_all_by_filters_tags(self):
instance = self.create_instance_with_args(
metadata={'foo': 'bar'})
self.create_instance_with_args()
# For format 'tag-'
result = db.instance_get_all_by_filters(
self.ctxt, {'filter': [
{'name': 'tag-key', 'value': 'foo'},
{'name': 'tag-value', 'value': 'bar'},
]})
self._assertEqualListsOfInstances([instance], result)
# For format 'tag:'
result = db.instance_get_all_by_filters(
self.ctxt, {'filter': [
{'name': 'tag:foo', 'value': 'bar'},
]})
self._assertEqualListsOfInstances([instance], result)
# For non-existent tag
result = db.instance_get_all_by_filters(
self.ctxt, {'filter': [
{'name': 'tag:foo', 'value': 'barred'},
]})
self.assertEqual([], result)
# Confirm with deleted tags
db.instance_metadata_delete(self.ctxt, instance['uuid'], 'foo')
# For format 'tag-'
result = db.instance_get_all_by_filters(
self.ctxt, {'filter': [
{'name': 'tag-key', 'value': 'foo'},
]})
self.assertEqual([], result)
result = db.instance_get_all_by_filters(
self.ctxt, {'filter': [
{'name': 'tag-value', 'value': 'bar'}
]})
self.assertEqual([], result)
# For format 'tag:'
result = db.instance_get_all_by_filters(
self.ctxt, {'filter': [
{'name': 'tag:foo', 'value': 'bar'},
]})
self.assertEqual([], result)
def test_instance_get_by_uuid(self):
inst = self.create_instance_with_args()
result = db.instance_get_by_uuid(self.ctxt, inst['uuid'])
self._assertEqualInstances(inst, result)
def test_instance_get_by_uuid_join_empty(self):
inst = self.create_instance_with_args()
result = db.instance_get_by_uuid(self.ctxt, inst['uuid'],
columns_to_join=[])
meta = utils.metadata_to_dict(result['metadata'])
self.assertEqual(meta, {})
sys_meta = utils.metadata_to_dict(result['system_metadata'])
self.assertEqual(sys_meta, {})
def test_instance_get_by_uuid_join_meta(self):
inst = self.create_instance_with_args()
result = db.instance_get_by_uuid(self.ctxt, inst['uuid'],
columns_to_join=['metadata'])
meta = utils.metadata_to_dict(result['metadata'])
self.assertEqual(meta, self.sample_data['metadata'])
sys_meta = utils.metadata_to_dict(result['system_metadata'])
self.assertEqual(sys_meta, {})
def test_instance_get_by_uuid_join_sys_meta(self):
inst = self.create_instance_with_args()
result = db.instance_get_by_uuid(self.ctxt, inst['uuid'],
columns_to_join=['system_metadata'])
meta = utils.metadata_to_dict(result['metadata'])
self.assertEqual(meta, {})
sys_meta = utils.metadata_to_dict(result['system_metadata'])
self.assertEqual(sys_meta, self.sample_data['system_metadata'])
def test_instance_get_all_by_filters_deleted(self):
inst1 = self.create_instance_with_args()
inst2 = self.create_instance_with_args(reservation_id='b')
db.instance_destroy(self.ctxt, inst1['uuid'])
result = db.instance_get_all_by_filters(self.ctxt, {})
self._assertEqualListsOfObjects([inst1, inst2], result,
ignored_keys=['metadata', 'system_metadata',
'deleted', 'deleted_at', 'info_cache',
'pci_devices', 'extra'])
def test_instance_get_all_by_filters_deleted_and_soft_deleted(self):
inst1 = self.create_instance_with_args()
inst2 = self.create_instance_with_args(vm_state=vm_states.SOFT_DELETED)
self.create_instance_with_args()
db.instance_destroy(self.ctxt, inst1['uuid'])
result = db.instance_get_all_by_filters(self.ctxt,
{'deleted': True})
self._assertEqualListsOfObjects([inst1, inst2], result,
ignored_keys=['metadata', 'system_metadata',
'deleted', 'deleted_at', 'info_cache',
'pci_devices', 'extra'])
def test_instance_get_all_by_filters_deleted_no_soft_deleted(self):
inst1 = self.create_instance_with_args()
self.create_instance_with_args(vm_state=vm_states.SOFT_DELETED)
self.create_instance_with_args()
db.instance_destroy(self.ctxt, inst1['uuid'])
result = db.instance_get_all_by_filters(self.ctxt,
{'deleted': True,
'soft_deleted': False})
self._assertEqualListsOfObjects([inst1], result,
ignored_keys=['deleted', 'deleted_at', 'metadata',
'system_metadata', 'info_cache', 'pci_devices',
'extra'])
def test_instance_get_all_by_filters_alive_and_soft_deleted(self):
inst1 = self.create_instance_with_args()
inst2 = self.create_instance_with_args(vm_state=vm_states.SOFT_DELETED)
inst3 = self.create_instance_with_args()
db.instance_destroy(self.ctxt, inst1['uuid'])
result = db.instance_get_all_by_filters(self.ctxt,
{'deleted': False,
'soft_deleted': True})
self._assertEqualListsOfInstances([inst2, inst3], result)
def test_instance_get_all_by_filters_not_deleted(self):
inst1 = self.create_instance_with_args()
self.create_instance_with_args(vm_state=vm_states.SOFT_DELETED)
inst3 = self.create_instance_with_args()
inst4 = self.create_instance_with_args(vm_state=vm_states.ACTIVE)
db.instance_destroy(self.ctxt, inst1['uuid'])
result = db.instance_get_all_by_filters(self.ctxt,
{'deleted': False})
self.assertIsNone(inst3.vm_state)
self._assertEqualListsOfInstances([inst3, inst4], result)
def test_instance_get_all_by_filters_cleaned(self):
inst1 = self.create_instance_with_args()
inst2 = self.create_instance_with_args(reservation_id='b')
db.instance_update(self.ctxt, inst1['uuid'], {'cleaned': 1})
result = db.instance_get_all_by_filters(self.ctxt, {})
self.assertEqual(2, len(result))
self.assertIn(inst1['uuid'], [result[0]['uuid'], result[1]['uuid']])
self.assertIn(inst2['uuid'], [result[0]['uuid'], result[1]['uuid']])
if inst1['uuid'] == result[0]['uuid']:
self.assertTrue(result[0]['cleaned'])
self.assertFalse(result[1]['cleaned'])
else:
self.assertTrue(result[1]['cleaned'])
self.assertFalse(result[0]['cleaned'])
def test_instance_get_all_by_filters_tag_any(self):
inst1 = self.create_instance_with_args()
inst2 = self.create_instance_with_args()
inst3 = self.create_instance_with_args()
t1 = 'tag1'
t2 = 'tag2'
t3 = 'tag3'
db.instance_tag_set(self.ctxt, inst1.uuid, [t1])
db.instance_tag_set(self.ctxt, inst2.uuid, [t1, t2, t3])
db.instance_tag_set(self.ctxt, inst3.uuid, [t3])
result = db.instance_get_all_by_filters(self.ctxt,
{'tags-any': [t1, t2]})
self._assertEqualListsOfObjects([inst1, inst2], result,
ignored_keys=['deleted', 'deleted_at', 'metadata', 'extra',
'system_metadata', 'info_cache', 'pci_devices'])
def test_instance_get_all_by_filters_tag_any_empty(self):
inst1 = self.create_instance_with_args()
inst2 = self.create_instance_with_args()
t1 = 'tag1'
t2 = 'tag2'
t3 = 'tag3'
t4 = 'tag4'
db.instance_tag_set(self.ctxt, inst1.uuid, [t1])
db.instance_tag_set(self.ctxt, inst2.uuid, [t1, t2])
result = db.instance_get_all_by_filters(self.ctxt,
{'tags-any': [t3, t4]})
self.assertEqual([], result)
def test_instance_get_all_by_filters_tag(self):
inst1 = self.create_instance_with_args()
inst2 = self.create_instance_with_args()
inst3 = self.create_instance_with_args()
t1 = 'tag1'
t2 = 'tag2'
t3 = 'tag3'
db.instance_tag_set(self.ctxt, inst1.uuid, [t1, t3])
db.instance_tag_set(self.ctxt, inst2.uuid, [t1, t2])
db.instance_tag_set(self.ctxt, inst3.uuid, [t1, t2, t3])
result = db.instance_get_all_by_filters(self.ctxt,
{'tags': [t1, t2]})
self._assertEqualListsOfObjects([inst2, inst3], result,
ignored_keys=['deleted', 'deleted_at', 'metadata', 'extra',
'system_metadata', 'info_cache', 'pci_devices'])
def test_instance_get_all_by_filters_tag_empty(self):
inst1 = self.create_instance_with_args()
inst2 = self.create_instance_with_args()
t1 = 'tag1'
t2 = 'tag2'
t3 = 'tag3'
db.instance_tag_set(self.ctxt, inst1.uuid, [t1])
db.instance_tag_set(self.ctxt, inst2.uuid, [t1, t2])
result = db.instance_get_all_by_filters(self.ctxt,
{'tags': [t3]})
self.assertEqual([], result)
def test_instance_get_all_by_filters_tag_any_and_tag(self):
inst1 = self.create_instance_with_args()
inst2 = self.create_instance_with_args()
inst3 = self.create_instance_with_args()
t1 = 'tag1'
t2 = 'tag2'
t3 = 'tag3'
t4 = 'tag4'
db.instance_tag_set(self.ctxt, inst1.uuid, [t1, t2])
db.instance_tag_set(self.ctxt, inst2.uuid, [t1, t2, t4])
db.instance_tag_set(self.ctxt, inst3.uuid, [t2, t3])
result = db.instance_get_all_by_filters(self.ctxt,
{'tags': [t1, t2],
'tags-any': [t3, t4]})
self._assertEqualListsOfObjects([inst2], result,
ignored_keys=['deleted', 'deleted_at', 'metadata', 'extra',
'system_metadata', 'info_cache', 'pci_devices'])
def test_instance_get_all_by_host_and_node_no_join(self):
instance = self.create_instance_with_args()
result = db.instance_get_all_by_host_and_node(self.ctxt, 'h1', 'n1')
self.assertEqual(result[0]['uuid'], instance['uuid'])
self.assertEqual(result[0]['system_metadata'], [])
def test_instance_get_all_by_host_and_node(self):
instance = self.create_instance_with_args(
system_metadata={'foo': 'bar'})
result = db.instance_get_all_by_host_and_node(
self.ctxt, 'h1', 'n1',
columns_to_join=['system_metadata', 'extra'])
self.assertEqual(instance['uuid'], result[0]['uuid'])
self.assertEqual('bar', result[0]['system_metadata'][0]['value'])
self.assertEqual(instance['uuid'], result[0]['extra']['instance_uuid'])
@mock.patch('nova.db.sqlalchemy.api._instances_fill_metadata')
@mock.patch('nova.db.sqlalchemy.api._instance_get_all_query')
def test_instance_get_all_by_host_and_node_fills_manually(self,
mock_getall,
mock_fill):
db.instance_get_all_by_host_and_node(
self.ctxt, 'h1', 'n1',
columns_to_join=['metadata', 'system_metadata', 'extra', 'foo'])
self.assertEqual(sorted(['extra', 'foo']),
sorted(mock_getall.call_args[1]['joins']))
self.assertEqual(sorted(['metadata', 'system_metadata']),
sorted(mock_fill.call_args[1]['manual_joins']))
def _get_base_values(self):
return {
'name': 'fake_sec_group',
'description': 'fake_sec_group_descr',
'user_id': 'fake',
'project_id': 'fake',
'instances': []
}
def _get_base_rule_values(self):
return {
'protocol': "tcp",
'from_port': 80,
'to_port': 8080,
'cidr': None,
'deleted': 0,
'deleted_at': None,
'grantee_group': None,
'updated_at': None
}
def _create_security_group(self, values):
v = self._get_base_values()
v.update(values)
return db.security_group_create(self.ctxt, v)
def _create_security_group_rule(self, values):
v = self._get_base_rule_values()
v.update(values)
return db.security_group_rule_create(self.ctxt, v)
def test_instance_get_all_by_grantee_security_groups(self):
instance1 = self.create_instance_with_args()
instance2 = self.create_instance_with_args()
instance3 = self.create_instance_with_args()
secgroup1 = self._create_security_group(
{'name': 'fake-secgroup1', 'instances': [instance1]})
secgroup2 = self._create_security_group(
{'name': 'fake-secgroup2', 'instances': [instance1]})
secgroup3 = self._create_security_group(
{'name': 'fake-secgroup3', 'instances': [instance2]})
secgroup4 = self._create_security_group(
{'name': 'fake-secgroup4', 'instances': [instance2, instance3]})
self._create_security_group_rule({'grantee_group': secgroup1,
'parent_group': secgroup3})
self._create_security_group_rule({'grantee_group': secgroup2,
'parent_group': secgroup4})
group_ids = [secgroup['id'] for secgroup in [secgroup1, secgroup2]]
instances = db.instance_get_all_by_grantee_security_groups(self.ctxt,
group_ids)
instance_uuids = [instance['uuid'] for instance in instances]
self.assertEqual(len(instances), 2)
self.assertIn(instance2['uuid'], instance_uuids)
self.assertIn(instance3['uuid'], instance_uuids)
def test_instance_get_all_by_grantee_security_groups_empty_group_ids(self):
results = db.instance_get_all_by_grantee_security_groups(self.ctxt, [])
self.assertEqual([], results)
def test_instance_get_all_hung_in_rebooting(self):
# Ensure no instances are returned.
results = db.instance_get_all_hung_in_rebooting(self.ctxt, 10)
self.assertEqual([], results)
# Ensure one rebooting instance with updated_at older than 10 seconds
# is returned.
instance = self.create_instance_with_args(task_state="rebooting",
updated_at=datetime.datetime(2000, 1, 1, 12, 0, 0))
results = db.instance_get_all_hung_in_rebooting(self.ctxt, 10)
self._assertEqualListsOfObjects([instance], results,
ignored_keys=['task_state', 'info_cache', 'security_groups',
'metadata', 'system_metadata', 'pci_devices',
'extra'])
db.instance_update(self.ctxt, instance['uuid'], {"task_state": None})
# Ensure the newly rebooted instance is not returned.
self.create_instance_with_args(task_state="rebooting",
updated_at=timeutils.utcnow())
results = db.instance_get_all_hung_in_rebooting(self.ctxt, 10)
self.assertEqual([], results)
def test_instance_update_with_expected_vm_state(self):
instance = self.create_instance_with_args(vm_state='foo')
db.instance_update(self.ctxt, instance['uuid'], {'host': 'h1',
'expected_vm_state': ('foo', 'bar')})
def test_instance_update_with_unexpected_vm_state(self):
instance = self.create_instance_with_args(vm_state='foo')
self.assertRaises(exception.InstanceUpdateConflict,
db.instance_update, self.ctxt, instance['uuid'],
{'host': 'h1', 'expected_vm_state': ('spam', 'bar')})
def test_instance_update_with_instance_uuid(self):
# test instance_update() works when an instance UUID is passed.
ctxt = context.get_admin_context()
# Create an instance with some metadata
values = {'metadata': {'host': 'foo', 'key1': 'meow'},
'system_metadata': {'original_image_ref': 'blah'}}
instance = db.instance_create(ctxt, values)
# Update the metadata
values = {'metadata': {'host': 'bar', 'key2': 'wuff'},
'system_metadata': {'original_image_ref': 'baz'}}
db.instance_update(ctxt, instance['uuid'], values)
# Retrieve the user-provided metadata to ensure it was successfully
# updated
instance_meta = db.instance_metadata_get(ctxt, instance['uuid'])
self.assertEqual('bar', instance_meta['host'])
self.assertEqual('wuff', instance_meta['key2'])
self.assertNotIn('key1', instance_meta)
# Retrieve the system metadata to ensure it was successfully updated
system_meta = db.instance_system_metadata_get(ctxt, instance['uuid'])
self.assertEqual('baz', system_meta['original_image_ref'])
def test_delete_instance_metadata_on_instance_destroy(self):
ctxt = context.get_admin_context()
# Create an instance with some metadata
values = {'metadata': {'host': 'foo', 'key1': 'meow'},
'system_metadata': {'original_image_ref': 'blah'}}
instance = db.instance_create(ctxt, values)
instance_meta = db.instance_metadata_get(ctxt, instance['uuid'])
self.assertEqual('foo', instance_meta['host'])
self.assertEqual('meow', instance_meta['key1'])
db.instance_destroy(ctxt, instance['uuid'])
instance_meta = db.instance_metadata_get(ctxt, instance['uuid'])
# Make sure instance metadata is deleted as well
self.assertEqual({}, instance_meta)
def test_delete_instance_faults_on_instance_destroy(self):
ctxt = context.get_admin_context()
uuid = str(stdlib_uuid.uuid4())
# Create faults
db.instance_create(ctxt, {'uuid': uuid})
fault_values = {
'message': 'message',
'details': 'detail',
'instance_uuid': uuid,
'code': 404,
'host': 'localhost'
}
fault = db.instance_fault_create(ctxt, fault_values)
# Retrieve the fault to ensure it was successfully added
faults = db.instance_fault_get_by_instance_uuids(ctxt, [uuid])
self.assertEqual(1, len(faults[uuid]))
self._assertEqualObjects(fault, faults[uuid][0])
db.instance_destroy(ctxt, uuid)
faults = db.instance_fault_get_by_instance_uuids(ctxt, [uuid])
# Make sure instance faults is deleted as well
self.assertEqual(0, len(faults[uuid]))
def test_instance_update_and_get_original(self):
instance = self.create_instance_with_args(vm_state='building')
(old_ref, new_ref) = db.instance_update_and_get_original(self.ctxt,
instance['uuid'], {'vm_state': 'needscoffee'})
self.assertEqual('building', old_ref['vm_state'])
self.assertEqual('needscoffee', new_ref['vm_state'])
def test_instance_update_and_get_original_metadata(self):
instance = self.create_instance_with_args()
columns_to_join = ['metadata']
(old_ref, new_ref) = db.instance_update_and_get_original(
self.ctxt, instance['uuid'], {'vm_state': 'needscoffee'},
columns_to_join=columns_to_join)
meta = utils.metadata_to_dict(new_ref['metadata'])
self.assertEqual(meta, self.sample_data['metadata'])
sys_meta = utils.metadata_to_dict(new_ref['system_metadata'])
self.assertEqual(sys_meta, {})
def test_instance_update_and_get_original_metadata_none_join(self):
instance = self.create_instance_with_args()
(old_ref, new_ref) = db.instance_update_and_get_original(
self.ctxt, instance['uuid'], {'metadata': {'mk1': 'mv3'}})
meta = utils.metadata_to_dict(new_ref['metadata'])
self.assertEqual(meta, {'mk1': 'mv3'})
def test_instance_update_and_get_original_no_conflict_on_session(self):
session = get_session()
# patch get_session so that we may inspect it outside of the
# method; once enginefacade is implemented, this can be simplified
with mock.patch("nova.db.sqlalchemy.api.get_session", lambda: session):
instance = self.create_instance_with_args()
(old_ref, new_ref) = db.instance_update_and_get_original(
self.ctxt, instance['uuid'], {'metadata': {'mk1': 'mv3'}})
# test some regular persisted fields
self.assertEqual(old_ref.uuid, new_ref.uuid)
self.assertEqual(old_ref.project_id, new_ref.project_id)
# after a copy operation, we can assert:
# 1. the two states have their own InstanceState
old_insp = inspect(old_ref)
new_insp = inspect(new_ref)
self.assertNotEqual(old_insp, new_insp)
# 2. only one of the objects is still in our Session
self.assertIs(new_insp.session, session)
self.assertIsNone(old_insp.session)
# 3. The "new" object remains persistent and ready
# for updates
self.assertTrue(new_insp.persistent)
# 4. the "old" object is detached from this Session.
self.assertTrue(old_insp.detached)
def test_instance_update_and_get_original_conflict_race(self):
# Ensure that we retry if update_on_match fails for no discernable
# reason
instance = self.create_instance_with_args()
orig_update_on_match = update_match.update_on_match
# Reproduce the conditions of a race between fetching and updating the
# instance by making update_on_match fail for no discernable reason the
# first time it is called, but work normally the second time.
with mock.patch.object(update_match, 'update_on_match',
side_effect=[update_match.NoRowsMatched,
orig_update_on_match]):
db.instance_update_and_get_original(
self.ctxt, instance['uuid'], {'metadata': {'mk1': 'mv3'}})
self.assertEqual(update_match.update_on_match.call_count, 2)
def test_instance_update_and_get_original_conflict_race_fallthrough(self):
# Ensure that is update_match continuously fails for no discernable
# reason, we evantually raise UnknownInstanceUpdateConflict
instance = self.create_instance_with_args()
# Reproduce the conditions of a race between fetching and updating the
# instance by making update_on_match fail for no discernable reason.
with mock.patch.object(update_match, 'update_on_match',
side_effect=update_match.NoRowsMatched):
self.assertRaises(exception.UnknownInstanceUpdateConflict,
db.instance_update_and_get_original,
self.ctxt,
instance['uuid'],
{'metadata': {'mk1': 'mv3'}})
def test_instance_update_and_get_original_expected_host(self):
# Ensure that we allow update when expecting a host field
instance = self.create_instance_with_args()
(orig, new) = db.instance_update_and_get_original(
self.ctxt, instance['uuid'], {'host': None},
expected={'host': 'h1'})
self.assertIsNone(new['host'])
def test_instance_update_and_get_original_expected_host_fail(self):
# Ensure that we detect a changed expected host and raise
# InstanceUpdateConflict
instance = self.create_instance_with_args()
try:
db.instance_update_and_get_original(
self.ctxt, instance['uuid'], {'host': None},
expected={'host': 'h2'})
except exception.InstanceUpdateConflict as ex:
self.assertEqual(ex.kwargs['instance_uuid'], instance['uuid'])
self.assertEqual(ex.kwargs['actual'], {'host': 'h1'})
self.assertEqual(ex.kwargs['expected'], {'host': ['h2']})
else:
self.fail('InstanceUpdateConflict was not raised')
def test_instance_update_and_get_original_expected_host_none(self):
# Ensure that we allow update when expecting a host field of None
instance = self.create_instance_with_args(host=None)
(old, new) = db.instance_update_and_get_original(
self.ctxt, instance['uuid'], {'host': 'h1'},
expected={'host': None})
self.assertEqual('h1', new['host'])
def test_instance_update_and_get_original_expected_host_none_fail(self):
# Ensure that we detect a changed expected host of None and raise
# InstanceUpdateConflict
instance = self.create_instance_with_args()
try:
db.instance_update_and_get_original(
self.ctxt, instance['uuid'], {'host': None},
expected={'host': None})
except exception.InstanceUpdateConflict as ex:
self.assertEqual(ex.kwargs['instance_uuid'], instance['uuid'])
self.assertEqual(ex.kwargs['actual'], {'host': 'h1'})
self.assertEqual(ex.kwargs['expected'], {'host': [None]})
else:
self.fail('InstanceUpdateConflict was not raised')
def test_instance_update_and_get_original_expected_task_state_single_fail(self): # noqa
# Ensure that we detect a changed expected task and raise
# UnexpectedTaskStateError
instance = self.create_instance_with_args()
try:
db.instance_update_and_get_original(
self.ctxt, instance['uuid'], {
'host': None,
'expected_task_state': task_states.SCHEDULING
})
except exception.UnexpectedTaskStateError as ex:
self.assertEqual(ex.kwargs['instance_uuid'], instance['uuid'])
self.assertEqual(ex.kwargs['actual'], {'task_state': None})
self.assertEqual(ex.kwargs['expected'],
{'task_state': [task_states.SCHEDULING]})
else:
self.fail('UnexpectedTaskStateError was not raised')
def test_instance_update_and_get_original_expected_task_state_single_pass(self): # noqa
# Ensure that we allow an update when expected task is correct
instance = self.create_instance_with_args()
(orig, new) = db.instance_update_and_get_original(
self.ctxt, instance['uuid'], {
'host': None,
'expected_task_state': None
})
self.assertIsNone(new['host'])
def test_instance_update_and_get_original_expected_task_state_multi_fail(self): # noqa
# Ensure that we detect a changed expected task and raise
# UnexpectedTaskStateError when there are multiple potential expected
# tasks
instance = self.create_instance_with_args()
try:
db.instance_update_and_get_original(
self.ctxt, instance['uuid'], {
'host': None,
'expected_task_state': [task_states.SCHEDULING,
task_states.REBUILDING]
})
except exception.UnexpectedTaskStateError as ex:
self.assertEqual(ex.kwargs['instance_uuid'], instance['uuid'])
self.assertEqual(ex.kwargs['actual'], {'task_state': None})
self.assertEqual(ex.kwargs['expected'],
{'task_state': [task_states.SCHEDULING,
task_states.REBUILDING]})
else:
self.fail('UnexpectedTaskStateError was not raised')
def test_instance_update_and_get_original_expected_task_state_multi_pass(self): # noqa
# Ensure that we allow an update when expected task is in a list of
# expected tasks
instance = self.create_instance_with_args()
(orig, new) = db.instance_update_and_get_original(
self.ctxt, instance['uuid'], {
'host': None,
'expected_task_state': [task_states.SCHEDULING, None]
})
self.assertIsNone(new['host'])
def test_instance_update_and_get_original_expected_task_state_deleting(self): # noqa
# Ensure that we raise UnepectedDeletingTaskStateError when task state
# is not as expected, and it is DELETING
instance = self.create_instance_with_args(
task_state=task_states.DELETING)
try:
db.instance_update_and_get_original(
self.ctxt, instance['uuid'], {
'host': None,
'expected_task_state': task_states.SCHEDULING
})
except exception.UnexpectedDeletingTaskStateError as ex:
self.assertEqual(ex.kwargs['instance_uuid'], instance['uuid'])
self.assertEqual(ex.kwargs['actual'],
{'task_state': task_states.DELETING})
self.assertEqual(ex.kwargs['expected'],
{'task_state': [task_states.SCHEDULING]})
else:
self.fail('UnexpectedDeletingTaskStateError was not raised')
def test_instance_update_unique_name(self):
context1 = context.RequestContext('user1', 'p1')
context2 = context.RequestContext('user2', 'p2')
inst1 = self.create_instance_with_args(context=context1,
project_id='p1',
hostname='fake_name1')
inst2 = self.create_instance_with_args(context=context1,
project_id='p1',
hostname='fake_name2')
inst3 = self.create_instance_with_args(context=context2,
project_id='p2',
hostname='fake_name3')
# osapi_compute_unique_server_name_scope is unset so this should work:
db.instance_update(context1, inst1['uuid'], {'hostname': 'fake_name2'})
db.instance_update(context1, inst1['uuid'], {'hostname': 'fake_name1'})
# With scope 'global' any duplicate should fail.
self.flags(osapi_compute_unique_server_name_scope='global')
self.assertRaises(exception.InstanceExists,
db.instance_update,
context1,
inst2['uuid'],
{'hostname': 'fake_name1'})
self.assertRaises(exception.InstanceExists,
db.instance_update,
context2,
inst3['uuid'],
{'hostname': 'fake_name1'})
# But we should definitely be able to update our name if we aren't
# really changing it.
db.instance_update(context1, inst1['uuid'], {'hostname': 'fake_NAME'})
# With scope 'project' a duplicate in the project should fail:
self.flags(osapi_compute_unique_server_name_scope='project')
self.assertRaises(exception.InstanceExists, db.instance_update,
context1, inst2['uuid'], {'hostname': 'fake_NAME'})
# With scope 'project' a duplicate in a different project should work:
self.flags(osapi_compute_unique_server_name_scope='project')
db.instance_update(context2, inst3['uuid'], {'hostname': 'fake_NAME'})
def _test_instance_update_updates_metadata(self, metadata_type):
instance = self.create_instance_with_args()
def set_and_check(meta):
inst = db.instance_update(self.ctxt, instance['uuid'],
{metadata_type: dict(meta)})
_meta = utils.metadata_to_dict(inst[metadata_type])
self.assertEqual(meta, _meta)
meta = {'speed': '88', 'units': 'MPH'}
set_and_check(meta)
meta['gigawatts'] = '1.21'
set_and_check(meta)
del meta['gigawatts']
set_and_check(meta)
self.ctxt.read_deleted = 'yes'
self.assertNotIn('gigawatts',
db.instance_system_metadata_get(self.ctxt, instance.uuid))
def test_security_group_in_use(self):
db.instance_create(self.ctxt, dict(host='foo'))
def test_instance_update_updates_system_metadata(self):
# Ensure that system_metadata is updated during instance_update
self._test_instance_update_updates_metadata('system_metadata')
def test_instance_update_updates_metadata(self):
# Ensure that metadata is updated during instance_update
self._test_instance_update_updates_metadata('metadata')
def test_instance_floating_address_get_all(self):
ctxt = context.get_admin_context()
instance1 = db.instance_create(ctxt, {'host': 'h1', 'hostname': 'n1'})
instance2 = db.instance_create(ctxt, {'host': 'h2', 'hostname': 'n2'})
fixed_addresses = ['1.1.1.1', '1.1.1.2', '1.1.1.3']
float_addresses = ['2.1.1.1', '2.1.1.2', '2.1.1.3']
instance_uuids = [instance1['uuid'], instance1['uuid'],
instance2['uuid']]
for fixed_addr, float_addr, instance_uuid in zip(fixed_addresses,
float_addresses,
instance_uuids):
db.fixed_ip_create(ctxt, {'address': fixed_addr,
'instance_uuid': instance_uuid})
fixed_id = db.fixed_ip_get_by_address(ctxt, fixed_addr)['id']
db.floating_ip_create(ctxt,
{'address': float_addr,
'fixed_ip_id': fixed_id})
real_float_addresses = \
db.instance_floating_address_get_all(ctxt, instance_uuids[0])
self.assertEqual(set(float_addresses[:2]), set(real_float_addresses))
real_float_addresses = \
db.instance_floating_address_get_all(ctxt, instance_uuids[2])
self.assertEqual(set([float_addresses[2]]), set(real_float_addresses))
self.assertRaises(exception.InvalidUUID,
db.instance_floating_address_get_all,
ctxt, 'invalid_uuid')
def test_instance_stringified_ips(self):
instance = self.create_instance_with_args()
instance = db.instance_update(
self.ctxt, instance['uuid'],
{'access_ip_v4': netaddr.IPAddress('1.2.3.4'),
'access_ip_v6': netaddr.IPAddress('::1')})
self.assertIsInstance(instance['access_ip_v4'], six.string_types)
self.assertIsInstance(instance['access_ip_v6'], six.string_types)
instance = db.instance_get_by_uuid(self.ctxt, instance['uuid'])
self.assertIsInstance(instance['access_ip_v4'], six.string_types)
self.assertIsInstance(instance['access_ip_v6'], six.string_types)
@mock.patch('nova.db.sqlalchemy.api._check_instance_exists_in_project',
return_value=None)
def test_instance_destroy(self, mock_check_inst_exists):
ctxt = context.get_admin_context()
values = {
'metadata': {'key': 'value'},
'system_metadata': {'key': 'value'}
}
inst_uuid = self.create_instance_with_args(**values)['uuid']
db.instance_tag_set(ctxt, inst_uuid, ['tag1', 'tag2'])
db.instance_destroy(ctxt, inst_uuid)
self.assertRaises(exception.InstanceNotFound,
db.instance_get, ctxt, inst_uuid)
self.assertIsNone(db.instance_info_cache_get(ctxt, inst_uuid))
self.assertEqual({}, db.instance_metadata_get(ctxt, inst_uuid))
self.assertEqual([], db.instance_tag_get_by_instance_uuid(
ctxt, inst_uuid))
ctxt.read_deleted = 'yes'
self.assertEqual(values['system_metadata'],
db.instance_system_metadata_get(ctxt, inst_uuid))
def test_instance_destroy_already_destroyed(self):
ctxt = context.get_admin_context()
instance = self.create_instance_with_args()
db.instance_destroy(ctxt, instance['uuid'])
self.assertRaises(exception.InstanceNotFound,
db.instance_destroy, ctxt, instance['uuid'])
def test_check_instance_exists(self):
session = get_session()
instance = self.create_instance_with_args()
self.assertIsNone(sqlalchemy_api._check_instance_exists_in_project(
self.ctxt, session, instance['uuid']))
def test_check_instance_exists_non_existing_instance(self):
session = get_session()
self.assertRaises(exception.InstanceNotFound,
sqlalchemy_api._check_instance_exists_in_project,
self.ctxt, session, '123')
def test_check_instance_exists_from_different_tenant(self):
context1 = context.RequestContext('user1', 'project1')
context2 = context.RequestContext('user2', 'project2')
session = get_session()
instance = self.create_instance_with_args(context=context1)
self.assertIsNone(sqlalchemy_api._check_instance_exists_in_project(
context1, session, instance['uuid']))
self.assertRaises(exception.InstanceNotFound,
sqlalchemy_api._check_instance_exists_in_project,
context2, session, instance['uuid'])
def test_check_instance_exists_admin_context(self):
session = get_session()
some_context = context.RequestContext('some_user', 'some_project')
instance = self.create_instance_with_args(context=some_context)
# Check that method works correctly with admin context
self.assertIsNone(sqlalchemy_api._check_instance_exists_in_project(
self.ctxt, session, instance['uuid']))
class InstanceMetadataTestCase(test.TestCase):
"""Tests for db.api.instance_metadata_* methods."""
def setUp(self):
super(InstanceMetadataTestCase, self).setUp()
self.ctxt = context.get_admin_context()
def test_instance_metadata_get(self):
instance = db.instance_create(self.ctxt, {'metadata':
{'key': 'value'}})
self.assertEqual({'key': 'value'}, db.instance_metadata_get(
self.ctxt, instance['uuid']))
def test_instance_metadata_delete(self):
instance = db.instance_create(self.ctxt,
{'metadata': {'key': 'val',
'key1': 'val1'}})
db.instance_metadata_delete(self.ctxt, instance['uuid'], 'key1')
self.assertEqual({'key': 'val'}, db.instance_metadata_get(
self.ctxt, instance['uuid']))
def test_instance_metadata_update(self):
instance = db.instance_create(self.ctxt, {'host': 'h1',
'project_id': 'p1', 'metadata': {'key': 'value'}})
# This should add new key/value pair
db.instance_metadata_update(self.ctxt, instance['uuid'],
{'new_key': 'new_value'}, False)
metadata = db.instance_metadata_get(self.ctxt, instance['uuid'])
self.assertEqual(metadata, {'key': 'value', 'new_key': 'new_value'})
# This should leave only one key/value pair
db.instance_metadata_update(self.ctxt, instance['uuid'],
{'new_key': 'new_value'}, True)
metadata = db.instance_metadata_get(self.ctxt, instance['uuid'])
self.assertEqual(metadata, {'new_key': 'new_value'})
class InstanceExtraTestCase(test.TestCase):
def setUp(self):
super(InstanceExtraTestCase, self).setUp()
self.ctxt = context.get_admin_context()
self.instance = db.instance_create(self.ctxt, {})
def test_instance_extra_get_by_uuid_instance_create(self):
inst_extra = db.instance_extra_get_by_instance_uuid(
self.ctxt, self.instance['uuid'])
self.assertIsNotNone(inst_extra)
def test_instance_extra_update_by_uuid(self):
db.instance_extra_update_by_uuid(self.ctxt, self.instance['uuid'],
{'numa_topology': 'changed'})
inst_extra = db.instance_extra_get_by_instance_uuid(
self.ctxt, self.instance['uuid'])
self.assertEqual('changed', inst_extra.numa_topology)
def test_instance_extra_update_by_uuid_and_create(self):
sqlalchemy_api.model_query(self.ctxt, models.InstanceExtra).\
filter_by(instance_uuid=self.instance['uuid']).\
delete()
inst_extra = db.instance_extra_get_by_instance_uuid(
self.ctxt, self.instance['uuid'])
self.assertIsNone(inst_extra)
db.instance_extra_update_by_uuid(self.ctxt, self.instance['uuid'],
{'numa_topology': 'changed'})
inst_extra = db.instance_extra_get_by_instance_uuid(
self.ctxt, self.instance['uuid'])
self.assertEqual('changed', inst_extra.numa_topology)
def test_instance_extra_get_with_columns(self):
extra = db.instance_extra_get_by_instance_uuid(
self.ctxt, self.instance['uuid'],
columns=['numa_topology', 'vcpu_model'])
self.assertRaises(SQLAlchemyError,
extra.__getitem__, 'pci_requests')
self.assertIn('numa_topology', extra)
self.assertIn('vcpu_model', extra)
class ServiceTestCase(test.TestCase, ModelsObjectComparatorMixin):
def setUp(self):
super(ServiceTestCase, self).setUp()
self.ctxt = context.get_admin_context()
def _get_base_values(self):
return {
'host': 'fake_host',
'binary': 'fake_binary',
'topic': 'fake_topic',
'report_count': 3,
'disabled': False,
'forced_down': False
}
def _create_service(self, values):
v = self._get_base_values()
v.update(values)
return db.service_create(self.ctxt, v)
def test_service_create(self):
service = self._create_service({})
self.assertIsNotNone(service['id'])
for key, value in self._get_base_values().items():
self.assertEqual(value, service[key])
def test_service_create_disabled(self):
self.flags(enable_new_services=False)
service = self._create_service({})
self.assertTrue(service['disabled'])
def test_service_destroy(self):
service1 = self._create_service({})
service2 = self._create_service({'host': 'fake_host2'})
db.service_destroy(self.ctxt, service1['id'])
self.assertRaises(exception.ServiceNotFound,
db.service_get, self.ctxt, service1['id'])
self._assertEqualObjects(db.service_get(self.ctxt, service2['id']),
service2, ignored_keys=['compute_node'])
def test_service_update(self):
service = self._create_service({})
new_values = {
'host': 'fake_host1',
'binary': 'fake_binary1',
'topic': 'fake_topic1',
'report_count': 4,
'disabled': True
}
db.service_update(self.ctxt, service['id'], new_values)
updated_service = db.service_get(self.ctxt, service['id'])
for key, value in new_values.items():
self.assertEqual(value, updated_service[key])
def test_service_update_not_found_exception(self):
self.assertRaises(exception.ServiceNotFound,
db.service_update, self.ctxt, 100500, {})
def test_service_update_with_set_forced_down(self):
service = self._create_service({})
db.service_update(self.ctxt, service['id'], {'forced_down': True})
updated_service = db.service_get(self.ctxt, service['id'])
self.assertTrue(updated_service['forced_down'])
def test_service_update_with_unset_forced_down(self):
service = self._create_service({'forced_down': True})
db.service_update(self.ctxt, service['id'], {'forced_down': False})
updated_service = db.service_get(self.ctxt, service['id'])
self.assertFalse(updated_service['forced_down'])
def test_service_get(self):
service1 = self._create_service({})
self._create_service({'host': 'some_other_fake_host'})
real_service1 = db.service_get(self.ctxt, service1['id'])
self._assertEqualObjects(service1, real_service1,
ignored_keys=['compute_node'])
def test_service_get_minimum_version(self):
self._create_service({'version': 1,
'host': 'host3',
'binary': 'compute',
'forced_down': True})
self._create_service({'version': 2,
'host': 'host1',
'binary': 'compute'})
self._create_service({'version': 3,
'host': 'host2',
'binary': 'compute'})
self.assertEqual(2, db.service_get_minimum_version(self.ctxt,
'compute'))
def test_service_get_not_found_exception(self):
self.assertRaises(exception.ServiceNotFound,
db.service_get, self.ctxt, 100500)
def test_service_get_by_host_and_topic(self):
service1 = self._create_service({'host': 'host1', 'topic': 'topic1'})
self._create_service({'host': 'host2', 'topic': 'topic2'})
real_service1 = db.service_get_by_host_and_topic(self.ctxt,
host='host1',
topic='topic1')
self._assertEqualObjects(service1, real_service1)
def test_service_get_by_host_and_binary(self):
service1 = self._create_service({'host': 'host1', 'binary': 'foo'})
self._create_service({'host': 'host2', 'binary': 'bar'})
real_service1 = db.service_get_by_host_and_binary(self.ctxt,
host='host1',
binary='foo')
self._assertEqualObjects(service1, real_service1)
def test_service_get_by_host_and_binary_raises(self):
self.assertRaises(exception.HostBinaryNotFound,
db.service_get_by_host_and_binary, self.ctxt,
host='host1', binary='baz')
def test_service_get_all(self):
values = [
{'host': 'host1', 'topic': 'topic1'},
{'host': 'host2', 'topic': 'topic2'},
{'disabled': True}
]
services = [self._create_service(vals) for vals in values]
disabled_services = [services[-1]]
non_disabled_services = services[:-1]
compares = [
(services, db.service_get_all(self.ctxt)),
(disabled_services, db.service_get_all(self.ctxt, True)),
(non_disabled_services, db.service_get_all(self.ctxt, False))
]
for comp in compares:
self._assertEqualListsOfObjects(*comp)
def test_service_get_all_by_topic(self):
values = [
{'host': 'host1', 'topic': 't1'},
{'host': 'host2', 'topic': 't1'},
{'disabled': True, 'topic': 't1'},
{'host': 'host3', 'topic': 't2'}
]
services = [self._create_service(vals) for vals in values]
expected = services[:2]
real = db.service_get_all_by_topic(self.ctxt, 't1')
self._assertEqualListsOfObjects(expected, real)
def test_service_get_all_by_binary(self):
values = [
{'host': 'host1', 'binary': 'b1'},
{'host': 'host2', 'binary': 'b1'},
{'disabled': True, 'binary': 'b1'},
{'host': 'host3', 'binary': 'b2'}
]
services = [self._create_service(vals) for vals in values]
expected = services[:2]
real = db.service_get_all_by_binary(self.ctxt, 'b1')
self._assertEqualListsOfObjects(expected, real)
def test_service_get_all_by_host(self):
values = [
{'host': 'host1', 'topic': 't11', 'binary': 'b11'},
{'host': 'host1', 'topic': 't12', 'binary': 'b12'},
{'host': 'host2', 'topic': 't1'},
{'host': 'host3', 'topic': 't1'}
]
services = [self._create_service(vals) for vals in values]
expected = services[:2]
real = db.service_get_all_by_host(self.ctxt, 'host1')
self._assertEqualListsOfObjects(expected, real)
def test_service_get_by_compute_host(self):
values = [
{'host': 'host1', 'binary': 'nova-compute'},
{'host': 'host2', 'binary': 'nova-scheduler'},
{'host': 'host3', 'binary': 'nova-compute'}
]
services = [self._create_service(vals) for vals in values]
real_service = db.service_get_by_compute_host(self.ctxt, 'host1')
self._assertEqualObjects(services[0], real_service)
self.assertRaises(exception.ComputeHostNotFound,
db.service_get_by_compute_host,
self.ctxt, 'non-exists-host')
def test_service_get_by_compute_host_not_found(self):
self.assertRaises(exception.ComputeHostNotFound,
db.service_get_by_compute_host,
self.ctxt, 'non-exists-host')
def test_service_binary_exists_exception(self):
db.service_create(self.ctxt, self._get_base_values())
values = self._get_base_values()
values.update({'topic': 'top1'})
self.assertRaises(exception.ServiceBinaryExists, db.service_create,
self.ctxt, values)
def test_service_topic_exists_exceptions(self):
db.service_create(self.ctxt, self._get_base_values())
values = self._get_base_values()
values.update({'binary': 'bin1'})
self.assertRaises(exception.ServiceTopicExists, db.service_create,
self.ctxt, values)
class BaseInstanceTypeTestCase(test.TestCase, ModelsObjectComparatorMixin):
def setUp(self):
super(BaseInstanceTypeTestCase, self).setUp()
self.ctxt = context.get_admin_context()
self.user_ctxt = context.RequestContext('user', 'user')
def _get_base_values(self):
return {
'name': 'fake_name',
'memory_mb': 512,
'vcpus': 1,
'root_gb': 10,
'ephemeral_gb': 10,
'flavorid': 'fake_flavor',
'swap': 0,
'rxtx_factor': 0.5,
'vcpu_weight': 1,
'disabled': False,
'is_public': True
}
def _create_flavor(self, values, projects=None):
v = self._get_base_values()
v.update(values)
return db.flavor_create(self.ctxt, v, projects)
class InstanceActionTestCase(test.TestCase, ModelsObjectComparatorMixin):
IGNORED_FIELDS = [
'id',
'created_at',
'updated_at',
'deleted_at',
'deleted'
]
def setUp(self):
super(InstanceActionTestCase, self).setUp()
self.ctxt = context.get_admin_context()
def _create_action_values(self, uuid, action='run_instance',
ctxt=None, extra=None):
if ctxt is None:
ctxt = self.ctxt
db.instance_create(ctxt, {'uuid': uuid})
values = {
'action': action,
'instance_uuid': uuid,
'request_id': ctxt.request_id,
'user_id': ctxt.user_id,
'project_id': ctxt.project_id,
'start_time': timeutils.utcnow(),
'message': 'action-message'
}
if extra is not None:
values.update(extra)
return values
def _create_event_values(self, uuid, event='schedule',
ctxt=None, extra=None):
if ctxt is None:
ctxt = self.ctxt
values = {
'event': event,
'instance_uuid': uuid,
'request_id': ctxt.request_id,
'start_time': timeutils.utcnow(),
'host': 'fake-host',
'details': 'fake-details',
}
if extra is not None:
values.update(extra)
return values
def _assertActionSaved(self, action, uuid):
"""Retrieve the action to ensure it was successfully added."""
actions = db.actions_get(self.ctxt, uuid)
self.assertEqual(1, len(actions))
self._assertEqualObjects(action, actions[0])
def _assertActionEventSaved(self, event, action_id):
# Retrieve the event to ensure it was successfully added
events = db.action_events_get(self.ctxt, action_id)
self.assertEqual(1, len(events))
self._assertEqualObjects(event, events[0],
['instance_uuid', 'request_id'])
def test_instance_action_start(self):
"""Create an instance action."""
uuid = str(stdlib_uuid.uuid4())
action_values = self._create_action_values(uuid)
action = db.action_start(self.ctxt, action_values)
ignored_keys = self.IGNORED_FIELDS + ['finish_time']
self._assertEqualObjects(action_values, action, ignored_keys)
self._assertActionSaved(action, uuid)
def test_instance_action_finish(self):
"""Create an instance action."""
uuid = str(stdlib_uuid.uuid4())
action_values = self._create_action_values(uuid)
db.action_start(self.ctxt, action_values)
action_values['finish_time'] = timeutils.utcnow()
action = db.action_finish(self.ctxt, action_values)
self._assertEqualObjects(action_values, action, self.IGNORED_FIELDS)
self._assertActionSaved(action, uuid)
def test_instance_action_finish_without_started_event(self):
"""Create an instance finish action."""
uuid = str(stdlib_uuid.uuid4())
action_values = self._create_action_values(uuid)
action_values['finish_time'] = timeutils.utcnow()
self.assertRaises(exception.InstanceActionNotFound, db.action_finish,
self.ctxt, action_values)
def test_instance_actions_get_by_instance(self):
"""Ensure we can get actions by UUID."""
uuid1 = str(stdlib_uuid.uuid4())
expected = []
action_values = self._create_action_values(uuid1)
action = db.action_start(self.ctxt, action_values)
expected.append(action)
action_values['action'] = 'resize'
action = db.action_start(self.ctxt, action_values)
expected.append(action)
# Create some extra actions
uuid2 = str(stdlib_uuid.uuid4())
ctxt2 = context.get_admin_context()
action_values = self._create_action_values(uuid2, 'reboot', ctxt2)
db.action_start(ctxt2, action_values)
db.action_start(ctxt2, action_values)
# Retrieve the action to ensure it was successfully added
actions = db.actions_get(self.ctxt, uuid1)
self._assertEqualListsOfObjects(expected, actions)
def test_instance_actions_get_are_in_order(self):
"""Ensure retrived actions are in order."""
uuid1 = str(stdlib_uuid.uuid4())
extra = {
'created_at': timeutils.utcnow()
}
action_values = self._create_action_values(uuid1, extra=extra)
action1 = db.action_start(self.ctxt, action_values)
action_values['action'] = 'delete'
action2 = db.action_start(self.ctxt, action_values)
actions = db.actions_get(self.ctxt, uuid1)
self.assertEqual(2, len(actions))
self._assertEqualOrderedListOfObjects([action2, action1], actions)
def test_instance_action_get_by_instance_and_action(self):
"""Ensure we can get an action by instance UUID and action id."""
ctxt2 = context.get_admin_context()
uuid1 = str(stdlib_uuid.uuid4())
uuid2 = str(stdlib_uuid.uuid4())
action_values = self._create_action_values(uuid1)
db.action_start(self.ctxt, action_values)
request_id = action_values['request_id']
# NOTE(rpodolyaka): ensure we use a different req id for the 2nd req
action_values['action'] = 'resize'
action_values['request_id'] = 'req-00000000-7522-4d99-7ff-111111111111'
db.action_start(self.ctxt, action_values)
action_values = self._create_action_values(uuid2, 'reboot', ctxt2)
db.action_start(ctxt2, action_values)
db.action_start(ctxt2, action_values)
action = db.action_get_by_request_id(self.ctxt, uuid1, request_id)
self.assertEqual('run_instance', action['action'])
self.assertEqual(self.ctxt.request_id, action['request_id'])
def test_instance_action_event_start(self):
"""Create an instance action event."""
uuid = str(stdlib_uuid.uuid4())
action_values = self._create_action_values(uuid)
action = db.action_start(self.ctxt, action_values)
event_values = self._create_event_values(uuid)
event = db.action_event_start(self.ctxt, event_values)
# self.fail(self._dict_from_object(event, None))
event_values['action_id'] = action['id']
ignored = self.IGNORED_FIELDS + ['finish_time', 'traceback', 'result']
self._assertEqualObjects(event_values, event, ignored)
self._assertActionEventSaved(event, action['id'])
def test_instance_action_event_start_without_action(self):
"""Create an instance action event."""
uuid = str(stdlib_uuid.uuid4())
event_values = self._create_event_values(uuid)
self.assertRaises(exception.InstanceActionNotFound,
db.action_event_start, self.ctxt, event_values)
def test_instance_action_event_finish_without_started_event(self):
"""Finish an instance action event."""
uuid = str(stdlib_uuid.uuid4())
db.action_start(self.ctxt, self._create_action_values(uuid))
event_values = {
'finish_time': timeutils.utcnow() + datetime.timedelta(seconds=5),
'result': 'Success'
}
event_values = self._create_event_values(uuid, extra=event_values)
self.assertRaises(exception.InstanceActionEventNotFound,
db.action_event_finish, self.ctxt, event_values)
def test_instance_action_event_finish_without_action(self):
"""Finish an instance action event."""
uuid = str(stdlib_uuid.uuid4())
event_values = {
'finish_time': timeutils.utcnow() + datetime.timedelta(seconds=5),
'result': 'Success'
}
event_values = self._create_event_values(uuid, extra=event_values)
self.assertRaises(exception.InstanceActionNotFound,
db.action_event_finish, self.ctxt, event_values)
def test_instance_action_event_finish_success(self):
"""Finish an instance action event."""
uuid = str(stdlib_uuid.uuid4())
action = db.action_start(self.ctxt, self._create_action_values(uuid))
db.action_event_start(self.ctxt, self._create_event_values(uuid))
event_values = {
'finish_time': timeutils.utcnow() + datetime.timedelta(seconds=5),
'result': 'Success'
}
event_values = self._create_event_values(uuid, extra=event_values)
event = db.action_event_finish(self.ctxt, event_values)
self._assertActionEventSaved(event, action['id'])
action = db.action_get_by_request_id(self.ctxt, uuid,
self.ctxt.request_id)
self.assertNotEqual('Error', action['message'])
def test_instance_action_event_finish_error(self):
"""Finish an instance action event with an error."""
uuid = str(stdlib_uuid.uuid4())
action = db.action_start(self.ctxt, self._create_action_values(uuid))
db.action_event_start(self.ctxt, self._create_event_values(uuid))
event_values = {
'finish_time': timeutils.utcnow() + datetime.timedelta(seconds=5),
'result': 'Error'
}
event_values = self._create_event_values(uuid, extra=event_values)
event = db.action_event_finish(self.ctxt, event_values)
self._assertActionEventSaved(event, action['id'])
action = db.action_get_by_request_id(self.ctxt, uuid,
self.ctxt.request_id)
self.assertEqual('Error', action['message'])
def test_instance_action_and_event_start_string_time(self):
"""Create an instance action and event with a string start_time."""
uuid = str(stdlib_uuid.uuid4())
action = db.action_start(self.ctxt, self._create_action_values(uuid))
event_values = {'start_time': timeutils.utcnow().isoformat()}
event_values = self._create_event_values(uuid, extra=event_values)
event = db.action_event_start(self.ctxt, event_values)
self._assertActionEventSaved(event, action['id'])
def test_instance_action_events_get_are_in_order(self):
"""Ensure retrived action events are in order."""
uuid1 = str(stdlib_uuid.uuid4())
action = db.action_start(self.ctxt,
self._create_action_values(uuid1))
extra1 = {
'created_at': timeutils.utcnow()
}
extra2 = {
'created_at': timeutils.utcnow() + datetime.timedelta(seconds=5)
}
event_val1 = self._create_event_values(uuid1, 'schedule', extra=extra1)
event_val2 = self._create_event_values(uuid1, 'run', extra=extra1)
event_val3 = self._create_event_values(uuid1, 'stop', extra=extra2)
event1 = db.action_event_start(self.ctxt, event_val1)
event2 = db.action_event_start(self.ctxt, event_val2)
event3 = db.action_event_start(self.ctxt, event_val3)
events = db.action_events_get(self.ctxt, action['id'])
self.assertEqual(3, len(events))
self._assertEqualOrderedListOfObjects([event3, event2, event1], events,
['instance_uuid', 'request_id'])
def test_instance_action_event_get_by_id(self):
"""Get a specific instance action event."""
ctxt2 = context.get_admin_context()
uuid1 = str(stdlib_uuid.uuid4())
uuid2 = str(stdlib_uuid.uuid4())
action = db.action_start(self.ctxt,
self._create_action_values(uuid1))
db.action_start(ctxt2,
self._create_action_values(uuid2, 'reboot', ctxt2))
event = db.action_event_start(self.ctxt,
self._create_event_values(uuid1))
event_values = self._create_event_values(uuid2, 'reboot', ctxt2)
db.action_event_start(ctxt2, event_values)
# Retrieve the event to ensure it was successfully added
saved_event = db.action_event_get_by_id(self.ctxt,
action['id'],
event['id'])
self._assertEqualObjects(event, saved_event,
['instance_uuid', 'request_id'])
def test_instance_action_event_start_with_different_request_id(self):
uuid = str(stdlib_uuid.uuid4())
action_values = self._create_action_values(uuid)
action = db.action_start(self.ctxt, action_values)
# init_host case
fake_admin_context = context.get_admin_context()
event_values = self._create_event_values(uuid, ctxt=fake_admin_context)
event = db.action_event_start(fake_admin_context, event_values)
event_values['action_id'] = action['id']
ignored = self.IGNORED_FIELDS + ['finish_time', 'traceback', 'result']
self._assertEqualObjects(event_values, event, ignored)
self._assertActionEventSaved(event, action['id'])
def test_instance_action_event_finish_with_different_request_id(self):
uuid = str(stdlib_uuid.uuid4())
action = db.action_start(self.ctxt, self._create_action_values(uuid))
# init_host case
fake_admin_context = context.get_admin_context()
db.action_event_start(fake_admin_context, self._create_event_values(
uuid, ctxt=fake_admin_context))
event_values = {
'finish_time': timeutils.utcnow() + datetime.timedelta(seconds=5),
'result': 'Success'
}
event_values = self._create_event_values(uuid, ctxt=fake_admin_context,
extra=event_values)
event = db.action_event_finish(fake_admin_context, event_values)
self._assertActionEventSaved(event, action['id'])
action = db.action_get_by_request_id(self.ctxt, uuid,
self.ctxt.request_id)
self.assertNotEqual('Error', action['message'])
class InstanceFaultTestCase(test.TestCase, ModelsObjectComparatorMixin):
def setUp(self):
super(InstanceFaultTestCase, self).setUp()
self.ctxt = context.get_admin_context()
def _create_fault_values(self, uuid, code=404):
return {
'message': 'message',
'details': 'detail',
'instance_uuid': uuid,
'code': code,
'host': 'localhost'
}
def test_instance_fault_create(self):
"""Ensure we can create an instance fault."""
uuid = str(stdlib_uuid.uuid4())
# Ensure no faults registered for this instance
faults = db.instance_fault_get_by_instance_uuids(self.ctxt, [uuid])
self.assertEqual(0, len(faults[uuid]))
# Create a fault
fault_values = self._create_fault_values(uuid)
db.instance_create(self.ctxt, {'uuid': uuid})
fault = db.instance_fault_create(self.ctxt, fault_values)
ignored_keys = ['deleted', 'created_at', 'updated_at',
'deleted_at', 'id']
self._assertEqualObjects(fault_values, fault, ignored_keys)
# Retrieve the fault to ensure it was successfully added
faults = db.instance_fault_get_by_instance_uuids(self.ctxt, [uuid])
self.assertEqual(1, len(faults[uuid]))
self._assertEqualObjects(fault, faults[uuid][0])
def test_instance_fault_get_by_instance(self):
"""Ensure we can retrieve faults for instance."""
uuids = [str(stdlib_uuid.uuid4()), str(stdlib_uuid.uuid4())]
fault_codes = [404, 500]
expected = {}
# Create faults
for uuid in uuids:
db.instance_create(self.ctxt, {'uuid': uuid})
expected[uuid] = []
for code in fault_codes:
fault_values = self._create_fault_values(uuid, code)
fault = db.instance_fault_create(self.ctxt, fault_values)
expected[uuid].append(fault)
# Ensure faults are saved
faults = db.instance_fault_get_by_instance_uuids(self.ctxt, uuids)
self.assertEqual(len(expected), len(faults))
for uuid in uuids:
self._assertEqualListsOfObjects(expected[uuid], faults[uuid])
def test_instance_faults_get_by_instance_uuids_no_faults(self):
uuid = str(stdlib_uuid.uuid4())
# None should be returned when no faults exist.
faults = db.instance_fault_get_by_instance_uuids(self.ctxt, [uuid])
expected = {uuid: []}
self.assertEqual(expected, faults)
def test_instance_faults_get_by_instance_uuids_no_uuids(self):
self.mox.StubOutWithMock(query.Query, 'filter')
self.mox.ReplayAll()
faults = db.instance_fault_get_by_instance_uuids(self.ctxt, [])
self.assertEqual({}, faults)
class InstanceTypeTestCase(BaseInstanceTypeTestCase):
def test_flavor_create(self):
flavor = self._create_flavor({})
ignored_keys = ['id', 'deleted', 'deleted_at', 'updated_at',
'created_at', 'extra_specs']
self.assertIsNotNone(flavor['id'])
self._assertEqualObjects(flavor, self._get_base_values(),
ignored_keys)
def test_flavor_create_with_projects(self):
projects = ['fake-project1', 'fake-project2']
flavor = self._create_flavor({}, projects + ['fake-project2'])
access = db.flavor_access_get_by_flavor_id(self.ctxt,
flavor['flavorid'])
self.assertEqual(projects, [x.project_id for x in access])
def test_flavor_destroy(self):
specs1 = {'a': '1', 'b': '2'}
flavor1 = self._create_flavor({'name': 'name1', 'flavorid': 'a1',
'extra_specs': specs1})
specs2 = {'c': '4', 'd': '3'}
flavor2 = self._create_flavor({'name': 'name2', 'flavorid': 'a2',
'extra_specs': specs2})
db.flavor_destroy(self.ctxt, 'name1')
self.assertRaises(exception.FlavorNotFound,
db.flavor_get, self.ctxt, flavor1['id'])
real_specs1 = db.flavor_extra_specs_get(self.ctxt, flavor1['flavorid'])
self._assertEqualObjects(real_specs1, {})
r_flavor2 = db.flavor_get(self.ctxt, flavor2['id'])
self._assertEqualObjects(flavor2, r_flavor2, 'extra_specs')
def test_flavor_destroy_not_found(self):
self.assertRaises(exception.FlavorNotFound,
db.flavor_destroy, self.ctxt, 'nonexists')
def test_flavor_create_duplicate_name(self):
self._create_flavor({})
self.assertRaises(exception.FlavorExists,
self._create_flavor,
{'flavorid': 'some_random_flavor'})
def test_flavor_create_duplicate_flavorid(self):
self._create_flavor({})
self.assertRaises(exception.FlavorIdExists,
self._create_flavor,
{'name': 'some_random_name'})
def test_flavor_create_with_extra_specs(self):
extra_specs = dict(a='abc', b='def', c='ghi')
flavor = self._create_flavor({'extra_specs': extra_specs})
ignored_keys = ['id', 'deleted', 'deleted_at', 'updated_at',
'created_at', 'extra_specs']
self._assertEqualObjects(flavor, self._get_base_values(),
ignored_keys)
self._assertEqualObjects(extra_specs, flavor['extra_specs'])
def test_flavor_get_all(self):
# NOTE(boris-42): Remove base instance types
for it in db.flavor_get_all(self.ctxt):
db.flavor_destroy(self.ctxt, it['name'])
flavors = [
{'root_gb': 600, 'memory_mb': 100, 'disabled': True,
'is_public': True, 'name': 'a1', 'flavorid': 'f1'},
{'root_gb': 500, 'memory_mb': 200, 'disabled': True,
'is_public': True, 'name': 'a2', 'flavorid': 'f2'},
{'root_gb': 400, 'memory_mb': 300, 'disabled': False,
'is_public': True, 'name': 'a3', 'flavorid': 'f3'},
{'root_gb': 300, 'memory_mb': 400, 'disabled': False,
'is_public': False, 'name': 'a4', 'flavorid': 'f4'},
{'root_gb': 200, 'memory_mb': 500, 'disabled': True,
'is_public': False, 'name': 'a5', 'flavorid': 'f5'},
{'root_gb': 100, 'memory_mb': 600, 'disabled': True,
'is_public': False, 'name': 'a6', 'flavorid': 'f6'}
]
flavors = [self._create_flavor(it) for it in flavors]
lambda_filters = {
'min_memory_mb': lambda it, v: it['memory_mb'] >= v,
'min_root_gb': lambda it, v: it['root_gb'] >= v,
'disabled': lambda it, v: it['disabled'] == v,
'is_public': lambda it, v: (v is None or it['is_public'] == v)
}
mem_filts = [{'min_memory_mb': x} for x in [100, 350, 550, 650]]
root_filts = [{'min_root_gb': x} for x in [100, 350, 550, 650]]
disabled_filts = [{'disabled': x} for x in [True, False]]
is_public_filts = [{'is_public': x} for x in [True, False, None]]
def assert_multi_filter_flavor_get(filters=None):
if filters is None:
filters = {}
expected_it = flavors
for name, value in filters.items():
filt = lambda it: lambda_filters[name](it, value)
expected_it = list(filter(filt, expected_it))
real_it = db.flavor_get_all(self.ctxt, filters=filters)
self._assertEqualListsOfObjects(expected_it, real_it)
# no filter
assert_multi_filter_flavor_get()
# test only with one filter
for filt in mem_filts:
assert_multi_filter_flavor_get(filt)
for filt in root_filts:
assert_multi_filter_flavor_get(filt)
for filt in disabled_filts:
assert_multi_filter_flavor_get(filt)
for filt in is_public_filts:
assert_multi_filter_flavor_get(filt)
# test all filters together
for mem in mem_filts:
for root in root_filts:
for disabled in disabled_filts:
for is_public in is_public_filts:
filts = {}
for f in (mem, root, disabled, is_public):
filts.update(f)
assert_multi_filter_flavor_get(filts)
def test_flavor_get_all_limit_sort(self):
def assert_sorted_by_key_dir(sort_key, asc=True):
sort_dir = 'asc' if asc else 'desc'
results = db.flavor_get_all(self.ctxt, sort_key='name',
sort_dir=sort_dir)
# Manually sort the results as we would expect them
expected_results = sorted(results,
key=lambda item: item['name'],
reverse=(not asc))
self.assertEqual(expected_results, results)
def assert_sorted_by_key_both_dir(sort_key):
assert_sorted_by_key_dir(sort_key, True)
assert_sorted_by_key_dir(sort_key, False)
for attr in ['memory_mb', 'root_gb', 'deleted_at', 'name', 'deleted',
'created_at', 'ephemeral_gb', 'updated_at', 'disabled',
'vcpus', 'swap', 'rxtx_factor', 'is_public', 'flavorid',
'vcpu_weight', 'id']:
assert_sorted_by_key_both_dir(attr)
def test_flavor_get_all_limit(self):
limited_flavors = db.flavor_get_all(self.ctxt, limit=2)
self.assertEqual(2, len(limited_flavors))
def test_flavor_get_all_list_marker(self):
all_flavors = db.flavor_get_all(self.ctxt)
# Set the 3rd result as the marker
marker_flavorid = all_flavors[2]['flavorid']
marked_flavors = db.flavor_get_all(self.ctxt, marker=marker_flavorid)
# We expect everything /after/ the 3rd result
expected_results = all_flavors[3:]
self.assertEqual(expected_results, marked_flavors)
def test_flavor_get_all_marker_not_found(self):
self.assertRaises(exception.MarkerNotFound,
db.flavor_get_all, self.ctxt, marker='invalid')
def test_flavor_get(self):
flavors = [{'name': 'abc', 'flavorid': '123'},
{'name': 'def', 'flavorid': '456'},
{'name': 'ghi', 'flavorid': '789'}]
flavors = [self._create_flavor(t) for t in flavors]
for flavor in flavors:
flavor_by_id = db.flavor_get(self.ctxt, flavor['id'])
self._assertEqualObjects(flavor, flavor_by_id)
def test_flavor_get_non_public(self):
flavor = self._create_flavor({'name': 'abc', 'flavorid': '123',
'is_public': False})
# Admin can see it
flavor_by_id = db.flavor_get(self.ctxt, flavor['id'])
self._assertEqualObjects(flavor, flavor_by_id)
# Regular user can not
self.assertRaises(exception.FlavorNotFound, db.flavor_get,
self.user_ctxt, flavor['id'])
# Regular user can see it after being granted access
db.flavor_access_add(self.ctxt, flavor['flavorid'],
self.user_ctxt.project_id)
flavor_by_id = db.flavor_get(self.user_ctxt, flavor['id'])
self._assertEqualObjects(flavor, flavor_by_id)
def test_flavor_get_by_name(self):
flavors = [{'name': 'abc', 'flavorid': '123'},
{'name': 'def', 'flavorid': '456'},
{'name': 'ghi', 'flavorid': '789'}]
flavors = [self._create_flavor(t) for t in flavors]
for flavor in flavors:
flavor_by_name = db.flavor_get_by_name(self.ctxt, flavor['name'])
self._assertEqualObjects(flavor, flavor_by_name)
def test_flavor_get_by_name_not_found(self):
self._create_flavor({})
self.assertRaises(exception.FlavorNotFoundByName,
db.flavor_get_by_name, self.ctxt, 'nonexists')
def test_flavor_get_by_name_non_public(self):
flavor = self._create_flavor({'name': 'abc', 'flavorid': '123',
'is_public': False})
# Admin can see it
flavor_by_name = db.flavor_get_by_name(self.ctxt, flavor['name'])
self._assertEqualObjects(flavor, flavor_by_name)
# Regular user can not
self.assertRaises(exception.FlavorNotFoundByName,
db.flavor_get_by_name, self.user_ctxt,
flavor['name'])
# Regular user can see it after being granted access
db.flavor_access_add(self.ctxt, flavor['flavorid'],
self.user_ctxt.project_id)
flavor_by_name = db.flavor_get_by_name(self.user_ctxt, flavor['name'])
self._assertEqualObjects(flavor, flavor_by_name)
def test_flavor_get_by_flavor_id(self):
flavors = [{'name': 'abc', 'flavorid': '123'},
{'name': 'def', 'flavorid': '456'},
{'name': 'ghi', 'flavorid': '789'}]
flavors = [self._create_flavor(t) for t in flavors]
for flavor in flavors:
params = (self.ctxt, flavor['flavorid'])
flavor_by_flavorid = db.flavor_get_by_flavor_id(*params)
self._assertEqualObjects(flavor, flavor_by_flavorid)
def test_flavor_get_by_flavor_not_found(self):
self._create_flavor({})
self.assertRaises(exception.FlavorNotFound,
db.flavor_get_by_flavor_id,
self.ctxt, 'nonexists')
def test_flavor_get_by_flavor_id_non_public(self):
flavor = self._create_flavor({'name': 'abc', 'flavorid': '123',
'is_public': False})
# Admin can see it
flavor_by_fid = db.flavor_get_by_flavor_id(self.ctxt,
flavor['flavorid'])
self._assertEqualObjects(flavor, flavor_by_fid)
# Regular user can not
self.assertRaises(exception.FlavorNotFound,
db.flavor_get_by_flavor_id, self.user_ctxt,
flavor['flavorid'])
# Regular user can see it after being granted access
db.flavor_access_add(self.ctxt, flavor['flavorid'],
self.user_ctxt.project_id)
flavor_by_fid = db.flavor_get_by_flavor_id(self.user_ctxt,
flavor['flavorid'])
self._assertEqualObjects(flavor, flavor_by_fid)
def test_flavor_get_by_flavor_id_deleted(self):
flavor = self._create_flavor({'name': 'abc', 'flavorid': '123'})
db.flavor_destroy(self.ctxt, 'abc')
flavor_by_fid = db.flavor_get_by_flavor_id(self.ctxt,
flavor['flavorid'], read_deleted='yes')
self.assertEqual(flavor['id'], flavor_by_fid['id'])
def test_flavor_get_by_flavor_id_deleted_and_recreat(self):
# NOTE(wingwj): Aims to test difference between mysql and postgresql
# for bug 1288636
param_dict = {'name': 'abc', 'flavorid': '123'}
self._create_flavor(param_dict)
db.flavor_destroy(self.ctxt, 'abc')
# Recreate the flavor with the same params
flavor = self._create_flavor(param_dict)
flavor_by_fid = db.flavor_get_by_flavor_id(self.ctxt,
flavor['flavorid'], read_deleted='yes')
self.assertEqual(flavor['id'], flavor_by_fid['id'])
class InstanceTypeExtraSpecsTestCase(BaseInstanceTypeTestCase):
def setUp(self):
super(InstanceTypeExtraSpecsTestCase, self).setUp()
values = ({'name': 'n1', 'flavorid': 'f1',
'extra_specs': dict(a='a', b='b', c='c')},
{'name': 'n2', 'flavorid': 'f2',
'extra_specs': dict(d='d', e='e', f='f')})
# NOTE(boris-42): We have already tested flavor_create method
# with extra_specs in InstanceTypeTestCase.
self.flavors = [self._create_flavor(v) for v in values]
def test_flavor_extra_specs_get(self):
for it in self.flavors:
real_specs = db.flavor_extra_specs_get(self.ctxt, it['flavorid'])
self._assertEqualObjects(it['extra_specs'], real_specs)
def test_flavor_extra_specs_delete(self):
for it in self.flavors:
specs = it['extra_specs']
key = list(specs.keys())[0]
del specs[key]
db.flavor_extra_specs_delete(self.ctxt, it['flavorid'], key)
real_specs = db.flavor_extra_specs_get(self.ctxt, it['flavorid'])
self._assertEqualObjects(it['extra_specs'], real_specs)
def test_flavor_extra_specs_delete_failed(self):
for it in self.flavors:
self.assertRaises(exception.FlavorExtraSpecsNotFound,
db.flavor_extra_specs_delete,
self.ctxt, it['flavorid'], 'dummy')
def test_flavor_extra_specs_update_or_create(self):
for it in self.flavors:
current_specs = it['extra_specs']
current_specs.update(dict(b='b1', c='c1', d='d1', e='e1'))
params = (self.ctxt, it['flavorid'], current_specs)
db.flavor_extra_specs_update_or_create(*params)
real_specs = db.flavor_extra_specs_get(self.ctxt, it['flavorid'])
self._assertEqualObjects(current_specs, real_specs)
def test_flavor_extra_specs_update_or_create_flavor_not_found(self):
self.assertRaises(exception.FlavorNotFound,
db.flavor_extra_specs_update_or_create,
self.ctxt, 'nonexists', {})
def test_flavor_extra_specs_update_or_create_retry(self):
def counted():
def get_id(context, flavorid, session):
get_id.counter += 1
raise db_exc.DBDuplicateEntry
get_id.counter = 0
return get_id
get_id = counted()
self.stubs.Set(sqlalchemy_api, '_flavor_get_id_from_flavor', get_id)
self.assertRaises(exception.FlavorExtraSpecUpdateCreateFailed,
sqlalchemy_api.flavor_extra_specs_update_or_create,
self.ctxt, 1, {}, 5)
self.assertEqual(get_id.counter, 5)
class InstanceTypeAccessTestCase(BaseInstanceTypeTestCase):
def _create_flavor_access(self, flavor_id, project_id):
return db.flavor_access_add(self.ctxt, flavor_id, project_id)
def test_flavor_access_get_by_flavor_id(self):
flavors = ({'name': 'n1', 'flavorid': 'f1'},
{'name': 'n2', 'flavorid': 'f2'})
it1, it2 = tuple((self._create_flavor(v) for v in flavors))
access_it1 = [self._create_flavor_access(it1['flavorid'], 'pr1'),
self._create_flavor_access(it1['flavorid'], 'pr2')]
access_it2 = [self._create_flavor_access(it2['flavorid'], 'pr1')]
for it, access_it in zip((it1, it2), (access_it1, access_it2)):
params = (self.ctxt, it['flavorid'])
real_access_it = db.flavor_access_get_by_flavor_id(*params)
self._assertEqualListsOfObjects(access_it, real_access_it)
def test_flavor_access_get_by_flavor_id_flavor_not_found(self):
self.assertRaises(exception.FlavorNotFound,
db.flavor_get_by_flavor_id,
self.ctxt, 'nonexists')
def test_flavor_access_add(self):
flavor = self._create_flavor({'flavorid': 'f1'})
project_id = 'p1'
access = self._create_flavor_access(flavor['flavorid'], project_id)
# NOTE(boris-42): Check that flavor_access_add doesn't fail and
# returns correct value. This is enough because other
# logic is checked by other methods.
self.assertIsNotNone(access['id'])
self.assertEqual(access['instance_type_id'], flavor['id'])
self.assertEqual(access['project_id'], project_id)
def test_flavor_access_add_to_non_existing_flavor(self):
self.assertRaises(exception.FlavorNotFound,
self._create_flavor_access,
'nonexists', 'does_not_matter')
def test_flavor_access_add_duplicate_project_id_flavor(self):
flavor = self._create_flavor({'flavorid': 'f1'})
params = (flavor['flavorid'], 'p1')
self._create_flavor_access(*params)
self.assertRaises(exception.FlavorAccessExists,
self._create_flavor_access, *params)
def test_flavor_access_remove(self):
flavors = ({'name': 'n1', 'flavorid': 'f1'},
{'name': 'n2', 'flavorid': 'f2'})
it1, it2 = tuple((self._create_flavor(v) for v in flavors))
access_it1 = [self._create_flavor_access(it1['flavorid'], 'pr1'),
self._create_flavor_access(it1['flavorid'], 'pr2')]
access_it2 = [self._create_flavor_access(it2['flavorid'], 'pr1')]
db.flavor_access_remove(self.ctxt, it1['flavorid'],
access_it1[1]['project_id'])
for it, access_it in zip((it1, it2), (access_it1[:1], access_it2)):
params = (self.ctxt, it['flavorid'])
real_access_it = db.flavor_access_get_by_flavor_id(*params)
self._assertEqualListsOfObjects(access_it, real_access_it)
def test_flavor_access_remove_flavor_not_found(self):
self.assertRaises(exception.FlavorNotFound,
db.flavor_access_remove,
self.ctxt, 'nonexists', 'does_not_matter')
def test_flavor_access_remove_access_not_found(self):
flavor = self._create_flavor({'flavorid': 'f1'})
params = (flavor['flavorid'], 'p1')
self._create_flavor_access(*params)
self.assertRaises(exception.FlavorAccessNotFound,
db.flavor_access_remove,
self.ctxt, flavor['flavorid'], 'p2')
def test_flavor_access_removed_after_flavor_destroy(self):
flavor1 = self._create_flavor({'flavorid': 'f1', 'name': 'n1'})
flavor2 = self._create_flavor({'flavorid': 'f2', 'name': 'n2'})
values = [
(flavor1['flavorid'], 'p1'),
(flavor1['flavorid'], 'p2'),
(flavor2['flavorid'], 'p3')
]
for v in values:
self._create_flavor_access(*v)
db.flavor_destroy(self.ctxt, flavor1['name'])
p = (self.ctxt, flavor1['flavorid'])
self.assertEqual(0, len(db.flavor_access_get_by_flavor_id(*p)))
p = (self.ctxt, flavor2['flavorid'])
self.assertEqual(1, len(db.flavor_access_get_by_flavor_id(*p)))
db.flavor_destroy(self.ctxt, flavor2['name'])
self.assertEqual(0, len(db.flavor_access_get_by_flavor_id(*p)))
class FixedIPTestCase(BaseInstanceTypeTestCase):
def _timeout_test(self, ctxt, timeout, multi_host):
instance = db.instance_create(ctxt, dict(host='foo'))
net = db.network_create_safe(ctxt, dict(multi_host=multi_host,
host='bar'))
old = timeout - datetime.timedelta(seconds=5)
new = timeout + datetime.timedelta(seconds=5)
# should deallocate
db.fixed_ip_create(ctxt, dict(allocated=False,
instance_uuid=instance['uuid'],
network_id=net['id'],
updated_at=old))
# still allocated
db.fixed_ip_create(ctxt, dict(allocated=True,
instance_uuid=instance['uuid'],
network_id=net['id'],
updated_at=old))
# wrong network
db.fixed_ip_create(ctxt, dict(allocated=False,
instance_uuid=instance['uuid'],
network_id=None,
updated_at=old))
# too new
db.fixed_ip_create(ctxt, dict(allocated=False,
instance_uuid=instance['uuid'],
network_id=None,
updated_at=new))
def mock_db_query_first_to_raise_data_error_exception(self):
self.mox.StubOutWithMock(query.Query, 'first')
query.Query.first().AndRaise(db_exc.DBError())
self.mox.ReplayAll()
def test_fixed_ip_disassociate_all_by_timeout_single_host(self):
now = timeutils.utcnow()
self._timeout_test(self.ctxt, now, False)
result = db.fixed_ip_disassociate_all_by_timeout(self.ctxt, 'foo', now)
self.assertEqual(result, 0)
result = db.fixed_ip_disassociate_all_by_timeout(self.ctxt, 'bar', now)
self.assertEqual(result, 1)
def test_fixed_ip_disassociate_all_by_timeout_multi_host(self):
now = timeutils.utcnow()
self._timeout_test(self.ctxt, now, True)
result = db.fixed_ip_disassociate_all_by_timeout(self.ctxt, 'foo', now)
self.assertEqual(result, 1)
result = db.fixed_ip_disassociate_all_by_timeout(self.ctxt, 'bar', now)
self.assertEqual(result, 0)
def test_fixed_ip_get_by_floating_address(self):
fixed_ip = db.fixed_ip_create(self.ctxt, {'address': '192.168.0.2'})
values = {'address': '8.7.6.5',
'fixed_ip_id': fixed_ip['id']}
floating = db.floating_ip_create(self.ctxt, values)['address']
fixed_ip_ref = db.fixed_ip_get_by_floating_address(self.ctxt, floating)
self._assertEqualObjects(fixed_ip, fixed_ip_ref)
def test_fixed_ip_get_by_host(self):
host_ips = {
'host1': ['1.1.1.1', '1.1.1.2', '1.1.1.3'],
'host2': ['1.1.1.4', '1.1.1.5'],
'host3': ['1.1.1.6']
}
for host, ips in host_ips.items():
for ip in ips:
instance_uuid = self._create_instance(host=host)
db.fixed_ip_create(self.ctxt, {'address': ip})
db.fixed_ip_associate(self.ctxt, ip, instance_uuid)
for host, ips in host_ips.items():
ips_on_host = [x['address']
for x in db.fixed_ip_get_by_host(self.ctxt, host)]
self._assertEqualListsOfPrimitivesAsSets(ips_on_host, ips)
def test_fixed_ip_get_by_network_host_not_found_exception(self):
self.assertRaises(
exception.FixedIpNotFoundForNetworkHost,
db.fixed_ip_get_by_network_host,
self.ctxt, 1, 'ignore')
def test_fixed_ip_get_by_network_host_fixed_ip_found(self):
db.fixed_ip_create(self.ctxt, dict(network_id=1, host='host'))
fip = db.fixed_ip_get_by_network_host(self.ctxt, 1, 'host')
self.assertEqual(1, fip['network_id'])
self.assertEqual('host', fip['host'])
def _create_instance(self, **kwargs):
instance = db.instance_create(self.ctxt, kwargs)
return instance['uuid']
def test_fixed_ip_get_by_instance_fixed_ip_found(self):
instance_uuid = self._create_instance()
FIXED_IP_ADDRESS = '192.168.1.5'
db.fixed_ip_create(self.ctxt, dict(
instance_uuid=instance_uuid, address=FIXED_IP_ADDRESS))
ips_list = db.fixed_ip_get_by_instance(self.ctxt, instance_uuid)
self._assertEqualListsOfPrimitivesAsSets([FIXED_IP_ADDRESS],
[ips_list[0].address])
def test_fixed_ip_get_by_instance_multiple_fixed_ips_found(self):
instance_uuid = self._create_instance()
FIXED_IP_ADDRESS_1 = '192.168.1.5'
db.fixed_ip_create(self.ctxt, dict(
instance_uuid=instance_uuid, address=FIXED_IP_ADDRESS_1))
FIXED_IP_ADDRESS_2 = '192.168.1.6'
db.fixed_ip_create(self.ctxt, dict(
instance_uuid=instance_uuid, address=FIXED_IP_ADDRESS_2))
ips_list = db.fixed_ip_get_by_instance(self.ctxt, instance_uuid)
self._assertEqualListsOfPrimitivesAsSets(
[FIXED_IP_ADDRESS_1, FIXED_IP_ADDRESS_2],
[ips_list[0].address, ips_list[1].address])
def test_fixed_ip_get_by_instance_inappropriate_ignored(self):
instance_uuid = self._create_instance()
FIXED_IP_ADDRESS_1 = '192.168.1.5'
db.fixed_ip_create(self.ctxt, dict(
instance_uuid=instance_uuid, address=FIXED_IP_ADDRESS_1))
FIXED_IP_ADDRESS_2 = '192.168.1.6'
db.fixed_ip_create(self.ctxt, dict(
instance_uuid=instance_uuid, address=FIXED_IP_ADDRESS_2))
another_instance = db.instance_create(self.ctxt, {})
db.fixed_ip_create(self.ctxt, dict(
instance_uuid=another_instance['uuid'], address="192.168.1.7"))
ips_list = db.fixed_ip_get_by_instance(self.ctxt, instance_uuid)
self._assertEqualListsOfPrimitivesAsSets(
[FIXED_IP_ADDRESS_1, FIXED_IP_ADDRESS_2],
[ips_list[0].address, ips_list[1].address])
def test_fixed_ip_get_by_instance_not_found_exception(self):
instance_uuid = self._create_instance()
self.assertRaises(exception.FixedIpNotFoundForInstance,
db.fixed_ip_get_by_instance,
self.ctxt, instance_uuid)
def test_fixed_ips_by_virtual_interface_fixed_ip_found(self):
instance_uuid = self._create_instance()
vif = db.virtual_interface_create(
self.ctxt, dict(instance_uuid=instance_uuid))
FIXED_IP_ADDRESS = '192.168.1.5'
db.fixed_ip_create(self.ctxt, dict(
virtual_interface_id=vif.id, address=FIXED_IP_ADDRESS))
ips_list = db.fixed_ips_by_virtual_interface(self.ctxt, vif.id)
self._assertEqualListsOfPrimitivesAsSets([FIXED_IP_ADDRESS],
[ips_list[0].address])
def test_fixed_ips_by_virtual_interface_multiple_fixed_ips_found(self):
instance_uuid = self._create_instance()
vif = db.virtual_interface_create(
self.ctxt, dict(instance_uuid=instance_uuid))
FIXED_IP_ADDRESS_1 = '192.168.1.5'
db.fixed_ip_create(self.ctxt, dict(
virtual_interface_id=vif.id, address=FIXED_IP_ADDRESS_1))
FIXED_IP_ADDRESS_2 = '192.168.1.6'
db.fixed_ip_create(self.ctxt, dict(
virtual_interface_id=vif.id, address=FIXED_IP_ADDRESS_2))
ips_list = db.fixed_ips_by_virtual_interface(self.ctxt, vif.id)
self._assertEqualListsOfPrimitivesAsSets(
[FIXED_IP_ADDRESS_1, FIXED_IP_ADDRESS_2],
[ips_list[0].address, ips_list[1].address])
def test_fixed_ips_by_virtual_interface_inappropriate_ignored(self):
instance_uuid = self._create_instance()
vif = db.virtual_interface_create(
self.ctxt, dict(instance_uuid=instance_uuid))
FIXED_IP_ADDRESS_1 = '192.168.1.5'
db.fixed_ip_create(self.ctxt, dict(
virtual_interface_id=vif.id, address=FIXED_IP_ADDRESS_1))
FIXED_IP_ADDRESS_2 = '192.168.1.6'
db.fixed_ip_create(self.ctxt, dict(
virtual_interface_id=vif.id, address=FIXED_IP_ADDRESS_2))
another_vif = db.virtual_interface_create(
self.ctxt, dict(instance_uuid=instance_uuid))
db.fixed_ip_create(self.ctxt, dict(
virtual_interface_id=another_vif.id, address="192.168.1.7"))
ips_list = db.fixed_ips_by_virtual_interface(self.ctxt, vif.id)
self._assertEqualListsOfPrimitivesAsSets(
[FIXED_IP_ADDRESS_1, FIXED_IP_ADDRESS_2],
[ips_list[0].address, ips_list[1].address])
def test_fixed_ips_by_virtual_interface_no_ip_found(self):
instance_uuid = self._create_instance()
vif = db.virtual_interface_create(
self.ctxt, dict(instance_uuid=instance_uuid))
ips_list = db.fixed_ips_by_virtual_interface(self.ctxt, vif.id)
self.assertEqual(0, len(ips_list))
def create_fixed_ip(self, **params):
default_params = {'address': '192.168.0.1'}
default_params.update(params)
return db.fixed_ip_create(self.ctxt, default_params)['address']
def test_fixed_ip_associate_fails_if_ip_not_in_network(self):
instance_uuid = self._create_instance()
self.assertRaises(exception.FixedIpNotFoundForNetwork,
db.fixed_ip_associate,
self.ctxt, None, instance_uuid)
def test_fixed_ip_associate_fails_if_ip_in_use(self):
instance_uuid = self._create_instance()
address = self.create_fixed_ip(instance_uuid=instance_uuid)
self.assertRaises(exception.FixedIpAlreadyInUse,
db.fixed_ip_associate,
self.ctxt, address, instance_uuid)
def test_fixed_ip_associate_succeeds(self):
instance_uuid = self._create_instance()
network = db.network_create_safe(self.ctxt, {})
address = self.create_fixed_ip(network_id=network['id'])
db.fixed_ip_associate(self.ctxt, address, instance_uuid,
network_id=network['id'])
fixed_ip = db.fixed_ip_get_by_address(self.ctxt, address)
self.assertEqual(fixed_ip['instance_uuid'], instance_uuid)
def test_fixed_ip_associate_succeeds_and_sets_network(self):
instance_uuid = self._create_instance()
network = db.network_create_safe(self.ctxt, {})
address = self.create_fixed_ip()
db.fixed_ip_associate(self.ctxt, address, instance_uuid,
network_id=network['id'])
fixed_ip = db.fixed_ip_get_by_address(self.ctxt, address)
self.assertEqual(fixed_ip['instance_uuid'], instance_uuid)
self.assertEqual(fixed_ip['network_id'], network['id'])
def test_fixed_ip_associate_succeeds_retry_on_deadlock(self):
instance_uuid = self._create_instance()
network = db.network_create_safe(self.ctxt, {})
address = self.create_fixed_ip()
def fake_first():
if mock_first.call_count == 1:
raise db_exc.DBDeadlock()
else:
return objects.Instance(id=1, address=address, reserved=False,
instance_uuid=None, network_id=None)
with mock.patch('sqlalchemy.orm.query.Query.first',
side_effect=fake_first) as mock_first:
db.fixed_ip_associate(self.ctxt, address, instance_uuid,
network_id=network['id'])
self.assertEqual(2, mock_first.call_count)
fixed_ip = db.fixed_ip_get_by_address(self.ctxt, address)
self.assertEqual(fixed_ip['instance_uuid'], instance_uuid)
self.assertEqual(fixed_ip['network_id'], network['id'])
def test_fixed_ip_associate_succeeds_retry_on_no_rows_updated(self):
instance_uuid = self._create_instance()
network = db.network_create_safe(self.ctxt, {})
address = self.create_fixed_ip()
def fake_first():
if mock_first.call_count == 1:
return objects.Instance(id=2, address=address, reserved=False,
instance_uuid=None, network_id=None)
else:
return objects.Instance(id=1, address=address, reserved=False,
instance_uuid=None, network_id=None)
with mock.patch('sqlalchemy.orm.query.Query.first',
side_effect=fake_first) as mock_first:
db.fixed_ip_associate(self.ctxt, address, instance_uuid,
network_id=network['id'])
self.assertEqual(2, mock_first.call_count)
fixed_ip = db.fixed_ip_get_by_address(self.ctxt, address)
self.assertEqual(fixed_ip['instance_uuid'], instance_uuid)
self.assertEqual(fixed_ip['network_id'], network['id'])
def test_fixed_ip_associate_succeeds_retry_limit_exceeded(self):
instance_uuid = self._create_instance()
network = db.network_create_safe(self.ctxt, {})
address = self.create_fixed_ip()
def fake_first():
return objects.Instance(id=2, address=address, reserved=False,
instance_uuid=None, network_id=None)
with mock.patch('sqlalchemy.orm.query.Query.first',
side_effect=fake_first) as mock_first:
self.assertRaises(exception.FixedIpAssociateFailed,
db.fixed_ip_associate, self.ctxt, address,
instance_uuid, network_id=network['id'])
# 5 reties + initial attempt
self.assertEqual(6, mock_first.call_count)
def test_fixed_ip_associate_ip_not_in_network_with_no_retries(self):
instance_uuid = self._create_instance()
with mock.patch('sqlalchemy.orm.query.Query.first',
return_value=None) as mock_first:
self.assertRaises(exception.FixedIpNotFoundForNetwork,
db.fixed_ip_associate,
self.ctxt, None, instance_uuid)
self.assertEqual(1, mock_first.call_count)
def test_fixed_ip_associate_no_network_id_with_no_retries(self):
# Tests that trying to associate an instance to a fixed IP on a network
# but without specifying the network ID during associate will fail.
instance_uuid = self._create_instance()
network = db.network_create_safe(self.ctxt, {})
address = self.create_fixed_ip(network_id=network['id'])
with mock.patch('sqlalchemy.orm.query.Query.first',
return_value=None) as mock_first:
self.assertRaises(exception.FixedIpNotFoundForNetwork,
db.fixed_ip_associate,
self.ctxt, address, instance_uuid)
self.assertEqual(1, mock_first.call_count)
def test_fixed_ip_associate_with_vif(self):
instance_uuid = self._create_instance()
network = db.network_create_safe(self.ctxt, {})
vif = db.virtual_interface_create(self.ctxt, {})
address = self.create_fixed_ip()
fixed_ip = db.fixed_ip_associate(self.ctxt, address, instance_uuid,
network_id=network['id'],
virtual_interface_id=vif['id'])
self.assertTrue(fixed_ip['allocated'])
self.assertEqual(vif['id'], fixed_ip['virtual_interface_id'])
def test_fixed_ip_associate_not_allocated_without_vif(self):
instance_uuid = self._create_instance()
address = self.create_fixed_ip()
fixed_ip = db.fixed_ip_associate(self.ctxt, address, instance_uuid)
self.assertFalse(fixed_ip['allocated'])
self.assertIsNone(fixed_ip['virtual_interface_id'])
def test_fixed_ip_associate_pool_invalid_uuid(self):
instance_uuid = '123'
self.assertRaises(exception.InvalidUUID, db.fixed_ip_associate_pool,
self.ctxt, None, instance_uuid)
def test_fixed_ip_associate_pool_no_more_fixed_ips(self):
instance_uuid = self._create_instance()
self.assertRaises(exception.NoMoreFixedIps, db.fixed_ip_associate_pool,
self.ctxt, None, instance_uuid)
def test_fixed_ip_associate_pool_succeeds(self):
instance_uuid = self._create_instance()
network = db.network_create_safe(self.ctxt, {})
address = self.create_fixed_ip(network_id=network['id'])
db.fixed_ip_associate_pool(self.ctxt, network['id'], instance_uuid)
fixed_ip = db.fixed_ip_get_by_address(self.ctxt, address)
self.assertEqual(fixed_ip['instance_uuid'], instance_uuid)
def test_fixed_ip_associate_pool_succeeds_fip_ref_network_id_is_none(self):
instance_uuid = self._create_instance()
network = db.network_create_safe(self.ctxt, {})
self.create_fixed_ip(network_id=None)
fixed_ip = db.fixed_ip_associate_pool(self.ctxt,
network['id'], instance_uuid)
self.assertEqual(instance_uuid, fixed_ip['instance_uuid'])
self.assertEqual(network['id'], fixed_ip['network_id'])
def test_fixed_ip_associate_pool_succeeds_retry(self):
instance_uuid = self._create_instance()
network = db.network_create_safe(self.ctxt, {})
address = self.create_fixed_ip(network_id=network['id'])
def fake_first():
if mock_first.call_count == 1:
return {'network_id': network['id'], 'address': 'invalid',
'instance_uuid': None, 'host': None, 'id': 1}
else:
return {'network_id': network['id'], 'address': address,
'instance_uuid': None, 'host': None, 'id': 1}
with mock.patch('sqlalchemy.orm.query.Query.first',
side_effect=fake_first) as mock_first:
db.fixed_ip_associate_pool(self.ctxt, network['id'], instance_uuid)
self.assertEqual(2, mock_first.call_count)
fixed_ip = db.fixed_ip_get_by_address(self.ctxt, address)
self.assertEqual(instance_uuid, fixed_ip['instance_uuid'])
def test_fixed_ip_associate_pool_retry_limit_exceeded(self):
instance_uuid = self._create_instance()
network = db.network_create_safe(self.ctxt, {})
self.create_fixed_ip(network_id=network['id'])
def fake_first():
return {'network_id': network['id'], 'address': 'invalid',
'instance_uuid': None, 'host': None, 'id': 1}
with mock.patch('sqlalchemy.orm.query.Query.first',
side_effect=fake_first) as mock_first:
self.assertRaises(exception.FixedIpAssociateFailed,
db.fixed_ip_associate_pool, self.ctxt,
network['id'], instance_uuid)
# 5 retries + initial attempt
self.assertEqual(6, mock_first.call_count)
def test_fixed_ip_create_same_address(self):
address = '192.168.1.5'
params = {'address': address}
db.fixed_ip_create(self.ctxt, params)
self.assertRaises(exception.FixedIpExists, db.fixed_ip_create,
self.ctxt, params)
def test_fixed_ip_create_success(self):
instance_uuid = self._create_instance()
network_id = db.network_create_safe(self.ctxt, {})['id']
param = {
'reserved': False,
'deleted': 0,
'leased': False,
'host': '127.0.0.1',
'address': '192.168.1.5',
'allocated': False,
'instance_uuid': instance_uuid,
'network_id': network_id,
'virtual_interface_id': None
}
ignored_keys = ['created_at', 'id', 'deleted_at', 'updated_at']
fixed_ip_data = db.fixed_ip_create(self.ctxt, param)
self._assertEqualObjects(param, fixed_ip_data, ignored_keys)
def test_fixed_ip_bulk_create_same_address(self):
address_1 = '192.168.1.5'
address_2 = '192.168.1.6'
instance_uuid = self._create_instance()
network_id_1 = db.network_create_safe(self.ctxt, {})['id']
network_id_2 = db.network_create_safe(self.ctxt, {})['id']
params = [
{'reserved': False, 'deleted': 0, 'leased': False,
'host': '127.0.0.1', 'address': address_2, 'allocated': False,
'instance_uuid': instance_uuid, 'network_id': network_id_1,
'virtual_interface_id': None},
{'reserved': False, 'deleted': 0, 'leased': False,
'host': '127.0.0.1', 'address': address_1, 'allocated': False,
'instance_uuid': instance_uuid, 'network_id': network_id_1,
'virtual_interface_id': None},
{'reserved': False, 'deleted': 0, 'leased': False,
'host': 'localhost', 'address': address_2, 'allocated': True,
'instance_uuid': instance_uuid, 'network_id': network_id_2,
'virtual_interface_id': None},
]
self.assertRaises(exception.FixedIpExists, db.fixed_ip_bulk_create,
self.ctxt, params)
# In this case the transaction will be rolled back and none of the ips
# will make it to the database.
self.assertRaises(exception.FixedIpNotFoundForAddress,
db.fixed_ip_get_by_address, self.ctxt, address_1)
self.assertRaises(exception.FixedIpNotFoundForAddress,
db.fixed_ip_get_by_address, self.ctxt, address_2)
def test_fixed_ip_bulk_create_success(self):
address_1 = '192.168.1.5'
address_2 = '192.168.1.6'
instance_uuid = self._create_instance()
network_id_1 = db.network_create_safe(self.ctxt, {})['id']
network_id_2 = db.network_create_safe(self.ctxt, {})['id']
params = [
{'reserved': False, 'deleted': 0, 'leased': False,
'host': '127.0.0.1', 'address': address_1, 'allocated': False,
'instance_uuid': instance_uuid, 'network_id': network_id_1,
'virtual_interface_id': None},
{'reserved': False, 'deleted': 0, 'leased': False,
'host': 'localhost', 'address': address_2, 'allocated': True,
'instance_uuid': instance_uuid, 'network_id': network_id_2,
'virtual_interface_id': None}
]
db.fixed_ip_bulk_create(self.ctxt, params)
ignored_keys = ['created_at', 'id', 'deleted_at', 'updated_at',
'virtual_interface', 'network', 'floating_ips']
fixed_ip_data = db.fixed_ip_get_by_instance(self.ctxt, instance_uuid)
# we have no `id` in incoming data so we can not use
# _assertEqualListsOfObjects to compare incoming data and received
# objects
fixed_ip_data = sorted(fixed_ip_data, key=lambda i: i['network_id'])
params = sorted(params, key=lambda i: i['network_id'])
for param, ip in zip(params, fixed_ip_data):
self._assertEqualObjects(param, ip, ignored_keys)
def test_fixed_ip_disassociate(self):
address = '192.168.1.5'
instance_uuid = self._create_instance()
network_id = db.network_create_safe(self.ctxt, {})['id']
values = {'address': '192.168.1.5', 'instance_uuid': instance_uuid}
vif = db.virtual_interface_create(self.ctxt, values)
param = {
'reserved': False,
'deleted': 0,
'leased': False,
'host': '127.0.0.1',
'address': address,
'allocated': False,
'instance_uuid': instance_uuid,
'network_id': network_id,
'virtual_interface_id': vif['id']
}
db.fixed_ip_create(self.ctxt, param)
db.fixed_ip_disassociate(self.ctxt, address)
fixed_ip_data = db.fixed_ip_get_by_address(self.ctxt, address)
ignored_keys = ['created_at', 'id', 'deleted_at',
'updated_at', 'instance_uuid',
'virtual_interface_id']
self._assertEqualObjects(param, fixed_ip_data, ignored_keys)
self.assertIsNone(fixed_ip_data['instance_uuid'])
self.assertIsNone(fixed_ip_data['virtual_interface_id'])
def test_fixed_ip_get_not_found_exception(self):
self.assertRaises(exception.FixedIpNotFound,
db.fixed_ip_get, self.ctxt, 0)
def test_fixed_ip_get_success2(self):
address = '192.168.1.5'
instance_uuid = self._create_instance()
network_id = db.network_create_safe(self.ctxt, {})['id']
param = {
'reserved': False,
'deleted': 0,
'leased': False,
'host': '127.0.0.1',
'address': address,
'allocated': False,
'instance_uuid': instance_uuid,
'network_id': network_id,
'virtual_interface_id': None
}
fixed_ip_id = db.fixed_ip_create(self.ctxt, param)
self.ctxt.is_admin = False
self.assertRaises(exception.Forbidden, db.fixed_ip_get,
self.ctxt, fixed_ip_id)
def test_fixed_ip_get_success(self):
address = '192.168.1.5'
instance_uuid = self._create_instance()
network_id = db.network_create_safe(self.ctxt, {})['id']
param = {
'reserved': False,
'deleted': 0,
'leased': False,
'host': '127.0.0.1',
'address': address,
'allocated': False,
'instance_uuid': instance_uuid,
'network_id': network_id,
'virtual_interface_id': None
}
db.fixed_ip_create(self.ctxt, param)
fixed_ip_id = db.fixed_ip_get_by_address(self.ctxt, address)['id']
fixed_ip_data = db.fixed_ip_get(self.ctxt, fixed_ip_id)
ignored_keys = ['created_at', 'id', 'deleted_at', 'updated_at']
self._assertEqualObjects(param, fixed_ip_data, ignored_keys)
def test_fixed_ip_get_by_address(self):
instance_uuid = self._create_instance()
db.fixed_ip_create(self.ctxt, {'address': '1.2.3.4',
'instance_uuid': instance_uuid,
})
fixed_ip = db.fixed_ip_get_by_address(self.ctxt, '1.2.3.4',
columns_to_join=['instance'])
self.assertIn('instance', fixed_ip.__dict__)
self.assertEqual(instance_uuid, fixed_ip.instance.uuid)
def test_fixed_ip_update_not_found_for_address(self):
self.assertRaises(exception.FixedIpNotFoundForAddress,
db.fixed_ip_update, self.ctxt,
'192.168.1.5', {})
def test_fixed_ip_update(self):
instance_uuid_1 = self._create_instance()
instance_uuid_2 = self._create_instance()
network_id_1 = db.network_create_safe(self.ctxt, {})['id']
network_id_2 = db.network_create_safe(self.ctxt, {})['id']
param_1 = {
'reserved': True, 'deleted': 0, 'leased': True,
'host': '192.168.133.1', 'address': '10.0.0.2',
'allocated': True, 'instance_uuid': instance_uuid_1,
'network_id': network_id_1, 'virtual_interface_id': '123',
}
param_2 = {
'reserved': False, 'deleted': 0, 'leased': False,
'host': '127.0.0.1', 'address': '10.0.0.3', 'allocated': False,
'instance_uuid': instance_uuid_2, 'network_id': network_id_2,
'virtual_interface_id': None
}
ignored_keys = ['created_at', 'id', 'deleted_at', 'updated_at']
fixed_ip_addr = db.fixed_ip_create(self.ctxt, param_1)['address']
db.fixed_ip_update(self.ctxt, fixed_ip_addr, param_2)
fixed_ip_after_update = db.fixed_ip_get_by_address(self.ctxt,
param_2['address'])
self._assertEqualObjects(param_2, fixed_ip_after_update, ignored_keys)
class FloatingIpTestCase(test.TestCase, ModelsObjectComparatorMixin):
def setUp(self):
super(FloatingIpTestCase, self).setUp()
self.ctxt = context.get_admin_context()
def _get_base_values(self):
return {
'address': '1.1.1.1',
'fixed_ip_id': None,
'project_id': 'fake_project',
'host': 'fake_host',
'auto_assigned': False,
'pool': 'fake_pool',
'interface': 'fake_interface',
}
def mock_db_query_first_to_raise_data_error_exception(self):
self.mox.StubOutWithMock(query.Query, 'first')
query.Query.first().AndRaise(db_exc.DBError())
self.mox.ReplayAll()
def _create_floating_ip(self, values):
if not values:
values = {}
vals = self._get_base_values()
vals.update(values)
return db.floating_ip_create(self.ctxt, vals)
def test_floating_ip_get(self):
values = [{'address': '0.0.0.0'}, {'address': '1.1.1.1'}]
floating_ips = [self._create_floating_ip(val) for val in values]
for floating_ip in floating_ips:
real_floating_ip = db.floating_ip_get(self.ctxt, floating_ip['id'])
self._assertEqualObjects(floating_ip, real_floating_ip,
ignored_keys=['fixed_ip'])
def test_floating_ip_get_not_found(self):
self.assertRaises(exception.FloatingIpNotFound,
db.floating_ip_get, self.ctxt, 100500)
def test_floating_ip_get_with_long_id_not_found(self):
self.mock_db_query_first_to_raise_data_error_exception()
self.assertRaises(exception.InvalidID,
db.floating_ip_get, self.ctxt, 123456789101112)
def test_floating_ip_get_pools(self):
values = [
{'address': '0.0.0.0', 'pool': 'abc'},
{'address': '1.1.1.1', 'pool': 'abc'},
{'address': '2.2.2.2', 'pool': 'def'},
{'address': '3.3.3.3', 'pool': 'ghi'},
]
for val in values:
self._create_floating_ip(val)
expected_pools = [{'name': x}
for x in set(map(lambda x: x['pool'], values))]
real_pools = db.floating_ip_get_pools(self.ctxt)
self._assertEqualListsOfPrimitivesAsSets(real_pools, expected_pools)
def test_floating_ip_allocate_address(self):
pools = {
'pool1': ['0.0.0.0', '1.1.1.1'],
'pool2': ['2.2.2.2'],
'pool3': ['3.3.3.3', '4.4.4.4', '5.5.5.5']
}
for pool, addresses in pools.items():
for address in addresses:
vals = {'pool': pool, 'address': address, 'project_id': None}
self._create_floating_ip(vals)
project_id = self._get_base_values()['project_id']
for pool, addresses in pools.items():
alloc_addrs = []
for i in addresses:
float_addr = db.floating_ip_allocate_address(self.ctxt,
project_id, pool)
alloc_addrs.append(float_addr)
self._assertEqualListsOfPrimitivesAsSets(alloc_addrs, addresses)
def test_floating_ip_allocate_auto_assigned(self):
addresses = ['1.1.1.1', '1.1.1.2', '1.1.1.3', '1.1.1.4']
float_ips = []
for i in range(0, 2):
float_ips.append(self._create_floating_ip(
{"address": addresses[i]}))
for i in range(2, 4):
float_ips.append(self._create_floating_ip({"address": addresses[i],
"auto_assigned": True}))
for i in range(0, 2):
float_ip = db.floating_ip_get(self.ctxt, float_ips[i].id)
self.assertFalse(float_ip.auto_assigned)
for i in range(2, 4):
float_ip = db.floating_ip_get(self.ctxt, float_ips[i].id)
self.assertTrue(float_ip.auto_assigned)
def test_floating_ip_allocate_address_no_more_floating_ips(self):
self.assertRaises(exception.NoMoreFloatingIps,
db.floating_ip_allocate_address,
self.ctxt, 'any_project_id', 'no_such_pool')
def test_floating_ip_allocate_not_authorized(self):
ctxt = context.RequestContext(user_id='a', project_id='abc',
is_admin=False)
self.assertRaises(exception.Forbidden,
db.floating_ip_allocate_address,
ctxt, 'other_project_id', 'any_pool')
def test_floating_ip_allocate_address_succeeds_retry(self):
pool = 'pool0'
address = '0.0.0.0'
vals = {'pool': pool, 'address': address, 'project_id': None}
floating_ip = self._create_floating_ip(vals)
project_id = self._get_base_values()['project_id']
def fake_first():
if mock_first.call_count == 1:
return {'pool': pool, 'project_id': None, 'fixed_ip_id': None,
'address': address, 'id': 'invalid_id'}
else:
return {'pool': pool, 'project_id': None, 'fixed_ip_id': None,
'address': address, 'id': 1}
with mock.patch('sqlalchemy.orm.query.Query.first',
side_effect=fake_first) as mock_first:
float_addr = db.floating_ip_allocate_address(self.ctxt,
project_id, pool)
self.assertEqual(address, float_addr)
self.assertEqual(2, mock_first.call_count)
float_ip = db.floating_ip_get(self.ctxt, floating_ip.id)
self.assertEqual(project_id, float_ip['project_id'])
def test_floating_ip_allocate_address_retry_limit_exceeded(self):
pool = 'pool0'
address = '0.0.0.0'
vals = {'pool': pool, 'address': address, 'project_id': None}
self._create_floating_ip(vals)
project_id = self._get_base_values()['project_id']
def fake_first():
return {'pool': pool, 'project_id': None, 'fixed_ip_id': None,
'address': address, 'id': 'invalid_id'}
with mock.patch('sqlalchemy.orm.query.Query.first',
side_effect=fake_first) as mock_first:
self.assertRaises(exception.FloatingIpAllocateFailed,
db.floating_ip_allocate_address, self.ctxt,
project_id, pool)
# 5 retries + initial attempt
self.assertEqual(6, mock_first.call_count)
def test_floating_ip_allocate_address_no_more_ips_with_no_retries(self):
with mock.patch('sqlalchemy.orm.query.Query.first',
return_value=None) as mock_first:
self.assertRaises(exception.NoMoreFloatingIps,
db.floating_ip_allocate_address,
self.ctxt, 'any_project_id', 'no_such_pool')
self.assertEqual(1, mock_first.call_count)
def _get_existing_ips(self):
return [ip['address'] for ip in db.floating_ip_get_all(self.ctxt)]
def test_floating_ip_bulk_create(self):
expected_ips = ['1.1.1.1', '1.1.1.2', '1.1.1.3', '1.1.1.4']
result = db.floating_ip_bulk_create(self.ctxt,
[{'address': x} for x in expected_ips],
want_result=False)
self.assertIsNone(result)
self._assertEqualListsOfPrimitivesAsSets(self._get_existing_ips(),
expected_ips)
def test_floating_ip_bulk_create_duplicate(self):
ips = ['1.1.1.1', '1.1.1.2', '1.1.1.3', '1.1.1.4']
prepare_ips = lambda x: {'address': x}
result = db.floating_ip_bulk_create(self.ctxt,
list(map(prepare_ips, ips)))
self.assertEqual(ips, [ip.address for ip in result])
self.assertRaises(exception.FloatingIpExists,
db.floating_ip_bulk_create,
self.ctxt,
list(map(prepare_ips, ['1.1.1.5', '1.1.1.4'])),
want_result=False)
self.assertRaises(exception.FloatingIpNotFoundForAddress,
db.floating_ip_get_by_address,
self.ctxt, '1.1.1.5')
def test_floating_ip_bulk_destroy(self):
ips_for_delete = []
ips_for_non_delete = []
def create_ips(i, j):
return [{'address': '1.1.%s.%s' % (i, k)} for k in range(1, j + 1)]
# NOTE(boris-42): Create more than 256 ip to check that
# _ip_range_splitter works properly.
for i in range(1, 3):
ips_for_delete.extend(create_ips(i, 255))
ips_for_non_delete.extend(create_ips(3, 255))
result = db.floating_ip_bulk_create(self.ctxt,
ips_for_delete + ips_for_non_delete,
want_result=False)
self.assertIsNone(result)
non_bulk_ips_for_delete = create_ips(4, 3)
non_bulk_ips_for_non_delete = create_ips(5, 3)
non_bulk_ips = non_bulk_ips_for_delete + non_bulk_ips_for_non_delete
project_id = 'fake_project'
reservations = quota.QUOTAS.reserve(self.ctxt,
floating_ips=len(non_bulk_ips),
project_id=project_id)
for dct in non_bulk_ips:
self._create_floating_ip(dct)
quota.QUOTAS.commit(self.ctxt, reservations, project_id=project_id)
self.assertEqual(db.quota_usage_get_all_by_project(
self.ctxt, project_id),
{'project_id': project_id,
'floating_ips': {'in_use': 6, 'reserved': 0}})
ips_for_delete.extend(non_bulk_ips_for_delete)
ips_for_non_delete.extend(non_bulk_ips_for_non_delete)
db.floating_ip_bulk_destroy(self.ctxt, ips_for_delete)
expected_addresses = [x['address'] for x in ips_for_non_delete]
self._assertEqualListsOfPrimitivesAsSets(self._get_existing_ips(),
expected_addresses)
self.assertEqual(db.quota_usage_get_all_by_project(
self.ctxt, project_id),
{'project_id': project_id,
'floating_ips': {'in_use': 3, 'reserved': 0}})
def test_floating_ip_create(self):
floating_ip = self._create_floating_ip({})
ignored_keys = ['id', 'deleted', 'deleted_at', 'updated_at',
'created_at']
self.assertIsNotNone(floating_ip['id'])
self._assertEqualObjects(floating_ip, self._get_base_values(),
ignored_keys)
def test_floating_ip_create_duplicate(self):
self._create_floating_ip({})
self.assertRaises(exception.FloatingIpExists,
self._create_floating_ip, {})
def _create_fixed_ip(self, params):
default_params = {'address': '192.168.0.1'}
default_params.update(params)
return db.fixed_ip_create(self.ctxt, default_params)['address']
def test_floating_ip_fixed_ip_associate(self):
float_addresses = ['1.1.1.1', '1.1.1.2', '1.1.1.3']
fixed_addresses = ['2.2.2.1', '2.2.2.2', '2.2.2.3']
project_id = self.ctxt.project_id
float_ips = [self._create_floating_ip({'address': address,
'project_id': project_id})
for address in float_addresses]
fixed_addrs = [self._create_fixed_ip({'address': address})
for address in fixed_addresses]
for float_ip, fixed_addr in zip(float_ips, fixed_addrs):
fixed_ip = db.floating_ip_fixed_ip_associate(self.ctxt,
float_ip.address,
fixed_addr, 'host')
self.assertEqual(fixed_ip.address, fixed_addr)
updated_float_ip = db.floating_ip_get(self.ctxt, float_ip.id)
self.assertEqual(fixed_ip.id, updated_float_ip.fixed_ip_id)
self.assertEqual('host', updated_float_ip.host)
fixed_ip = db.floating_ip_fixed_ip_associate(self.ctxt,
float_addresses[0],
fixed_addresses[0],
'host')
self.assertEqual(fixed_ip.address, fixed_addresses[0])
def test_floating_ip_fixed_ip_associate_float_ip_not_found(self):
self.assertRaises(exception.FixedIpNotFoundForAddress,
db.floating_ip_fixed_ip_associate,
self.ctxt, '10.10.10.10', 'some', 'some')
def test_floating_ip_associate_failed(self):
fixed_ip = self._create_fixed_ip({'address': '7.7.7.7'})
self.assertRaises(exception.FloatingIpAssociateFailed,
db.floating_ip_fixed_ip_associate,
self.ctxt, '10.10.10.10', fixed_ip, 'some')
def test_floating_ip_deallocate(self):
values = {'address': '1.1.1.1', 'project_id': 'fake', 'host': 'fake'}
float_ip = self._create_floating_ip(values)
rows_updated = db.floating_ip_deallocate(self.ctxt, float_ip.address)
self.assertEqual(1, rows_updated)
updated_float_ip = db.floating_ip_get(self.ctxt, float_ip.id)
self.assertIsNone(updated_float_ip.project_id)
self.assertIsNone(updated_float_ip.host)
self.assertFalse(updated_float_ip.auto_assigned)
def test_floating_ip_deallocate_address_not_found(self):
self.assertEqual(0, db.floating_ip_deallocate(self.ctxt, '2.2.2.2'))
def test_floating_ip_deallocate_address_associated_ip(self):
float_address = '1.1.1.1'
fixed_address = '2.2.2.1'
project_id = self.ctxt.project_id
float_ip = self._create_floating_ip({'address': float_address,
'project_id': project_id})
fixed_addr = self._create_fixed_ip({'address': fixed_address})
db.floating_ip_fixed_ip_associate(self.ctxt, float_ip.address,
fixed_addr, 'host')
self.assertEqual(0, db.floating_ip_deallocate(self.ctxt,
float_address))
def test_floating_ip_destroy(self):
addresses = ['1.1.1.1', '1.1.1.2', '1.1.1.3']
float_ips = [self._create_floating_ip({'address': addr})
for addr in addresses]
expected_len = len(addresses)
for float_ip in float_ips:
db.floating_ip_destroy(self.ctxt, float_ip.address)
self.assertRaises(exception.FloatingIpNotFound,
db.floating_ip_get, self.ctxt, float_ip.id)
expected_len -= 1
if expected_len > 0:
self.assertEqual(expected_len,
len(db.floating_ip_get_all(self.ctxt)))
else:
self.assertRaises(exception.NoFloatingIpsDefined,
db.floating_ip_get_all, self.ctxt)
def test_floating_ip_disassociate(self):
float_addresses = ['1.1.1.1', '1.1.1.2', '1.1.1.3']
fixed_addresses = ['2.2.2.1', '2.2.2.2', '2.2.2.3']
project_id = self.ctxt.project_id
float_ips = [self._create_floating_ip({'address': address,
'project_id': project_id})
for address in float_addresses]
fixed_addrs = [self._create_fixed_ip({'address': address})
for address in fixed_addresses]
for float_ip, fixed_addr in zip(float_ips, fixed_addrs):
db.floating_ip_fixed_ip_associate(self.ctxt,
float_ip.address,
fixed_addr, 'host')
for float_ip, fixed_addr in zip(float_ips, fixed_addrs):
fixed = db.floating_ip_disassociate(self.ctxt, float_ip.address)
self.assertEqual(fixed.address, fixed_addr)
updated_float_ip = db.floating_ip_get(self.ctxt, float_ip.id)
self.assertIsNone(updated_float_ip.fixed_ip_id)
self.assertIsNone(updated_float_ip.host)
def test_floating_ip_disassociate_not_found(self):
self.assertRaises(exception.FloatingIpNotFoundForAddress,
db.floating_ip_disassociate, self.ctxt,
'11.11.11.11')
def test_floating_ip_get_all(self):
addresses = ['1.1.1.1', '1.1.1.2', '1.1.1.3']
float_ips = [self._create_floating_ip({'address': addr})
for addr in addresses]
self._assertEqualListsOfObjects(float_ips,
db.floating_ip_get_all(self.ctxt),
ignored_keys="fixed_ip")
def test_floating_ip_get_all_associated(self):
instance = db.instance_create(self.ctxt, {'uuid': 'fake'})
project_id = self.ctxt.project_id
float_ip = self._create_floating_ip({'address': '1.1.1.1',
'project_id': project_id})
fixed_ip = self._create_fixed_ip({'address': '2.2.2.2',
'instance_uuid': instance.uuid})
db.floating_ip_fixed_ip_associate(self.ctxt,
float_ip.address,
fixed_ip,
'host')
float_ips = db.floating_ip_get_all(self.ctxt)
self.assertEqual(1, len(float_ips))
self.assertEqual(float_ip.address, float_ips[0].address)
self.assertEqual(fixed_ip, float_ips[0].fixed_ip.address)
self.assertEqual(instance.uuid, float_ips[0].fixed_ip.instance_uuid)
def test_floating_ip_get_all_not_found(self):
self.assertRaises(exception.NoFloatingIpsDefined,
db.floating_ip_get_all, self.ctxt)
def test_floating_ip_get_all_by_host(self):
hosts = {
'host1': ['1.1.1.1', '1.1.1.2'],
'host2': ['2.1.1.1', '2.1.1.2'],
'host3': ['3.1.1.1', '3.1.1.2', '3.1.1.3']
}
hosts_with_float_ips = {}
for host, addresses in hosts.items():
hosts_with_float_ips[host] = []
for address in addresses:
float_ip = self._create_floating_ip({'host': host,
'address': address})
hosts_with_float_ips[host].append(float_ip)
for host, float_ips in hosts_with_float_ips.items():
real_float_ips = db.floating_ip_get_all_by_host(self.ctxt, host)
self._assertEqualListsOfObjects(float_ips, real_float_ips,
ignored_keys="fixed_ip")
def test_floating_ip_get_all_by_host_not_found(self):
self.assertRaises(exception.FloatingIpNotFoundForHost,
db.floating_ip_get_all_by_host,
self.ctxt, 'non_exists_host')
def test_floating_ip_get_all_by_project(self):
projects = {
'pr1': ['1.1.1.1', '1.1.1.2'],
'pr2': ['2.1.1.1', '2.1.1.2'],
'pr3': ['3.1.1.1', '3.1.1.2', '3.1.1.3']
}
projects_with_float_ips = {}
for project_id, addresses in projects.items():
projects_with_float_ips[project_id] = []
for address in addresses:
float_ip = self._create_floating_ip({'project_id': project_id,
'address': address})
projects_with_float_ips[project_id].append(float_ip)
for project_id, float_ips in projects_with_float_ips.items():
real_float_ips = db.floating_ip_get_all_by_project(self.ctxt,
project_id)
self._assertEqualListsOfObjects(float_ips, real_float_ips,
ignored_keys='fixed_ip')
def test_floating_ip_get_all_by_project_not_authorized(self):
ctxt = context.RequestContext(user_id='a', project_id='abc',
is_admin=False)
self.assertRaises(exception.Forbidden,
db.floating_ip_get_all_by_project,
ctxt, 'other_project')
def test_floating_ip_get_by_address(self):
addresses = ['1.1.1.1', '1.1.1.2', '1.1.1.3']
float_ips = [self._create_floating_ip({'address': addr})
for addr in addresses]
for float_ip in float_ips:
real_float_ip = db.floating_ip_get_by_address(self.ctxt,
float_ip.address)
self._assertEqualObjects(float_ip, real_float_ip,
ignored_keys='fixed_ip')
def test_floating_ip_get_by_address_not_found(self):
self.assertRaises(exception.FloatingIpNotFoundForAddress,
db.floating_ip_get_by_address,
self.ctxt, '20.20.20.20')
def test_floating_ip_get_by_invalid_address(self):
self.mock_db_query_first_to_raise_data_error_exception()
self.assertRaises(exception.InvalidIpAddressError,
db.floating_ip_get_by_address,
self.ctxt, 'non_exists_host')
def test_floating_ip_get_by_fixed_address(self):
fixed_float = [
('1.1.1.1', '2.2.2.1'),
('1.1.1.2', '2.2.2.2'),
('1.1.1.3', '2.2.2.3')
]
for fixed_addr, float_addr in fixed_float:
project_id = self.ctxt.project_id
self._create_floating_ip({'address': float_addr,
'project_id': project_id})
self._create_fixed_ip({'address': fixed_addr})
db.floating_ip_fixed_ip_associate(self.ctxt, float_addr,
fixed_addr, 'some_host')
for fixed_addr, float_addr in fixed_float:
float_ip = db.floating_ip_get_by_fixed_address(self.ctxt,
fixed_addr)
self.assertEqual(float_addr, float_ip[0]['address'])
def test_floating_ip_get_by_fixed_ip_id(self):
fixed_float = [
('1.1.1.1', '2.2.2.1'),
('1.1.1.2', '2.2.2.2'),
('1.1.1.3', '2.2.2.3')
]
for fixed_addr, float_addr in fixed_float:
project_id = self.ctxt.project_id
self._create_floating_ip({'address': float_addr,
'project_id': project_id})
self._create_fixed_ip({'address': fixed_addr})
db.floating_ip_fixed_ip_associate(self.ctxt, float_addr,
fixed_addr, 'some_host')
for fixed_addr, float_addr in fixed_float:
fixed_ip = db.fixed_ip_get_by_address(self.ctxt, fixed_addr)
float_ip = db.floating_ip_get_by_fixed_ip_id(self.ctxt,
fixed_ip['id'])
self.assertEqual(float_addr, float_ip[0]['address'])
def test_floating_ip_update(self):
float_ip = self._create_floating_ip({})
values = {
'project_id': 'some_pr',
'host': 'some_host',
'auto_assigned': True,
'interface': 'some_interface',
'pool': 'some_pool'
}
floating_ref = db.floating_ip_update(self.ctxt, float_ip['address'],
values)
self.assertIsNotNone(floating_ref)
updated_float_ip = db.floating_ip_get(self.ctxt, float_ip['id'])
self._assertEqualObjects(updated_float_ip, values,
ignored_keys=['id', 'address', 'updated_at',
'deleted_at', 'created_at',
'deleted', 'fixed_ip_id',
'fixed_ip'])
def test_floating_ip_update_to_duplicate(self):
float_ip1 = self._create_floating_ip({'address': '1.1.1.1'})
float_ip2 = self._create_floating_ip({'address': '1.1.1.2'})
self.assertRaises(exception.FloatingIpExists,
db.floating_ip_update,
self.ctxt, float_ip2['address'],
{'address': float_ip1['address']})
class InstanceDestroyConstraints(test.TestCase):
def test_destroy_with_equal_any_constraint_met_single_value(self):
ctx = context.get_admin_context()
instance = db.instance_create(ctx, {'task_state': 'deleting'})
constraint = db.constraint(task_state=db.equal_any('deleting'))
db.instance_destroy(ctx, instance['uuid'], constraint)
self.assertRaises(exception.InstanceNotFound, db.instance_get_by_uuid,
ctx, instance['uuid'])
def test_destroy_with_equal_any_constraint_met(self):
ctx = context.get_admin_context()
instance = db.instance_create(ctx, {'task_state': 'deleting'})
constraint = db.constraint(task_state=db.equal_any('deleting',
'error'))
db.instance_destroy(ctx, instance['uuid'], constraint)
self.assertRaises(exception.InstanceNotFound, db.instance_get_by_uuid,
ctx, instance['uuid'])
def test_destroy_with_equal_any_constraint_not_met(self):
ctx = context.get_admin_context()
instance = db.instance_create(ctx, {'vm_state': 'resize'})
constraint = db.constraint(vm_state=db.equal_any('active', 'error'))
self.assertRaises(exception.ConstraintNotMet, db.instance_destroy,
ctx, instance['uuid'], constraint)
instance = db.instance_get_by_uuid(ctx, instance['uuid'])
self.assertFalse(instance['deleted'])
def test_destroy_with_not_equal_constraint_met(self):
ctx = context.get_admin_context()
instance = db.instance_create(ctx, {'task_state': 'deleting'})
constraint = db.constraint(task_state=db.not_equal('error', 'resize'))
db.instance_destroy(ctx, instance['uuid'], constraint)
self.assertRaises(exception.InstanceNotFound, db.instance_get_by_uuid,
ctx, instance['uuid'])
def test_destroy_with_not_equal_constraint_not_met(self):
ctx = context.get_admin_context()
instance = db.instance_create(ctx, {'vm_state': 'active'})
constraint = db.constraint(vm_state=db.not_equal('active', 'error'))
self.assertRaises(exception.ConstraintNotMet, db.instance_destroy,
ctx, instance['uuid'], constraint)
instance = db.instance_get_by_uuid(ctx, instance['uuid'])
self.assertFalse(instance['deleted'])
class VolumeUsageDBApiTestCase(test.TestCase):
def setUp(self):
super(VolumeUsageDBApiTestCase, self).setUp()
self.user_id = 'fake'
self.project_id = 'fake'
self.context = context.RequestContext(self.user_id, self.project_id)
self.useFixture(test.TimeOverride())
def test_vol_usage_update_no_totals_update(self):
ctxt = context.get_admin_context()
now = timeutils.utcnow()
timeutils.set_time_override(now)
start_time = now - datetime.timedelta(seconds=10)
expected_vol_usages = {
u'1': {'volume_id': u'1',
'instance_uuid': 'fake-instance-uuid1',
'project_id': 'fake-project-uuid1',
'user_id': 'fake-user-uuid1',
'curr_reads': 1000,
'curr_read_bytes': 2000,
'curr_writes': 3000,
'curr_write_bytes': 4000,
'curr_last_refreshed': now,
'tot_reads': 0,
'tot_read_bytes': 0,
'tot_writes': 0,
'tot_write_bytes': 0,
'tot_last_refreshed': None},
u'2': {'volume_id': u'2',
'instance_uuid': 'fake-instance-uuid2',
'project_id': 'fake-project-uuid2',
'user_id': 'fake-user-uuid2',
'curr_reads': 100,
'curr_read_bytes': 200,
'curr_writes': 300,
'curr_write_bytes': 400,
'tot_reads': 0,
'tot_read_bytes': 0,
'tot_writes': 0,
'tot_write_bytes': 0,
'tot_last_refreshed': None}
}
def _compare(vol_usage, expected):
for key, value in expected.items():
self.assertEqual(vol_usage[key], value)
vol_usages = db.vol_get_usage_by_time(ctxt, start_time)
self.assertEqual(len(vol_usages), 0)
db.vol_usage_update(ctxt, u'1', rd_req=10, rd_bytes=20,
wr_req=30, wr_bytes=40,
instance_id='fake-instance-uuid1',
project_id='fake-project-uuid1',
user_id='fake-user-uuid1',
availability_zone='fake-az')
db.vol_usage_update(ctxt, u'2', rd_req=100, rd_bytes=200,
wr_req=300, wr_bytes=400,
instance_id='fake-instance-uuid2',
project_id='fake-project-uuid2',
user_id='fake-user-uuid2',
availability_zone='fake-az')
db.vol_usage_update(ctxt, u'1', rd_req=1000, rd_bytes=2000,
wr_req=3000, wr_bytes=4000,
instance_id='fake-instance-uuid1',
project_id='fake-project-uuid1',
user_id='fake-user-uuid1',
availability_zone='fake-az')
vol_usages = db.vol_get_usage_by_time(ctxt, start_time)
self.assertEqual(len(vol_usages), 2)
for usage in vol_usages:
_compare(usage, expected_vol_usages[usage.volume_id])
def test_vol_usage_update_totals_update(self):
ctxt = context.get_admin_context()
now = datetime.datetime(1, 1, 1, 1, 0, 0)
start_time = now - datetime.timedelta(seconds=10)
now1 = now + datetime.timedelta(minutes=1)
now2 = now + datetime.timedelta(minutes=2)
now3 = now + datetime.timedelta(minutes=3)
timeutils.set_time_override(now)
db.vol_usage_update(ctxt, u'1', rd_req=100, rd_bytes=200,
wr_req=300, wr_bytes=400,
instance_id='fake-instance-uuid',
project_id='fake-project-uuid',
user_id='fake-user-uuid',
availability_zone='fake-az')
current_usage = db.vol_get_usage_by_time(ctxt, start_time)[0]
self.assertEqual(current_usage['tot_reads'], 0)
self.assertEqual(current_usage['curr_reads'], 100)
timeutils.set_time_override(now1)
db.vol_usage_update(ctxt, u'1', rd_req=200, rd_bytes=300,
wr_req=400, wr_bytes=500,
instance_id='fake-instance-uuid',
project_id='fake-project-uuid',
user_id='fake-user-uuid',
availability_zone='fake-az',
update_totals=True)
current_usage = db.vol_get_usage_by_time(ctxt, start_time)[0]
self.assertEqual(current_usage['tot_reads'], 200)
self.assertEqual(current_usage['curr_reads'], 0)
timeutils.set_time_override(now2)
db.vol_usage_update(ctxt, u'1', rd_req=300, rd_bytes=400,
wr_req=500, wr_bytes=600,
instance_id='fake-instance-uuid',
project_id='fake-project-uuid',
availability_zone='fake-az',
user_id='fake-user-uuid')
current_usage = db.vol_get_usage_by_time(ctxt, start_time)[0]
self.assertEqual(current_usage['tot_reads'], 200)
self.assertEqual(current_usage['curr_reads'], 300)
timeutils.set_time_override(now3)
db.vol_usage_update(ctxt, u'1', rd_req=400, rd_bytes=500,
wr_req=600, wr_bytes=700,
instance_id='fake-instance-uuid',
project_id='fake-project-uuid',
user_id='fake-user-uuid',
availability_zone='fake-az',
update_totals=True)
vol_usages = db.vol_get_usage_by_time(ctxt, start_time)
expected_vol_usages = {'volume_id': u'1',
'project_id': 'fake-project-uuid',
'user_id': 'fake-user-uuid',
'instance_uuid': 'fake-instance-uuid',
'availability_zone': 'fake-az',
'tot_reads': 600,
'tot_read_bytes': 800,
'tot_writes': 1000,
'tot_write_bytes': 1200,
'tot_last_refreshed': now3,
'curr_reads': 0,
'curr_read_bytes': 0,
'curr_writes': 0,
'curr_write_bytes': 0,
'curr_last_refreshed': now2}
self.assertEqual(1, len(vol_usages))
for key, value in expected_vol_usages.items():
self.assertEqual(vol_usages[0][key], value, key)
def test_vol_usage_update_when_blockdevicestats_reset(self):
ctxt = context.get_admin_context()
now = timeutils.utcnow()
start_time = now - datetime.timedelta(seconds=10)
vol_usages = db.vol_get_usage_by_time(ctxt, start_time)
self.assertEqual(len(vol_usages), 0)
db.vol_usage_update(ctxt, u'1',
rd_req=10000, rd_bytes=20000,
wr_req=30000, wr_bytes=40000,
instance_id='fake-instance-uuid1',
project_id='fake-project-uuid1',
availability_zone='fake-az',
user_id='fake-user-uuid1')
# Instance rebooted or crashed. block device stats were reset and are
# less than the previous values
db.vol_usage_update(ctxt, u'1',
rd_req=100, rd_bytes=200,
wr_req=300, wr_bytes=400,
instance_id='fake-instance-uuid1',
project_id='fake-project-uuid1',
availability_zone='fake-az',
user_id='fake-user-uuid1')
db.vol_usage_update(ctxt, u'1',
rd_req=200, rd_bytes=300,
wr_req=400, wr_bytes=500,
instance_id='fake-instance-uuid1',
project_id='fake-project-uuid1',
availability_zone='fake-az',
user_id='fake-user-uuid1')
vol_usage = db.vol_get_usage_by_time(ctxt, start_time)[0]
expected_vol_usage = {'volume_id': u'1',
'instance_uuid': 'fake-instance-uuid1',
'project_id': 'fake-project-uuid1',
'availability_zone': 'fake-az',
'user_id': 'fake-user-uuid1',
'curr_reads': 200,
'curr_read_bytes': 300,
'curr_writes': 400,
'curr_write_bytes': 500,
'tot_reads': 10000,
'tot_read_bytes': 20000,
'tot_writes': 30000,
'tot_write_bytes': 40000}
for key, value in expected_vol_usage.items():
self.assertEqual(vol_usage[key], value, key)
def test_vol_usage_update_totals_update_when_blockdevicestats_reset(self):
# This is unlikely to happen, but could when a volume is detached
# right after an instance has rebooted / recovered and before
# the system polled and updated the volume usage cache table.
ctxt = context.get_admin_context()
now = timeutils.utcnow()
start_time = now - datetime.timedelta(seconds=10)
vol_usages = db.vol_get_usage_by_time(ctxt, start_time)
self.assertEqual(len(vol_usages), 0)
db.vol_usage_update(ctxt, u'1',
rd_req=10000, rd_bytes=20000,
wr_req=30000, wr_bytes=40000,
instance_id='fake-instance-uuid1',
project_id='fake-project-uuid1',
availability_zone='fake-az',
user_id='fake-user-uuid1')
# Instance rebooted or crashed. block device stats were reset and are
# less than the previous values
db.vol_usage_update(ctxt, u'1',
rd_req=100, rd_bytes=200,
wr_req=300, wr_bytes=400,
instance_id='fake-instance-uuid1',
project_id='fake-project-uuid1',
availability_zone='fake-az',
user_id='fake-user-uuid1',
update_totals=True)
vol_usage = db.vol_get_usage_by_time(ctxt, start_time)[0]
expected_vol_usage = {'volume_id': u'1',
'instance_uuid': 'fake-instance-uuid1',
'project_id': 'fake-project-uuid1',
'availability_zone': 'fake-az',
'user_id': 'fake-user-uuid1',
'curr_reads': 0,
'curr_read_bytes': 0,
'curr_writes': 0,
'curr_write_bytes': 0,
'tot_reads': 10100,
'tot_read_bytes': 20200,
'tot_writes': 30300,
'tot_write_bytes': 40400}
for key, value in expected_vol_usage.items():
self.assertEqual(vol_usage[key], value, key)
class TaskLogTestCase(test.TestCase):
def setUp(self):
super(TaskLogTestCase, self).setUp()
self.context = context.get_admin_context()
now = timeutils.utcnow()
self.begin = (now - datetime.timedelta(seconds=10)).isoformat()
self.end = (now - datetime.timedelta(seconds=5)).isoformat()
self.task_name = 'fake-task-name'
self.host = 'fake-host'
self.message = 'Fake task message'
db.task_log_begin_task(self.context, self.task_name, self.begin,
self.end, self.host, message=self.message)
def test_task_log_get(self):
result = db.task_log_get(self.context, self.task_name, self.begin,
self.end, self.host)
self.assertEqual(result['task_name'], self.task_name)
self.assertEqual(result['period_beginning'],
timeutils.parse_strtime(self.begin))
self.assertEqual(result['period_ending'],
timeutils.parse_strtime(self.end))
self.assertEqual(result['host'], self.host)
self.assertEqual(result['message'], self.message)
def test_task_log_get_all(self):
result = db.task_log_get_all(self.context, self.task_name, self.begin,
self.end, host=self.host)
self.assertEqual(len(result), 1)
result = db.task_log_get_all(self.context, self.task_name, self.begin,
self.end, host=self.host, state='')
self.assertEqual(len(result), 0)
def test_task_log_begin_task(self):
db.task_log_begin_task(self.context, 'fake', self.begin,
self.end, self.host, task_items=42,
message=self.message)
result = db.task_log_get(self.context, 'fake', self.begin,
self.end, self.host)
self.assertEqual(result['task_name'], 'fake')
def test_task_log_begin_task_duplicate(self):
params = (self.context, 'fake', self.begin, self.end, self.host)
db.task_log_begin_task(*params, message=self.message)
self.assertRaises(exception.TaskAlreadyRunning,
db.task_log_begin_task,
*params, message=self.message)
def test_task_log_end_task(self):
errors = 1
db.task_log_end_task(self.context, self.task_name, self.begin,
self.end, self.host, errors, message=self.message)
result = db.task_log_get(self.context, self.task_name, self.begin,
self.end, self.host)
self.assertEqual(result['errors'], 1)
def test_task_log_end_task_task_not_running(self):
self.assertRaises(exception.TaskNotRunning,
db.task_log_end_task, self.context, 'nonexistent',
self.begin, self.end, self.host, 42,
message=self.message)
class BlockDeviceMappingTestCase(test.TestCase):
def setUp(self):
super(BlockDeviceMappingTestCase, self).setUp()
self.ctxt = context.get_admin_context()
self.instance = db.instance_create(self.ctxt, {})
def _create_bdm(self, values):
values.setdefault('instance_uuid', self.instance['uuid'])
values.setdefault('device_name', 'fake_device')
values.setdefault('source_type', 'volume')
values.setdefault('destination_type', 'volume')
block_dev = block_device.BlockDeviceDict(values)
db.block_device_mapping_create(self.ctxt, block_dev, legacy=False)
uuid = block_dev['instance_uuid']
bdms = db.block_device_mapping_get_all_by_instance(self.ctxt, uuid)
for bdm in bdms:
if bdm['device_name'] == values['device_name']:
return bdm
def test_scrub_empty_str_values_no_effect(self):
values = {'volume_size': 5}
expected = copy.copy(values)
sqlalchemy_api._scrub_empty_str_values(values, ['volume_size'])
self.assertEqual(values, expected)
def test_scrub_empty_str_values_empty_string(self):
values = {'volume_size': ''}
sqlalchemy_api._scrub_empty_str_values(values, ['volume_size'])
self.assertEqual(values, {})
def test_scrub_empty_str_values_empty_unicode(self):
values = {'volume_size': u''}
sqlalchemy_api._scrub_empty_str_values(values, ['volume_size'])
self.assertEqual(values, {})
def test_block_device_mapping_create(self):
bdm = self._create_bdm({})
self.assertIsNotNone(bdm)
def test_block_device_mapping_update(self):
bdm = self._create_bdm({})
result = db.block_device_mapping_update(
self.ctxt, bdm['id'], {'destination_type': 'moon'},
legacy=False)
uuid = bdm['instance_uuid']
bdm_real = db.block_device_mapping_get_all_by_instance(self.ctxt, uuid)
self.assertEqual(bdm_real[0]['destination_type'], 'moon')
# Also make sure the update call returned correct data
self.assertEqual(dict(bdm_real[0]),
dict(result))
def test_block_device_mapping_update_or_create(self):
values = {
'instance_uuid': self.instance['uuid'],
'device_name': 'fake_name',
'source_type': 'volume',
'destination_type': 'volume'
}
# check create
db.block_device_mapping_update_or_create(self.ctxt, values,
legacy=False)
uuid = values['instance_uuid']
bdm_real = db.block_device_mapping_get_all_by_instance(self.ctxt, uuid)
self.assertEqual(len(bdm_real), 1)
self.assertEqual(bdm_real[0]['device_name'], 'fake_name')
# check update
values['destination_type'] = 'camelot'
db.block_device_mapping_update_or_create(self.ctxt, values,
legacy=False)
bdm_real = db.block_device_mapping_get_all_by_instance(self.ctxt, uuid)
self.assertEqual(len(bdm_real), 1)
bdm_real = bdm_real[0]
self.assertEqual(bdm_real['device_name'], 'fake_name')
self.assertEqual(bdm_real['destination_type'], 'camelot')
# check create without device_name
bdm1 = dict(values)
bdm1['device_name'] = None
db.block_device_mapping_update_or_create(self.ctxt, bdm1, legacy=False)
bdms = db.block_device_mapping_get_all_by_instance(self.ctxt, uuid)
with_device_name = [b for b in bdms if b['device_name'] is not None]
without_device_name = [b for b in bdms if b['device_name'] is None]
self.assertEqual(len(with_device_name), 1,
'expected 1 bdm with device_name, found %d' %
len(with_device_name))
self.assertEqual(len(without_device_name), 1,
'expected 1 bdm without device_name, found %d' %
len(without_device_name))
# check create multiple devices without device_name
bdm2 = dict(values)
bdm2['device_name'] = None
db.block_device_mapping_update_or_create(self.ctxt, bdm2, legacy=False)
bdms = db.block_device_mapping_get_all_by_instance(self.ctxt, uuid)
with_device_name = [b for b in bdms if b['device_name'] is not None]
without_device_name = [b for b in bdms if b['device_name'] is None]
self.assertEqual(len(with_device_name), 1,
'expected 1 bdm with device_name, found %d' %
len(with_device_name))
self.assertEqual(len(without_device_name), 2,
'expected 2 bdms without device_name, found %d' %
len(without_device_name))
def test_block_device_mapping_update_or_create_multiple_ephemeral(self):
uuid = self.instance['uuid']
values = {
'instance_uuid': uuid,
'source_type': 'blank',
'guest_format': 'myformat',
}
bdm1 = dict(values)
bdm1['device_name'] = '/dev/sdb'
db.block_device_mapping_update_or_create(self.ctxt, bdm1, legacy=False)
bdm2 = dict(values)
bdm2['device_name'] = '/dev/sdc'
db.block_device_mapping_update_or_create(self.ctxt, bdm2, legacy=False)
bdm_real = sorted(
db.block_device_mapping_get_all_by_instance(self.ctxt, uuid),
key=lambda bdm: bdm['device_name']
)
self.assertEqual(len(bdm_real), 2)
for bdm, device_name in zip(bdm_real, ['/dev/sdb', '/dev/sdc']):
self.assertEqual(bdm['device_name'], device_name)
self.assertEqual(bdm['guest_format'], 'myformat')
def test_block_device_mapping_update_or_create_check_remove_virt(self):
uuid = self.instance['uuid']
values = {
'instance_uuid': uuid,
'source_type': 'blank',
'destination_type': 'local',
'guest_format': 'swap',
}
# check that old swap bdms are deleted on create
val1 = dict(values)
val1['device_name'] = 'device1'
db.block_device_mapping_create(self.ctxt, val1, legacy=False)
val2 = dict(values)
val2['device_name'] = 'device2'
db.block_device_mapping_update_or_create(self.ctxt, val2, legacy=False)
bdm_real = db.block_device_mapping_get_all_by_instance(self.ctxt, uuid)
self.assertEqual(len(bdm_real), 1)
bdm_real = bdm_real[0]
self.assertEqual(bdm_real['device_name'], 'device2')
self.assertEqual(bdm_real['source_type'], 'blank')
self.assertEqual(bdm_real['guest_format'], 'swap')
db.block_device_mapping_destroy(self.ctxt, bdm_real['id'])
def test_block_device_mapping_get_all_by_instance_uuids(self):
uuid1 = self.instance['uuid']
uuid2 = db.instance_create(self.ctxt, {})['uuid']
bdms_values = [{'instance_uuid': uuid1,
'device_name': '/dev/vda'},
{'instance_uuid': uuid2,
'device_name': '/dev/vdb'},
{'instance_uuid': uuid2,
'device_name': '/dev/vdc'}]
for bdm in bdms_values:
self._create_bdm(bdm)
bdms = db.block_device_mapping_get_all_by_instance_uuids(
self.ctxt, [])
self.assertEqual(len(bdms), 0)
bdms = db.block_device_mapping_get_all_by_instance_uuids(
self.ctxt, [uuid2])
self.assertEqual(len(bdms), 2)
bdms = db.block_device_mapping_get_all_by_instance_uuids(
self.ctxt, [uuid1, uuid2])
self.assertEqual(len(bdms), 3)
def test_block_device_mapping_get_all_by_instance(self):
uuid1 = self.instance['uuid']
uuid2 = db.instance_create(self.ctxt, {})['uuid']
bdms_values = [{'instance_uuid': uuid1,
'device_name': '/dev/vda'},
{'instance_uuid': uuid2,
'device_name': '/dev/vdb'},
{'instance_uuid': uuid2,
'device_name': '/dev/vdc'}]
for bdm in bdms_values:
self._create_bdm(bdm)
bdms = db.block_device_mapping_get_all_by_instance(self.ctxt, uuid1)
self.assertEqual(len(bdms), 1)
self.assertEqual(bdms[0]['device_name'], '/dev/vda')
bdms = db.block_device_mapping_get_all_by_instance(self.ctxt, uuid2)
self.assertEqual(len(bdms), 2)
def test_block_device_mapping_destroy(self):
bdm = self._create_bdm({})
db.block_device_mapping_destroy(self.ctxt, bdm['id'])
bdm = db.block_device_mapping_get_all_by_instance(self.ctxt,
bdm['instance_uuid'])
self.assertEqual(len(bdm), 0)
def test_block_device_mapping_destroy_by_instance_and_volume(self):
vol_id1 = '69f5c254-1a5b-4fff-acf7-cb369904f58f'
vol_id2 = '69f5c254-1a5b-4fff-acf7-cb369904f59f'
self._create_bdm({'device_name': '/dev/vda', 'volume_id': vol_id1})
self._create_bdm({'device_name': '/dev/vdb', 'volume_id': vol_id2})
uuid = self.instance['uuid']
db.block_device_mapping_destroy_by_instance_and_volume(self.ctxt, uuid,
vol_id1)
bdms = db.block_device_mapping_get_all_by_instance(self.ctxt, uuid)
self.assertEqual(len(bdms), 1)
self.assertEqual(bdms[0]['device_name'], '/dev/vdb')
def test_block_device_mapping_destroy_by_instance_and_device(self):
self._create_bdm({'device_name': '/dev/vda'})
self._create_bdm({'device_name': '/dev/vdb'})
uuid = self.instance['uuid']
params = (self.ctxt, uuid, '/dev/vdb')
db.block_device_mapping_destroy_by_instance_and_device(*params)
bdms = db.block_device_mapping_get_all_by_instance(self.ctxt, uuid)
self.assertEqual(len(bdms), 1)
self.assertEqual(bdms[0]['device_name'], '/dev/vda')
def test_block_device_mapping_get_by_volume_id(self):
self._create_bdm({'volume_id': 'fake_id'})
bdm = db.block_device_mapping_get_by_volume_id(self.ctxt, 'fake_id')
self.assertEqual(bdm['volume_id'], 'fake_id')
def test_block_device_mapping_get_by_volume_id_join_instance(self):
self._create_bdm({'volume_id': 'fake_id'})
bdm = db.block_device_mapping_get_by_volume_id(self.ctxt, 'fake_id',
['instance'])
self.assertEqual(bdm['volume_id'], 'fake_id')
self.assertEqual(bdm['instance']['uuid'], self.instance['uuid'])
class AgentBuildTestCase(test.TestCase, ModelsObjectComparatorMixin):
"""Tests for db.api.agent_build_* methods."""
def setUp(self):
super(AgentBuildTestCase, self).setUp()
self.ctxt = context.get_admin_context()
def test_agent_build_create_and_get_all(self):
self.assertEqual(0, len(db.agent_build_get_all(self.ctxt)))
agent_build = db.agent_build_create(self.ctxt, {'os': 'GNU/HURD'})
all_agent_builds = db.agent_build_get_all(self.ctxt)
self.assertEqual(1, len(all_agent_builds))
self._assertEqualObjects(agent_build, all_agent_builds[0])
def test_agent_build_get_by_triple(self):
agent_build = db.agent_build_create(self.ctxt, {'hypervisor': 'kvm',
'os': 'FreeBSD', 'architecture': arch.X86_64})
self.assertIsNone(db.agent_build_get_by_triple(self.ctxt, 'kvm',
'FreeBSD', 'i386'))
self._assertEqualObjects(agent_build, db.agent_build_get_by_triple(
self.ctxt, 'kvm', 'FreeBSD', arch.X86_64))
def test_agent_build_destroy(self):
agent_build = db.agent_build_create(self.ctxt, {})
self.assertEqual(1, len(db.agent_build_get_all(self.ctxt)))
db.agent_build_destroy(self.ctxt, agent_build.id)
self.assertEqual(0, len(db.agent_build_get_all(self.ctxt)))
def test_agent_build_update(self):
agent_build = db.agent_build_create(self.ctxt, {'os': 'HaikuOS'})
db.agent_build_update(self.ctxt, agent_build.id, {'os': 'ReactOS'})
self.assertEqual('ReactOS', db.agent_build_get_all(self.ctxt)[0].os)
def test_agent_build_destroy_destroyed(self):
agent_build = db.agent_build_create(self.ctxt, {})
db.agent_build_destroy(self.ctxt, agent_build.id)
self.assertRaises(exception.AgentBuildNotFound,
db.agent_build_destroy, self.ctxt, agent_build.id)
def test_agent_build_update_destroyed(self):
agent_build = db.agent_build_create(self.ctxt, {'os': 'HaikuOS'})
db.agent_build_destroy(self.ctxt, agent_build.id)
self.assertRaises(exception.AgentBuildNotFound,
db.agent_build_update, self.ctxt, agent_build.id, {'os': 'OS/2'})
def test_agent_build_exists(self):
values = {'hypervisor': 'kvm', 'os': 'FreeBSD',
'architecture': arch.X86_64}
db.agent_build_create(self.ctxt, values)
self.assertRaises(exception.AgentBuildExists, db.agent_build_create,
self.ctxt, values)
def test_agent_build_get_all_by_hypervisor(self):
values = {'hypervisor': 'kvm', 'os': 'FreeBSD',
'architecture': arch.X86_64}
created = db.agent_build_create(self.ctxt, values)
actual = db.agent_build_get_all(self.ctxt, hypervisor='kvm')
self._assertEqualListsOfObjects([created], actual)
class VirtualInterfaceTestCase(test.TestCase, ModelsObjectComparatorMixin):
def setUp(self):
super(VirtualInterfaceTestCase, self).setUp()
self.ctxt = context.get_admin_context()
self.instance_uuid = db.instance_create(self.ctxt, {})['uuid']
values = {'host': 'localhost', 'project_id': 'project1'}
self.network = db.network_create_safe(self.ctxt, values)
def _get_base_values(self):
return {
'instance_uuid': self.instance_uuid,
'address': 'fake_address',
'network_id': self.network['id'],
'uuid': str(stdlib_uuid.uuid4())
}
def mock_db_query_first_to_raise_data_error_exception(self):
self.mox.StubOutWithMock(query.Query, 'first')
query.Query.first().AndRaise(db_exc.DBError())
self.mox.ReplayAll()
def _create_virt_interface(self, values):
v = self._get_base_values()
v.update(values)
return db.virtual_interface_create(self.ctxt, v)
def test_virtual_interface_create(self):
vif = self._create_virt_interface({})
self.assertIsNotNone(vif['id'])
ignored_keys = ['id', 'deleted', 'deleted_at', 'updated_at',
'created_at', 'uuid']
self._assertEqualObjects(vif, self._get_base_values(), ignored_keys)
def test_virtual_interface_create_with_duplicate_address(self):
vif = self._create_virt_interface({})
self.assertRaises(exception.VirtualInterfaceCreateException,
self._create_virt_interface, {"uuid": vif['uuid']})
def test_virtual_interface_get(self):
vifs = [self._create_virt_interface({'address': 'a'}),
self._create_virt_interface({'address': 'b'})]
for vif in vifs:
real_vif = db.virtual_interface_get(self.ctxt, vif['id'])
self._assertEqualObjects(vif, real_vif)
def test_virtual_interface_get_by_address(self):
vifs = [self._create_virt_interface({'address': 'first'}),
self._create_virt_interface({'address': 'second'})]
for vif in vifs:
real_vif = db.virtual_interface_get_by_address(self.ctxt,
vif['address'])
self._assertEqualObjects(vif, real_vif)
def test_virtual_interface_get_by_address_not_found(self):
self.assertIsNone(db.virtual_interface_get_by_address(self.ctxt,
"i.nv.ali.ip"))
def test_virtual_interface_get_by_address_data_error_exception(self):
self.mock_db_query_first_to_raise_data_error_exception()
self.assertRaises(exception.InvalidIpAddressError,
db.virtual_interface_get_by_address,
self.ctxt,
"i.nv.ali.ip")
def test_virtual_interface_get_by_uuid(self):
vifs = [self._create_virt_interface({"address": "address_1"}),
self._create_virt_interface({"address": "address_2"})]
for vif in vifs:
real_vif = db.virtual_interface_get_by_uuid(self.ctxt, vif['uuid'])
self._assertEqualObjects(vif, real_vif)
def test_virtual_interface_get_by_instance(self):
inst_uuid2 = db.instance_create(self.ctxt, {})['uuid']
vifs1 = [self._create_virt_interface({'address': 'fake1'}),
self._create_virt_interface({'address': 'fake2'})]
# multiple nic of same instance
vifs2 = [self._create_virt_interface({'address': 'fake3',
'instance_uuid': inst_uuid2}),
self._create_virt_interface({'address': 'fake4',
'instance_uuid': inst_uuid2})]
vifs1_real = db.virtual_interface_get_by_instance(self.ctxt,
self.instance_uuid)
vifs2_real = db.virtual_interface_get_by_instance(self.ctxt,
inst_uuid2)
self._assertEqualListsOfObjects(vifs1, vifs1_real)
self._assertEqualOrderedListOfObjects(vifs2, vifs2_real)
def test_virtual_interface_get_by_instance_and_network(self):
inst_uuid2 = db.instance_create(self.ctxt, {})['uuid']
values = {'host': 'localhost', 'project_id': 'project2'}
network_id = db.network_create_safe(self.ctxt, values)['id']
vifs = [self._create_virt_interface({'address': 'fake1'}),
self._create_virt_interface({'address': 'fake2',
'network_id': network_id,
'instance_uuid': inst_uuid2}),
self._create_virt_interface({'address': 'fake3',
'instance_uuid': inst_uuid2})]
for vif in vifs:
params = (self.ctxt, vif['instance_uuid'], vif['network_id'])
r_vif = db.virtual_interface_get_by_instance_and_network(*params)
self._assertEqualObjects(r_vif, vif)
def test_virtual_interface_delete_by_instance(self):
inst_uuid2 = db.instance_create(self.ctxt, {})['uuid']
values = [dict(address='fake1'), dict(address='fake2'),
dict(address='fake3', instance_uuid=inst_uuid2)]
for vals in values:
self._create_virt_interface(vals)
db.virtual_interface_delete_by_instance(self.ctxt, self.instance_uuid)
real_vifs1 = db.virtual_interface_get_by_instance(self.ctxt,
self.instance_uuid)
real_vifs2 = db.virtual_interface_get_by_instance(self.ctxt,
inst_uuid2)
self.assertEqual(len(real_vifs1), 0)
self.assertEqual(len(real_vifs2), 1)
def test_virtual_interface_get_all(self):
inst_uuid2 = db.instance_create(self.ctxt, {})['uuid']
values = [dict(address='fake1'), dict(address='fake2'),
dict(address='fake3', instance_uuid=inst_uuid2)]
vifs = [self._create_virt_interface(val) for val in values]
real_vifs = db.virtual_interface_get_all(self.ctxt)
self._assertEqualListsOfObjects(vifs, real_vifs)
class NetworkTestCase(test.TestCase, ModelsObjectComparatorMixin):
"""Tests for db.api.network_* methods."""
def setUp(self):
super(NetworkTestCase, self).setUp()
self.ctxt = context.get_admin_context()
def _get_associated_fixed_ip(self, host, cidr, ip):
network = db.network_create_safe(self.ctxt,
{'project_id': 'project1', 'cidr': cidr})
self.assertFalse(db.network_in_use_on_host(self.ctxt, network.id,
host))
instance = db.instance_create(self.ctxt,
{'project_id': 'project1', 'host': host})
virtual_interface = db.virtual_interface_create(self.ctxt,
{'instance_uuid': instance.uuid, 'network_id': network.id,
'address': ip})
db.fixed_ip_create(self.ctxt, {'address': ip,
'network_id': network.id, 'allocated': True,
'virtual_interface_id': virtual_interface.id})
db.fixed_ip_associate(self.ctxt, ip, instance.uuid,
network.id, virtual_interface_id=virtual_interface['id'])
return network, instance
def test_network_get_associated_default_route(self):
network, instance = self._get_associated_fixed_ip('host.net',
'192.0.2.0/30', '192.0.2.1')
network2 = db.network_create_safe(self.ctxt,
{'project_id': 'project1', 'cidr': '192.0.3.0/30'})
ip = '192.0.3.1'
virtual_interface = db.virtual_interface_create(self.ctxt,
{'instance_uuid': instance.uuid, 'network_id': network2.id,
'address': ip})
db.fixed_ip_create(self.ctxt, {'address': ip,
'network_id': network2.id, 'allocated': True,
'virtual_interface_id': virtual_interface.id})
db.fixed_ip_associate(self.ctxt, ip, instance.uuid,
network2.id)
data = db.network_get_associated_fixed_ips(self.ctxt, network.id)
self.assertEqual(1, len(data))
self.assertTrue(data[0]['default_route'])
data = db.network_get_associated_fixed_ips(self.ctxt, network2.id)
self.assertEqual(1, len(data))
self.assertFalse(data[0]['default_route'])
def test_network_get_associated_fixed_ips(self):
network, instance = self._get_associated_fixed_ip('host.net',
'192.0.2.0/30', '192.0.2.1')
data = db.network_get_associated_fixed_ips(self.ctxt, network.id)
self.assertEqual(1, len(data))
self.assertEqual('192.0.2.1', data[0]['address'])
self.assertEqual('192.0.2.1', data[0]['vif_address'])
self.assertEqual(instance.uuid, data[0]['instance_uuid'])
self.assertTrue(data[0][fields.PciDeviceStatus.ALLOCATED])
def test_network_create_safe(self):
values = {'host': 'localhost', 'project_id': 'project1'}
network = db.network_create_safe(self.ctxt, values)
self.assertEqual(36, len(network['uuid']))
db_network = db.network_get(self.ctxt, network['id'])
self._assertEqualObjects(network, db_network)
def test_network_create_with_duplicate_vlan(self):
values1 = {'host': 'localhost', 'project_id': 'project1', 'vlan': 1}
values2 = {'host': 'something', 'project_id': 'project1', 'vlan': 1}
db.network_create_safe(self.ctxt, values1)
self.assertRaises(exception.DuplicateVlan,
db.network_create_safe, self.ctxt, values2)
def test_network_delete_safe(self):
values = {'host': 'localhost', 'project_id': 'project1'}
network = db.network_create_safe(self.ctxt, values)
db.network_get(self.ctxt, network['id'])
values = {'network_id': network['id'], 'address': '192.168.1.5'}
address1 = db.fixed_ip_create(self.ctxt, values)['address']
values = {'network_id': network['id'],
'address': '192.168.1.6',
'allocated': True}
address2 = db.fixed_ip_create(self.ctxt, values)['address']
self.assertRaises(exception.NetworkInUse,
db.network_delete_safe, self.ctxt, network['id'])
db.fixed_ip_update(self.ctxt, address2, {'allocated': False})
network = db.network_delete_safe(self.ctxt, network['id'])
self.assertRaises(exception.FixedIpNotFoundForAddress,
db.fixed_ip_get_by_address, self.ctxt, address1)
ctxt = self.ctxt.elevated(read_deleted='yes')
fixed_ip = db.fixed_ip_get_by_address(ctxt, address1)
self.assertTrue(fixed_ip['deleted'])
def test_network_in_use_on_host(self):
values = {'host': 'foo', 'hostname': 'myname'}
instance = db.instance_create(self.ctxt, values)
values = {'address': '192.168.1.5', 'instance_uuid': instance['uuid']}
vif = db.virtual_interface_create(self.ctxt, values)
values = {'address': '192.168.1.6',
'network_id': 1,
'allocated': True,
'instance_uuid': instance['uuid'],
'virtual_interface_id': vif['id']}
db.fixed_ip_create(self.ctxt, values)
self.assertEqual(db.network_in_use_on_host(self.ctxt, 1, 'foo'), True)
self.assertEqual(db.network_in_use_on_host(self.ctxt, 1, 'bar'), False)
def test_network_update_nonexistent(self):
self.assertRaises(exception.NetworkNotFound,
db.network_update, self.ctxt, 123456, {})
def test_network_update_with_duplicate_vlan(self):
values1 = {'host': 'localhost', 'project_id': 'project1', 'vlan': 1}
values2 = {'host': 'something', 'project_id': 'project1', 'vlan': 2}
network_ref = db.network_create_safe(self.ctxt, values1)
db.network_create_safe(self.ctxt, values2)
self.assertRaises(exception.DuplicateVlan,
db.network_update, self.ctxt,
network_ref["id"], values2)
def test_network_update(self):
network = db.network_create_safe(self.ctxt, {'project_id': 'project1',
'vlan': 1, 'host': 'test.com'})
db.network_update(self.ctxt, network.id, {'vlan': 2})
network_new = db.network_get(self.ctxt, network.id)
self.assertEqual(2, network_new.vlan)
def test_network_set_host_nonexistent_network(self):
self.assertRaises(exception.NetworkNotFound, db.network_set_host,
self.ctxt, 123456, 'nonexistent')
def test_network_set_host_already_set_correct(self):
values = {'host': 'example.com', 'project_id': 'project1'}
network = db.network_create_safe(self.ctxt, values)
self.assertIsNone(db.network_set_host(self.ctxt, network.id,
'example.com'))
def test_network_set_host_already_set_incorrect(self):
values = {'host': 'example.com', 'project_id': 'project1'}
network = db.network_create_safe(self.ctxt, values)
self.assertIsNone(db.network_set_host(self.ctxt, network.id,
'new.example.com'))
def test_network_set_host_with_initially_no_host(self):
values = {'project_id': 'project1'}
network = db.network_create_safe(self.ctxt, values)
db.network_set_host(self.ctxt, network.id, 'example.com')
self.assertEqual('example.com',
db.network_get(self.ctxt, network.id).host)
def test_network_set_host_succeeds_retry_on_deadlock(self):
values = {'project_id': 'project1'}
network = db.network_create_safe(self.ctxt, values)
def fake_update(params):
if mock_update.call_count == 1:
raise db_exc.DBDeadlock()
else:
return 1
with mock.patch('sqlalchemy.orm.query.Query.update',
side_effect=fake_update) as mock_update:
db.network_set_host(self.ctxt, network.id, 'example.com')
self.assertEqual(2, mock_update.call_count)
def test_network_set_host_succeeds_retry_on_no_rows_updated(self):
values = {'project_id': 'project1'}
network = db.network_create_safe(self.ctxt, values)
def fake_update(params):
if mock_update.call_count == 1:
return 0
else:
return 1
with mock.patch('sqlalchemy.orm.query.Query.update',
side_effect=fake_update) as mock_update:
db.network_set_host(self.ctxt, network.id, 'example.com')
self.assertEqual(2, mock_update.call_count)
def test_network_set_host_failed_with_retry_on_no_rows_updated(self):
values = {'project_id': 'project1'}
network = db.network_create_safe(self.ctxt, values)
with mock.patch('sqlalchemy.orm.query.Query.update',
return_value=0) as mock_update:
self.assertRaises(exception.NetworkSetHostFailed,
db.network_set_host, self.ctxt, network.id,
'example.com')
# 5 retries + initial attempt
self.assertEqual(6, mock_update.call_count)
def test_network_get_all_by_host(self):
self.assertEqual([],
db.network_get_all_by_host(self.ctxt, 'example.com'))
host = 'h1.example.com'
# network with host set
net1 = db.network_create_safe(self.ctxt, {'host': host})
self._assertEqualListsOfObjects([net1],
db.network_get_all_by_host(self.ctxt, host))
# network with fixed ip with host set
net2 = db.network_create_safe(self.ctxt, {})
db.fixed_ip_create(self.ctxt, {'host': host, 'network_id': net2.id})
db.network_get_all_by_host(self.ctxt, host)
self._assertEqualListsOfObjects([net1, net2],
db.network_get_all_by_host(self.ctxt, host))
# network with instance with host set
net3 = db.network_create_safe(self.ctxt, {})
instance = db.instance_create(self.ctxt, {'host': host})
db.fixed_ip_create(self.ctxt, {'network_id': net3.id,
'instance_uuid': instance.uuid})
self._assertEqualListsOfObjects([net1, net2, net3],
db.network_get_all_by_host(self.ctxt, host))
def test_network_get_by_cidr(self):
cidr = '192.0.2.0/30'
cidr_v6 = '2001:db8:1::/64'
network = db.network_create_safe(self.ctxt,
{'project_id': 'project1', 'cidr': cidr, 'cidr_v6': cidr_v6})
self._assertEqualObjects(network,
db.network_get_by_cidr(self.ctxt, cidr))
self._assertEqualObjects(network,
db.network_get_by_cidr(self.ctxt, cidr_v6))
def test_network_get_by_cidr_nonexistent(self):
self.assertRaises(exception.NetworkNotFoundForCidr,
db.network_get_by_cidr, self.ctxt, '192.0.2.0/30')
def test_network_get_by_uuid(self):
network = db.network_create_safe(self.ctxt,
{'project_id': 'project_1'})
self._assertEqualObjects(network,
db.network_get_by_uuid(self.ctxt, network.uuid))
def test_network_get_by_uuid_nonexistent(self):
self.assertRaises(exception.NetworkNotFoundForUUID,
db.network_get_by_uuid, self.ctxt, 'non-existent-uuid')
def test_network_get_all_by_uuids_no_networks(self):
self.assertRaises(exception.NoNetworksFound,
db.network_get_all_by_uuids, self.ctxt, ['non-existent-uuid'])
def test_network_get_all_by_uuids(self):
net1 = db.network_create_safe(self.ctxt, {})
net2 = db.network_create_safe(self.ctxt, {})
self._assertEqualListsOfObjects([net1, net2],
db.network_get_all_by_uuids(self.ctxt, [net1.uuid, net2.uuid]))
def test_network_get_all_no_networks(self):
self.assertRaises(exception.NoNetworksFound,
db.network_get_all, self.ctxt)
def test_network_get_all(self):
network = db.network_create_safe(self.ctxt, {})
network_db = db.network_get_all(self.ctxt)
self.assertEqual(1, len(network_db))
self._assertEqualObjects(network, network_db[0])
def test_network_get_all_admin_user(self):
network1 = db.network_create_safe(self.ctxt, {})
network2 = db.network_create_safe(self.ctxt,
{'project_id': 'project1'})
self._assertEqualListsOfObjects([network1, network2],
db.network_get_all(self.ctxt,
project_only=True))
def test_network_get_all_normal_user(self):
normal_ctxt = context.RequestContext('fake', 'fake')
db.network_create_safe(self.ctxt, {})
db.network_create_safe(self.ctxt, {'project_id': 'project1'})
network1 = db.network_create_safe(self.ctxt,
{'project_id': 'fake'})
network_db = db.network_get_all(normal_ctxt, project_only=True)
self.assertEqual(1, len(network_db))
self._assertEqualObjects(network1, network_db[0])
def test_network_get(self):
network = db.network_create_safe(self.ctxt, {})
self._assertEqualObjects(db.network_get(self.ctxt, network.id),
network)
db.network_delete_safe(self.ctxt, network.id)
self.assertRaises(exception.NetworkNotFound,
db.network_get, self.ctxt, network.id)
def test_network_associate(self):
network = db.network_create_safe(self.ctxt, {})
self.assertIsNone(network.project_id)
db.network_associate(self.ctxt, "project1", network.id)
self.assertEqual("project1", db.network_get(self.ctxt,
network.id).project_id)
def test_network_diassociate(self):
network = db.network_create_safe(self.ctxt,
{'project_id': 'project1', 'host': 'test.net'})
# disassociate project
db.network_disassociate(self.ctxt, network.id, False, True)
self.assertIsNone(db.network_get(self.ctxt, network.id).project_id)
# disassociate host
db.network_disassociate(self.ctxt, network.id, True, False)
self.assertIsNone(db.network_get(self.ctxt, network.id).host)
def test_network_count_reserved_ips(self):
net = db.network_create_safe(self.ctxt, {})
self.assertEqual(0, db.network_count_reserved_ips(self.ctxt, net.id))
db.fixed_ip_create(self.ctxt, {'network_id': net.id,
'reserved': True})
self.assertEqual(1, db.network_count_reserved_ips(self.ctxt, net.id))
class KeyPairTestCase(test.TestCase, ModelsObjectComparatorMixin):
def setUp(self):
super(KeyPairTestCase, self).setUp()
self.ctxt = context.get_admin_context()
def _create_key_pair(self, values):
return db.key_pair_create(self.ctxt, values)
def test_key_pair_create(self):
param = {
'name': 'test_1',
'type': 'ssh',
'user_id': 'test_user_id_1',
'public_key': 'test_public_key_1',
'fingerprint': 'test_fingerprint_1'
}
key_pair = self._create_key_pair(param)
self.assertIsNotNone(key_pair['id'])
ignored_keys = ['deleted', 'created_at', 'updated_at',
'deleted_at', 'id']
self._assertEqualObjects(key_pair, param, ignored_keys)
def test_key_pair_create_with_duplicate_name(self):
params = {'name': 'test_name', 'user_id': 'test_user_id',
'type': 'ssh'}
self._create_key_pair(params)
self.assertRaises(exception.KeyPairExists, self._create_key_pair,
params)
def test_key_pair_get(self):
params = [
{'name': 'test_1', 'user_id': 'test_user_id_1', 'type': 'ssh'},
{'name': 'test_2', 'user_id': 'test_user_id_2', 'type': 'ssh'},
{'name': 'test_3', 'user_id': 'test_user_id_3', 'type': 'ssh'}
]
key_pairs = [self._create_key_pair(p) for p in params]
for key in key_pairs:
real_key = db.key_pair_get(self.ctxt, key['user_id'], key['name'])
self._assertEqualObjects(key, real_key)
def test_key_pair_get_no_results(self):
param = {'name': 'test_1', 'user_id': 'test_user_id_1'}
self.assertRaises(exception.KeypairNotFound, db.key_pair_get,
self.ctxt, param['user_id'], param['name'])
def test_key_pair_get_deleted(self):
param = {'name': 'test_1', 'user_id': 'test_user_id_1', 'type': 'ssh'}
key_pair_created = self._create_key_pair(param)
db.key_pair_destroy(self.ctxt, param['user_id'], param['name'])
self.assertRaises(exception.KeypairNotFound, db.key_pair_get,
self.ctxt, param['user_id'], param['name'])
ctxt = self.ctxt.elevated(read_deleted='yes')
key_pair_deleted = db.key_pair_get(ctxt, param['user_id'],
param['name'])
ignored_keys = ['deleted', 'created_at', 'updated_at', 'deleted_at']
self._assertEqualObjects(key_pair_deleted, key_pair_created,
ignored_keys)
self.assertEqual(key_pair_deleted['deleted'], key_pair_deleted['id'])
def test_key_pair_get_all_by_user(self):
params = [
{'name': 'test_1', 'user_id': 'test_user_id_1', 'type': 'ssh'},
{'name': 'test_2', 'user_id': 'test_user_id_1', 'type': 'ssh'},
{'name': 'test_3', 'user_id': 'test_user_id_2', 'type': 'ssh'}
]
key_pairs_user_1 = [self._create_key_pair(p) for p in params
if p['user_id'] == 'test_user_id_1']
key_pairs_user_2 = [self._create_key_pair(p) for p in params
if p['user_id'] == 'test_user_id_2']
real_keys_1 = db.key_pair_get_all_by_user(self.ctxt, 'test_user_id_1')
real_keys_2 = db.key_pair_get_all_by_user(self.ctxt, 'test_user_id_2')
self._assertEqualListsOfObjects(key_pairs_user_1, real_keys_1)
self._assertEqualListsOfObjects(key_pairs_user_2, real_keys_2)
def test_key_pair_count_by_user(self):
params = [
{'name': 'test_1', 'user_id': 'test_user_id_1', 'type': 'ssh'},
{'name': 'test_2', 'user_id': 'test_user_id_1', 'type': 'ssh'},
{'name': 'test_3', 'user_id': 'test_user_id_2', 'type': 'ssh'}
]
for p in params:
self._create_key_pair(p)
count_1 = db.key_pair_count_by_user(self.ctxt, 'test_user_id_1')
self.assertEqual(count_1, 2)
count_2 = db.key_pair_count_by_user(self.ctxt, 'test_user_id_2')
self.assertEqual(count_2, 1)
def test_key_pair_destroy(self):
param = {'name': 'test_1', 'user_id': 'test_user_id_1', 'type': 'ssh'}
self._create_key_pair(param)
db.key_pair_destroy(self.ctxt, param['user_id'], param['name'])
self.assertRaises(exception.KeypairNotFound, db.key_pair_get,
self.ctxt, param['user_id'], param['name'])
def test_key_pair_destroy_no_such_key(self):
param = {'name': 'test_1', 'user_id': 'test_user_id_1'}
self.assertRaises(exception.KeypairNotFound,
db.key_pair_destroy, self.ctxt,
param['user_id'], param['name'])
class QuotaTestCase(test.TestCase, ModelsObjectComparatorMixin):
"""Tests for db.api.quota_* methods."""
def setUp(self):
super(QuotaTestCase, self).setUp()
self.ctxt = context.get_admin_context()
def test_quota_create(self):
quota = db.quota_create(self.ctxt, 'project1', 'resource', 99)
self.assertEqual(quota.resource, 'resource')
self.assertEqual(quota.hard_limit, 99)
self.assertEqual(quota.project_id, 'project1')
def test_quota_get(self):
quota = db.quota_create(self.ctxt, 'project1', 'resource', 99)
quota_db = db.quota_get(self.ctxt, 'project1', 'resource')
self._assertEqualObjects(quota, quota_db)
def test_quota_get_all_by_project(self):
for i in range(3):
for j in range(3):
db.quota_create(self.ctxt, 'proj%d' % i, 'resource%d' % j, j)
for i in range(3):
quotas_db = db.quota_get_all_by_project(self.ctxt, 'proj%d' % i)
self.assertEqual(quotas_db, {'project_id': 'proj%d' % i,
'resource0': 0,
'resource1': 1,
'resource2': 2})
def test_quota_get_all_by_project_and_user(self):
for i in range(3):
for j in range(3):
db.quota_create(self.ctxt, 'proj%d' % i, 'resource%d' % j,
j - 1, user_id='user%d' % i)
for i in range(3):
quotas_db = db.quota_get_all_by_project_and_user(self.ctxt,
'proj%d' % i,
'user%d' % i)
self.assertEqual(quotas_db, {'project_id': 'proj%d' % i,
'user_id': 'user%d' % i,
'resource0': -1,
'resource1': 0,
'resource2': 1})
def test_quota_update(self):
db.quota_create(self.ctxt, 'project1', 'resource1', 41)
db.quota_update(self.ctxt, 'project1', 'resource1', 42)
quota = db.quota_get(self.ctxt, 'project1', 'resource1')
self.assertEqual(quota.hard_limit, 42)
self.assertEqual(quota.resource, 'resource1')
self.assertEqual(quota.project_id, 'project1')
def test_quota_update_nonexistent(self):
self.assertRaises(exception.ProjectQuotaNotFound,
db.quota_update, self.ctxt, 'project1', 'resource1', 42)
def test_quota_get_nonexistent(self):
self.assertRaises(exception.ProjectQuotaNotFound,
db.quota_get, self.ctxt, 'project1', 'resource1')
def test_quota_reserve_all_resources(self):
quotas = {}
deltas = {}
reservable_resources = {}
for i, resource in enumerate(quota.resources):
if isinstance(resource, quota.ReservableResource):
quotas[resource.name] = db.quota_create(self.ctxt, 'project1',
resource.name,
100).hard_limit
deltas[resource.name] = i
reservable_resources[resource.name] = resource
usages = {'instances': 3, 'cores': 6, 'ram': 9}
instances = []
for i in range(3):
instances.append(db.instance_create(self.ctxt,
{'vcpus': 2, 'memory_mb': 3,
'project_id': 'project1'}))
usages['fixed_ips'] = 2
network = db.network_create_safe(self.ctxt, {})
for i in range(2):
address = '192.168.0.%d' % i
db.fixed_ip_create(self.ctxt, {'project_id': 'project1',
'address': address,
'network_id': network['id']})
db.fixed_ip_associate(self.ctxt, address,
instances[0].uuid, network['id'])
usages['floating_ips'] = 5
for i in range(5):
db.floating_ip_create(self.ctxt, {'project_id': 'project1'})
usages['security_groups'] = 3
for i in range(3):
db.security_group_create(self.ctxt, {'project_id': 'project1'})
usages['server_groups'] = 4
for i in range(4):
db.instance_group_create(self.ctxt, {'uuid': str(i),
'project_id': 'project1'})
reservations_uuids = db.quota_reserve(self.ctxt, reservable_resources,
quotas, quotas, deltas, None,
None, None, 'project1')
resources_names = list(reservable_resources.keys())
for reservation_uuid in reservations_uuids:
reservation = _reservation_get(self.ctxt, reservation_uuid)
usage = db.quota_usage_get(self.ctxt, 'project1',
reservation.resource)
self.assertEqual(usage.in_use, usages[reservation.resource],
'Resource: %s' % reservation.resource)
self.assertEqual(usage.reserved, deltas[reservation.resource])
self.assertIn(reservation.resource, resources_names)
resources_names.remove(reservation.resource)
self.assertEqual(len(resources_names), 0)
def test_quota_destroy_all_by_project(self):
reservations = _quota_reserve(self.ctxt, 'project1', 'user1')
db.quota_destroy_all_by_project(self.ctxt, 'project1')
self.assertEqual(db.quota_get_all_by_project(self.ctxt, 'project1'),
{'project_id': 'project1'})
self.assertEqual(db.quota_get_all_by_project_and_user(self.ctxt,
'project1', 'user1'),
{'project_id': 'project1', 'user_id': 'user1'})
self.assertEqual(db.quota_usage_get_all_by_project(
self.ctxt, 'project1'),
{'project_id': 'project1'})
for r in reservations:
self.assertRaises(exception.ReservationNotFound,
_reservation_get, self.ctxt, r)
def test_quota_destroy_all_by_project_and_user(self):
reservations = _quota_reserve(self.ctxt, 'project1', 'user1')
db.quota_destroy_all_by_project_and_user(self.ctxt, 'project1',
'user1')
self.assertEqual(db.quota_get_all_by_project_and_user(self.ctxt,
'project1', 'user1'),
{'project_id': 'project1',
'user_id': 'user1'})
self.assertEqual(db.quota_usage_get_all_by_project_and_user(
self.ctxt, 'project1', 'user1'),
{'project_id': 'project1',
'user_id': 'user1',
'fixed_ips': {'in_use': 2, 'reserved': 2}})
for r in reservations:
self.assertRaises(exception.ReservationNotFound,
_reservation_get, self.ctxt, r)
def test_quota_usage_get_nonexistent(self):
self.assertRaises(exception.QuotaUsageNotFound, db.quota_usage_get,
self.ctxt, 'p1', 'nonexitent_resource')
def test_quota_usage_get(self):
_quota_reserve(self.ctxt, 'p1', 'u1')
quota_usage = db.quota_usage_get(self.ctxt, 'p1', 'resource0')
expected = {'resource': 'resource0', 'project_id': 'p1',
'in_use': 0, 'reserved': 0, 'total': 0}
for key, value in expected.items():
self.assertEqual(value, quota_usage[key])
def test_quota_usage_get_all_by_project(self):
_quota_reserve(self.ctxt, 'p1', 'u1')
expected = {'project_id': 'p1',
'resource0': {'in_use': 0, 'reserved': 0},
'resource1': {'in_use': 1, 'reserved': 1},
'fixed_ips': {'in_use': 2, 'reserved': 2}}
self.assertEqual(expected, db.quota_usage_get_all_by_project(
self.ctxt, 'p1'))
def test_quota_usage_get_all_by_project_and_user(self):
_quota_reserve(self.ctxt, 'p1', 'u1')
expected = {'project_id': 'p1',
'user_id': 'u1',
'resource0': {'in_use': 0, 'reserved': 0},
'resource1': {'in_use': 1, 'reserved': 1},
'fixed_ips': {'in_use': 2, 'reserved': 2}}
self.assertEqual(expected, db.quota_usage_get_all_by_project_and_user(
self.ctxt, 'p1', 'u1'))
def test_get_project_user_quota_usages_in_order(self):
_quota_reserve(self.ctxt, 'p1', 'u1')
with mock.patch.object(query.Query, 'order_by') as order_mock:
sqlalchemy_api._get_project_user_quota_usages(
self.ctxt, None, 'p1', 'u1')
self.assertTrue(order_mock.called)
def test_quota_usage_update_nonexistent(self):
self.assertRaises(exception.QuotaUsageNotFound, db.quota_usage_update,
self.ctxt, 'p1', 'u1', 'resource', in_use=42)
def test_quota_usage_update(self):
_quota_reserve(self.ctxt, 'p1', 'u1')
db.quota_usage_update(self.ctxt, 'p1', 'u1', 'resource0', in_use=42,
reserved=43)
quota_usage = db.quota_usage_get(self.ctxt, 'p1', 'resource0', 'u1')
expected = {'resource': 'resource0', 'project_id': 'p1',
'user_id': 'u1', 'in_use': 42, 'reserved': 43, 'total': 85}
for key, value in expected.items():
self.assertEqual(value, quota_usage[key])
def test_quota_create_exists(self):
db.quota_create(self.ctxt, 'project1', 'resource1', 41)
self.assertRaises(exception.QuotaExists, db.quota_create, self.ctxt,
'project1', 'resource1', 42)
class QuotaReserveNoDbTestCase(test.NoDBTestCase):
"""Tests quota reserve/refresh operations using mock."""
def test_create_quota_usage_if_missing_not_created(self):
# Tests that QuotaUsage isn't created if it's already in user_usages.
resource = 'fake-resource'
project_id = 'fake-project'
user_id = 'fake_user'
session = mock.sentinel
quota_usage = mock.sentinel
user_usages = {resource: quota_usage}
with mock.patch.object(sqlalchemy_api, '_quota_usage_create') as quc:
self.assertFalse(sqlalchemy_api._create_quota_usage_if_missing(
user_usages, resource, None,
project_id, user_id, session))
self.assertFalse(quc.called)
def _test_create_quota_usage_if_missing_created(self, per_project_quotas):
# Tests that the QuotaUsage is created.
user_usages = {}
if per_project_quotas:
resource = sqlalchemy_api.PER_PROJECT_QUOTAS[0]
else:
resource = 'fake-resource'
project_id = 'fake-project'
user_id = 'fake_user'
session = mock.sentinel
quota_usage = mock.sentinel
with mock.patch.object(sqlalchemy_api, '_quota_usage_create',
return_value=quota_usage) as quc:
self.assertTrue(sqlalchemy_api._create_quota_usage_if_missing(
user_usages, resource, None,
project_id, user_id, session))
self.assertEqual(quota_usage, user_usages[resource])
# Now test if the QuotaUsage was created with a user_id or not.
if per_project_quotas:
quc.assert_called_once_with(
project_id, None, resource, 0, 0, None, session=session)
else:
quc.assert_called_once_with(
project_id, user_id, resource, 0, 0, None, session=session)
def test_create_quota_usage_if_missing_created_per_project_quotas(self):
self._test_create_quota_usage_if_missing_created(True)
def test_create_quota_usage_if_missing_created_user_quotas(self):
self._test_create_quota_usage_if_missing_created(False)
def test_is_quota_refresh_needed_in_use(self):
# Tests when a quota refresh is needed based on the in_use value.
for in_use in range(-1, 1):
# We have to set until_refresh=None otherwise mock will give it
# a value which runs some code we don't want.
quota_usage = mock.MagicMock(in_use=in_use, until_refresh=None)
if in_use < 0:
self.assertTrue(sqlalchemy_api._is_quota_refresh_needed(
quota_usage, max_age=0))
else:
self.assertFalse(sqlalchemy_api._is_quota_refresh_needed(
quota_usage, max_age=0))
def test_is_quota_refresh_needed_until_refresh_none(self):
quota_usage = mock.MagicMock(in_use=0, until_refresh=None)
self.assertFalse(sqlalchemy_api._is_quota_refresh_needed(quota_usage,
max_age=0))
def test_is_quota_refresh_needed_until_refresh_not_none(self):
# Tests different values for the until_refresh counter.
for until_refresh in range(3):
quota_usage = mock.MagicMock(in_use=0, until_refresh=until_refresh)
refresh = sqlalchemy_api._is_quota_refresh_needed(quota_usage,
max_age=0)
until_refresh -= 1
if until_refresh <= 0:
self.assertTrue(refresh)
else:
self.assertFalse(refresh)
self.assertEqual(until_refresh, quota_usage.until_refresh)
def test_refresh_quota_usages(self):
quota_usage = mock.Mock(spec=models.QuotaUsage)
quota_usage.in_use = 5
quota_usage.until_refresh = None
sqlalchemy_api._refresh_quota_usages(quota_usage, until_refresh=5,
in_use=6)
self.assertEqual(6, quota_usage.in_use)
self.assertEqual(5, quota_usage.until_refresh)
def test_calculate_overquota_no_delta(self):
deltas = {'foo': -1}
user_quotas = {'foo': 10}
overs = sqlalchemy_api._calculate_overquota({}, user_quotas, deltas,
{}, {})
self.assertFalse(overs)
def test_calculate_overquota_unlimited_user_quota(self):
deltas = {'foo': 1}
project_quotas = {'foo': -1}
user_quotas = {'foo': -1}
project_usages = {'foo': {'total': 10}}
user_usages = {'foo': {'total': 10}}
overs = sqlalchemy_api._calculate_overquota(
project_quotas, user_quotas, deltas, project_usages, user_usages)
self.assertFalse(overs)
def test_calculate_overquota_unlimited_project_quota(self):
deltas = {'foo': 1}
project_quotas = {'foo': -1}
user_quotas = {'foo': 1}
project_usages = {'foo': {'total': 0}}
user_usages = {'foo': {'total': 0}}
overs = sqlalchemy_api._calculate_overquota(
project_quotas, user_quotas, deltas, project_usages, user_usages)
self.assertFalse(overs)
def _test_calculate_overquota(self, resource, project_usages, user_usages):
deltas = {resource: 1}
project_quotas = {resource: 10}
user_quotas = {resource: 10}
overs = sqlalchemy_api._calculate_overquota(
project_quotas, user_quotas, deltas, project_usages, user_usages)
self.assertEqual(resource, overs[0])
def test_calculate_overquota_per_project_quota_overquota(self):
# In this test, user quotas are fine but project quotas are over.
resource = 'foo'
project_usages = {resource: {'total': 10}}
user_usages = {resource: {'total': 5}}
self._test_calculate_overquota(resource, project_usages, user_usages)
def test_calculate_overquota_per_user_quota_overquota(self):
# In this test, project quotas are fine but user quotas are over.
resource = 'foo'
project_usages = {resource: {'total': 5}}
user_usages = {resource: {'total': 10}}
self._test_calculate_overquota(resource, project_usages, user_usages)
class QuotaClassTestCase(test.TestCase, ModelsObjectComparatorMixin):
def setUp(self):
super(QuotaClassTestCase, self).setUp()
self.ctxt = context.get_admin_context()
def test_quota_class_get_default(self):
params = {
'test_resource1': '10',
'test_resource2': '20',
'test_resource3': '30',
}
for res, limit in params.items():
db.quota_class_create(self.ctxt, 'default', res, limit)
defaults = db.quota_class_get_default(self.ctxt)
self.assertEqual(defaults, dict(class_name='default',
test_resource1=10,
test_resource2=20,
test_resource3=30))
def test_quota_class_create(self):
qc = db.quota_class_create(self.ctxt, 'class name', 'resource', 42)
self.assertEqual(qc.class_name, 'class name')
self.assertEqual(qc.resource, 'resource')
self.assertEqual(qc.hard_limit, 42)
def test_quota_class_get(self):
qc = db.quota_class_create(self.ctxt, 'class name', 'resource', 42)
qc_db = db.quota_class_get(self.ctxt, 'class name', 'resource')
self._assertEqualObjects(qc, qc_db)
def test_quota_class_get_nonexistent(self):
self.assertRaises(exception.QuotaClassNotFound, db.quota_class_get,
self.ctxt, 'nonexistent', 'resource')
def test_quota_class_get_all_by_name(self):
for i in range(3):
for j in range(3):
db.quota_class_create(self.ctxt, 'class%d' % i,
'resource%d' % j, j)
for i in range(3):
classes = db.quota_class_get_all_by_name(self.ctxt, 'class%d' % i)
self.assertEqual(classes, {'class_name': 'class%d' % i,
'resource0': 0, 'resource1': 1, 'resource2': 2})
def test_quota_class_update(self):
db.quota_class_create(self.ctxt, 'class name', 'resource', 42)
db.quota_class_update(self.ctxt, 'class name', 'resource', 43)
self.assertEqual(db.quota_class_get(self.ctxt, 'class name',
'resource').hard_limit, 43)
def test_quota_class_update_nonexistent(self):
self.assertRaises(exception.QuotaClassNotFound, db.quota_class_update,
self.ctxt, 'class name', 'resource', 42)
def test_refresh_quota_usages(self):
quota_usages = mock.Mock()
sqlalchemy_api._refresh_quota_usages(quota_usages, until_refresh=5,
in_use=6)
class S3ImageTestCase(test.TestCase):
def setUp(self):
super(S3ImageTestCase, self).setUp()
self.ctxt = context.get_admin_context()
self.values = [uuidutils.generate_uuid() for i in range(3)]
self.images = [db.s3_image_create(self.ctxt, uuid)
for uuid in self.values]
def test_s3_image_create(self):
for ref in self.images:
self.assertTrue(uuidutils.is_uuid_like(ref.uuid))
self.assertEqual(sorted(self.values),
sorted([ref.uuid for ref in self.images]))
def test_s3_image_get_by_uuid(self):
for uuid in self.values:
ref = db.s3_image_get_by_uuid(self.ctxt, uuid)
self.assertTrue(uuidutils.is_uuid_like(ref.uuid))
self.assertEqual(uuid, ref.uuid)
def test_s3_image_get(self):
self.assertEqual(sorted(self.values),
sorted([db.s3_image_get(self.ctxt, ref.id).uuid
for ref in self.images]))
def test_s3_image_get_not_found(self):
self.assertRaises(exception.ImageNotFound, db.s3_image_get, self.ctxt,
100500)
def test_s3_image_get_by_uuid_not_found(self):
self.assertRaises(exception.ImageNotFound, db.s3_image_get_by_uuid,
self.ctxt, uuidutils.generate_uuid())
class ComputeNodeTestCase(test.TestCase, ModelsObjectComparatorMixin):
_ignored_keys = ['id', 'deleted', 'deleted_at', 'created_at', 'updated_at']
def setUp(self):
super(ComputeNodeTestCase, self).setUp()
self.ctxt = context.get_admin_context()
self.service_dict = dict(host='host1', binary='nova-compute',
topic=CONF.compute_topic, report_count=1,
disabled=False)
self.service = db.service_create(self.ctxt, self.service_dict)
self.compute_node_dict = dict(vcpus=2, memory_mb=1024, local_gb=2048,
vcpus_used=0, memory_mb_used=0,
local_gb_used=0, free_ram_mb=1024,
free_disk_gb=2048, hypervisor_type="xen",
hypervisor_version=1, cpu_info="",
running_vms=0, current_workload=0,
service_id=self.service['id'],
host=self.service['host'],
disk_available_least=100,
hypervisor_hostname='abracadabra104',
host_ip='127.0.0.1',
supported_instances='',
pci_stats='',
metrics='',
extra_resources='',
cpu_allocation_ratio=16.0,
ram_allocation_ratio=1.5,
stats='', numa_topology='')
# add some random stats
self.stats = dict(num_instances=3, num_proj_12345=2,
num_proj_23456=2, num_vm_building=3)
self.compute_node_dict['stats'] = jsonutils.dumps(self.stats)
self.flags(reserved_host_memory_mb=0)
self.flags(reserved_host_disk_mb=0)
self.item = db.compute_node_create(self.ctxt, self.compute_node_dict)
def test_compute_node_create(self):
self._assertEqualObjects(self.compute_node_dict, self.item,
ignored_keys=self._ignored_keys + ['stats'])
new_stats = jsonutils.loads(self.item['stats'])
self.assertEqual(self.stats, new_stats)
def test_compute_node_get_all(self):
nodes = db.compute_node_get_all(self.ctxt)
self.assertEqual(1, len(nodes))
node = nodes[0]
self._assertEqualObjects(self.compute_node_dict, node,
ignored_keys=self._ignored_keys +
['stats', 'service'])
new_stats = jsonutils.loads(node['stats'])
self.assertEqual(self.stats, new_stats)
def test_compute_node_get_all_deleted_compute_node(self):
# Create a service and compute node and ensure we can find its stats;
# delete the service and compute node when done and loop again
for x in range(2, 5):
# Create a service
service_data = self.service_dict.copy()
service_data['host'] = 'host-%s' % x
service = db.service_create(self.ctxt, service_data)
# Create a compute node
compute_node_data = self.compute_node_dict.copy()
compute_node_data['service_id'] = service['id']
compute_node_data['stats'] = jsonutils.dumps(self.stats.copy())
compute_node_data['hypervisor_hostname'] = 'hypervisor-%s' % x
node = db.compute_node_create(self.ctxt, compute_node_data)
# Ensure the "new" compute node is found
nodes = db.compute_node_get_all(self.ctxt)
self.assertEqual(2, len(nodes))
found = None
for n in nodes:
if n['id'] == node['id']:
found = n
break
self.assertIsNotNone(found)
# Now ensure the match has stats!
self.assertNotEqual(jsonutils.loads(found['stats']), {})
# Now delete the newly-created compute node to ensure the related
# compute node stats are wiped in a cascaded fashion
db.compute_node_delete(self.ctxt, node['id'])
# Clean up the service
db.service_destroy(self.ctxt, service['id'])
def test_compute_node_get_all_mult_compute_nodes_one_service_entry(self):
service_data = self.service_dict.copy()
service_data['host'] = 'host2'
service = db.service_create(self.ctxt, service_data)
existing_node = dict(self.item.iteritems())
expected = [existing_node]
for name in ['bm_node1', 'bm_node2']:
compute_node_data = self.compute_node_dict.copy()
compute_node_data['service_id'] = service['id']
compute_node_data['stats'] = jsonutils.dumps(self.stats)
compute_node_data['hypervisor_hostname'] = name
node = db.compute_node_create(self.ctxt, compute_node_data)
node = dict(node)
expected.append(node)
result = sorted(db.compute_node_get_all(self.ctxt),
key=lambda n: n['hypervisor_hostname'])
self._assertEqualListsOfObjects(expected, result,
ignored_keys=['stats'])
def test_compute_node_get_all_by_host_with_distinct_hosts(self):
# Create another service with another node
service2 = self.service_dict.copy()
service2['host'] = 'host2'
db.service_create(self.ctxt, service2)
compute_node_another_host = self.compute_node_dict.copy()
compute_node_another_host['stats'] = jsonutils.dumps(self.stats)
compute_node_another_host['hypervisor_hostname'] = 'node_2'
compute_node_another_host['host'] = 'host2'
node = db.compute_node_create(self.ctxt, compute_node_another_host)
result = db.compute_node_get_all_by_host(self.ctxt, 'host1', False)
self._assertEqualListsOfObjects([self.item], result)
result = db.compute_node_get_all_by_host(self.ctxt, 'host2', False)
self._assertEqualListsOfObjects([node], result)
def test_compute_node_get_all_by_host_with_same_host(self):
# Create another node on top of the same service
compute_node_same_host = self.compute_node_dict.copy()
compute_node_same_host['stats'] = jsonutils.dumps(self.stats)
compute_node_same_host['hypervisor_hostname'] = 'node_3'
node = db.compute_node_create(self.ctxt, compute_node_same_host)
expected = [self.item, node]
result = sorted(db.compute_node_get_all_by_host(
self.ctxt, 'host1', False),
key=lambda n: n['hypervisor_hostname'])
self._assertEqualListsOfObjects(expected, result,
ignored_keys=['stats'])
def test_compute_node_get_all_by_host_not_found(self):
self.assertRaises(exception.ComputeHostNotFound,
db.compute_node_get_all_by_host, self.ctxt, 'wrong')
def test_compute_nodes_get_by_service_id_one_result(self):
expected = [self.item]
result = db.compute_nodes_get_by_service_id(
self.ctxt, self.service['id'])
self._assertEqualListsOfObjects(expected, result,
ignored_keys=['stats'])
def test_compute_nodes_get_by_service_id_multiple_results(self):
# Create another node on top of the same service
compute_node_same_host = self.compute_node_dict.copy()
compute_node_same_host['stats'] = jsonutils.dumps(self.stats)
compute_node_same_host['hypervisor_hostname'] = 'node_2'
node = db.compute_node_create(self.ctxt, compute_node_same_host)
expected = [self.item, node]
result = sorted(db.compute_nodes_get_by_service_id(
self.ctxt, self.service['id']),
key=lambda n: n['hypervisor_hostname'])
self._assertEqualListsOfObjects(expected, result,
ignored_keys=['stats'])
def test_compute_nodes_get_by_service_id_not_found(self):
self.assertRaises(exception.ServiceNotFound,
db.compute_nodes_get_by_service_id, self.ctxt,
'fake')
def test_compute_node_get_by_host_and_nodename(self):
# Create another node on top of the same service
compute_node_same_host = self.compute_node_dict.copy()
compute_node_same_host['stats'] = jsonutils.dumps(self.stats)
compute_node_same_host['hypervisor_hostname'] = 'node_2'
node = db.compute_node_create(self.ctxt, compute_node_same_host)
expected = node
result = db.compute_node_get_by_host_and_nodename(
self.ctxt, 'host1', 'node_2')
self._assertEqualObjects(expected, result)
def test_compute_node_get_by_host_and_nodename_not_found(self):
self.assertRaises(exception.ComputeHostNotFound,
db.compute_node_get_by_host_and_nodename,
self.ctxt, 'host1', 'wrong')
def test_compute_node_get(self):
compute_node_id = self.item['id']
node = db.compute_node_get(self.ctxt, compute_node_id)
self._assertEqualObjects(self.compute_node_dict, node,
ignored_keys=self._ignored_keys + ['stats', 'service'])
new_stats = jsonutils.loads(node['stats'])
self.assertEqual(self.stats, new_stats)
def test_compute_node_update(self):
compute_node_id = self.item['id']
stats = jsonutils.loads(self.item['stats'])
# change some values:
stats['num_instances'] = 8
stats['num_tribbles'] = 1
values = {
'vcpus': 4,
'stats': jsonutils.dumps(stats),
}
item_updated = db.compute_node_update(self.ctxt, compute_node_id,
values)
self.assertEqual(4, item_updated['vcpus'])
new_stats = jsonutils.loads(item_updated['stats'])
self.assertEqual(stats, new_stats)
def test_compute_node_delete(self):
compute_node_id = self.item['id']
db.compute_node_delete(self.ctxt, compute_node_id)
nodes = db.compute_node_get_all(self.ctxt)
self.assertEqual(len(nodes), 0)
def test_compute_node_search_by_hypervisor(self):
nodes_created = []
new_service = copy.copy(self.service_dict)
for i in range(3):
new_service['binary'] += str(i)
new_service['topic'] += str(i)
service = db.service_create(self.ctxt, new_service)
self.compute_node_dict['service_id'] = service['id']
self.compute_node_dict['hypervisor_hostname'] = 'testhost' + str(i)
self.compute_node_dict['stats'] = jsonutils.dumps(self.stats)
node = db.compute_node_create(self.ctxt, self.compute_node_dict)
nodes_created.append(node)
nodes = db.compute_node_search_by_hypervisor(self.ctxt, 'host')
self.assertEqual(3, len(nodes))
self._assertEqualListsOfObjects(nodes_created, nodes,
ignored_keys=self._ignored_keys + ['stats', 'service'])
def test_compute_node_statistics(self):
stats = db.compute_node_statistics(self.ctxt)
self.assertEqual(stats.pop('count'), 1)
for k, v in stats.items():
self.assertEqual(v, self.item[k])
def test_compute_node_statistics_disabled_service(self):
serv = db.service_get_by_host_and_topic(
self.ctxt, 'host1', CONF.compute_topic)
db.service_update(self.ctxt, serv['id'], {'disabled': True})
stats = db.compute_node_statistics(self.ctxt)
self.assertEqual(stats.pop('count'), 0)
def test_compute_node_statistics_with_old_service_id(self):
# NOTE(sbauza): This test is only for checking backwards compatibility
# with old versions of compute_nodes not providing host column.
# This test could be removed once we are sure that all compute nodes
# are populating the host field thanks to the ResourceTracker
service2 = self.service_dict.copy()
service2['host'] = 'host2'
db_service2 = db.service_create(self.ctxt, service2)
compute_node_old_host = self.compute_node_dict.copy()
compute_node_old_host['stats'] = jsonutils.dumps(self.stats)
compute_node_old_host['hypervisor_hostname'] = 'node_2'
compute_node_old_host['service_id'] = db_service2['id']
compute_node_old_host.pop('host')
db.compute_node_create(self.ctxt, compute_node_old_host)
stats = db.compute_node_statistics(self.ctxt)
self.assertEqual(2, stats.pop('count'))
def test_compute_node_statistics_with_other_service(self):
other_service = self.service_dict.copy()
other_service['topic'] = 'fake-topic'
other_service['binary'] = 'nova-fake'
db.service_create(self.ctxt, other_service)
stats = db.compute_node_statistics(self.ctxt)
data = {'count': 1,
'vcpus_used': 0,
'local_gb_used': 0,
'memory_mb': 1024,
'current_workload': 0,
'vcpus': 2,
'running_vms': 0,
'free_disk_gb': 2048,
'disk_available_least': 100,
'local_gb': 2048,
'free_ram_mb': 1024,
'memory_mb_used': 0}
for key, value in six.iteritems(data):
self.assertEqual(value, stats.pop(key))
def test_compute_node_not_found(self):
self.assertRaises(exception.ComputeHostNotFound, db.compute_node_get,
self.ctxt, 100500)
def test_compute_node_update_always_updates_updated_at(self):
item_updated = db.compute_node_update(self.ctxt,
self.item['id'], {})
self.assertNotEqual(self.item['updated_at'],
item_updated['updated_at'])
def test_compute_node_update_override_updated_at(self):
# Update the record once so updated_at is set.
first = db.compute_node_update(self.ctxt, self.item['id'],
{'free_ram_mb': '12'})
self.assertIsNotNone(first['updated_at'])
# Update a second time. Make sure that the updated_at value we send
# is overridden.
second = db.compute_node_update(self.ctxt, self.item['id'],
{'updated_at': first.updated_at,
'free_ram_mb': '13'})
self.assertNotEqual(first['updated_at'], second['updated_at'])
def test_service_destroy_with_compute_node(self):
db.service_destroy(self.ctxt, self.service['id'])
self.assertRaises(exception.ComputeHostNotFound,
db.compute_node_get, self.ctxt,
self.item['id'])
def test_service_destroy_with_old_compute_node(self):
# NOTE(sbauza): This test is only for checking backwards compatibility
# with old versions of compute_nodes not providing host column.
# This test could be removed once we are sure that all compute nodes
# are populating the host field thanks to the ResourceTracker
compute_node_old_host_dict = self.compute_node_dict.copy()
compute_node_old_host_dict.pop('host')
item_old = db.compute_node_create(self.ctxt,
compute_node_old_host_dict)
db.service_destroy(self.ctxt, self.service['id'])
self.assertRaises(exception.ComputeHostNotFound,
db.compute_node_get, self.ctxt,
item_old['id'])
class ProviderFwRuleTestCase(test.TestCase, ModelsObjectComparatorMixin):
def setUp(self):
super(ProviderFwRuleTestCase, self).setUp()
self.ctxt = context.get_admin_context()
self.values = self._get_rule_values()
self.rules = [db.provider_fw_rule_create(self.ctxt, rule)
for rule in self.values]
def _get_rule_values(self):
cidr_samples = ['192.168.0.0/24', '10.1.2.3/32',
'2001:4f8:3:ba::/64',
'2001:4f8:3:ba:2e0:81ff:fe22:d1f1/128']
values = []
for i in range(len(cidr_samples)):
rule = {}
rule['protocol'] = 'foo' + str(i)
rule['from_port'] = 9999 + i
rule['to_port'] = 9898 + i
rule['cidr'] = cidr_samples[i]
values.append(rule)
return values
def test_provider_fw_rule_create(self):
ignored_keys = ['id', 'deleted', 'deleted_at', 'created_at',
'updated_at']
for i, rule in enumerate(self.values):
self._assertEqualObjects(self.rules[i], rule,
ignored_keys=ignored_keys)
def test_provider_fw_rule_get_all(self):
self._assertEqualListsOfObjects(self.rules,
db.provider_fw_rule_get_all(self.ctxt))
def test_provider_fw_rule_destroy(self):
for rule in self.rules:
db.provider_fw_rule_destroy(self.ctxt, rule.id)
self.assertEqual([], db.provider_fw_rule_get_all(self.ctxt))
class CertificateTestCase(test.TestCase, ModelsObjectComparatorMixin):
def setUp(self):
super(CertificateTestCase, self).setUp()
self.ctxt = context.get_admin_context()
self.created = self._certificates_create()
def _get_certs_values(self):
base_values = {
'user_id': 'user',
'project_id': 'project',
'file_name': 'filename'
}
return [{k: v + str(x) for k, v in base_values.items()}
for x in range(1, 4)]
def _certificates_create(self):
return [db.certificate_create(self.ctxt, cert)
for cert in self._get_certs_values()]
def test_certificate_create(self):
ignored_keys = ['id', 'deleted', 'deleted_at', 'created_at',
'updated_at']
for i, cert in enumerate(self._get_certs_values()):
self._assertEqualObjects(self.created[i], cert,
ignored_keys=ignored_keys)
def test_certificate_get_all_by_project(self):
cert = db.certificate_get_all_by_project(self.ctxt,
self.created[1].project_id)
self._assertEqualObjects(self.created[1], cert[0])
def test_certificate_get_all_by_user(self):
cert = db.certificate_get_all_by_user(self.ctxt,
self.created[1].user_id)
self._assertEqualObjects(self.created[1], cert[0])
def test_certificate_get_all_by_user_and_project(self):
cert = db.certificate_get_all_by_user_and_project(self.ctxt,
self.created[1].user_id, self.created[1].project_id)
self._assertEqualObjects(self.created[1], cert[0])
class ConsoleTestCase(test.TestCase, ModelsObjectComparatorMixin):
def setUp(self):
super(ConsoleTestCase, self).setUp()
self.ctxt = context.get_admin_context()
pools_data = [
{'address': '192.168.10.10',
'username': 'user1',
'password': 'passwd1',
'console_type': 'type1',
'public_hostname': 'public_host1',
'host': 'host1',
'compute_host': 'compute_host1',
},
{'address': '192.168.10.11',
'username': 'user2',
'password': 'passwd2',
'console_type': 'type2',
'public_hostname': 'public_host2',
'host': 'host2',
'compute_host': 'compute_host2',
},
]
self.console_pools = [db.console_pool_create(self.ctxt, val)
for val in pools_data]
instance_uuid = uuidutils.generate_uuid()
db.instance_create(self.ctxt, {'uuid': instance_uuid})
self.console_data = [{'instance_name': 'name' + str(x),
'instance_uuid': instance_uuid,
'password': 'pass' + str(x),
'port': 7878 + x,
'pool_id': self.console_pools[x]['id']}
for x in range(len(pools_data))]
self.consoles = [db.console_create(self.ctxt, val)
for val in self.console_data]
def test_console_create(self):
ignored_keys = ['id', 'deleted', 'deleted_at', 'created_at',
'updated_at']
for console in self.consoles:
self.assertIsNotNone(console['id'])
self._assertEqualListsOfObjects(self.console_data, self.consoles,
ignored_keys=ignored_keys)
def test_console_get_by_id(self):
console = self.consoles[0]
console_get = db.console_get(self.ctxt, console['id'])
self._assertEqualObjects(console, console_get,
ignored_keys=['pool'])
def test_console_get_by_id_uuid(self):
console = self.consoles[0]
console_get = db.console_get(self.ctxt, console['id'],
console['instance_uuid'])
self._assertEqualObjects(console, console_get,
ignored_keys=['pool'])
def test_console_get_by_pool_instance(self):
console = self.consoles[0]
console_get = db.console_get_by_pool_instance(self.ctxt,
console['pool_id'], console['instance_uuid'])
self._assertEqualObjects(console, console_get,
ignored_keys=['pool'])
def test_console_get_all_by_instance(self):
instance_uuid = self.consoles[0]['instance_uuid']
consoles_get = db.console_get_all_by_instance(self.ctxt, instance_uuid)
self._assertEqualListsOfObjects(self.consoles, consoles_get)
def test_console_get_all_by_instance_with_pool(self):
instance_uuid = self.consoles[0]['instance_uuid']
consoles_get = db.console_get_all_by_instance(self.ctxt, instance_uuid,
columns_to_join=['pool'])
self._assertEqualListsOfObjects(self.consoles, consoles_get,
ignored_keys=['pool'])
self._assertEqualListsOfObjects([pool for pool in self.console_pools],
[c['pool'] for c in consoles_get])
def test_console_get_all_by_instance_empty(self):
consoles_get = db.console_get_all_by_instance(self.ctxt,
uuidutils.generate_uuid())
self.assertEqual(consoles_get, [])
def test_console_delete(self):
console_id = self.consoles[0]['id']
db.console_delete(self.ctxt, console_id)
self.assertRaises(exception.ConsoleNotFound, db.console_get,
self.ctxt, console_id)
def test_console_get_by_pool_instance_not_found(self):
self.assertRaises(exception.ConsoleNotFoundInPoolForInstance,
db.console_get_by_pool_instance, self.ctxt,
self.consoles[0]['pool_id'],
uuidutils.generate_uuid())
def test_console_get_not_found(self):
self.assertRaises(exception.ConsoleNotFound, db.console_get,
self.ctxt, 100500)
def test_console_get_not_found_instance(self):
self.assertRaises(exception.ConsoleNotFoundForInstance, db.console_get,
self.ctxt, self.consoles[0]['id'],
uuidutils.generate_uuid())
class CellTestCase(test.TestCase, ModelsObjectComparatorMixin):
_ignored_keys = ['id', 'deleted', 'deleted_at', 'created_at', 'updated_at']
def setUp(self):
super(CellTestCase, self).setUp()
self.ctxt = context.get_admin_context()
def _get_cell_base_values(self):
return {
'name': 'myname',
'api_url': 'apiurl',
'transport_url': 'transporturl',
'weight_offset': 0.5,
'weight_scale': 1.5,
'is_parent': True,
}
def _cell_value_modify(self, value, step):
if isinstance(value, str):
return value + str(step)
elif isinstance(value, float):
return value + step + 0.6
elif isinstance(value, bool):
return bool(step % 2)
elif isinstance(value, int):
return value + step
def _create_cells(self):
test_values = []
for x in range(1, 4):
modified_val = {k: self._cell_value_modify(v, x)
for k, v in self._get_cell_base_values().items()}
db.cell_create(self.ctxt, modified_val)
test_values.append(modified_val)
return test_values
def test_cell_create(self):
cell = db.cell_create(self.ctxt, self._get_cell_base_values())
self.assertIsNotNone(cell['id'])
self._assertEqualObjects(cell, self._get_cell_base_values(),
ignored_keys=self._ignored_keys)
def test_cell_update(self):
db.cell_create(self.ctxt, self._get_cell_base_values())
new_values = {
'api_url': 'apiurl1',
'transport_url': 'transporturl1',
'weight_offset': 0.6,
'weight_scale': 1.6,
'is_parent': False,
}
test_cellname = self._get_cell_base_values()['name']
updated_cell = db.cell_update(self.ctxt, test_cellname, new_values)
self._assertEqualObjects(updated_cell, new_values,
ignored_keys=self._ignored_keys + ['name'])
def test_cell_delete(self):
new_cells = self._create_cells()
for cell in new_cells:
test_cellname = cell['name']
db.cell_delete(self.ctxt, test_cellname)
self.assertRaises(exception.CellNotFound, db.cell_get, self.ctxt,
test_cellname)
def test_cell_get(self):
new_cells = self._create_cells()
for cell in new_cells:
cell_get = db.cell_get(self.ctxt, cell['name'])
self._assertEqualObjects(cell_get, cell,
ignored_keys=self._ignored_keys)
def test_cell_get_all(self):
new_cells = self._create_cells()
cells = db.cell_get_all(self.ctxt)
self.assertEqual(len(new_cells), len(cells))
cells_byname = {newcell['name']: newcell
for newcell in new_cells}
for cell in cells:
self._assertEqualObjects(cell, cells_byname[cell['name']],
self._ignored_keys)
def test_cell_get_not_found(self):
self._create_cells()
self.assertRaises(exception.CellNotFound, db.cell_get, self.ctxt,
'cellnotinbase')
def test_cell_update_not_found(self):
self._create_cells()
self.assertRaises(exception.CellNotFound, db.cell_update, self.ctxt,
'cellnotinbase', self._get_cell_base_values())
def test_cell_create_exists(self):
db.cell_create(self.ctxt, self._get_cell_base_values())
self.assertRaises(exception.CellExists, db.cell_create,
self.ctxt, self._get_cell_base_values())
class ConsolePoolTestCase(test.TestCase, ModelsObjectComparatorMixin):
def setUp(self):
super(ConsolePoolTestCase, self).setUp()
self.ctxt = context.get_admin_context()
self.test_console_pool_1 = {
'address': '192.168.2.10',
'username': 'user_1',
'password': 'secret_123',
'console_type': 'type_1',
'public_hostname': 'public_hostname_123',
'host': 'localhost',
'compute_host': '127.0.0.1',
}
self.test_console_pool_2 = {
'address': '192.168.2.11',
'username': 'user_2',
'password': 'secret_1234',
'console_type': 'type_2',
'public_hostname': 'public_hostname_1234',
'host': '127.0.0.1',
'compute_host': 'localhost',
}
self.test_console_pool_3 = {
'address': '192.168.2.12',
'username': 'user_3',
'password': 'secret_12345',
'console_type': 'type_2',
'public_hostname': 'public_hostname_12345',
'host': '127.0.0.1',
'compute_host': '192.168.1.1',
}
def test_console_pool_create(self):
console_pool = db.console_pool_create(
self.ctxt, self.test_console_pool_1)
self.assertIsNotNone(console_pool.get('id'))
ignored_keys = ['deleted', 'created_at', 'updated_at',
'deleted_at', 'id']
self._assertEqualObjects(
console_pool, self.test_console_pool_1, ignored_keys)
def test_console_pool_create_duplicate(self):
db.console_pool_create(self.ctxt, self.test_console_pool_1)
self.assertRaises(exception.ConsolePoolExists, db.console_pool_create,
self.ctxt, self.test_console_pool_1)
def test_console_pool_get_by_host_type(self):
params = [
self.test_console_pool_1,
self.test_console_pool_2,
]
for p in params:
db.console_pool_create(self.ctxt, p)
ignored_keys = ['deleted', 'created_at', 'updated_at',
'deleted_at', 'id', 'consoles']
cp = self.test_console_pool_1
db_cp = db.console_pool_get_by_host_type(
self.ctxt, cp['compute_host'], cp['host'], cp['console_type']
)
self._assertEqualObjects(cp, db_cp, ignored_keys)
def test_console_pool_get_by_host_type_no_resuls(self):
self.assertRaises(
exception.ConsolePoolNotFoundForHostType,
db.console_pool_get_by_host_type, self.ctxt, 'compute_host',
'host', 'console_type')
def test_console_pool_get_all_by_host_type(self):
params = [
self.test_console_pool_1,
self.test_console_pool_2,
self.test_console_pool_3,
]
for p in params:
db.console_pool_create(self.ctxt, p)
ignored_keys = ['deleted', 'created_at', 'updated_at',
'deleted_at', 'id', 'consoles']
cp = self.test_console_pool_2
db_cp = db.console_pool_get_all_by_host_type(
self.ctxt, cp['host'], cp['console_type'])
self._assertEqualListsOfObjects(
db_cp, [self.test_console_pool_2, self.test_console_pool_3],
ignored_keys)
def test_console_pool_get_all_by_host_type_no_results(self):
res = db.console_pool_get_all_by_host_type(
self.ctxt, 'cp_host', 'cp_console_type')
self.assertEqual([], res)
class DnsdomainTestCase(test.TestCase):
def setUp(self):
super(DnsdomainTestCase, self).setUp()
self.ctxt = context.get_admin_context()
self.domain = 'test.domain'
self.testzone = 'testzone'
self.project = 'fake'
def test_dnsdomain_register_for_zone(self):
db.dnsdomain_register_for_zone(self.ctxt, self.domain, self.testzone)
domain = db.dnsdomain_get(self.ctxt, self.domain)
self.assertEqual(domain['domain'], self.domain)
self.assertEqual(domain['availability_zone'], self.testzone)
self.assertEqual(domain['scope'], 'private')
def test_dnsdomain_register_for_project(self):
db.dnsdomain_register_for_project(self.ctxt, self.domain, self.project)
domain = db.dnsdomain_get(self.ctxt, self.domain)
self.assertEqual(domain['domain'], self.domain)
self.assertEqual(domain['project_id'], self.project)
self.assertEqual(domain['scope'], 'public')
def test_dnsdomain_unregister(self):
db.dnsdomain_register_for_zone(self.ctxt, self.domain, self.testzone)
db.dnsdomain_unregister(self.ctxt, self.domain)
domain = db.dnsdomain_get(self.ctxt, self.domain)
self.assertIsNone(domain)
def test_dnsdomain_get_all(self):
d_list = ['test.domain.one', 'test.domain.two']
db.dnsdomain_register_for_zone(self.ctxt, d_list[0], 'zone')
db.dnsdomain_register_for_zone(self.ctxt, d_list[1], 'zone')
db_list = db.dnsdomain_get_all(self.ctxt)
db_domain_list = [d.domain for d in db_list]
self.assertEqual(sorted(d_list), sorted(db_domain_list))
class BwUsageTestCase(test.TestCase, ModelsObjectComparatorMixin):
_ignored_keys = ['id', 'deleted', 'deleted_at', 'created_at', 'updated_at']
def setUp(self):
super(BwUsageTestCase, self).setUp()
self.ctxt = context.get_admin_context()
self.useFixture(test.TimeOverride())
def test_bw_usage_get_by_uuids(self):
now = timeutils.utcnow()
start_period = now - datetime.timedelta(seconds=10)
start_period_str = start_period.isoformat()
uuid3_refreshed = now - datetime.timedelta(seconds=5)
uuid3_refreshed_str = uuid3_refreshed.isoformat()
expected_bw_usages = {
'fake_uuid1': {'uuid': 'fake_uuid1',
'mac': 'fake_mac1',
'start_period': start_period,
'bw_in': 100,
'bw_out': 200,
'last_ctr_in': 12345,
'last_ctr_out': 67890,
'last_refreshed': now},
'fake_uuid2': {'uuid': 'fake_uuid2',
'mac': 'fake_mac2',
'start_period': start_period,
'bw_in': 200,
'bw_out': 300,
'last_ctr_in': 22345,
'last_ctr_out': 77890,
'last_refreshed': now},
'fake_uuid3': {'uuid': 'fake_uuid3',
'mac': 'fake_mac3',
'start_period': start_period,
'bw_in': 400,
'bw_out': 500,
'last_ctr_in': 32345,
'last_ctr_out': 87890,
'last_refreshed': uuid3_refreshed}
}
bw_usages = db.bw_usage_get_by_uuids(self.ctxt,
['fake_uuid1', 'fake_uuid2'], start_period_str)
# No matches
self.assertEqual(len(bw_usages), 0)
# Add 3 entries
db.bw_usage_update(self.ctxt, 'fake_uuid1',
'fake_mac1', start_period_str,
100, 200, 12345, 67890)
db.bw_usage_update(self.ctxt, 'fake_uuid2',
'fake_mac2', start_period_str,
100, 200, 42, 42)
# Test explicit refreshed time
db.bw_usage_update(self.ctxt, 'fake_uuid3',
'fake_mac3', start_period_str,
400, 500, 32345, 87890,
last_refreshed=uuid3_refreshed_str)
# Update 2nd entry
db.bw_usage_update(self.ctxt, 'fake_uuid2',
'fake_mac2', start_period_str,
200, 300, 22345, 77890)
bw_usages = db.bw_usage_get_by_uuids(self.ctxt,
['fake_uuid1', 'fake_uuid2', 'fake_uuid3'], start_period_str)
self.assertEqual(len(bw_usages), 3)
for usage in bw_usages:
self._assertEqualObjects(expected_bw_usages[usage['uuid']], usage,
ignored_keys=self._ignored_keys)
def _test_bw_usage_update(self, **expected_bw_usage):
bw_usage = db.bw_usage_update(self.ctxt, **expected_bw_usage)
self._assertEqualObjects(expected_bw_usage, bw_usage,
ignored_keys=self._ignored_keys)
uuid = expected_bw_usage['uuid']
mac = expected_bw_usage['mac']
start_period = expected_bw_usage['start_period']
bw_usage = db.bw_usage_get(self.ctxt, uuid, start_period, mac)
self._assertEqualObjects(expected_bw_usage, bw_usage,
ignored_keys=self._ignored_keys)
def test_bw_usage_get(self):
now = timeutils.utcnow()
start_period = now - datetime.timedelta(seconds=10)
start_period_str = start_period.isoformat()
expected_bw_usage = {'uuid': 'fake_uuid1',
'mac': 'fake_mac1',
'start_period': start_period,
'bw_in': 100,
'bw_out': 200,
'last_ctr_in': 12345,
'last_ctr_out': 67890,
'last_refreshed': now}
bw_usage = db.bw_usage_get(self.ctxt, 'fake_uuid1', start_period_str,
'fake_mac1')
self.assertIsNone(bw_usage)
self._test_bw_usage_update(**expected_bw_usage)
def test_bw_usage_update_new(self):
now = timeutils.utcnow()
start_period = now - datetime.timedelta(seconds=10)
expected_bw_usage = {'uuid': 'fake_uuid1',
'mac': 'fake_mac1',
'start_period': start_period,
'bw_in': 100,
'bw_out': 200,
'last_ctr_in': 12345,
'last_ctr_out': 67890,
'last_refreshed': now}
self._test_bw_usage_update(**expected_bw_usage)
def test_bw_usage_update_existing(self):
now = timeutils.utcnow()
start_period = now - datetime.timedelta(seconds=10)
expected_bw_usage = {'uuid': 'fake_uuid1',
'mac': 'fake_mac1',
'start_period': start_period,
'bw_in': 100,
'bw_out': 200,
'last_ctr_in': 12345,
'last_ctr_out': 67890,
'last_refreshed': now}
self._test_bw_usage_update(**expected_bw_usage)
expected_bw_usage['bw_in'] = 300
expected_bw_usage['bw_out'] = 400
expected_bw_usage['last_ctr_in'] = 23456
expected_bw_usage['last_ctr_out'] = 78901
self._test_bw_usage_update(**expected_bw_usage)
class Ec2TestCase(test.TestCase):
def setUp(self):
super(Ec2TestCase, self).setUp()
self.ctxt = context.RequestContext('fake_user', 'fake_project')
def test_ec2_ids_not_found_are_printable(self):
def check_exc_format(method, value):
try:
method(self.ctxt, value)
except exception.NotFound as exc:
self.assertIn(six.text_type(value), six.text_type(exc))
check_exc_format(db.get_instance_uuid_by_ec2_id, 123456)
check_exc_format(db.ec2_snapshot_get_by_ec2_id, 123456)
check_exc_format(db.ec2_snapshot_get_by_uuid, 'fake')
def test_ec2_volume_create(self):
vol = db.ec2_volume_create(self.ctxt, 'fake-uuid')
self.assertIsNotNone(vol['id'])
self.assertEqual(vol['uuid'], 'fake-uuid')
def test_ec2_volume_get_by_id(self):
vol = db.ec2_volume_create(self.ctxt, 'fake-uuid')
vol2 = db.ec2_volume_get_by_id(self.ctxt, vol['id'])
self.assertEqual(vol2['uuid'], vol['uuid'])
def test_ec2_volume_get_by_uuid(self):
vol = db.ec2_volume_create(self.ctxt, 'fake-uuid')
vol2 = db.ec2_volume_get_by_uuid(self.ctxt, vol['uuid'])
self.assertEqual(vol2['id'], vol['id'])
def test_ec2_snapshot_create(self):
snap = db.ec2_snapshot_create(self.ctxt, 'fake-uuid')
self.assertIsNotNone(snap['id'])
self.assertEqual(snap['uuid'], 'fake-uuid')
def test_ec2_snapshot_get_by_ec2_id(self):
snap = db.ec2_snapshot_create(self.ctxt, 'fake-uuid')
snap2 = db.ec2_snapshot_get_by_ec2_id(self.ctxt, snap['id'])
self.assertEqual(snap2['uuid'], 'fake-uuid')
def test_ec2_snapshot_get_by_uuid(self):
snap = db.ec2_snapshot_create(self.ctxt, 'fake-uuid')
snap2 = db.ec2_snapshot_get_by_uuid(self.ctxt, 'fake-uuid')
self.assertEqual(snap['id'], snap2['id'])
def test_ec2_snapshot_get_by_ec2_id_not_found(self):
self.assertRaises(exception.SnapshotNotFound,
db.ec2_snapshot_get_by_ec2_id,
self.ctxt, 123456)
def test_ec2_snapshot_get_by_uuid_not_found(self):
self.assertRaises(exception.SnapshotNotFound,
db.ec2_snapshot_get_by_uuid,
self.ctxt, 'fake-uuid')
def test_ec2_instance_create(self):
inst = db.ec2_instance_create(self.ctxt, 'fake-uuid')
self.assertIsNotNone(inst['id'])
self.assertEqual(inst['uuid'], 'fake-uuid')
def test_ec2_instance_get_by_uuid(self):
inst = db.ec2_instance_create(self.ctxt, 'fake-uuid')
inst2 = db.ec2_instance_get_by_uuid(self.ctxt, 'fake-uuid')
self.assertEqual(inst['id'], inst2['id'])
def test_ec2_instance_get_by_id(self):
inst = db.ec2_instance_create(self.ctxt, 'fake-uuid')
inst2 = db.ec2_instance_get_by_id(self.ctxt, inst['id'])
self.assertEqual(inst['id'], inst2['id'])
def test_ec2_instance_get_by_uuid_not_found(self):
self.assertRaises(exception.InstanceNotFound,
db.ec2_instance_get_by_uuid,
self.ctxt, 'uuid-not-present')
def test_ec2_instance_get_by_id_not_found(self):
self.assertRaises(exception.InstanceNotFound,
db.ec2_instance_get_by_uuid,
self.ctxt, 12345)
def test_get_instance_uuid_by_ec2_id(self):
inst = db.ec2_instance_create(self.ctxt, 'fake-uuid')
inst_uuid = db.get_instance_uuid_by_ec2_id(self.ctxt, inst['id'])
self.assertEqual(inst_uuid, 'fake-uuid')
def test_get_instance_uuid_by_ec2_id_not_found(self):
self.assertRaises(exception.InstanceNotFound,
db.get_instance_uuid_by_ec2_id,
self.ctxt, 100500)
class ArchiveTestCase(test.TestCase, ModelsObjectComparatorMixin):
def setUp(self):
super(ArchiveTestCase, self).setUp()
self.engine = get_engine()
self.conn = self.engine.connect()
self.instance_id_mappings = models.InstanceIdMapping.__table__
self.shadow_instance_id_mappings = sqlalchemyutils.get_table(
self.engine, "shadow_instance_id_mappings")
self.dns_domains = models.DNSDomain.__table__
self.shadow_dns_domains = sqlalchemyutils.get_table(
self.engine, "shadow_dns_domains")
self.consoles = models.Console.__table__
self.shadow_consoles = sqlalchemyutils.get_table(
self.engine, "shadow_consoles")
self.console_pools = models.ConsolePool.__table__
self.shadow_console_pools = sqlalchemyutils.get_table(
self.engine, "shadow_console_pools")
self.instances = models.Instance.__table__
self.shadow_instances = sqlalchemyutils.get_table(
self.engine, "shadow_instances")
self.uuidstrs = []
for _ in range(6):
self.uuidstrs.append(stdlib_uuid.uuid4().hex)
def _assert_shadow_tables_empty_except(self, *exceptions):
"""Ensure shadow tables are empty
This method ensures that all the shadow tables in the schema,
except for specificially named exceptions, are empty. This
makes sure that archiving isn't moving unexpected content.
"""
metadata = MetaData(bind=self.engine)
metadata.reflect()
for table in metadata.tables:
if table.startswith("shadow_") and table not in exceptions:
rows = self.conn.execute("select * from %s" % table).fetchall()
self.assertEqual(rows, [], "Table %s not empty" % table)
def test_shadow_tables(self):
metadata = MetaData(bind=self.engine)
metadata.reflect()
for table_name in metadata.tables:
# NOTE(rpodolyaka): migration 209 introduced a few new tables,
# which don't have shadow tables and it's
# completely OK, so we should skip them here
if table_name.startswith("dump_"):
continue
# NOTE(snikitin): migration 266 introduced a new table 'tags',
# which have no shadow table and it's
# completely OK, so we should skip it here
if table_name == 'tags':
continue
if table_name.startswith("shadow_"):
self.assertIn(table_name[7:], metadata.tables)
continue
self.assertTrue(db_utils.check_shadow_table(self.engine,
table_name))
self._assert_shadow_tables_empty_except()
def test_archive_deleted_rows(self):
# Add 6 rows to table
for uuidstr in self.uuidstrs:
ins_stmt = self.instance_id_mappings.insert().values(uuid=uuidstr)
self.conn.execute(ins_stmt)
# Set 4 to deleted
update_statement = self.instance_id_mappings.update().\
where(self.instance_id_mappings.c.uuid.in_(self.uuidstrs[:4]))\
.values(deleted=1)
self.conn.execute(update_statement)
qiim = sql.select([self.instance_id_mappings]).where(self.
instance_id_mappings.c.uuid.in_(self.uuidstrs))
rows = self.conn.execute(qiim).fetchall()
# Verify we have 6 in main
self.assertEqual(len(rows), 6)
qsiim = sql.select([self.shadow_instance_id_mappings]).\
where(self.shadow_instance_id_mappings.c.uuid.in_(
self.uuidstrs))
rows = self.conn.execute(qsiim).fetchall()
# Verify we have 0 in shadow
self.assertEqual(len(rows), 0)
# Archive 2 rows
results = db.archive_deleted_rows(max_rows=2)
expected = dict(instance_id_mappings=2)
self._assertEqualObjects(expected, results)
rows = self.conn.execute(qiim).fetchall()
# Verify we have 4 left in main
self.assertEqual(len(rows), 4)
rows = self.conn.execute(qsiim).fetchall()
# Verify we have 2 in shadow
self.assertEqual(len(rows), 2)
# Archive 2 more rows
results = db.archive_deleted_rows(max_rows=2)
expected = dict(instance_id_mappings=2)
self._assertEqualObjects(expected, results)
rows = self.conn.execute(qiim).fetchall()
# Verify we have 2 left in main
self.assertEqual(len(rows), 2)
rows = self.conn.execute(qsiim).fetchall()
# Verify we have 4 in shadow
self.assertEqual(len(rows), 4)
# Try to archive more, but there are no deleted rows left.
results = db.archive_deleted_rows(max_rows=2)
expected = dict()
self._assertEqualObjects(expected, results)
rows = self.conn.execute(qiim).fetchall()
# Verify we still have 2 left in main
self.assertEqual(len(rows), 2)
rows = self.conn.execute(qsiim).fetchall()
# Verify we still have 4 in shadow
self.assertEqual(len(rows), 4)
# Ensure only deleted rows were deleted
self._assert_shadow_tables_empty_except(
'shadow_instance_id_mappings')
def test_archive_deleted_rows_for_every_uuid_table(self):
tablenames = []
for model_class in six.itervalues(models.__dict__):
if hasattr(model_class, "__tablename__"):
tablenames.append(model_class.__tablename__)
tablenames.sort()
for tablename in tablenames:
self._test_archive_deleted_rows_for_one_uuid_table(tablename)
def _test_archive_deleted_rows_for_one_uuid_table(self, tablename):
""":returns: 0 on success, 1 if no uuid column, 2 if insert failed."""
main_table = sqlalchemyutils.get_table(self.engine, tablename)
if not hasattr(main_table.c, "uuid"):
# Not a uuid table, so skip it.
return 1
shadow_table = sqlalchemyutils.get_table(
self.engine, "shadow_" + tablename)
# Add 6 rows to table
for uuidstr in self.uuidstrs:
ins_stmt = main_table.insert().values(uuid=uuidstr)
try:
self.conn.execute(ins_stmt)
except (db_exc.DBError, OperationalError):
# This table has constraints that require a table-specific
# insert, so skip it.
return 2
# Set 4 to deleted
update_statement = main_table.update().\
where(main_table.c.uuid.in_(self.uuidstrs[:4]))\
.values(deleted=1)
self.conn.execute(update_statement)
qmt = sql.select([main_table]).where(main_table.c.uuid.in_(
self.uuidstrs))
rows = self.conn.execute(qmt).fetchall()
# Verify we have 6 in main
self.assertEqual(len(rows), 6)
qst = sql.select([shadow_table]).\
where(shadow_table.c.uuid.in_(self.uuidstrs))
rows = self.conn.execute(qst).fetchall()
# Verify we have 0 in shadow
self.assertEqual(len(rows), 0)
# Archive 2 rows
sqlalchemy_api._archive_deleted_rows_for_table(tablename, max_rows=2)
# Verify we have 4 left in main
rows = self.conn.execute(qmt).fetchall()
self.assertEqual(len(rows), 4)
# Verify we have 2 in shadow
rows = self.conn.execute(qst).fetchall()
self.assertEqual(len(rows), 2)
# Archive 2 more rows
sqlalchemy_api._archive_deleted_rows_for_table(tablename, max_rows=2)
# Verify we have 2 left in main
rows = self.conn.execute(qmt).fetchall()
self.assertEqual(len(rows), 2)
# Verify we have 4 in shadow
rows = self.conn.execute(qst).fetchall()
self.assertEqual(len(rows), 4)
# Try to archive more, but there are no deleted rows left.
sqlalchemy_api._archive_deleted_rows_for_table(tablename, max_rows=2)
# Verify we still have 2 left in main
rows = self.conn.execute(qmt).fetchall()
self.assertEqual(len(rows), 2)
# Verify we still have 4 in shadow
rows = self.conn.execute(qst).fetchall()
self.assertEqual(len(rows), 4)
return 0
def test_archive_deleted_rows_no_id_column(self):
uuidstr0 = self.uuidstrs[0]
ins_stmt = self.dns_domains.insert().values(domain=uuidstr0)
self.conn.execute(ins_stmt)
update_statement = self.dns_domains.update().\
where(self.dns_domains.c.domain == uuidstr0).\
values(deleted=True)
self.conn.execute(update_statement)
qdd = sql.select([self.dns_domains], self.dns_domains.c.domain ==
uuidstr0)
rows = self.conn.execute(qdd).fetchall()
self.assertEqual(len(rows), 1)
qsdd = sql.select([self.shadow_dns_domains],
self.shadow_dns_domains.c.domain == uuidstr0)
rows = self.conn.execute(qsdd).fetchall()
self.assertEqual(len(rows), 0)
db.archive_deleted_rows(max_rows=1)
rows = self.conn.execute(qdd).fetchall()
self.assertEqual(len(rows), 0)
rows = self.conn.execute(qsdd).fetchall()
self.assertEqual(len(rows), 1)
self._assert_shadow_tables_empty_except(
'shadow_dns_domains',
)
def test_archive_deleted_rows_fk_constraint(self):
# consoles.pool_id depends on console_pools.id
# SQLite doesn't enforce foreign key constraints without a pragma.
dialect = self.engine.url.get_dialect()
if dialect == sqlite.dialect:
# We're seeing issues with foreign key support in SQLite 3.6.20
# SQLAlchemy doesn't support it at all with < SQLite 3.6.19
# It works fine in SQLite 3.7.
# So return early to skip this test if running SQLite < 3.7
import sqlite3
tup = sqlite3.sqlite_version_info
if tup[0] < 3 or (tup[0] == 3 and tup[1] < 7):
self.skipTest(
'sqlite version too old for reliable SQLA foreign_keys')
self.conn.execute("PRAGMA foreign_keys = ON")
ins_stmt = self.console_pools.insert().values(deleted=1)
result = self.conn.execute(ins_stmt)
id1 = result.inserted_primary_key[0]
ins_stmt = self.consoles.insert().values(deleted=1,
pool_id=id1)
result = self.conn.execute(ins_stmt)
result.inserted_primary_key[0]
# The first try to archive console_pools should fail, due to FK.
num = sqlalchemy_api._archive_deleted_rows_for_table("console_pools",
max_rows=None)
self.assertEqual(num, 0)
# Then archiving consoles should work.
num = sqlalchemy_api._archive_deleted_rows_for_table("consoles",
max_rows=None)
self.assertEqual(num, 1)
# Then archiving console_pools should work.
num = sqlalchemy_api._archive_deleted_rows_for_table("console_pools",
max_rows=None)
self.assertEqual(num, 1)
self._assert_shadow_tables_empty_except(
'shadow_console_pools',
'shadow_consoles'
)
def test_archive_deleted_rows_2_tables(self):
# Add 6 rows to each table
for uuidstr in self.uuidstrs:
ins_stmt = self.instance_id_mappings.insert().values(uuid=uuidstr)
self.conn.execute(ins_stmt)
ins_stmt2 = self.instances.insert().values(uuid=uuidstr)
self.conn.execute(ins_stmt2)
# Set 4 of each to deleted
update_statement = self.instance_id_mappings.update().\
where(self.instance_id_mappings.c.uuid.in_(self.uuidstrs[:4]))\
.values(deleted=1)
self.conn.execute(update_statement)
update_statement2 = self.instances.update().\
where(self.instances.c.uuid.in_(self.uuidstrs[:4]))\
.values(deleted=1)
self.conn.execute(update_statement2)
# Verify we have 6 in each main table
qiim = sql.select([self.instance_id_mappings]).where(
self.instance_id_mappings.c.uuid.in_(self.uuidstrs))
rows = self.conn.execute(qiim).fetchall()
self.assertEqual(len(rows), 6)
qi = sql.select([self.instances]).where(self.instances.c.uuid.in_(
self.uuidstrs))
rows = self.conn.execute(qi).fetchall()
self.assertEqual(len(rows), 6)
# Verify we have 0 in each shadow table
qsiim = sql.select([self.shadow_instance_id_mappings]).\
where(self.shadow_instance_id_mappings.c.uuid.in_(
self.uuidstrs))
rows = self.conn.execute(qsiim).fetchall()
self.assertEqual(len(rows), 0)
qsi = sql.select([self.shadow_instances]).\
where(self.shadow_instances.c.uuid.in_(self.uuidstrs))
rows = self.conn.execute(qsi).fetchall()
self.assertEqual(len(rows), 0)
# Archive 7 rows, which should be 4 in one table and 3 in the other.
db.archive_deleted_rows(max_rows=7)
# Verify we have 5 left in the two main tables combined
iim_rows = self.conn.execute(qiim).fetchall()
i_rows = self.conn.execute(qi).fetchall()
self.assertEqual(len(iim_rows) + len(i_rows), 5)
# Verify we have 7 in the two shadow tables combined.
siim_rows = self.conn.execute(qsiim).fetchall()
si_rows = self.conn.execute(qsi).fetchall()
self.assertEqual(len(siim_rows) + len(si_rows), 7)
# Archive the remaining deleted rows.
db.archive_deleted_rows(max_rows=1)
# Verify we have 4 total left in both main tables.
iim_rows = self.conn.execute(qiim).fetchall()
i_rows = self.conn.execute(qi).fetchall()
self.assertEqual(len(iim_rows) + len(i_rows), 4)
# Verify we have 8 in shadow
siim_rows = self.conn.execute(qsiim).fetchall()
si_rows = self.conn.execute(qsi).fetchall()
self.assertEqual(len(siim_rows) + len(si_rows), 8)
# Try to archive more, but there are no deleted rows left.
db.archive_deleted_rows(max_rows=500)
# Verify we have 4 total left in both main tables.
iim_rows = self.conn.execute(qiim).fetchall()
i_rows = self.conn.execute(qi).fetchall()
self.assertEqual(len(iim_rows) + len(i_rows), 4)
# Verify we have 8 in shadow
siim_rows = self.conn.execute(qsiim).fetchall()
si_rows = self.conn.execute(qsi).fetchall()
self.assertEqual(len(siim_rows) + len(si_rows), 8)
self._assert_shadow_tables_empty_except(
'shadow_instances',
'shadow_instance_id_mappings'
)
class InstanceGroupDBApiTestCase(test.TestCase, ModelsObjectComparatorMixin):
def setUp(self):
super(InstanceGroupDBApiTestCase, self).setUp()
self.user_id = 'fake_user'
self.project_id = 'fake_project'
self.new_user_id = 'new_user_id'
self.new_project_id = 'new_project_id'
self.context = context.RequestContext(self.user_id, self.project_id)
self.new_context = context.RequestContext(self.new_user_id,
self.new_project_id)
def _get_default_values(self):
return {'name': 'fake_name',
'user_id': self.user_id,
'project_id': self.project_id}
def _get_new_default_values(self):
return {'name': 'fake_new_name',
'user_id': self.new_user_id,
'project_id': self.new_project_id}
def _create_instance_group(self, context, values, policies=None,
members=None):
return db.instance_group_create(context, values, policies=policies,
members=members)
def test_instance_group_create_no_key(self):
values = self._get_default_values()
result = self._create_instance_group(self.context, values)
ignored_keys = ['id', 'uuid', 'deleted', 'deleted_at', 'updated_at',
'created_at']
self._assertEqualObjects(result, values, ignored_keys)
self.assertTrue(uuidutils.is_uuid_like(result['uuid']))
def test_instance_group_create_with_key(self):
values = self._get_default_values()
values['uuid'] = 'fake_id'
result = self._create_instance_group(self.context, values)
ignored_keys = ['id', 'deleted', 'deleted_at', 'updated_at',
'created_at']
self._assertEqualObjects(result, values, ignored_keys)
def test_instance_group_create_with_same_key(self):
values = self._get_default_values()
values['uuid'] = 'fake_id'
self._create_instance_group(self.context, values)
self.assertRaises(exception.InstanceGroupIdExists,
self._create_instance_group, self.context, values)
def test_instance_group_get(self):
values = self._get_default_values()
result1 = self._create_instance_group(self.context, values)
result2 = db.instance_group_get(self.context, result1['uuid'])
self._assertEqualObjects(result1, result2)
def test_instance_group_update_simple(self):
values = self._get_default_values()
result1 = self._create_instance_group(self.context, values)
values = {'name': 'new_name'}
db.instance_group_update(self.context, result1['uuid'],
values)
result2 = db.instance_group_get(self.context, result1['uuid'])
self.assertEqual(result1['uuid'], result2['uuid'])
ignored_keys = ['id', 'uuid', 'deleted', 'deleted_at', 'updated_at',
'created_at', 'project_id', 'user_id']
self._assertEqualObjects(result2, values, ignored_keys)
def test_instance_group_delete(self):
values = self._get_default_values()
result = self._create_instance_group(self.context, values)
db.instance_group_delete(self.context, result['uuid'])
self.assertRaises(exception.InstanceGroupNotFound,
db.instance_group_delete, self.context,
result['uuid'])
def test_instance_group_get_nonexistent(self):
self.assertRaises(exception.InstanceGroupNotFound,
db.instance_group_get,
self.context,
'nonexistent')
def test_instance_group_delete_nonexistent(self):
self.assertRaises(exception.InstanceGroupNotFound,
db.instance_group_delete,
self.context,
'nonexistent')
def test_instance_group_get_all(self):
groups = db.instance_group_get_all(self.context)
self.assertEqual(0, len(groups))
value = self._get_default_values()
result1 = self._create_instance_group(self.context, value)
groups = db.instance_group_get_all(self.context)
self.assertEqual(1, len(groups))
value = self._get_default_values()
result2 = self._create_instance_group(self.context, value)
groups = db.instance_group_get_all(self.context)
results = [result1, result2]
self._assertEqualListsOfObjects(results, groups)
def test_instance_group_get_all_by_project_id(self):
groups = db.instance_group_get_all_by_project_id(self.context,
'invalid_project_id')
self.assertEqual(0, len(groups))
values = self._get_default_values()
result1 = self._create_instance_group(self.context, values)
groups = db.instance_group_get_all_by_project_id(self.context,
self.project_id)
self.assertEqual(1, len(groups))
values = self._get_new_default_values()
result2 = self._create_instance_group(self.new_context, values)
groups = db.instance_group_get_all(self.context)
groups.extend(db.instance_group_get_all(self.new_context))
results = [result1, result2]
self._assertEqualListsOfObjects(results, groups)
projects = [{'context': self.context, 'name': self.project_id,
'value': [result1]},
{'context': self.new_context, 'name': self.new_project_id,
'value': [result2]}]
for project in projects:
groups = db.instance_group_get_all_by_project_id(
project['context'], project['name'])
self._assertEqualListsOfObjects(project['value'], groups)
def test_instance_group_update(self):
values = self._get_default_values()
result = self._create_instance_group(self.context, values)
ignored_keys = ['id', 'uuid', 'deleted', 'deleted_at', 'updated_at',
'created_at']
self._assertEqualObjects(result, values, ignored_keys)
self.assertTrue(uuidutils.is_uuid_like(result['uuid']))
id = result['uuid']
values = self._get_default_values()
values['name'] = 'new_fake_name'
db.instance_group_update(self.context, id, values)
result = db.instance_group_get(self.context, id)
self.assertEqual(result['name'], 'new_fake_name')
# update update members
values = self._get_default_values()
members = ['instance_id1', 'instance_id2']
values['members'] = members
db.instance_group_update(self.context, id, values)
result = db.instance_group_get(self.context, id)
self._assertEqualListsOfPrimitivesAsSets(result['members'], members)
# update update policies
values = self._get_default_values()
policies = ['policy1', 'policy2']
values['policies'] = policies
db.instance_group_update(self.context, id, values)
result = db.instance_group_get(self.context, id)
self._assertEqualListsOfPrimitivesAsSets(result['policies'], policies)
# test invalid ID
self.assertRaises(exception.InstanceGroupNotFound,
db.instance_group_update, self.context,
'invalid_id', values)
def test_instance_group_get_by_instance(self):
values = self._get_default_values()
group1 = self._create_instance_group(self.context, values)
members = ['instance_id1', 'instance_id2']
db.instance_group_members_add(self.context, group1.uuid, members)
group2 = db.instance_group_get_by_instance(self.context,
'instance_id1')
self.assertEqual(group2.uuid, group1.uuid)
def test_instance_group_get_by_other_project_user(self):
values = self._get_default_values()
result = self._create_instance_group(self.context, values)
self.assertRaises(exception.InstanceGroupNotFound,
db.instance_group_get,
self.new_context, result['uuid'])
def test_instance_group_delete_by_other_project_user(self):
values = self._get_default_values()
result = self._create_instance_group(self.context, values)
self.assertRaises(exception.InstanceGroupNotFound,
db.instance_group_delete,
self.new_context, result['uuid'])
def test_instance_group_get_by_admin(self):
values = self._get_default_values()
result = self._create_instance_group(self.context, values)
group = db.instance_group_get(context.get_admin_context(),
result['uuid'])
self.assertEqual(result['uuid'], group.uuid)
self.assertEqual(values['user_id'], group.user_id)
self.assertEqual(values['project_id'], group.project_id)
def test_instance_group_delete_by_admin(self):
values = self._get_default_values()
result = self._create_instance_group(self.context, values)
db.instance_group_delete(context.get_admin_context(),
result['uuid'])
class InstanceGroupMembersDBApiTestCase(InstanceGroupDBApiTestCase):
def test_instance_group_members_on_create(self):
values = self._get_default_values()
values['uuid'] = 'fake_id'
members = ['instance_id1', 'instance_id2']
result = self._create_instance_group(self.context, values,
members=members)
ignored_keys = ['id', 'deleted', 'deleted_at', 'updated_at',
'created_at']
self._assertEqualObjects(result, values, ignored_keys)
self._assertEqualListsOfPrimitivesAsSets(result['members'], members)
def test_instance_group_members_add(self):
values = self._get_default_values()
values['uuid'] = 'fake_id'
result = self._create_instance_group(self.context, values)
id = result['uuid']
members = db.instance_group_members_get(self.context, id)
self.assertEqual(members, [])
members2 = ['instance_id1', 'instance_id2']
db.instance_group_members_add(self.context, id, members2)
members = db.instance_group_members_get(self.context, id)
self._assertEqualListsOfPrimitivesAsSets(members, members2)
def test_instance_group_members_update(self):
values = self._get_default_values()
values['uuid'] = 'fake_id'
result = self._create_instance_group(self.context, values)
id = result['uuid']
members2 = ['instance_id1', 'instance_id2']
db.instance_group_members_add(self.context, id, members2)
members = db.instance_group_members_get(self.context, id)
self._assertEqualListsOfPrimitivesAsSets(members, members2)
# check add with existing keys
members3 = ['instance_id1', 'instance_id2', 'instance_id3']
db.instance_group_members_add(self.context, id, members3)
members = db.instance_group_members_get(self.context, id)
self._assertEqualListsOfPrimitivesAsSets(members, members3)
def test_instance_group_members_delete(self):
values = self._get_default_values()
values['uuid'] = 'fake_id'
result = self._create_instance_group(self.context, values)
id = result['uuid']
members3 = ['instance_id1', 'instance_id2', 'instance_id3']
db.instance_group_members_add(self.context, id, members3)
members = db.instance_group_members_get(self.context, id)
self._assertEqualListsOfPrimitivesAsSets(members, members3)
for instance_id in members3[:]:
db.instance_group_member_delete(self.context, id, instance_id)
members3.remove(instance_id)
members = db.instance_group_members_get(self.context, id)
self._assertEqualListsOfPrimitivesAsSets(members, members3)
def test_instance_group_members_invalid_ids(self):
values = self._get_default_values()
result = self._create_instance_group(self.context, values)
id = result['uuid']
self.assertRaises(exception.InstanceGroupNotFound,
db.instance_group_members_get,
self.context, 'invalid')
self.assertRaises(exception.InstanceGroupNotFound,
db.instance_group_member_delete, self.context,
'invalidid', 'instance_id1')
members = ['instance_id1', 'instance_id2']
db.instance_group_members_add(self.context, id, members)
self.assertRaises(exception.InstanceGroupMemberNotFound,
db.instance_group_member_delete,
self.context, id, 'invalid_id')
class InstanceGroupPoliciesDBApiTestCase(InstanceGroupDBApiTestCase):
def test_instance_group_policies_on_create(self):
values = self._get_default_values()
values['uuid'] = 'fake_id'
policies = ['policy1', 'policy2']
result = self._create_instance_group(self.context, values,
policies=policies)
ignored_keys = ['id', 'deleted', 'deleted_at', 'updated_at',
'created_at']
self._assertEqualObjects(result, values, ignored_keys)
self._assertEqualListsOfPrimitivesAsSets(result['policies'], policies)
class PciDeviceDBApiTestCase(test.TestCase, ModelsObjectComparatorMixin):
def setUp(self):
super(PciDeviceDBApiTestCase, self).setUp()
self.user_id = 'fake_user'
self.project_id = 'fake_project'
self.context = context.RequestContext(self.user_id, self.project_id)
self.admin_context = context.get_admin_context()
self.ignored_keys = ['id', 'deleted', 'deleted_at', 'updated_at',
'created_at']
self._compute_node = None
def _get_fake_pci_devs(self):
return {'id': 3353,
'compute_node_id': 1,
'address': '0000:0f:08.7',
'vendor_id': '8086',
'product_id': '1520',
'numa_node': 1,
'dev_type': fields.PciDeviceType.SRIOV_VF,
'dev_id': 'pci_0000:0f:08.7',
'extra_info': None,
'label': 'label_8086_1520',
'status': fields.PciDeviceStatus.AVAILABLE,
'instance_uuid': '00000000-0000-0000-0000-000000000010',
'request_id': None,
}, {'id': 3356,
'compute_node_id': 1,
'address': '0000:0f:03.7',
'vendor_id': '8083',
'product_id': '1523',
'numa_node': 0,
'dev_type': fields.PciDeviceType.SRIOV_VF,
'dev_id': 'pci_0000:0f:08.7',
'extra_info': None,
'label': 'label_8086_1520',
'status': fields.PciDeviceStatus.AVAILABLE,
'instance_uuid': '00000000-0000-0000-0000-000000000010',
'request_id': None,
}
@property
def compute_node(self):
if self._compute_node is None:
self._compute_node = db.compute_node_create(self.admin_context, {
'vcpus': 0,
'memory_mb': 0,
'local_gb': 0,
'vcpus_used': 0,
'memory_mb_used': 0,
'local_gb_used': 0,
'hypervisor_type': 'fake',
'hypervisor_version': 0,
'cpu_info': 'fake',
})
return self._compute_node
def _create_fake_pci_devs(self):
v1, v2 = self._get_fake_pci_devs()
for i in v1, v2:
i['compute_node_id'] = self.compute_node['id']
db.pci_device_update(self.admin_context, v1['compute_node_id'],
v1['address'], v1)
db.pci_device_update(self.admin_context, v2['compute_node_id'],
v2['address'], v2)
return (v1, v2)
def test_pci_device_get_by_addr(self):
v1, v2 = self._create_fake_pci_devs()
result = db.pci_device_get_by_addr(self.admin_context, 1,
'0000:0f:08.7')
self._assertEqualObjects(v1, result, self.ignored_keys)
def test_pci_device_get_by_addr_not_found(self):
self._create_fake_pci_devs()
self.assertRaises(exception.PciDeviceNotFound,
db.pci_device_get_by_addr, self.admin_context,
1, '0000:0f:08:09')
def test_pci_device_get_by_id(self):
v1, v2 = self._create_fake_pci_devs()
result = db.pci_device_get_by_id(self.admin_context, 3353)
self._assertEqualObjects(v1, result, self.ignored_keys)
def test_pci_device_get_by_id_not_found(self):
self._create_fake_pci_devs()
self.assertRaises(exception.PciDeviceNotFoundById,
db.pci_device_get_by_id,
self.admin_context, 3354)
def test_pci_device_get_all_by_node(self):
v1, v2 = self._create_fake_pci_devs()
results = db.pci_device_get_all_by_node(self.admin_context, 1)
self._assertEqualListsOfObjects(results, [v1, v2], self.ignored_keys)
def test_pci_device_get_all_by_node_empty(self):
v1, v2 = self._get_fake_pci_devs()
results = db.pci_device_get_all_by_node(self.admin_context, 9)
self.assertEqual(len(results), 0)
def test_pci_device_get_by_instance_uuid(self):
v1, v2 = self._create_fake_pci_devs()
v1['status'] = fields.PciDeviceStatus.ALLOCATED
v2['status'] = fields.PciDeviceStatus.ALLOCATED
db.pci_device_update(self.admin_context, v1['compute_node_id'],
v1['address'], v1)
db.pci_device_update(self.admin_context, v2['compute_node_id'],
v2['address'], v2)
results = db.pci_device_get_all_by_instance_uuid(
self.context,
'00000000-0000-0000-0000-000000000010')
self._assertEqualListsOfObjects(results, [v1, v2], self.ignored_keys)
def test_pci_device_get_by_instance_uuid_check_status(self):
v1, v2 = self._create_fake_pci_devs()
v1['status'] = fields.PciDeviceStatus.ALLOCATED
v2['status'] = fields.PciDeviceStatus.CLAIMED
db.pci_device_update(self.admin_context, v1['compute_node_id'],
v1['address'], v1)
db.pci_device_update(self.admin_context, v2['compute_node_id'],
v2['address'], v2)
results = db.pci_device_get_all_by_instance_uuid(
self.context,
'00000000-0000-0000-0000-000000000010')
self._assertEqualListsOfObjects(results, [v1], self.ignored_keys)
def test_pci_device_update(self):
v1, v2 = self._create_fake_pci_devs()
v1['status'] = fields.PciDeviceStatus.ALLOCATED
db.pci_device_update(self.admin_context, v1['compute_node_id'],
v1['address'], v1)
result = db.pci_device_get_by_addr(
self.admin_context, 1, '0000:0f:08.7')
self._assertEqualObjects(v1, result, self.ignored_keys)
v1['status'] = fields.PciDeviceStatus.CLAIMED
db.pci_device_update(self.admin_context, v1['compute_node_id'],
v1['address'], v1)
result = db.pci_device_get_by_addr(
self.admin_context, 1, '0000:0f:08.7')
self._assertEqualObjects(v1, result, self.ignored_keys)
def test_pci_device_destroy(self):
v1, v2 = self._create_fake_pci_devs()
results = db.pci_device_get_all_by_node(self.admin_context,
self.compute_node['id'])
self._assertEqualListsOfObjects(results, [v1, v2], self.ignored_keys)
db.pci_device_destroy(self.admin_context, v1['compute_node_id'],
v1['address'])
results = db.pci_device_get_all_by_node(self.admin_context,
self.compute_node['id'])
self._assertEqualListsOfObjects(results, [v2], self.ignored_keys)
def test_pci_device_destroy_exception(self):
v1, v2 = self._get_fake_pci_devs()
self.assertRaises(exception.PciDeviceNotFound,
db.pci_device_destroy,
self.admin_context,
v1['compute_node_id'],
v1['address'])
class RetryOnDeadlockTestCase(test.TestCase):
def test_without_deadlock(self):
@oslo_db_api.wrap_db_retry(max_retries=5,
retry_on_deadlock=True)
def call_api(*args, **kwargs):
return True
self.assertTrue(call_api())
def test_raise_deadlock(self):
self.attempts = 2
@oslo_db_api.wrap_db_retry(max_retries=5,
retry_on_deadlock=True)
def call_api(*args, **kwargs):
while self.attempts:
self.attempts = self.attempts - 1
raise db_exc.DBDeadlock("fake exception")
return True
self.assertTrue(call_api())
class TestSqlalchemyTypesRepr(test_base.DbTestCase):
def setUp(self):
super(TestSqlalchemyTypesRepr, self).setUp()
meta = MetaData(bind=self.engine)
self.table = Table(
'cidr_tbl',
meta,
Column('id', Integer, primary_key=True),
Column('addr', col_types.CIDR())
)
self.table.create()
self.addCleanup(meta.drop_all)
def test_cidr_repr(self):
addrs = [('192.168.3.0/24', '192.168.3.0/24'),
('2001:db8::/64', '2001:db8::/64'),
('192.168.3.0', '192.168.3.0/32'),
('2001:db8::', '2001:db8::/128'),
(None, None)]
with self.engine.begin() as conn:
for i in addrs:
conn.execute(self.table.insert(), {'addr': i[0]})
query = self.table.select().order_by(self.table.c.id)
result = conn.execute(query)
for idx, row in enumerate(result):
self.assertEqual(addrs[idx][1], row.addr)
class TestMySQLSqlalchemyTypesRepr(TestSqlalchemyTypesRepr,
test_base.MySQLOpportunisticTestCase):
pass
class TestPostgreSQLSqlalchemyTypesRepr(TestSqlalchemyTypesRepr,
test_base.PostgreSQLOpportunisticTestCase):
pass
class TestDBInstanceTags(test.TestCase):
sample_data = {
'project_id': 'project1',
'hostname': 'example.com',
'host': 'h1',
'node': 'n1',
'metadata': {'mkey1': 'mval1', 'mkey2': 'mval2'},
'system_metadata': {'smkey1': 'smval1', 'smkey2': 'smval2'},
'info_cache': {'ckey': 'cvalue'}
}
def setUp(self):
super(TestDBInstanceTags, self).setUp()
self.user_id = 'user1'
self.project_id = 'project1'
self.context = context.RequestContext(self.user_id, self.project_id)
def _create_instance(self):
inst = db.instance_create(self.context, self.sample_data)
return inst['uuid']
def _get_tags_from_resp(self, tag_refs):
return [(t.resource_id, t.tag) for t in tag_refs]
def test_instance_tag_add(self):
uuid = self._create_instance()
tag = 'tag'
tag_ref = db.instance_tag_add(self.context, uuid, tag)
self.assertEqual(uuid, tag_ref.resource_id)
self.assertEqual(tag, tag_ref.tag)
tag_refs = db.instance_tag_get_by_instance_uuid(self.context, uuid)
# Check the tag for the instance was added
tags = self._get_tags_from_resp(tag_refs)
self.assertEqual([(uuid, tag)], tags)
def test_instance_tag_add_duplication(self):
uuid = self._create_instance()
tag = 'tag'
for x in range(5):
db.instance_tag_add(self.context, uuid, tag)
tag_refs = db.instance_tag_get_by_instance_uuid(self.context, uuid)
# Check the only one tag for the instance was added
tags = self._get_tags_from_resp(tag_refs)
self.assertEqual([(uuid, tag)], tags)
def test_instance_tag_set(self):
uuid = self._create_instance()
tag1 = 'tag1'
tag2 = 'tag2'
tag3 = 'tag3'
tag4 = 'tag4'
# Set tags to the instance
db.instance_tag_set(self.context, uuid, [tag1, tag2])
tag_refs = db.instance_tag_get_by_instance_uuid(self.context, uuid)
# Check the tags for the instance were set
tags = self._get_tags_from_resp(tag_refs)
expected = [(uuid, tag1), (uuid, tag2)]
self.assertEqual(expected, tags)
# Set new tags to the instance
db.instance_tag_set(self.context, uuid, [tag3, tag4, tag2])
tag_refs = db.instance_tag_get_by_instance_uuid(self.context, uuid)
# Check the tags for the instance were replaced
tags = self._get_tags_from_resp(tag_refs)
expected = [(uuid, tag3), (uuid, tag4), (uuid, tag2)]
self.assertEqual(set(expected), set(tags))
@mock.patch('nova.db.sqlalchemy.models.Tag.__table__.insert',
return_value=models.Tag.__table__.insert())
def test_instance_tag_set_empty_add(self, mock_insert):
uuid = self._create_instance()
tag1 = 'tag1'
tag2 = 'tag2'
db.instance_tag_set(self.context, uuid, [tag1, tag2])
# Check insert() was called to insert 'tag1' and 'tag2'
mock_insert.assert_called_once_with()
mock_insert.reset_mock()
db.instance_tag_set(self.context, uuid, [tag1])
# Check insert() wasn't called because there are no tags for creation
mock_insert.assert_not_called()
@mock.patch('sqlalchemy.orm.query.Query.delete')
def test_instance_tag_set_empty_delete(self, mock_delete):
uuid = self._create_instance()
db.instance_tag_set(self.context, uuid, ['tag1', 'tag2'])
# Check delete() wasn't called because there are no tags for deletion
mock_delete.assert_not_called()
db.instance_tag_set(self.context, uuid, ['tag1', 'tag3'])
# Check delete() was called to delete 'tag2'
mock_delete.assert_called_once_with(synchronize_session=False)
def test_instance_tag_get_by_instance_uuid(self):
uuid1 = self._create_instance()
uuid2 = self._create_instance()
tag1 = 'tag1'
tag2 = 'tag2'
tag3 = 'tag3'
db.instance_tag_add(self.context, uuid1, tag1)
db.instance_tag_add(self.context, uuid2, tag1)
db.instance_tag_add(self.context, uuid2, tag2)
db.instance_tag_add(self.context, uuid2, tag3)
# Check the tags for the first instance
tag_refs = db.instance_tag_get_by_instance_uuid(self.context, uuid1)
tags = self._get_tags_from_resp(tag_refs)
expected = [(uuid1, tag1)]
self.assertEqual(expected, tags)
# Check the tags for the second instance
tag_refs = db.instance_tag_get_by_instance_uuid(self.context, uuid2)
tags = self._get_tags_from_resp(tag_refs)
expected = [(uuid2, tag1), (uuid2, tag2), (uuid2, tag3)]
self.assertEqual(expected, tags)
def test_instance_tag_get_by_instance_uuid_no_tags(self):
uuid = self._create_instance()
self.assertEqual([], db.instance_tag_get_by_instance_uuid(self.context,
uuid))
def test_instance_tag_delete(self):
uuid = self._create_instance()
tag1 = 'tag1'
tag2 = 'tag2'
db.instance_tag_add(self.context, uuid, tag1)
db.instance_tag_add(self.context, uuid, tag2)
tag_refs = db.instance_tag_get_by_instance_uuid(self.context, uuid)
tags = self._get_tags_from_resp(tag_refs)
expected = [(uuid, tag1), (uuid, tag2)]
# Check the tags for the instance were added
self.assertEqual(expected, tags)
db.instance_tag_delete(self.context, uuid, tag1)
tag_refs = db.instance_tag_get_by_instance_uuid(self.context, uuid)
tags = self._get_tags_from_resp(tag_refs)
expected = [(uuid, tag2)]
self.assertEqual(expected, tags)
def test_instance_tag_delete_non_existent(self):
uuid = self._create_instance()
self.assertRaises(exception.InstanceTagNotFound,
db.instance_tag_delete, self.context, uuid, 'tag')
def test_instance_tag_delete_all(self):
uuid = self._create_instance()
tag1 = 'tag1'
tag2 = 'tag2'
db.instance_tag_add(self.context, uuid, tag1)
db.instance_tag_add(self.context, uuid, tag2)
tag_refs = db.instance_tag_get_by_instance_uuid(self.context, uuid)
tags = self._get_tags_from_resp(tag_refs)
expected = [(uuid, tag1), (uuid, tag2)]
# Check the tags for the instance were added
self.assertEqual(expected, tags)
db.instance_tag_delete_all(self.context, uuid)
tag_refs = db.instance_tag_get_by_instance_uuid(self.context, uuid)
tags = self._get_tags_from_resp(tag_refs)
self.assertEqual([], tags)
def test_instance_tag_exists(self):
uuid = self._create_instance()
tag1 = 'tag1'
tag2 = 'tag2'
db.instance_tag_add(self.context, uuid, tag1)
# NOTE(snikitin): Make sure it's actually a bool
self.assertEqual(True, db.instance_tag_exists(self.context, uuid,
tag1))
self.assertEqual(False, db.instance_tag_exists(self.context, uuid,
tag2))
def test_instance_tag_add_to_non_existing_instance(self):
self._create_instance()
self.assertRaises(exception.InstanceNotFound, db.instance_tag_add,
self.context, 'fake_uuid', 'tag')
def test_instance_tag_set_to_non_existing_instance(self):
self._create_instance()
self.assertRaises(exception.InstanceNotFound, db.instance_tag_set,
self.context, 'fake_uuid', ['tag1', 'tag2'])
def test_instance_tag_get_from_non_existing_instance(self):
self._create_instance()
self.assertRaises(exception.InstanceNotFound,
db.instance_tag_get_by_instance_uuid, self.context,
'fake_uuid')
def test_instance_tag_delete_from_non_existing_instance(self):
self._create_instance()
self.assertRaises(exception.InstanceNotFound, db.instance_tag_delete,
self.context, 'fake_uuid', 'tag')
def test_instance_tag_delete_all_from_non_existing_instance(self):
self._create_instance()
self.assertRaises(exception.InstanceNotFound,
db.instance_tag_delete_all,
self.context, 'fake_uuid')
def test_instance_tag_exists_non_existing_instance(self):
self._create_instance()
self.assertRaises(exception.InstanceNotFound,
db.instance_tag_exists,
self.context, 'fake_uuid', 'tag')
|
gpl-2.0
|
jeremycline/pulp_ostree
|
plugins/pulp_ostree/plugins/importers/web.py
|
6123
|
import sys
from gettext import gettext as _
from pulp.common.config import read_json_config
from pulp.plugins.importer import Importer
from pulp.server.db.model import Repository
from pulp_ostree.common import constants
from pulp_ostree.plugins.importers.steps import Main
def entry_point():
"""
Entry point that pulp platform uses to load the importer
:return: importer class and its config
:rtype: Importer, dict
"""
config = read_json_config(constants.IMPORTER_CONFIG_FILE_PATH)
return WebImporter, config
class WebImporter(Importer):
def __init__(self):
super(WebImporter, self).__init__()
@classmethod
def metadata(cls):
"""
Used by Pulp to classify the capabilities of this importer. The
following keys must be present in the returned dictionary:
* id - Programmatic way to refer to this importer. Must be unique
across all importers. Only letters and underscores are valid.
* display_name - User-friendly identification of the importer.
* types - List of all content type IDs that may be imported using this
importer.
:return: keys and values listed above
:rtype: dict
"""
return {
'id': constants.WEB_IMPORTER_TYPE_ID,
'display_name': _('OSTree Web Importer'),
'types': [constants.OSTREE_TYPE_ID]
}
def validate_config(self, repo, config):
"""
Validate the configuration.
:param repo: metadata describing the repository
:type repo: pulp.plugins.model.Repository
:param config: plugin configuration
:type config: pulp.plugins.config.PluginCallConfiguration
"""
return True, ''
def sync_repo(self, repo, conduit, config):
"""
Synchronizes content into the given repository. This call is responsible
for adding new content units to Pulp as well as associating them to the
given repository.
While this call may be implemented using multiple threads, its execution
from the Pulp server's standpoint should be synchronous. This call should
not return until the sync is complete.
It is not expected that this call be atomic. Should an error occur, it
is not the responsibility of the importer to rollback any unit additions
or associations that have been made.
The returned report object is used to communicate the results of the
sync back to the user. Care should be taken to i18n the free text "log"
attribute in the report if applicable.
:param repo: metadata describing the repository
:type repo: pulp.plugins.model.Repository
:param conduit: provides access to relevant Pulp functionality
:type conduit: pulp.plugins.conduits.repo_sync.RepoSyncConduit
:param config: plugin configuration
:type config: pulp.plugins.config.PluginCallConfiguration
:return: report of the details of the sync
:rtype: pulp.plugins.model.SyncReport
"""
repository = Repository.objects.get(repo_id=repo.id)
step = Main(repo=repository, conduit=conduit, config=config)
report = step.process_lifecycle()
return report
def import_units(self, source, destination, conduit, config, units=None):
"""
Import content units into the given repository. This method will be
called in a number of different situations:
* A user is attempting to copy a content unit from one repository
into the repository that uses this importer
* A user is attempting to add an orphaned unit into a repository.
This call has two options for handling the requested units:
* Associate the given units with the destination repository. This will
link the repository with the existing unit directly; changes to the
unit will be reflected in all repositories that reference it.
* Create a new unit and save it to the repository. This would act as
a deep copy of sorts, creating a unique unit in the database. Keep
in mind that the unit key must change in order for the unit to
be considered different than the supplied one.
The APIs for both approaches are similar to those in the sync conduit.
In the case of a simple association, the init_unit step can be skipped
and save_unit simply called on each specified unit.
The units argument is optional. If None, all units in the source
repository should be imported. The conduit is used to query for those
units. If specified, only the units indicated should be imported (this
is the case where the caller passed a filter to Pulp).
:param source: metadata describing the repository containing the
units to import
:type source: pulp.plugins.model.Repository
:param destination: metadata describing the repository to import units
into
:type destination: pulp.plugins.model.Repository
:param conduit: provides access to relevant Pulp functionality
:type conduit: pulp.plugins.conduits.unit_import.ImportUnitConduit
:param config: plugin configuration
:type config: pulp.plugins.config.PluginCallConfiguration
:param units: optional list of pre-filtered units to import
:type units: list of pulp.plugins.model.Unit
:return: list of Unit instances that were saved to the destination repository
:rtype: list
"""
added = []
for unit in conduit.get_source_units():
conduit.associate_unit(unit)
added.append(unit)
return added
def cancel_sync_repo(self):
"""
Cancels an in-progress sync.
This call is responsible for halting a current sync by stopping any
in-progress downloads and performing any cleanup necessary to get the
system back into a stable state.
"""
sys.exit(0)
|
gpl-2.0
|
BIORIMP/biorimp
|
BIO-RIMP/test_data/code/joda/src/main/java/org/joda/beans/impl/direct/DirectMetaPropertyMap.java
|
8396
|
/*
* Copyright 2001-2014 Stephen Colebourne
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.joda.beans.impl.direct;
import java.util.AbstractCollection;
import java.util.AbstractMap;
import java.util.AbstractSet;
import java.util.Arrays;
import java.util.Collection;
import java.util.Iterator;
import java.util.Map;
import java.util.Set;
import org.joda.beans.MetaProperty;
/**
* A map of name to meta-property designed for use by the code generator.
* <p>
* This meta-property map implementation is designed primarily for code-generation.
* It stores a reference to the meta-bean and the meta-properties.
* The meta-properties are accessed using {@link DirectMetaBean#metaPropertyGet(String)}.
* <p>
* This class is immutable and thread-safe.
*
* @author Stephen Colebourne
*/
@SuppressWarnings("rawtypes")
public final class DirectMetaPropertyMap implements Map<String, MetaProperty<?>> {
/** The meta-bean. */
private final DirectMetaBean metaBean;
/** The property names. */
private final Set<String> keys;
/** The meta-properties. */
private final Collection<MetaProperty<?>> values;
/** The map entries. */
private final Set<Entry<String, MetaProperty<?>>> entries;
/**
* Constructor.
*
* @param metaBean the meta-bean, not null
* @param parent the superclass parent, may be null
* @param propertyNames the property names, not null
*/
@SuppressWarnings("unchecked")
public DirectMetaPropertyMap(final DirectMetaBean metaBean, DirectMetaPropertyMap parent, String... propertyNames) {
if (metaBean == null) {
throw new NullPointerException("MetaBean must not be null");
}
this.metaBean = metaBean;
int parentSize = 0;
final Entry<String, MetaProperty<?>>[] metaProperties;
if (parent != null) {
parentSize = parent.size();
metaProperties = Arrays.copyOf(((Entries) parent.entries).metaProperties, parentSize + propertyNames.length);
} else {
metaProperties = new Entry[propertyNames.length];
}
for (int i = 0; i < propertyNames.length; i++) {
metaProperties[i + parentSize] = new AbstractMap.SimpleImmutableEntry(propertyNames[i], metaBean.metaPropertyGet(propertyNames[i]));
}
keys = new Keys(metaProperties);
values = new Values(metaProperties);
entries = new Entries(metaProperties);
}
//-----------------------------------------------------------------------
@Override
public int size() {
return keys.size();
}
@Override
public boolean isEmpty() {
return size() == 0;
}
@SuppressWarnings("unchecked")
@Override
public MetaProperty<Object> get(Object propertyName) {
if (propertyName instanceof String) {
return (MetaProperty<Object>) metaBean.metaPropertyGet((String) propertyName);
}
return null;
}
@Override
public boolean containsKey(Object propertyName) {
return propertyName instanceof String &&
metaBean.metaPropertyGet(propertyName.toString()) != null;
}
@Override
public boolean containsValue(Object value) {
return value instanceof MetaProperty &&
metaBean.metaPropertyGet(((MetaProperty<?>) value).name()) != null;
}
//-----------------------------------------------------------------------
@Override
public MetaProperty<?> put(String key, MetaProperty<?> value) {
throw new UnsupportedOperationException("DirectBean meta-property map cannot be modified");
}
@Override
public MetaProperty<?> remove(Object key) {
throw new UnsupportedOperationException("DirectBean meta-property map cannot be modified");
}
@Override
public void putAll(Map<? extends String, ? extends MetaProperty<?>> m) {
throw new UnsupportedOperationException("DirectBean meta-property map cannot be modified");
}
@Override
public void clear() {
throw new UnsupportedOperationException("DirectBean meta-property map cannot be modified");
}
//-----------------------------------------------------------------------
@Override
public Set<String> keySet() {
return keys;
}
@Override
public Collection<MetaProperty<?>> values() {
return values;
}
@Override
public Set<Entry<String, MetaProperty<?>>> entrySet() {
return entries;
}
//-----------------------------------------------------------------------
/**
* Collection implementation for the keys.
*/
private static final class Keys extends AbstractSet<String> {
private final Entry<String, MetaProperty<?>>[] metaProperties;
private Keys(Entry<String, MetaProperty<?>>[] metaProperties) {
this.metaProperties = metaProperties;
}
@Override
public Iterator<String> iterator() {
return new Iterator<String>() {
private int index;
@Override
public boolean hasNext() {
return index < metaProperties.length;
}
@Override
public String next() {
return metaProperties[index++].getKey();
}
@Override
public void remove() {
throw new UnsupportedOperationException();
}
};
}
@Override
public int size() {
return metaProperties.length;
}
}
/**
* Collection implementation for the values.
*/
private static final class Values extends AbstractCollection<MetaProperty<?>> {
private final Entry<String, MetaProperty<?>>[] metaProperties;
private Values(Entry<String, MetaProperty<?>>[] metaProperties) {
this.metaProperties = metaProperties;
}
@Override
public Iterator<MetaProperty<?>> iterator() {
return new Iterator<MetaProperty<?>>() {
private int index;
@Override
public boolean hasNext() {
return index < metaProperties.length;
}
@Override
public MetaProperty<?> next() {
return metaProperties[index++].getValue();
}
@Override
public void remove() {
throw new UnsupportedOperationException();
}
};
}
@Override
public int size() {
return metaProperties.length;
}
}
/**
* Collection implementation for the entries.
*/
private static final class Entries extends AbstractSet<Entry<String, MetaProperty<?>>> {
private final Entry<String, MetaProperty<?>>[] metaProperties;
private Entries(Entry<String, MetaProperty<?>>[] metaProperties) {
this.metaProperties = metaProperties;
}
@Override
public Iterator<Entry<String, MetaProperty<?>>> iterator() {
return new Iterator<Entry<String, MetaProperty<?>>>() {
private int index;
@Override
public boolean hasNext() {
return index < metaProperties.length;
}
@Override
public Entry<String, MetaProperty<?>> next() {
return metaProperties[index++];
}
@Override
public void remove() {
throw new UnsupportedOperationException();
}
};
}
@Override
public int size() {
return metaProperties.length;
}
}
}
|
gpl-2.0
|
toshi0607/sukessan_wp
|
wp-content/themes/stinger5ver20140902/ad.php
|
344
|
<?php if(is_mobile()) { //スマートフォンの時は300pxサイズを ?>
<?php if ( function_exists('dynamic_sidebar') && dynamic_sidebar(4) ) : else : ?>
<?php endif; ?>
<?php
}else{ //PCの時は336pxサイズを
?>
<?php if ( function_exists('dynamic_sidebar') && dynamic_sidebar(3) ) : else : ?>
<?php endif; ?>
<?php
}
?>
|
gpl-2.0
|
Ginfred/DeathCore
|
src/server/game/AI/CoreAI/PetAI.cpp
|
23729
|
/*
* Copyright (C) 2013-2015 DeathCore <http://www.noffearrdeathproject.net/>
* Copyright (C) 2005-2009 MaNGOS <http://getmangos.com/>
*
* This program is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License as published by the
* Free Software Foundation; either version 2 of the License, or (at your
* option) any later version.
*
* This program is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
* more details.
*
* You should have received a copy of the GNU General Public License along
* with this program. If not, see <http://www.gnu.org/licenses/>.
*/
#include "PetAI.h"
#include "Errors.h"
#include "Pet.h"
#include "Player.h"
#include "DBCStores.h"
#include "Spell.h"
#include "ObjectAccessor.h"
#include "SpellMgr.h"
#include "Creature.h"
#include "World.h"
#include "Util.h"
#include "Group.h"
#include "SpellInfo.h"
int PetAI::Permissible(const Creature* creature)
{
if (creature->isPet())
return PERMIT_BASE_SPECIAL;
return PERMIT_BASE_NO;
}
PetAI::PetAI(Creature* c) : CreatureAI(c), i_tracker(TIME_INTERVAL_LOOK)
{
m_AllySet.clear();
UpdateAllies();
}
bool PetAI::_needToStop()
{
// This is needed for charmed creatures, as once their target was reset other effects can trigger threat
if (me->isCharmed() && me->GetVictim() == me->GetCharmer())
return true;
if (!me->IsValidAttackTarget(me->GetVictim()) || !me->canSeeOrDetect(me->GetVictim()))
return true;
return false;
}
void PetAI::_stopAttack()
{
if (!me->isAlive())
{
sLog->outDebug(LOG_FILTER_GENERAL, "Creature stoped attacking cuz his dead [guid=%u]", me->GetGUIDLow());
me->GetMotionMaster()->Clear();
me->GetMotionMaster()->MoveIdle();
me->CombatStop();
me->getHostileRefManager().deleteReferences();
return;
}
me->AttackStop();
me->InterruptNonMeleeSpells(false);
me->SendMeleeAttackStop(); // Should stop pet's attack button from flashing
me->GetCharmInfo()->SetIsCommandAttack(false);
ClearCharmInfoFlags();
HandleReturnMovement();
}
void PetAI::UpdateAI(const uint32 diff)
{
if (!me->isAlive() || !me->GetCharmInfo())
return;
Unit* owner = me->GetCharmerOrOwner();
if (m_updateAlliesTimer <= diff)
// UpdateAllies self set update timer
UpdateAllies();
else
m_updateAlliesTimer -= diff;
if (me->GetVictim() && me->GetVictim()->isAlive())
{
// is only necessary to stop casting, the pet must not exit combat
if (me->GetVictim()->HasBreakableByDamageCrowdControlAura(me))
{
me->InterruptNonMeleeSpells(false);
return;
}
if (_needToStop())
{
sLog->outDebug(LOG_FILTER_GENERAL, "Pet AI stopped attacking [guid=%u]", me->GetGUIDLow());
_stopAttack();
return;
}
// Check before attacking to prevent pets from leaving stay position
if (me->GetCharmInfo()->HasCommandState(COMMAND_STAY) || me->GetCharmInfo()->HasCommandState(COMMAND_MOVE_TO))
{
if (!IsCasterPet())
if (me->GetCharmInfo()->IsCommandAttack() || (me->GetCharmInfo()->IsAtStay() && me->IsWithinMeleeRange(me->GetVictim())))
DoMeleeAttackIfReady();
}
else if (!IsCasterPet())
DoMeleeAttackIfReady();
}
else
{
if (me->HasReactState(REACT_AGGRESSIVE) || me->GetCharmInfo()->IsAtStay())
{
// Every update we need to check targets only in certain cases
// Aggressive - Allow auto select if owner or pet don't have a target
// Stay - Only pick from pet or owner targets / attackers so targets won't run by
// while chasing our owner. Don't do auto select.
// All other cases (ie: defensive) - Targets are assigned by AttackedBy(), OwnerAttackedBy(), OwnerAttacked(), etc.
Unit* nextTarget = SelectNextTarget(me->HasReactState(REACT_AGGRESSIVE));
if (nextTarget)
AttackStart(nextTarget);
else
HandleReturnMovement();
}
else
HandleReturnMovement();
}
// Autocast (casted only in combat or persistent spells in any state)
if (!me->HasUnitState(UNIT_STATE_CASTING))
{
typedef std::vector<std::pair<Unit*, Spell*> > TargetSpellList;
TargetSpellList targetSpellStore;
for (uint8 i = 0; i < me->GetPetAutoSpellSize(); ++i)
{
uint32 spellID = me->GetPetAutoSpellOnPos(i);
if (!spellID)
continue;
SpellInfo const* spellInfo = sSpellMgr->GetSpellInfo(spellID);
if (!spellInfo)
continue;
// Ghoul energy should never go below 30 when autocasting
if (me->getPowerType() == POWER_ENERGY && me->GetPower(me->getPowerType()) <= 70
&& me->GetOwner()->getClass() == CLASS_DEATH_KNIGHT)
continue;
if (me->GetCharmInfo() && me->GetCharmInfo()->GetGlobalCooldownMgr().HasGlobalCooldown(spellInfo))
continue;
if (spellInfo->IsPositive())
{
if (spellInfo->CanBeUsedInCombat())
{
// check spell cooldown
if (me->HasSpellCooldown(spellInfo->Id))
continue;
// Check if we're in combat or commanded to attack
if (!me->isInCombat() && !me->GetCharmInfo()->IsCommandAttack())
continue;
}
Spell* spell = new Spell(me, spellInfo, TRIGGERED_NONE, 0);
bool spellUsed = false;
// Some spells can target enemy or friendly (DK Ghoul's Leap)
// Check for enemy first (pet then owner)
Unit* target = me->getAttackerForHelper();
if (!target && owner)
target = owner->getAttackerForHelper();
if (target)
{
if (CanAttack(target) && spell->CanAutoCast(target))
{
targetSpellStore.push_back(std::make_pair(target, spell));
spellUsed = true;
}
}
if (spellInfo->HasEffect(SPELL_EFFECT_JUMP_DEST))
{
if (!spellUsed)
delete spell;
continue; // Pets must only jump to target
}
// No enemy, check friendly
if (!spellUsed)
{
for (std::set<uint64>::const_iterator tar = m_AllySet.begin(); tar != m_AllySet.end(); ++tar)
{
Unit* ally = ObjectAccessor::GetUnit(*me, *tar);
//only buff targets that are in combat, unless the spell can only be cast while out of combat
if (!ally)
continue;
if (spell->CanAutoCast(ally))
{
targetSpellStore.push_back(std::make_pair(ally, spell));
spellUsed = true;
break;
}
}
}
// No valid targets at all
if (!spellUsed)
delete spell;
}
else if (me->GetVictim() && CanAttack(me->GetVictim()) && spellInfo->CanBeUsedInCombat())
{
Spell* spell = new Spell(me, spellInfo, TRIGGERED_NONE, 0);
if (spell->CanAutoCast(me->GetVictim()))
targetSpellStore.push_back(std::make_pair(me->GetVictim(), spell));
else
delete spell;
}
}
//found units to cast on to
if (!targetSpellStore.empty())
{
uint32 index = urand(0, targetSpellStore.size() - 1);
Spell* spell = targetSpellStore[index].second;
Unit* target = targetSpellStore[index].first;
targetSpellStore.erase(targetSpellStore.begin() + index);
SpellCastTargets targets;
targets.SetUnitTarget(target);
if (!me->HasInArc(M_PI, target))
{
me->SetInFront(target);
if (target && target->GetTypeId() == TYPEID_PLAYER)
me->SendUpdateToPlayer(target->ToPlayer());
if (owner && owner->GetTypeId() == TYPEID_PLAYER)
me->SendUpdateToPlayer(owner->ToPlayer());
}
spell->prepare(&targets);
}
// deleted cached Spell objects
for (TargetSpellList::const_iterator itr = targetSpellStore.begin(); itr != targetSpellStore.end(); ++itr)
delete itr->second;
}
// Update speed as needed to prevent dropping too far behind and despawning
me->UpdateSpeed(MOVE_RUN, true);
me->UpdateSpeed(MOVE_WALK, true);
me->UpdateSpeed(MOVE_FLIGHT, true);
}
void PetAI::UpdateAllies()
{
Unit* owner = me->GetCharmerOrOwner();
Group* group = NULL;
m_updateAlliesTimer = 10*IN_MILLISECONDS; //update friendly targets every 10 seconds, lesser checks increase performance
if (!owner)
return;
else if (owner->GetTypeId() == TYPEID_PLAYER)
group = owner->ToPlayer()->GetGroup();
//only pet and owner/not in group->ok
if (m_AllySet.size() == 2 && !group)
return;
//owner is in group; group members filled in already (no raid -> subgroupcount = whole count)
if (group && !group->isRaidGroup() && m_AllySet.size() == (group->GetMembersCount() + 2))
return;
m_AllySet.clear();
m_AllySet.insert(me->GetGUID());
if (group) //add group
{
for (GroupReference* itr = group->GetFirstMember(); itr != NULL; itr = itr->next())
{
Player* Target = itr->getSource();
if (!Target || !group->SameSubGroup((Player*)owner, Target))
continue;
if (Target->GetGUID() == owner->GetGUID())
continue;
m_AllySet.insert(Target->GetGUID());
}
}
else //remove group
m_AllySet.insert(owner->GetGUID());
}
void PetAI::KilledUnit(Unit* victim)
{
// Called from Unit::Kill() in case where pet or owner kills something
// if owner killed this victim, pet may still be attacking something else
if (me->GetVictim() && me->GetVictim() != victim)
return;
// Clear target just in case. May help problem where health / focus / mana
// regen gets stuck. Also resets attack command.
// Can't use _stopAttack() because that activates movement handlers and ignores
// next target selection
me->AttackStop();
me->InterruptNonMeleeSpells(false);
me->SendMeleeAttackStop(); // Stops the pet's 'Attack' button from flashing
// Before returning to owner, see if there are more things to attack
if (Unit* nextTarget = SelectNextTarget(false))
AttackStart(nextTarget);
else
HandleReturnMovement(); // Return
}
void PetAI::AttackStart(Unit* target)
{
// Overrides Unit::AttackStart to correctly evaluate Pet states
// Check all pet states to decide if we can attack this target
if (!CanAttack(target))
return;
// Only chase if not commanded to stay or if stay but commanded to attack
DoAttack(target, ((!me->GetCharmInfo()->HasCommandState(COMMAND_STAY) && !me->GetCharmInfo()->HasCommandState(COMMAND_MOVE_TO))
|| me->GetCharmInfo()->IsCommandAttack()));
}
void PetAI::AttackStart(Unit* target, uint32 spellId)
{
// Overrides Unit::AttackStart to correctly evaluate Pet states
// Check all pet states to decide if we can attack this target
if (!CanAttack(target))
return;
// Only chase if not commanded to stay or if stay but commanded to attack
DoAttack(target, ((!me->GetCharmInfo()->HasCommandState(COMMAND_STAY) && !me->GetCharmInfo()->HasCommandState(COMMAND_MOVE_TO))
|| me->GetCharmInfo()->IsCommandAttack()), spellId);
}
void PetAI::OwnerAttackedBy(Unit* attacker)
{
// Called when owner takes damage. This function helps keep pets from running off
// simply due to owner gaining aggro.
if (!attacker)
return;
// Passive pets don't do anything
if (me->HasReactState(REACT_PASSIVE))
return;
// Prevent pet from disengaging from current target
if (me->GetVictim() && me->GetVictim()->isAlive())
return;
// Continue to evaluate and attack if necessary
AttackStart(attacker);
}
void PetAI::OwnerAttacked(Unit* target)
{
// Called when owner attacks something. Allows defensive pets to know
// that they need to assist
// Target might be NULL if called from spell with invalid cast targets
if (!target)
return;
// Passive pets don't do anything
if (!me->HasReactState(REACT_ASSIST))
return;
// Continue to evaluate and attack if necessary
AttackStart(target);
}
Unit* PetAI::SelectNextTarget(bool allowAutoSelect) const
{
// Provides next target selection after current target death.
// This function should only be called internally by the AI
// Targets are not evaluated here for being valid targets, that is done in _CanAttack()
// The parameter: allowAutoSelect lets us disable aggressive pet auto targeting for certain situations
// Passive pets don't do next target selection
if (me->HasReactState(REACT_PASSIVE))
return NULL;
// Check pet attackers first so we don't drag a bunch of targets to the owner
if (Unit* myAttacker = me->getAttackerForHelper())
if (!myAttacker->HasBreakableByDamageCrowdControlAura())
return myAttacker;
// Not sure why we wouldn't have an owner but just in case...
if (!me->GetCharmerOrOwner())
return NULL;
// Check owner attackers
if (Unit* ownerAttacker = me->GetCharmerOrOwner()->getAttackerForHelper())
if (!ownerAttacker->HasBreakableByDamageCrowdControlAura())
return ownerAttacker;
// Check owner victim
// 3.0.2 - Pets now start attacking their owners victim in defensive mode as soon as the hunter does
if (Unit* ownerVictim = me->GetCharmerOrOwner()->GetVictim())
return ownerVictim;
// Neither pet or owner had a target and aggressive pets can pick any target
// To prevent aggressive pets from chain selecting targets and running off, we
// only select a random target if certain conditions are met.
if (me->HasReactState(REACT_AGGRESSIVE) && allowAutoSelect)
{
if (!me->GetCharmInfo()->IsReturning() || me->GetCharmInfo()->IsFollowing() || me->GetCharmInfo()->IsAtStay())
if (Unit* nearTarget = me->ToCreature()->SelectNearestHostileUnitInAggroRange(true))
return nearTarget;
}
// Default - no valid targets
return NULL;
}
void PetAI::HandleReturnMovement()
{
// Handles moving the pet back to stay or owner
// Prevent activating movement when under control of spells
// such as "Eyes of the Beast"
if (me->isCharmed() || me->HasUnitState(UNIT_STATE_CHARGING))
return;
if (me->GetCharmInfo()->HasCommandState(COMMAND_STAY) || me->GetCharmInfo()->HasCommandState(COMMAND_MOVE_TO))
{
if (!me->GetCharmInfo()->IsAtStay() && !me->GetCharmInfo()->IsReturning())
{
// Return to previous position where stay was clicked
float x, y, z;
me->GetCharmInfo()->GetStayPosition(x, y, z);
ClearCharmInfoFlags();
me->GetCharmInfo()->SetIsReturning(true);
me->GetMotionMaster()->Clear();
me->GetMotionMaster()->MovePoint(me->GetGUIDLow(), x, y, z);
}
}
else // COMMAND_FOLLOW
{
if (!me->GetCharmInfo()->IsFollowing() && !me->GetCharmInfo()->IsReturning())
{
ClearCharmInfoFlags();
me->GetCharmInfo()->SetIsReturning(true);
me->GetMotionMaster()->Clear();
me->GetMotionMaster()->MoveFollow(me->GetCharmerOrOwner(), PET_FOLLOW_DIST, me->GetFollowAngle());
}
}
}
bool PetAI::IsCasterPet()
{
return me->GetEntry() == 416 || me->GetEntry() == 510; // Imp and Water Elemental
}
float PetAI::GetAttackDistance(Unit* victim)
{
if (victim && !victim->IsWithinLOSInMap(me))
return 0.0f; // Go straight to our victim if we are not in LOS. Otherwise, we'd stand in front of a wall, not cast, and not get closer to our victim, which would be annoying for our masters.
switch(me->GetEntry())
{
case 416: //Imp - 40 Yard
return 39.0f; //little safety net.
case 510:
return 44.0f;
default:
return 0.0f;
}
}
void PetAI::DoAttack(Unit* target, bool chase, uint32 spellId)
{
// Handles attack with or without chase and also resets flags
// for next update / creature kill
if (me->Attack(target, true))
{
// Play sound to let the player know the pet is attacking something it picked on its own
if (me->HasReactState(REACT_AGGRESSIVE) && !me->GetCharmInfo()->IsCommandAttack())
me->SendPetAIReaction(me->GetGUID());
if (chase)
{
bool oldCmdAttack = me->GetCharmInfo()->IsCommandAttack(); // This needs to be reset after other flags are cleared
ClearCharmInfoFlags();
me->GetCharmInfo()->SetIsCommandAttack(oldCmdAttack); // For passive pets commanded to attack so they will use spells
me->GetMotionMaster()->Clear();
me->GetMotionMaster()->MoveChase(target, GetAttackDistance(target), 0.0f, spellId);
}
else // (Stay && ((Aggressive || Defensive) && In Melee Range)))
{
ClearCharmInfoFlags();
me->GetCharmInfo()->SetIsAtStay(true);
me->GetMotionMaster()->Clear();
me->GetMotionMaster()->MoveIdle();
}
}
}
void PetAI::MovementInform(uint32 moveType, uint32 data)
{
// Receives notification when pet reaches stay or follow owner
switch (moveType)
{
case POINT_MOTION_TYPE:
{
// Pet is returning to where stay was clicked. data should be
// pet's GUIDLow since we set that as the waypoint ID
if (data == me->GetGUIDLow() && me->GetCharmInfo()->IsReturning())
{
ClearCharmInfoFlags();
me->GetCharmInfo()->SetIsAtStay(true);
me->GetMotionMaster()->Clear();
me->GetMotionMaster()->MoveIdle();
}
break;
}
case FOLLOW_MOTION_TYPE:
{
// If data is owner's GUIDLow then we've reached follow point,
// otherwise we're probably chasing a creature
if (me->GetCharmerOrOwner() && me->GetCharmInfo() && data == me->GetCharmerOrOwner()->GetGUIDLow() && me->GetCharmInfo()->IsReturning())
{
ClearCharmInfoFlags();
me->GetCharmInfo()->SetIsFollowing(true);
}
break;
}
default:
break;
}
}
bool PetAI::CanAttack(Unit* target)
{
// Evaluates wether a pet can attack a specific target based on CommandState, ReactState and other flags
// IMPORTANT: The order in which things are checked is important, be careful if you add or remove checks
// Hmmm...
if (!target || !me->GetCharmInfo())
return false;
if (!target->isAlive() || !me->canSeeOrDetect(target))
{
// Clear target to prevent getting stuck on dead targets
me->AttackStop();
me->InterruptNonMeleeSpells(false);
me->SendMeleeAttackStop();
return false;
}
// Passive - passive pets can attack if told to
if (me->HasReactState(REACT_PASSIVE))
return me->GetCharmInfo()->IsCommandAttack();
// CC - mobs under crowd control can be attacked if owner commanded
if (target->HasBreakableByDamageCrowdControlAura())
return me->GetCharmInfo()->IsCommandAttack();
// Returning - pets ignore attacks only if owner clicked follow
if (me->GetCharmInfo()->IsReturning())
return !me->GetCharmInfo()->IsCommandFollow();
// Stay - can attack if target is within range or commanded to
if (me->GetCharmInfo()->HasCommandState(COMMAND_STAY) || me->GetCharmInfo()->HasCommandState(COMMAND_MOVE_TO))
return (me->IsWithinMeleeRange(target) || me->GetCharmInfo()->IsCommandAttack());
// Pets attacking something (or chasing) should only switch targets if owner tells them to
if (me->GetVictim() && me->GetVictim() != target)
{
// Check if our owner selected this target and clicked "attack"
Unit* ownerTarget = NULL;
if (Player* owner = me->GetCharmerOrOwner()->ToPlayer())
ownerTarget = owner->GetSelectedUnit();
else
ownerTarget = me->GetCharmerOrOwner()->GetVictim();
if (ownerTarget && me->GetCharmInfo()->IsCommandAttack())
return (target->GetGUID() == ownerTarget->GetGUID());
}
// Follow
if (me->GetCharmInfo()->HasCommandState(COMMAND_FOLLOW))
return !me->GetCharmInfo()->IsReturning();
// default, though we shouldn't ever get here
return false;
}
void PetAI::ReceiveEmote(Player* player, uint32 emote)
{
if (me->GetOwnerGUID() && me->GetOwnerGUID() == player->GetGUID())
switch (emote)
{
case TEXT_EMOTE_COWER:
if (me->isPet() && me->ToPet()->IsPetGhoul())
me->HandleEmoteCommand(/*EMOTE_ONESHOT_ROAR*/EMOTE_ONESHOT_OMNICAST_GHOUL);
break;
case TEXT_EMOTE_ANGRY:
if (me->isPet() && me->ToPet()->IsPetGhoul())
me->HandleEmoteCommand(/*EMOTE_ONESHOT_COWER*/EMOTE_STATE_STUN);
break;
case TEXT_EMOTE_GLARE:
if (me->isPet() && me->ToPet()->IsPetGhoul())
me->HandleEmoteCommand(EMOTE_STATE_STUN);
break;
case TEXT_EMOTE_SOOTHE:
if (me->isPet() && me->ToPet()->IsPetGhoul())
me->HandleEmoteCommand(EMOTE_ONESHOT_OMNICAST_GHOUL);
break;
}
}
void PetAI::ClearCharmInfoFlags()
{
// Quick access to set all flags to FALSE
CharmInfo* ci = me->GetCharmInfo();
if (ci)
{
ci->SetIsAtStay(false);
ci->SetIsCommandAttack(false);
ci->SetIsCommandFollow(false);
ci->SetIsFollowing(false);
ci->SetIsReturning(false);
}
}
void PetAI::AttackedBy(Unit* attacker)
{
// Called when pet takes damage. This function helps keep pets from running off
// simply due to gaining aggro.
if (!attacker)
return;
// Passive pets don't do anything
if (me->HasReactState(REACT_PASSIVE))
return;
// Prevent pet from disengaging from current target
if (me->GetVictim() && me->GetVictim()->isAlive())
return;
// Continue to evaluate and attack if necessary
AttackStart(attacker);
}
|
gpl-2.0
|
chihchao/mngexcuse
|
language/tchinese/blocks.php
|
207
|
<?php
define('_MD_MEXCS_BLOCK_INFO', '請假資訊');
define('_MD_MEXCS_BLOCK_PGSEXCUSE', '假單進度');
define('_MD_MEXCS_BLOCK_CHKEXCUSE', '假單審核');
define('_MD_MEXCS_BLOCK_EMPTY', '無');
?>
|
gpl-2.0
|
dlespiau/patchwork
|
patchwork/migrations/0027_auto_20180116_0044.py
|
452
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.9 on 2018-01-16 00:44
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('patchwork', '0026_event_series_not_mandatory'),
]
operations = [
migrations.AlterModelOptions(
name='series',
options={'ordering': ['-id'], 'verbose_name_plural': 'Series'},
),
]
|
gpl-2.0
|
rlhardrock/buhos
|
wp-content/plugins/wp-club-manager/includes/admin/wpcm-meta-box-functions.php
|
10509
|
<?php
/**
* WPClubManager Meta Box Functions
*
* @author ClubPress
* @category Core
* @package WPClubManager/Admin/Functions
* @version 1.0.0
*/
if ( ! defined( 'ABSPATH' ) ) exit; // Exit if accessed directly
/**
* Output a text input box.
*
* @access public
* @param array $field
* @return void
*/
function wpclubmanager_wp_text_input( $field ) {
global $thepostid, $post, $wpclubmanager;
$thepostid = empty( $thepostid ) ? $post->ID : $thepostid;
$field['placeholder'] = isset( $field['placeholder'] ) ? $field['placeholder'] : '';
$field['class'] = isset( $field['class'] ) ? $field['class'] : 'short';
$field['wrapper_class'] = isset( $field['wrapper_class'] ) ? $field['wrapper_class'] : '';
$field['value'] = isset( $field['value'] ) ? $field['value'] : get_post_meta( $thepostid, $field['id'], true );
$field['name'] = isset( $field['name'] ) ? $field['name'] : $field['id'];
$field['type'] = isset( $field['type'] ) ? $field['type'] : 'text';
echo '<p class="' . esc_attr( $field['id'] ) . '_field ' . esc_attr( $field['wrapper_class'] ) . '"><label for="' . esc_attr( $field['id'] ) . '">' . wp_kses_post( $field['label'] ) . '</label><input type="' . esc_attr( $field['type'] ) . '" class="' . esc_attr( $field['class'] ) . '" name="' . esc_attr( $field['name'] ) . '" id="' . esc_attr( $field['id'] ) . '" value="' . esc_attr( $field['value'] ) . '" placeholder="' . esc_attr( $field['placeholder'] ) . '" /> ';
if ( ! empty( $field['description'] ) ) {
if ( isset( $field['desc_tip'] ) && false !== $field['desc_tip'] ) {
echo '<img class="help_tip" data-tip="' . esc_attr( $field['description'] ) . '" src="' . esc_url( WPCM()->plugin_url() ) . '/assets/images/help.png" height="16" width="16" />';
} else {
echo '<span class="description">' . wp_kses_post( $field['description'] ) . '</span>';
}
}
echo '</p>';
}
/**
* Output a hidden input box.
*
* @access public
* @param array $field
* @return void
*/
function wpclubmanager_wp_hidden_input( $field ) {
global $thepostid, $post;
$thepostid = empty( $thepostid ) ? $post->ID : $thepostid;
$field['value'] = isset( $field['value'] ) ? $field['value'] : get_post_meta( $thepostid, $field['id'], true );
$field['class'] = isset( $field['class'] ) ? $field['class'] : '';
echo '<input type="hidden" class="' . esc_attr( $field['class'] ) . '" name="' . esc_attr( $field['id'] ) . '" id="' . esc_attr( $field['id'] ) . '" value="' . esc_attr( $field['value'] ) . '" /> ';
}
/**
* Output a textarea input box.
*
* @access public
* @param array $field
* @return void
*/
function wpclubmanager_wp_textarea_input( $field ) {
global $thepostid, $post, $wpclubmanager;
$thepostid = empty( $thepostid ) ? $post->ID : $thepostid;
$field['placeholder'] = isset( $field['placeholder'] ) ? $field['placeholder'] : '';
$field['class'] = isset( $field['class'] ) ? $field['class'] : 'short';
$field['wrapper_class'] = isset( $field['wrapper_class'] ) ? $field['wrapper_class'] : '';
$field['value'] = isset( $field['value'] ) ? $field['value'] : get_post_meta( $thepostid, $field['id'], true );
$field['rows'] = isset( $field['rows'] ) ? $field['rows'] : '4';
$field['cols'] = isset( $field['cols'] ) ? $field['cols'] : '40';
echo '<p class="' . esc_attr( $field['id'] ) . '_field ' . esc_attr( $field['wrapper_class'] ) . '"><label for="' . esc_attr( $field['id'] ) . '">' . wp_kses_post( $field['label'] ) . '</label><textarea class="' . esc_attr( $field['class'] ) . '" name="' . esc_attr( $field['id'] ) . '" id="' . esc_attr( $field['id'] ) . '" placeholder="' . esc_attr( $field['placeholder'] ) . '" rows="' . esc_attr( $field['rows'] ) . '" cols="' . esc_attr( $field['cols'] ) . '">' . esc_textarea( $field['value'] ) . '</textarea> ';
if ( ! empty( $field['description'] ) ) {
if ( isset( $field['desc_tip'] ) && false !== $field['desc_tip'] ) {
echo '<img class="help_tip" data-tip="' . esc_attr( $field['description'] ) . '" src="' . esc_url( WPCM()->plugin_url() ) . '/assets/images/help.png" height="16" width="16" />';
} else {
echo '<span class="description">' . wp_kses_post( $field['description'] ) . '</span>';
}
}
echo '</p>';
}
/**
* Output a checkbox input box.
*
* @access public
* @param array $field
* @return void
*/
function wpclubmanager_wp_checkbox( $field ) {
global $thepostid, $post;
$thepostid = empty( $thepostid ) ? $post->ID : $thepostid;
$field['class'] = isset( $field['class'] ) ? $field['class'] : 'checkbox';
$field['wrapper_class'] = isset( $field['wrapper_class'] ) ? $field['wrapper_class'] : '';
$field['value'] = isset( $field['value'] ) ? $field['value'] : get_post_meta( $thepostid, $field['id'], true );
$field['cbvalue'] = isset( $field['cbvalue'] ) ? $field['cbvalue'] : 'yes';
$field['name'] = isset( $field['name'] ) ? $field['name'] : $field['id'];
echo '<p class="' . esc_attr( $field['id'] ) . '_field ' . esc_attr( $field['wrapper_class'] ) . '"><label for="' . esc_attr( $field['id'] ) . '">' . wp_kses_post( $field['label'] ) . '</label><input type="checkbox" class="' . esc_attr( $field['class'] ) . '" name="' . esc_attr( $field['name'] ) . '" id="' . esc_attr( $field['id'] ) . '" value="' . esc_attr( $field['cbvalue'] ) . '" ' . checked( $field['value'], $field['cbvalue'], false ) . ' /> ';
if ( ! empty( $field['description'] ) ) echo '<span class="description">' . wp_kses_post( $field['description'] ) . '</span>';
echo '</p>';
}
/**
* Output a select input box.
*
* @access public
* @param array $field
* @return void
*/
function wpclubmanager_wp_select( $field ) {
global $thepostid, $post, $wpclubmanager;
$thepostid = empty( $thepostid ) ? $post->ID : $thepostid;
$field['class'] = isset( $field['class'] ) ? $field['class'] : 'chosen_select';
$field['wrapper_class'] = isset( $field['wrapper_class'] ) ? $field['wrapper_class'] : '';
$field['value'] = isset( $field['value'] ) ? $field['value'] : get_post_meta( $thepostid, $field['id'], true );
echo '<p class="form-field ' . esc_attr( $field['id'] ) . '_field ' . esc_attr( $field['wrapper_class'] ) . '"><label for="' . esc_attr( $field['id'] ) . '">' . wp_kses_post( $field['label'] ) . '</label><select id="' . esc_attr( $field['id'] ) . '" name="' . esc_attr( $field['id'] ) . '" class="' . esc_attr( $field['class'] ) . '">';
foreach ( $field['options'] as $key => $value ) {
echo '<option value="' . esc_attr( $key ) . '" ' . selected( esc_attr( $field['value'] ), esc_attr( $key ), false ) . '>' . esc_html( $value ) . '</option>';
}
echo '</select> ';
if ( ! empty( $field['description'] ) ) {
if ( isset( $field['desc_tip'] ) && false !== $field['desc_tip'] ) {
echo '<img class="help_tip" data-tip="' . esc_attr( $field['description'] ) . '" src="' . esc_url( WPCM()->plugin_url() ) . '/assets/images/help.png" height="16" width="16" />';
} else {
echo '<span class="description">' . wp_kses_post( $field['description'] ) . '</span>';
}
}
echo '</p>';
}
/**
* Output a radio input box.
*
* @access public
* @param array $field
* @return void
*/
function wpclubmanager_wp_radio( $field ) {
global $thepostid, $post, $wpclubmanager;
$thepostid = empty( $thepostid ) ? $post->ID : $thepostid;
$field['class'] = isset( $field['class'] ) ? $field['class'] : 'select short';
$field['wrapper_class'] = isset( $field['wrapper_class'] ) ? $field['wrapper_class'] : '';
$field['value'] = isset( $field['value'] ) ? $field['value'] : get_post_meta( $thepostid, $field['id'], true );
$field['name'] = isset( $field['name'] ) ? $field['name'] : $field['id'];
echo '<fieldset class="form-field ' . esc_attr( $field['id'] ) . '_field ' . esc_attr( $field['wrapper_class'] ) . '"><legend>' . wp_kses_post( $field['label'] ) . '</legend><ul class="wc-radios">';
foreach ( $field['options'] as $key => $value ) {
echo '<li><label><input
name="' . esc_attr( $field['name'] ) . '"
value="' . esc_attr( $key ) . '"
type="radio"
class="' . esc_attr( $field['class'] ) . '"
' . checked( esc_attr( $field['value'] ), esc_attr( $key ), false ) . '
/> ' . esc_html( $value ) . '</label>
</li>';
}
echo '</ul>';
if ( ! empty( $field['description'] ) ) {
if ( isset( $field['desc_tip'] ) && false !== $field['desc_tip'] ) {
echo '<img class="help_tip" data-tip="' . esc_attr( $field['description'] ) . '" src="' . esc_url( WPCM()->plugin_url() ) . '/assets/images/help.png" height="16" width="16" />';
} else {
echo '<span class="description">' . wp_kses_post( $field['description'] ) . '</span>';
}
}
echo '</fieldset>';
}
/**
* Output a radio input box.
*
* @access public
* @param array $field
* @return void
*/
function wpclubmanager_wp_country_select( $field ) {
global $thepostid, $post, $wpclubmanager;
$country_setting = get_post_meta( $post->ID, 'wpcm_natl', true);
if( $country_setting ) {
$country = $country_setting;
} else {
$country = get_option('wpcm_default_country');
}
$countries = WPCM()->countries->countries;
$thepostid = empty( $thepostid ) ? $post->ID : $thepostid;
$field['class'] = isset( $field['class'] ) ? $field['class'] : 'chosen_select';
$field['wrapper_class'] = isset( $field['wrapper_class'] ) ? $field['wrapper_class'] : '';
$field['value'] = isset( $field['value'] ) ? $field['value'] : get_post_meta( $thepostid, $field['id'], true );
echo '<p class="form-field ' . esc_attr( $field['id'] ) . '_field ' . esc_attr( $field['wrapper_class'] ) . '"><label for="' . esc_attr( $field['id'] ) . '">' . wp_kses_post( $field['label'] ) . '</label>';
echo '<select name="' . esc_attr( $field['id'] ) . '" data-placeholder="' . __( 'Choose a country…', 'wp-club-manager' ) . '" title="Country" class="' . esc_attr( $field['class'] ) . '">';
WPCM()->countries->country_dropdown_options( $country = $country );
echo '</select>';
if ( ! empty( $field['description'] ) ) {
if ( isset( $field['desc_tip'] ) && false !== $field['desc_tip'] ) {
echo '<img class="help_tip" data-tip="' . esc_attr( $field['description'] ) . '" src="' . esc_url( WPCM()->plugin_url() ) . '/assets/images/help.png" height="16" width="16" />';
} else {
echo '<span class="description">' . wp_kses_post( $field['description'] ) . '</span>';
}
}
echo '</p>';
}
|
gpl-2.0
|
hampelm/Ginsberg-CiviDemo
|
sites/all/modules/civicrm/CRM/Contact/BAO/GroupContactCache.php
|
12241
|
<?php
/*
+--------------------------------------------------------------------+
| CiviCRM version 3.3 |
+--------------------------------------------------------------------+
| Copyright CiviCRM LLC (c) 2004-2010 |
+--------------------------------------------------------------------+
| This file is a part of CiviCRM. |
| |
| CiviCRM is free software; you can copy, modify, and distribute it |
| under the terms of the GNU Affero General Public License |
| Version 3, 19 November 2007 and the CiviCRM Licensing Exception. |
| |
| CiviCRM is distributed in the hope that it will be useful, but |
| WITHOUT ANY WARRANTY; without even the implied warranty of |
| MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. |
| See the GNU Affero General Public License for more details. |
| |
| You should have received a copy of the GNU Affero General Public |
| License and the CiviCRM Licensing Exception along |
| with this program; if not, contact CiviCRM LLC |
| at info[AT]civicrm[DOT]org. If you have questions about the |
| GNU Affero General Public License or the licensing of CiviCRM, |
| see the CiviCRM license FAQ at http://civicrm.org/licensing |
+--------------------------------------------------------------------+
*/
/**
*
* @package CRM
* @copyright CiviCRM LLC (c) 2004-2010
* $Id$
*
*/
require_once 'CRM/Contact/DAO/GroupContactCache.php';
class CRM_Contact_BAO_GroupContactCache extends CRM_Contact_DAO_GroupContactCache {
const
NUM_CONTACTS_TO_INSERT = 200;
/**
* Check to see if we have cache entries for this group
* if not, regenerate, else return
*
* @param int $groupID groupID of group that we are checking against
*
* @return boolean true if we did not regenerate, false if we did
*/
static function check( $groupID ) {
if ( empty( $groupID ) ) {
return true;
}
if ( ! is_array( $groupID ) ) {
$groupID = array( $groupID );
}
// note escapeString is a must here and we can't send the imploded value as second arguement to
// the executeQuery(), since that would put single quote around the string and such a string
// of comma separated integers would not work.
$groupID = CRM_Core_DAO::escapeString( implode( ', ', $groupID ) );
$config = CRM_Core_Config::singleton( );
$smartGroupCacheTimeout =
isset( $config->smartGroupCacheTimeout ) &&
is_numeric( $config->smartGroupCacheTimeout ) ? $config->smartGroupCacheTimeout : 0;
//make sure to give original timezone settings again.
$originalTimezone = date_default_timezone_get( );
date_default_timezone_set('UTC');
$now = date('YmdHis');
date_default_timezone_set( $originalTimezone );
$query = "
SELECT g.id
FROM civicrm_group g
WHERE g.id IN ( {$groupID} ) AND ( g.saved_search_id IS NOT NULL OR g.children IS NOT NULL ) AND
(g.cache_date IS NULL OR (TIMESTAMPDIFF(MINUTE, g.cache_date, $now) >= $smartGroupCacheTimeout))
";
$dao =& CRM_Core_DAO::executeQuery( $query );
$groupIDs = array( );
while ( $dao->fetch() ) {
$groupIDs[] = $dao->id;
}
if ( empty( $groupIDs ) ) {
return true;
} else {
self::add( $groupIDs );
return false;
}
}
static function add( $groupID ) {
// first delete the current cache
self::remove( $groupID );
if ( ! is_array( $groupID ) ) {
$groupID = array( $groupID );
}
$params['return.contact_id'] = 1;
$params['offset'] = 0;
$params['rowCount'] = 0;
$params['sort'] = null;
$params['smartGroupCache'] = false;
require_once 'api/v2/Contact.php';
$values = array( );
foreach ( $groupID as $gid ) {
$params['group'] = array( );
$params['group'][$gid] = 1;
// the below call update the cache table as a byproduct of the query
$contacts = civicrm_contact_search( $params );
}
}
static function store( &$groupID, &$values ) {
$processed = false;
// to avoid long strings, lets do NUM_CONTACTS_TO_INSERT values at a time
while ( ! empty( $values ) ) {
$processed = true;
$input = array_splice( $values, 0, self::NUM_CONTACTS_TO_INSERT );
$str = implode( ',', $input );
$sql = "REPLACE INTO civicrm_group_contact_cache (group_id,contact_id) VALUES $str;";
CRM_Core_DAO::executeQuery( $sql );
}
// only update cache entry if we had any values
if ( $processed ) {
// also update the group with cache date information
//make sure to give original timezone settings again.
$originalTimezone = date_default_timezone_get( );
date_default_timezone_set('UTC');
$now = date('YmdHis');
date_default_timezone_set( $originalTimezone );
} else {
$now = 'null';
}
$groupIDs = implode( ',', $groupID );
$sql = "
UPDATE civicrm_group
SET cache_date = $now
WHERE id IN ( $groupIDs )
";
CRM_Core_DAO::executeQuery( $sql,
CRM_Core_DAO::$_nullArray );
}
static function remove( $groupID = null, $onceOnly = true ) {
static $invoked = false;
// typically this needs to happy only once per instance
// this is especially true in import, where we dont need
// to do this all the time
// this optimization is done only when no groupID is passed
// i.e. cache is reset for all groups
if ( $onceOnly &&
$invoked &&
$groupID == null ) {
return;
}
if ( $groupID == null ) {
$invoked = true;
}
//when there are difference in timezones for mysql and php.
//cache_date set null not behaving properly, CRM-6855
//make sure to give original timezone settings again.
$originalTimezone = date_default_timezone_get( );
date_default_timezone_set('UTC');
$now = date( 'YmdHis' );
date_default_timezone_set( $originalTimezone );
$config = CRM_Core_Config::singleton( );
$smartGroupCacheTimeout =
isset( $config->smartGroupCacheTimeout ) && is_numeric( $config->smartGroupCacheTimeout ) ? $config->smartGroupCacheTimeout : 0;
if ( ! isset( $groupID ) ) {
$query = "
DELETE g
FROM civicrm_group_contact_cache g
INNER JOIN civicrm_contact c ON c.id = g.contact_id
WHERE g.group_id IN (
SELECT id
FROM civicrm_group
WHERE TIMESTAMPDIFF(MINUTE, cache_date, $now) >= $smartGroupCacheTimeout
)
";
$update = "
UPDATE civicrm_group g
SET cache_date = null
WHERE TIMESTAMPDIFF(MINUTE, cache_date, $now) >= $smartGroupCacheTimeout
";
$params = array( );
} else if ( is_array( $groupID ) ) {
$query = "
DELETE g
FROM civicrm_group_contact_cache g
WHERE g.group_id IN ( %1 )
";
$update = "
UPDATE civicrm_group g
SET cache_date = null
WHERE id IN ( %1 )
";
$groupIDs = implode( ', ', $groupID );
$params = array( 1 => array( $groupIDs, 'String' ) );
} else {
$query = "
DELETE g
FROM civicrm_group_contact_cache g
WHERE g.group_id = %1
";
$update = "
UPDATE civicrm_group g
SET cache_date = null
WHERE id = %1
";
$params = array( 1 => array( $groupID, 'Integer' ) );
}
CRM_Core_DAO::executeQuery( $query , $params );
// also update the cache_date for these groups
CRM_Core_DAO::executeQuery( $update, $params );
}
/**
* load the smart group cache for a saved search
*/
static function load( &$group ) {
$groupID = $group->id;
$savedSearchID = $group->saved_search_id;
$sql = null;
$idName = 'id';
$customClass = null;
if ( $savedSearchID ) {
require_once 'CRM/Contact/BAO/SavedSearch.php';
$ssParams =& CRM_Contact_BAO_SavedSearch::getSearchParams($savedSearchID);
$returnProperties = array();
if (CRM_Core_DAO::getFieldValue( 'CRM_Contact_DAO_SavedSearch',
$savedSearchID,
'mapping_id' ) ) {
require_once "CRM/Core/BAO/Mapping.php";
$fv =& CRM_Contact_BAO_SavedSearch::getFormValues($savedSearchID);
$returnProperties = CRM_Core_BAO_Mapping::returnProperties( $fv );
}
if ( isset( $ssParams['customSearchID'] ) ) {
// if custom search
require_once 'CRM/Contact/BAO/SearchCustom.php';
// we split it up and store custom class
// so temp tables are not destroyed if they are used
// hence customClass is defined above at top of function
$customClass = CRM_Contact_BAO_SearchCustom::customClass( $ssParams['customSearchID'],
$savedSearchID );
$searchSQL = $customClass->contactIDs( );
$idName = 'contact_id';
} else {
require_once 'CRM/Contact/BAO/Query.php';
$query = new CRM_Contact_BAO_Query($ssParams, $returnProperties, null,
false, false, 1,
true, true, false );
$query->_useGroupBy = false;
$searchSQL =& $query->searchQuery( 0, 0, null,
false, false,
false, true, true, null );
}
$groupID = CRM_Utils_Type::escape($groupID, 'Integer');
$sql = $searchSQL .
" AND contact_a.id NOT IN (
SELECT contact_id FROM civicrm_group_contact
WHERE civicrm_group_contact.status = 'Removed'
AND civicrm_group_contact.group_id = $groupID ) ";
}
if ( $sql ) {
$sql .= " UNION ";
}
// lets also store the records that are explicitly added to the group
// this allows us to skip the group contact LEFT JOIN
$sql .= "
SELECT contact_id as $idName
FROM civicrm_group_contact
WHERE civicrm_group_contact.status = 'Added'
AND civicrm_group_contact.group_id = $groupID ";
$dao = CRM_Core_DAO::executeQuery( $sql );
$values = array( );
while ( $dao->fetch( ) ) {
$values[] = "({$groupID},{$dao->$idName})";
}
$groupIDs = array( $groupID );
self::remove( $groupIDs );
self::store ( $groupIDs, $values );
if ( $group->children ) {
require_once 'CRM/Contact/BAO/Group.php';
$childrenIDs = explode( ',', $group->children );
foreach ( $childrenIDs as $childID ) {
$contactIDs =& CRM_Contact_BAO_Group::getMember( $childID, false );
$values = array( );
foreach ( $contactIDs as $contactID => $dontCare) {
$values[] = "({$groupID},{$contactID})";
}
self::store ( $groupIDs, $values );
}
}
}
}
|
gpl-2.0
|
samnajian/glowbox
|
node_modules/grunt-express-server/tasks/lib/server.js
|
3321
|
/*
* grunt-express-server
* https://github.com/ericclemmons/grunt-express-server
*
* Copyright (c) 2013 Eric Clemmons
* Licensed under the MIT license.
*/
'use strict';
module.exports = function(grunt, target) {
if (!process._servers) {
process._servers = {};
}
var backup = null;
var startdone = null;
var stopdone = null;
var server = process._servers[target]; // Store server between live reloads to close/restart express
var finished = function() {
if (startdone) {
startdone();
startdone = null;
}
};
return {
start: function(options) {
if (server) {
this.stop();
if (grunt.task.current.flags.stop) {
finished();
return;
}
}
backup = JSON.parse(JSON.stringify(process.env)); // Clone process.env
// For some weird reason, on Windows the process.env stringify produces a "Path"
// member instead of a "PATH" member, and grunt chokes when it can't find PATH.
if (!backup.PATH) {
if (backup.Path) {
backup.PATH = backup.Path;
delete backup.Path;
}
}
grunt.log.writeln('Starting '.cyan + (options.background ? 'background' : 'foreground') + ' Express server');
startdone = grunt.task.current.async();
// Set PORT for new processes
process.env.PORT = options.port;
// Set NODE_ENV for new processes
if (options.node_env) {
process.env.NODE_ENV = options.node_env;
}
// Set debug mode for node-inspector
if(options.debug) {
options.args.unshift('--debug');
}
if (options.background) {
server = process._servers[target] = grunt.util.spawn({
cmd: options.cmd,
args: options.args,
env: process.env,
fallback: options.fallback
}, function (error, result, code) {
if (stopdone) {
stopdone();
}
finished();
});
if (options.delay) {
setTimeout(finished, options.delay);
}
if (options.output) {
server.stdout.on('data', function(data) {
var message = "" + data;
var regex = new RegExp(options.output, "gi");
if (message.match(regex)) {
finished();
}
});
}
server.stdout.pipe(process.stdout);
server.stderr.pipe(process.stderr);
} else {
// Server is ran in current process
server = process._servers[target] = require(options.script);
}
process.on('exit', finished);
process.on('exit', this.stop);
},
stop: function() {
if (server && server.kill) {
grunt.log.writeln('Stopping'.red + ' Express server');
// grunt.task.current.async() is no more available if stop is called
// from the 'exit' event.
if (typeof(grunt.task.current.async) === "function") {
stopdone = grunt.task.current.async();
}
server.kill('SIGTERM');
process.removeAllListeners();
server = process._servers[target] = null;
}
// Restore original process.env
if (backup) {
process.env = JSON.parse(JSON.stringify(backup));
}
finished();
}
};
};
|
gpl-2.0
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.